From 9e570b827516a219e18d9d80cffcc93f2971795c Mon Sep 17 00:00:00 2001 From: dx-tan Date: Fri, 22 Dec 2023 14:57:41 +0700 Subject: [PATCH 01/14] Add: request_id on result --- cope2n-api/fwd_api/api/ctel_view.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cope2n-api/fwd_api/api/ctel_view.py b/cope2n-api/fwd_api/api/ctel_view.py index 27f0847..2ef48b6 100755 --- a/cope2n-api/fwd_api/api/ctel_view.py +++ b/cope2n-api/fwd_api/api/ctel_view.py @@ -273,6 +273,7 @@ class CtelViewSet(viewsets.ViewSet): if serializer.data[0]["data"].get("status", 200) != 200: continue + serializer.data[0]["request_id"] = rq_id return Response(status=status.HTTP_200_OK, data=serializer.data[0]) raise ServiceTimeoutException(excArgs=f"{rq_id}") From a0396c7e7c79f2f88f44394f6e8246db4da08c54 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Mon, 25 Dec 2023 10:32:09 +0700 Subject: [PATCH 02/14] Add: S3 status --- .env_prod | 40 ++++++++++ cope2n-api/fwd/settings.py | 12 +++ cope2n-api/fwd_api/api/ctel_view.py | 1 - cope2n-api/fwd_api/bg_tasks/__init__.py | 0 .../fwd_api/bg_tasks/clean_local_files.py | 19 +++++ cope2n-api/fwd_api/bg_tasks/cron_job.py | 31 +++++++ .../fwd_api/celery_worker/internal_task.py | 12 ++- cope2n-api/fwd_api/celery_worker/worker.py | 11 +++ .../fwd_api/models/SubscriptionRequest.py | 3 +- cope2n-api/fwd_api/utils/file.py | 3 +- cope2n-api/requirements.txt | 3 +- deploy_images.sh | 2 +- docker-compose-dev.yml | 4 +- docker-compose.yml | 2 - speedtest_sync.py | 80 +++++++++++-------- 15 files changed, 177 insertions(+), 46 deletions(-) create mode 100644 .env_prod create mode 100644 cope2n-api/fwd_api/bg_tasks/__init__.py create mode 100644 cope2n-api/fwd_api/bg_tasks/clean_local_files.py create mode 100644 cope2n-api/fwd_api/bg_tasks/cron_job.py diff --git a/.env_prod b/.env_prod new file mode 100644 index 0000000..453c5a3 --- /dev/null +++ b/.env_prod @@ -0,0 +1,40 @@ +MEDIA_ROOT=/app/media +# DATABASE django setup +DB_ENGINE=django.db.backends.postgresql_psycopg2 +DB_SCHEMA=sbt2 +DB_USER=sbt +DB_PASSWORD=sbtCH240 +DB_HOST=sbt.cxetpslawu4p.ap-southeast-1.rds.amazonaws.com +DB_PUBLIC_PORT=5432 +DB_INTERNAL_PORT=5432 + +DEBUG=TRUE +CORS_ALLOWED_ORIGINS=* +CTEL_KEY=fTjWnZr4u7x!A%D*G-KaPdRgUkXp2s5v +DB_INTERNAL_KEY=7LYk-iaWTFPqsZHIE5GHuv41S0c_Vlb0ZVc-BnsEZqQ= +ALLOWED_HOSTS='*' +BROKER_URL=amqp://test:test@rabbitmq-manulife-sbt:5672 +BASE_URL=http://be-ctel-sbt:9000 +BASE_UI_URL=http://fe-sbt:9801 +HOST_MEDIA_FOLDER=./media +GID=1000 +UID=198 +SECRET_KEY=999999999999999999999999999999999999999999999999999999999999999999 +RABBITMQ_DEFAULT_USER=test +RABBITMQ_DEFAULT_PASS=test +BASE_PORT=9000 +S3_ACCESS_KEY=AKIA3AFPFVWZD77UACHE +S3_SECRET_KEY=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 +S3_BUCKET_NAME=ocr-sds + +AUTH_TOKEN_LIFE_TIME=168 +IMAGE_TOKEN_LIFE_TIME=168 +INTERNAL_SDS_KEY=TannedCung +FI_USER_NAME=sbt +FI_PASSWORD=7Eg4AbWIXDnufgn + +# Front end env variables +# VITE_PORT=80 +# VITE_PROXY=http://0.0.0.0 +# VITE_API_BASE_URL=http://0.0.0.0:8000 +# PORT=8002 \ No newline at end of file diff --git a/cope2n-api/fwd/settings.py b/cope2n-api/fwd/settings.py index 514ff37..13740da 100755 --- a/cope2n-api/fwd/settings.py +++ b/cope2n-api/fwd/settings.py @@ -59,6 +59,7 @@ INSTALLED_APPS = [ 'drf_spectacular', 'drf_spectacular_sidecar', # required for Django collectstatic discovery 'corsheaders', + "django_celery_beat", ] @@ -207,6 +208,17 @@ BROKER_URL = env.str("BROKER_URL", default="amqp://test:test@107.120.70.226:5672 CELERY_TASK_TRACK_STARTED = True CELERY_TASK_TIME_LIMIT = 30 * 60 +CELERY_BEAT_SCHEDULE = { + 'clean_local_file': { + 'task': 'fwd_api.celery_worker.internal_task.clean_local_files', + 'schedule': 3600.0, + 'args': (), + 'options': { + 'expires': 120.0, + }, + }, +} + MAX_UPLOAD_SIZE_OF_A_FILE = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_FILE_SIZE_OF_A_REQUEST = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_FILES_IN_A_REQUEST = 5 diff --git a/cope2n-api/fwd_api/api/ctel_view.py b/cope2n-api/fwd_api/api/ctel_view.py index 2ef48b6..4de39e3 100755 --- a/cope2n-api/fwd_api/api/ctel_view.py +++ b/cope2n-api/fwd_api/api/ctel_view.py @@ -236,7 +236,6 @@ class CtelViewSet(viewsets.ViewSet): compact_files.append(this_file) c_connector.do_pdf((rq_id, sub.id, p_type, user.id, compact_files)) - time_limit = 120 start_time = time.time() while True: diff --git a/cope2n-api/fwd_api/bg_tasks/__init__.py b/cope2n-api/fwd_api/bg_tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cope2n-api/fwd_api/bg_tasks/clean_local_files.py b/cope2n-api/fwd_api/bg_tasks/clean_local_files.py new file mode 100644 index 0000000..5fe41a1 --- /dev/null +++ b/cope2n-api/fwd_api/bg_tasks/clean_local_files.py @@ -0,0 +1,19 @@ +from django.conf import settings +from django.utils import timezone + +from fwd_api.models import SubscriptionRequest +from .cron_job import CronJob +from fwd_api.celery_worker.worker import app as celery_app + +class FileCleaningWoker(CronJob): + """Clean old files to save disk space""" + + def run(self): + print("-----------") + + + +@celery_app.task(time_limit=5000) +def clean_media(): + worker = FileCleaningWoker("file_cleaning_worker") + worker.run() \ No newline at end of file diff --git a/cope2n-api/fwd_api/bg_tasks/cron_job.py b/cope2n-api/fwd_api/bg_tasks/cron_job.py new file mode 100644 index 0000000..639462a --- /dev/null +++ b/cope2n-api/fwd_api/bg_tasks/cron_job.py @@ -0,0 +1,31 @@ +import traceback + +from celery.utils.log import get_task_logger + + +class CronJob: + def __init__( + self, + name, + ): + self.name = name + self.logger = self.get_logger() + + def get_logger(self): + """Create/Get the logger for this task""" + logger = get_task_logger(self.name) + return logger + + def safe_run(self): + """Create a logger and execute run() + in a try/except block to prevent crashing + """ + try: + self.run() + except Exception as e: + self.logger.error("Failed to run cron job in safe mode.") + self.logger.error(e) + traceback.print_exc() + + def run(self): + raise NotImplementedError("Not implemented error") \ No newline at end of file diff --git a/cope2n-api/fwd_api/celery_worker/internal_task.py b/cope2n-api/fwd_api/celery_worker/internal_task.py index e3615f5..f9c54cf 100755 --- a/cope2n-api/fwd_api/celery_worker/internal_task.py +++ b/cope2n-api/fwd_api/celery_worker/internal_task.py @@ -107,11 +107,15 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files): @app.task(name='upload_file_to_s3') -def upload_file_to_s3(local_file_path, s3_key): +def upload_file_to_s3(local_file_path, s3_key, request_id): if s3_client.s3_client is not None: - res = s3_client.upload_file(local_file_path, s3_key) - if res != None and res["ResponseMetadata"]["HTTPStatusCode"] == 200: - os.remove(local_file_path) + try: + s3_client.upload_file(local_file_path, s3_key) + sub_request = SubscriptionRequest.objects.filter(request_id=request_id) + sub_request.S3_uploaded = True + sub_request.save() + except Exception as e: + return else: logger.info(f"S3 is not available, skipping,...") diff --git a/cope2n-api/fwd_api/celery_worker/worker.py b/cope2n-api/fwd_api/celery_worker/worker.py index 70d302c..b8530fc 100755 --- a/cope2n-api/fwd_api/celery_worker/worker.py +++ b/cope2n-api/fwd_api/celery_worker/worker.py @@ -16,6 +16,17 @@ app: Celery = Celery( broker_transport_options={'confirm_publish': False}, ) +app.config_from_object("django.conf:settings", namespace="CELERY") +app.autodiscover_tasks() + +@app.on_after_finalize.connect +def setup_periodic_tasks(sender, **kwargs): + from fwd_api.bg_tasks.clean_local_files import clean_media + sender.add_periodic_task( + 10.0, clean_media.s(), expires=120.0 + ) + + app.conf.update({ 'task_queues': [ diff --git a/cope2n-api/fwd_api/models/SubscriptionRequest.py b/cope2n-api/fwd_api/models/SubscriptionRequest.py index ea6d44c..1073cfe 100755 --- a/cope2n-api/fwd_api/models/SubscriptionRequest.py +++ b/cope2n-api/fwd_api/models/SubscriptionRequest.py @@ -18,4 +18,5 @@ class SubscriptionRequest(models.Model): status = models.IntegerField() # 1: Processing(Pending) 2: PredictCompleted 3: ReturnCompleted subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE) created_at = models.DateTimeField(default=timezone.now, db_index=True) - updated_at = models.DateTimeField(auto_now=True) \ No newline at end of file + updated_at = models.DateTimeField(auto_now=True) + S3_uploaded = models.BooleanField(default=False) \ No newline at end of file diff --git a/cope2n-api/fwd_api/utils/file.py b/cope2n-api/fwd_api/utils/file.py index 29c15b9..5e83d0a 100644 --- a/cope2n-api/fwd_api/utils/file.py +++ b/cope2n-api/fwd_api/utils/file.py @@ -156,9 +156,10 @@ def resize_and_save_file(file_name: str, rq: SubscriptionRequest, file: Temporar def save_to_S3(file_name, rq, local_file_path): try: file_path = get_folder_path(rq) + request_id = rq.request_id assert len(file_path.split("/")) >= 2, "file_path must have at least process type and request id" s3_key = os.path.join(file_path.split("/")[-2], file_path.split("/")[-1], file_name) - c_connector.upload_file_to_s3((local_file_path, s3_key)) + c_connector.upload_file_to_s3((local_file_path, s3_key, request_id)) return s3_key except Exception as e: print(f"[ERROR]: {e}") diff --git a/cope2n-api/requirements.txt b/cope2n-api/requirements.txt index 327bea3..c204228 100755 --- a/cope2n-api/requirements.txt +++ b/cope2n-api/requirements.txt @@ -49,4 +49,5 @@ djangorestframework-xml==2.0.0 boto3==1.29.7 imagesize==1.4.1 pdf2image==1.16.3 -redis==5.0.1 \ No newline at end of file +redis==5.0.1 +django-celery-beat==2.5.0 \ No newline at end of file diff --git a/deploy_images.sh b/deploy_images.sh index 688822a..9be2183 100755 --- a/deploy_images.sh +++ b/deploy_images.sh @@ -21,4 +21,4 @@ docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} cp ./docker-compose.yml ./docker-compose_${tag}.yml sed -i "s/{{tag}}/$tag/g" ./docker-compose_${tag}.yml -cp .env .env_${tag} +cp .env_prod .env_${tag} diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 96e890a..9b0f3e4 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -76,7 +76,7 @@ services: volumes: - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} - BE_static:/app/static - # - ./cope2n-api:/app + - ./cope2n-api:/app working_dir: /app depends_on: db-sbt: @@ -162,7 +162,7 @@ services: condition: service_started volumes: - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} - # - ./cope2n-api:/app + - ./cope2n-api:/app working_dir: /app command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" diff --git a/docker-compose.yml b/docker-compose.yml index 963434c..33afaa1 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -135,8 +135,6 @@ services: networks: - ctel-sbt depends_on: - db-sbt: - condition: service_started rabbitmq-sbt: condition: service_started volumes: diff --git a/speedtest_sync.py b/speedtest_sync.py index 449f451..a24b839 100644 --- a/speedtest_sync.py +++ b/speedtest_sync.py @@ -1,10 +1,12 @@ import requests import time +import random import argparse import multiprocessing import tqdm -import random import traceback +from requests_toolbelt import MultipartEncoderMonitor +import requests parser = argparse.ArgumentParser() @@ -30,23 +32,48 @@ try: except: print("Failed to login") print(response.content) + # After the login, store the token in the memory (RAM) or DB # Re-login to issue a new token after 6 days. # ================================================================= def process_file(data): - files, token = data + _, token = data + files = [] + if random.random() < 0.2: + files = [ + ('invoice_file', ("invoice.jpg", open("test_samples/sbt/invoice.jpg", "rb"), 'application/octet-stream')), + ('imei_files', ("imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb"), 'application/octet-stream')), + ('imei_files', ("imei1.jpg", open("test_samples/sbt/imei2.jpg", "rb"), 'application/octet-stream')), + ] + elif random.random() < 0.6: + files = [ + ('imei_files', ("imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb"), 'application/octet-stream')), + ] + else: + files = [ + ('imei_files', ("imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb"), 'application/octet-stream')), + ('imei_files', ("imei1.jpg", open("test_samples/sbt/imei2.jpg", "rb"), 'application/octet-stream')), + ] num_files = len(files) - files.append( - ('processType', (None, 12)), - ) + files.append(('processType', '12')) # ================================================================= # UPLOAD THE FILE start_time = time.time() + end_of_upload_time = 0 + def my_callback(monitor): + nonlocal end_of_upload_time + if monitor.bytes_read == monitor.len: + end_of_upload_time = time.time() + m = MultipartEncoderMonitor.from_fields( + fields=files, + callback=my_callback + ) try: response = requests.post(f'{args.host}/api/ctel/images/process_sync/', headers={ 'Authorization': token, - }, files=files, timeout=300) + 'Content-Type': m.content_type + }, data=m, timeout=300) except requests.exceptions.Timeout: print("Timeout occurred while uploading") return { @@ -68,11 +95,19 @@ def process_file(data): "num_files": 0, } end_time = time.time() - upload_time = end_time - start_time + upload_time = end_of_upload_time - start_time # ================================================================= try: data = response.json() + if len(data["files"]) != num_files: + return { + "success": False, + "status": "missing_file", + "upload_time": 0, + "process_time": 0, + "num_files": 0, + } data.pop("files", None) print(data) except: @@ -88,36 +123,14 @@ def process_file(data): "success": True, "status": 200, "upload_time": upload_time, - "process_time": upload_time, + "process_time": time.time() - start_time - upload_time, "num_files": num_files, } - -invoice_files = [ - ('invoice_file', ('invoice.pdf', open("test_samples/20220303025923NHNE_20220222_Starhub_Order_Confirmation_by_Email.pdf", "rb").read())), -] -# invoice_files = [ -# ('invoice_file', ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read())), -# ] -imei_files = [ - ('imei_files', ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read())), - ('imei_files', ("test_samples/sbt/imei2.jpg", open("test_samples/sbt/imei2.jpg", "rb").read())), - ('imei_files', ("test_samples/sbt/imei3.jpg", open("test_samples/sbt/imei3.jpg", "rb").read())), - ('imei_files', ("test_samples/sbt/imei4.jpeg", open("test_samples/sbt/imei4.jpeg", "rb").read())), - ('imei_files', ("test_samples/sbt/imei5.jpg", open("test_samples/sbt/imei5.jpg", "rb").read())), -] -def get_imei_files(): - num_files = random.randint(1, len(imei_files) + 1) - print("Num imeis", num_files) - files = imei_files[:num_files] - # print("Num of imei files:", len(files)) - return files -def get_files(): - return invoice_files + get_imei_files() def gen_input(num_input): for _ in range(num_input): - yield (get_files(), token) + yield (None, token) pool = multiprocessing.Pool(processes=args.num_workers) results = [] for result in tqdm.tqdm(pool.imap_unordered(process_file, gen_input(num_input=args.num_requests)), total=args.num_requests): @@ -126,7 +139,6 @@ for result in tqdm.tqdm(pool.imap_unordered(process_file, gen_input(num_input=ar print("## TEST REPORT #################################") print("Number of requests: {}".format(args.num_requests)) print("Number of concurrent requests: {}".format(args.num_workers)) -print("Number of files: 1 invoice, 1-5 imei files (random)") print("--------------------------------------") print("SUCCESS RATE") counter = {} @@ -142,7 +154,8 @@ if len(uploading_time) == 0: print("No valid uploading time") print("Check the results!") processing_time = [x["process_time"] for x in results if x["success"]] -print("Uploading + Processing time (Avg / Min / Max): {:.3f}s {:.3f}s {:.3f}s".format(sum(processing_time) / len(processing_time), min(processing_time), max(processing_time))) +print("Uploading time (Avg / Min / Max): {:.3f}s {:.3f}s {:.3f}s".format(sum(uploading_time) / len(uploading_time), min(uploading_time), max(uploading_time))) +print("Processing time (Avg / Min / Max): {:.3f}s {:.3f}s {:.3f}s".format(sum(processing_time) / len(processing_time), min(processing_time), max(processing_time))) print("--------------------------------------") print("TIME BY IMAGE") uploading_time = [x["upload_time"] for x in results if x["success"]] @@ -151,3 +164,4 @@ num_images = sum(x["num_files"] for x in results if x["success"]) print("Total images:", num_images) print("Uploading + Processing time: {:.3f}s".format(sum(processing_time) / num_images)) print("--------------------------------------") + From 6fb304a49d70fc871fa3fd3c9bd61ab4fbdce3be Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 10:39:03 +0700 Subject: [PATCH 03/14] Update: Docker base --- .gitignore | 5 ++- cope2n-ai-fi/Dockerfile | 68 ++++++++++++++++++++++++++++------------- 2 files changed, 50 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 06e3308..de72e23 100644 --- a/.gitignore +++ b/.gitignore @@ -28,4 +28,7 @@ curl.md cope2n-api/fwd_api/commands/init_database.py /data backup -demo-ocr/ \ No newline at end of file +demo-ocr/ +logs/ +docker-compose_.yml +cope2n-ai-fi/Dockerfile_old_work diff --git a/cope2n-ai-fi/Dockerfile b/cope2n-ai-fi/Dockerfile index 405796c..1bb583f 100755 --- a/cope2n-ai-fi/Dockerfile +++ b/cope2n-ai-fi/Dockerfile @@ -1,40 +1,64 @@ -FROM thucpd2408/env-deskew +FROM pytorch/pytorch:1.13.1-cuda11.6-cudnn8-runtime -COPY ./packages/cudnn-linux*.tar.xz /tmp/cudnn-linux*.tar.xz +RUN apt-get update && \ + apt-get install -y git gcc g++ ffmpeg libsm6 libxext6 && \ + apt-get -y autoremove && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* -RUN tar -xvf /tmp/cudnn-linux*.tar.xz -C /tmp/ \ - && cp /tmp/cudnn-*-archive/include/cudnn*.h /usr/local/cuda/include \ - && cp -P /tmp/cudnn-*-archive/lib/libcudnn* /usr/local/cuda/lib64 \ - && chmod a+r /usr/local/cuda/include/cudnn*.h /usr/local/cuda/lib64/libcudnn* \ - && rm -rf /tmp/cudnn-*-archive +# RUN pip install torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116 --no-cache-dir -RUN apt-get update && apt-get install -y gcc g++ ffmpeg libsm6 libxext6 poppler-utils +RUN pip install -U openmim==0.3.7 --no-cache-dir +RUN mim install mmcv-full==1.7.1 +RUN pip install mmcv==1.6.0 -f https://download.openmmlab.com/mmcv/dst/cu116/torch1.13/index.html --no-cache-dir -WORKDIR /workspace +RUN ln -s /opt/conda/lib/python3.10/site-packages/torch/lib/libcudnn.so.8 /usr/lib/libcudnn.so && \ + ln -s /opt/conda/lib/libcublas.so /usr/lib/libcublas.so +RUN python -m pip install paddlepaddle-gpu==2.4.2.post116 -f https://www.paddlepaddle.org.cn/whl/linux/mkl/avx/stable.html --no-cache-dir -# COPY ./modules/ocr_engine/externals/ /workspace/cope2n-ai-fi/modules/ocr_engine/externals/ -# COPY ./modules/ocr_engine/requirements.txt /workspace/cope2n-ai-fi/modules/ocr_engine/requirements.txt -# COPY ./modules/sdsvkvu/ /workspace/cope2n-ai-fi/modules/sdsvkvu/ -# COPY ./requirements.txt /workspace/cope2n-ai-fi/requirements.txt +RUN python -m pip install 'git+https://github.com/facebookresearch/detectron2.git' --no-cache-dir +# Install SDSV packages COPY . /workspace/cope2n-ai-fi -RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsv_dewarp && pip3 install -v -e . -RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtd && pip3 install -v -e . -RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtr && pip3 install -v -e . +RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsv_dewarp && pip3 install -v -e . --no-cache-dir +RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtd && pip3 install -v -e . --no-cache-dir +RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtr && pip3 install -v -e . --no-cache-dir -RUN cd /workspace/cope2n-ai-fi/modules/sdsvkvu && pip3 install -v -e . -RUN cd /workspace/cope2n-ai-fi && pip3 install -r requirements.txt +# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsv_dewarp /tmp/sdsv_dewarp +# RUN cd /tmp/sdsv_dewarp && pip install -v -e . --no-cache-dir + +# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsvtd /tmp/sdsvtd +# RUN cd /tmp/sdsvtd && pip install -v -e . --no-cache-dir + +# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsvtr /tmp/sdsvtr +# RUN cd /tmp/sdsvtr && pip install -v -e . --no-cache-dir + +# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/requirements.txt /tmp/sdsvocr/requirements.txt +# RUN pip install -r /tmp/sdsvocr/requirements.txt + +# ENV PIP_DEFAULT_TIMEOUT 100 +# COPY ./modules/sdsvkvu /tmp/sdsvkvu +# RUN cd /tmp/sdsvkvu && pip install -v -e . --no-cache-dir + +# COPY ./requirements.txt /tmp/requirements.txt +# RUN pip install -r /tmp/requirements.txt --no-cache-dir + +RUN cd /workspace/cope2n-ai-fi/modules/sdsvkvu && pip3 install -v -e . --no-cache-dir +RUN cd /workspace/cope2n-ai-fi && pip3 install -r requirements.txt --no-cache-dir RUN rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 && \ - ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libcublasLt.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ - ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libcublas.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ - ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libnvblas.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 + ln -s /opt/conda/lib/libcublasLt.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ + ln -s /opt/conda/lib/libcublas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ + ln -s /opt/conda/lib/libnvblas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 + + +WORKDIR /workspace ENV PYTHONPATH="." ENV TZ="Asia/Ho_Chi_Minh" -CMD [ "sh", "run.sh"] +CMD [ "sh", "run.sh"] \ No newline at end of file From e824a1c1515c6cb087d1fea4722b5706c94a1c9b Mon Sep 17 00:00:00 2001 From: dx-tan Date: Mon, 25 Dec 2023 10:32:09 +0700 Subject: [PATCH 04/14] Add: S3 status --- cope2n-api/fwd_api/models/SubscriptionRequest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cope2n-api/fwd_api/models/SubscriptionRequest.py b/cope2n-api/fwd_api/models/SubscriptionRequest.py index 4197c2e..780df18 100755 --- a/cope2n-api/fwd_api/models/SubscriptionRequest.py +++ b/cope2n-api/fwd_api/models/SubscriptionRequest.py @@ -20,6 +20,7 @@ class SubscriptionRequest(models.Model): updated_at = models.DateTimeField(auto_now=True) S3_uploaded = models.BooleanField(default=False) is_test_request = models.BooleanField(default=False) + S3_uploaded = models.BooleanField(default=False) preprocessing_time = models.FloatField(default=-1) ai_inference_start_time = models.FloatField(default=0) From 3ddc3dace03bd255c9b53e03bdf458ccfecc86b7 Mon Sep 17 00:00:00 2001 From: Viet Anh Nguyen Date: Thu, 21 Dec 2023 17:31:55 +0700 Subject: [PATCH 05/14] Add preprocessing time to response --- speedtest_sync.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/speedtest_sync.py b/speedtest_sync.py index 1f5d84b..3477d3b 100644 --- a/speedtest_sync.py +++ b/speedtest_sync.py @@ -127,6 +127,27 @@ def process_file(data): } + +# invoice_files = [ +# ('invoice_file', ('invoice.pdf', open("test_samples/20220303025923NHNE_20220222_Starhub_Order_Confirmation_by_Email.pdf", "rb").read())), +# ] +invoice_files = [ + ('invoice_file', ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read())), +] +imei_files = [ + ('imei_files', ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read())), + # ('imei_files', ("test_samples/sbt/imei2.jpg", open("test_samples/sbt/imei2.jpg", "rb").read())), + # ('imei_files', ("test_samples/sbt/imei3.jpg", open("test_samples/sbt/imei3.jpg", "rb").read())), + # ('imei_files', ("test_samples/sbt/imei4.jpeg", open("test_samples/sbt/imei4.jpeg", "rb").read())), + # ('imei_files', ("test_samples/sbt/imei5.jpg", open("test_samples/sbt/imei5.jpg", "rb").read())), +] +def get_imei_files(): + num_files = random.randint(1, len(imei_files) + 1) + files = imei_files[:num_files] + # print("Num of imei files:", len(files)) + return files +def get_files(): + return invoice_files + imei_files # get_imei_files() def gen_input(num_input): for _ in range(num_input): yield (None, token) From 857e0d61ac0f7522fe465366de3c774141c0f753 Mon Sep 17 00:00:00 2001 From: Viet Anh Nguyen Date: Fri, 22 Dec 2023 14:03:56 +0700 Subject: [PATCH 06/14] Add AI processing time --- speedtest_sync.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/speedtest_sync.py b/speedtest_sync.py index 3477d3b..b7c4b19 100644 --- a/speedtest_sync.py +++ b/speedtest_sync.py @@ -135,8 +135,8 @@ invoice_files = [ ('invoice_file', ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read())), ] imei_files = [ - ('imei_files', ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read())), - # ('imei_files', ("test_samples/sbt/imei2.jpg", open("test_samples/sbt/imei2.jpg", "rb").read())), + ('imei_files', ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream')), + ('imei_files', ("test_samples/sbt/imei2.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream')), # ('imei_files', ("test_samples/sbt/imei3.jpg", open("test_samples/sbt/imei3.jpg", "rb").read())), # ('imei_files', ("test_samples/sbt/imei4.jpeg", open("test_samples/sbt/imei4.jpeg", "rb").read())), # ('imei_files', ("test_samples/sbt/imei5.jpg", open("test_samples/sbt/imei5.jpg", "rb").read())), @@ -146,8 +146,15 @@ def get_imei_files(): files = imei_files[:num_files] # print("Num of imei files:", len(files)) return files +# def get_files(): +# return imei_files + # return invoice_files + get_imei_files() def get_files(): - return invoice_files + imei_files # get_imei_files() + return { + 'invoice_file': ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read()), + 'imei_files': ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream'), + 'processType': 12, + } def gen_input(num_input): for _ in range(num_input): yield (None, token) From 18a457c9bd78d919edc1241dea11ce53f0f9dbbe Mon Sep 17 00:00:00 2001 From: Viet Anh Nguyen Date: Mon, 25 Dec 2023 10:07:58 +0700 Subject: [PATCH 07/14] Update speedtest script --- speedtest_sync.py | 28 ---------------------------- 1 file changed, 28 deletions(-) diff --git a/speedtest_sync.py b/speedtest_sync.py index b7c4b19..1f5d84b 100644 --- a/speedtest_sync.py +++ b/speedtest_sync.py @@ -127,34 +127,6 @@ def process_file(data): } - -# invoice_files = [ -# ('invoice_file', ('invoice.pdf', open("test_samples/20220303025923NHNE_20220222_Starhub_Order_Confirmation_by_Email.pdf", "rb").read())), -# ] -invoice_files = [ - ('invoice_file', ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read())), -] -imei_files = [ - ('imei_files', ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream')), - ('imei_files', ("test_samples/sbt/imei2.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream')), - # ('imei_files', ("test_samples/sbt/imei3.jpg", open("test_samples/sbt/imei3.jpg", "rb").read())), - # ('imei_files', ("test_samples/sbt/imei4.jpeg", open("test_samples/sbt/imei4.jpeg", "rb").read())), - # ('imei_files', ("test_samples/sbt/imei5.jpg", open("test_samples/sbt/imei5.jpg", "rb").read())), -] -def get_imei_files(): - num_files = random.randint(1, len(imei_files) + 1) - files = imei_files[:num_files] - # print("Num of imei files:", len(files)) - return files -# def get_files(): -# return imei_files - # return invoice_files + get_imei_files() -def get_files(): - return { - 'invoice_file': ('invoice.jpg', open("test_samples/sbt/invoice.jpg", "rb").read()), - 'imei_files': ("test_samples/sbt/imei1.jpg", open("test_samples/sbt/imei1.jpg", "rb").read(), 'application/octet-stream'), - 'processType': 12, - } def gen_input(num_input): for _ in range(num_input): yield (None, token) From 3db520d47785526e49a285d35679b08777a4ba32 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Mon, 25 Dec 2023 14:14:05 +0700 Subject: [PATCH 08/14] Update: build flow --- deploy_images.sh | 2 +- docker-compose-dev.yml | 10 ++- docker-compose-prod.yml | 193 ++++++++++++++++++++++++++++++++++++++++ docker-compose.yml | 11 +-- 4 files changed, 204 insertions(+), 12 deletions(-) create mode 100644 docker-compose-prod.yml diff --git a/deploy_images.sh b/deploy_images.sh index 9be2183..8048a3b 100755 --- a/deploy_images.sh +++ b/deploy_images.sh @@ -19,6 +19,6 @@ docker compose -f docker-compose-dev.yml build fe-sbt docker tag sidp/cope2n-fe-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} -cp ./docker-compose.yml ./docker-compose_${tag}.yml +cp ./docker-compose-prod.yml ./docker-compose_${tag}.yml sed -i "s/{{tag}}/$tag/g" ./docker-compose_${tag}.yml cp .env_prod .env_${tag} diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 9b0f3e4..db1120c 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -81,10 +81,12 @@ services: depends_on: db-sbt: condition: service_started - command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && - python manage.py migrate && - python manage.py compilemessages && - gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + # command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && + # python manage.py migrate && + # python manage.py compilemessages && + # gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + command: bash -c "tail -f > /dev/null" + minio: image: minio/minio environment: diff --git a/docker-compose-prod.yml b/docker-compose-prod.yml new file mode 100644 index 0000000..280499a --- /dev/null +++ b/docker-compose-prod.yml @@ -0,0 +1,193 @@ + +# TODO: use docker-compose extend: for compact purpose +version: '3.0' +networks: + ctel-sbt: + driver: bridge + +services: + cope2n-fi-sbt: + shm_size: 10gb + mem_limit: 10gb + restart: always + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:{{tag}} + networks: + - ctel-sbt + privileged: true + environment: + - CELERY_BROKER=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672 + working_dir: /workspace/cope2n-ai-fi + command: bash run.sh + deploy: + mode: replicated + replicas: 2 + # Back-end services + be-ctel-sbt: + environment: + - MEDIA_ROOT=${MEDIA_ROOT} + - DB_ENGINE=${DB_ENGINE} + - DB_SCHEMA=${DB_SCHEMA} + - DB_USER=${DB_USER} + - DB_PASSWORD=${DB_PASSWORD} + - DB_HOST=${DB_HOST} + - DB_PORT=${DB_PUBLIC_PORT} + - DEBUG=${DEBUG} + - CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS} + - BASE_PORT=${BASE_PORT} + - CTEL_KEY=${CTEL_KEY} + - SECRET_KEY=${SECRET_KEY} + - DB_INTERNAL_KEY=${DB_INTERNAL_KEY} + - ALLOWED_HOSTS=${ALLOWED_HOSTS} + - BROKER_URL=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672 + - BASE_URL=http://be-ctel-sbt:${BASE_PORT} + - BASE_UI_URL=http://fe:${VITE_PORT} + - AUTH_TOKEN_LIFE_TIME=${AUTH_TOKEN_LIFE_TIME} + - IMAGE_TOKEN_LIFE_TIME=${IMAGE_TOKEN_LIFE_TIME} + - INTERNAL_SDS_KEY=${INTERNAL_SDS_KEY} + - FI_USER_NAME=${FI_USER_NAME} + - FI_PASSWORD=${FI_PASSWORD} + - S3_ENDPOINT=${S3_ENDPOINT} + - S3_ACCESS_KEY=${S3_ACCESS_KEY} + - S3_SECRET_KEY=${S3_SECRET_KEY} + - S3_BUCKET_NAME=${S3_BUCKET_NAME} + restart: always + mem_limit: 10gb + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}} + networks: + - ctel-sbt + volumes: + - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} + - BE_static:/app/static + working_dir: /app + command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && + python manage.py migrate && + python manage.py compilemessages && + gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + minio: + image: minio/minio + environment: + - MINIO_ROOT_USER=${S3_ACCESS_KEY} + - MINIO_ROOT_PASSWORD=${S3_SECRET_KEY} + - MINIO_ACCESS_KEY=${S3_ACCESS_KEY} + - MINIO_SECRET_KEY=${S3_SECRET_KEY} + volumes: + - ./data/minio_data:/data + networks: + - ctel-sbt + restart: always + command: server --address :9884 --console-address :9885 /data + profiles: ["local"] + + createbuckets: + image: minio/mc + depends_on: + - minio + entrypoint: > + /bin/sh -c " + /usr/bin/mc alias set myminio http://minio:9884 ${S3_ACCESS_KEY} ${S3_SECRET_KEY}; + /usr/bin/mc mb myminio/${S3_BUCKET_NAME}; + /usr/bin/mc policy set public myminio/${S3_BUCKET_NAME}; + exit 0; + " + networks: + - ctel-sbt + profiles: ["local"] + + result-cache: + image: redis:6.2-alpine + restart: always + mem_limit: 10gb + command: redis-server --save 20 1 --loglevel warning + networks: + - ctel-sbt + + be-celery-sbt: + environment: + - MEDIA_ROOT=${MEDIA_ROOT} + - PYTHONPATH=${PYTHONPATH}:/app # For import module + - PYTHONUNBUFFERED=1 # For show print log + - DB_ENGINE=${DB_ENGINE} + - DB_SCHEMA=${DB_SCHEMA} + - DB_USER=${DB_USER} + - DB_PASSWORD=${DB_PASSWORD} + - DB_HOST=${DB_HOST} + - DB_PORT=${DB_PUBLIC_PORT} + - BROKER_URL=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672 + - BASE_UI_URL=http://fe:${VITE_PORT} + - DEBUG=${DEBUG} + - DB_INTERNAL_KEY=${DB_INTERNAL_KEY} + - IMAGE_TOKEN_LIFE_TIME=${IMAGE_TOKEN_LIFE_TIME} + - CTEL_KEY=${CTEL_KEY} + - SECRET_KEY=${SECRET_KEY} + - ALLOWED_HOSTS=${ALLOWED_HOSTS} + - S3_ENDPOINT=${S3_ENDPOINT} + - S3_ACCESS_KEY=${S3_ACCESS_KEY} + - S3_SECRET_KEY=${S3_SECRET_KEY} + - S3_BUCKET_NAME=${S3_BUCKET_NAME} + - BASE_URL=http://be-ctel-sbt:${BASE_PORT} + - REDIS_HOST=result-cache + - REDIS_PORT=6379 + restart: always + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}} + networks: + - ctel-sbt + depends_on: + rabbitmq-sbt: + condition: service_started + volumes: + - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} + + working_dir: /app + command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" + + # Back-end persistent + db-sbt: + restart: always + mem_reservation: 500m + image: postgres:14.7-alpine + volumes: + - ./data/postgres_data:/var/lib/postgresql/data + networks: + - ctel-sbt + environment: + - POSTGRES_USER=${DB_USER} + - POSTGRES_PASSWORD=${DB_PASSWORD} + - POSTGRES_DB=${DB_SCHEMA} + profiles: ["local"] + + rabbitmq-sbt: + mem_reservation: 600m + restart: always + image: rabbitmq:3.10-alpine + working_dir: /workspace/cope2n-api + networks: + - ctel-sbt + environment: + - RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER} + - RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS} + + # Front-end services + fe-sbt: + restart: always + mem_limit: 4gb + shm_size: 10gb + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:{{tag}} + privileged: true + ports: + - 80:80 + depends_on: + be-ctel-sbt: + condition: service_started + be-celery-sbt: + condition: service_started + environment: + - VITE_PROXY=http://be-ctel-sbt:${BASE_PORT} + - VITE_API_BASE_URL=http://fe-sbt:80 + volumes: + - BE_static:/backend-static + networks: + - ctel-sbt + +volumes: + db_data: + BE_static: diff --git a/docker-compose.yml b/docker-compose.yml index 7205573..50a9ef7 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,7 +10,7 @@ services: shm_size: 10gb mem_limit: 10gb restart: always - image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:{{tag}} + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt networks: - ctel-sbt privileged: true @@ -53,7 +53,7 @@ services: - S3_BUCKET_NAME=${S3_BUCKET_NAME} restart: always mem_limit: 10gb - image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}} + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt: networks: - ctel-sbt volumes: @@ -62,9 +62,6 @@ services: - ./cope2n-api:/app working_dir: /app - depends_on: - db-sbt: - condition: service_started command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && python manage.py migrate && python manage.py compilemessages && @@ -134,7 +131,7 @@ services: - REDIS_HOST=result-cache - REDIS_PORT=6379 restart: always - image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}} + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt: networks: - ctel-sbt depends_on: @@ -177,7 +174,7 @@ services: restart: always mem_limit: 4gb shm_size: 10gb - image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:{{tag}} + image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt privileged: true ports: - ${SIDP_SERVICE_PORT:-9881}:80 From 4a2e73c3c5885ec9a74ae50ae92408fd7398225f Mon Sep 17 00:00:00 2001 From: dx-tan Date: Mon, 25 Dec 2023 18:48:50 +0700 Subject: [PATCH 09/14] Add: delay task for local file removal --- .gitignore | 1 + cope2n-api/fwd/settings.py | 12 ------- cope2n-api/fwd_api/bg_tasks/__init__.py | 0 .../fwd_api/bg_tasks/clean_local_files.py | 19 ------------ cope2n-api/fwd_api/bg_tasks/cron_job.py | 31 ------------------- .../fwd_api/celery_worker/client_connector.py | 11 ++++--- .../fwd_api/celery_worker/internal_task.py | 12 ++++++- cope2n-api/fwd_api/celery_worker/worker.py | 4 +++ .../0158_subscriptionrequest_s3_uploaded.py | 18 +++++++++++ .../migrations/0162_merge_20231225_1439.py | 14 +++++++++ cope2n-api/fwd_api/utils/file.py | 1 + docker-compose-dev.yml | 10 +++--- 12 files changed, 60 insertions(+), 73 deletions(-) delete mode 100644 cope2n-api/fwd_api/bg_tasks/__init__.py delete mode 100644 cope2n-api/fwd_api/bg_tasks/clean_local_files.py delete mode 100644 cope2n-api/fwd_api/bg_tasks/cron_job.py create mode 100644 cope2n-api/fwd_api/migrations/0158_subscriptionrequest_s3_uploaded.py create mode 100644 cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py diff --git a/.gitignore b/.gitignore index de72e23..d1da6cb 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,4 @@ demo-ocr/ logs/ docker-compose_.yml cope2n-ai-fi/Dockerfile_old_work +*.sql diff --git a/cope2n-api/fwd/settings.py b/cope2n-api/fwd/settings.py index 13740da..514ff37 100755 --- a/cope2n-api/fwd/settings.py +++ b/cope2n-api/fwd/settings.py @@ -59,7 +59,6 @@ INSTALLED_APPS = [ 'drf_spectacular', 'drf_spectacular_sidecar', # required for Django collectstatic discovery 'corsheaders', - "django_celery_beat", ] @@ -208,17 +207,6 @@ BROKER_URL = env.str("BROKER_URL", default="amqp://test:test@107.120.70.226:5672 CELERY_TASK_TRACK_STARTED = True CELERY_TASK_TIME_LIMIT = 30 * 60 -CELERY_BEAT_SCHEDULE = { - 'clean_local_file': { - 'task': 'fwd_api.celery_worker.internal_task.clean_local_files', - 'schedule': 3600.0, - 'args': (), - 'options': { - 'expires': 120.0, - }, - }, -} - MAX_UPLOAD_SIZE_OF_A_FILE = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_FILE_SIZE_OF_A_REQUEST = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_FILES_IN_A_REQUEST = 5 diff --git a/cope2n-api/fwd_api/bg_tasks/__init__.py b/cope2n-api/fwd_api/bg_tasks/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/cope2n-api/fwd_api/bg_tasks/clean_local_files.py b/cope2n-api/fwd_api/bg_tasks/clean_local_files.py deleted file mode 100644 index 5fe41a1..0000000 --- a/cope2n-api/fwd_api/bg_tasks/clean_local_files.py +++ /dev/null @@ -1,19 +0,0 @@ -from django.conf import settings -from django.utils import timezone - -from fwd_api.models import SubscriptionRequest -from .cron_job import CronJob -from fwd_api.celery_worker.worker import app as celery_app - -class FileCleaningWoker(CronJob): - """Clean old files to save disk space""" - - def run(self): - print("-----------") - - - -@celery_app.task(time_limit=5000) -def clean_media(): - worker = FileCleaningWoker("file_cleaning_worker") - worker.run() \ No newline at end of file diff --git a/cope2n-api/fwd_api/bg_tasks/cron_job.py b/cope2n-api/fwd_api/bg_tasks/cron_job.py deleted file mode 100644 index 639462a..0000000 --- a/cope2n-api/fwd_api/bg_tasks/cron_job.py +++ /dev/null @@ -1,31 +0,0 @@ -import traceback - -from celery.utils.log import get_task_logger - - -class CronJob: - def __init__( - self, - name, - ): - self.name = name - self.logger = self.get_logger() - - def get_logger(self): - """Create/Get the logger for this task""" - logger = get_task_logger(self.name) - return logger - - def safe_run(self): - """Create a logger and execute run() - in a try/except block to prevent crashing - """ - try: - self.run() - except Exception as e: - self.logger.error("Failed to run cron job in safe mode.") - self.logger.error(e) - traceback.print_exc() - - def run(self): - raise NotImplementedError("Not implemented error") \ No newline at end of file diff --git a/cope2n-api/fwd_api/celery_worker/client_connector.py b/cope2n-api/fwd_api/celery_worker/client_connector.py index 10557ed..db411a6 100755 --- a/cope2n-api/fwd_api/celery_worker/client_connector.py +++ b/cope2n-api/fwd_api/celery_worker/client_connector.py @@ -29,9 +29,9 @@ class CeleryConnector: 'process_manulife_invoice': {'queue': "invoice_manulife"}, 'process_sbt_invoice': {'queue': "invoice_sbt"}, 'do_pdf': {'queue': "do_pdf"}, - 'upload_file_to_s3': {'queue': "upload_file_to_s3"}, + 'upload_file_to_s3': {'queue': "upload_file_to_s3"}, 'upload_obj_to_s3': {'queue': "upload_obj_to_s3"}, - + 'remove_local_file': {'queue': "remove_local_file"}, } app = Celery( @@ -45,6 +45,8 @@ class CeleryConnector: return self.send_task('upload_file_to_s3', args) def upload_obj_to_s3(self, args): return self.send_task('upload_obj_to_s3', args) + def remove_local_file(self, args): + return self.send_task('remove_local_file', args, countdown=600) # nearest execution of this task in 10 minutes def process_fi(self, args): return self.send_task('process_fi_invoice', args) def process_fi_result(self, args): @@ -86,10 +88,9 @@ class CeleryConnector: def process_invoice_sbt(self, args): return self.send_task('process_sbt_invoice', args) - def send_task(self, name=None, args=None): + def send_task(self, name=None, args=None, countdown=None): if name not in self.task_routes or 'queue' not in self.task_routes[name]: raise GeneralException("System") - return self.app.send_task(name, args, queue=self.task_routes[name]['queue'], expires=300) - + return self.app.send_task(name, args, queue=self.task_routes[name]['queue'], expires=300, countdown=countdown) c_connector = CeleryConnector() diff --git a/cope2n-api/fwd_api/celery_worker/internal_task.py b/cope2n-api/fwd_api/celery_worker/internal_task.py index 7ae29ac..d1376d5 100755 --- a/cope2n-api/fwd_api/celery_worker/internal_task.py +++ b/cope2n-api/fwd_api/celery_worker/internal_task.py @@ -123,14 +123,24 @@ def upload_file_to_s3(local_file_path, s3_key, request_id): if s3_client.s3_client is not None: try: s3_client.upload_file(local_file_path, s3_key) - sub_request = SubscriptionRequest.objects.filter(request_id=request_id) + sub_request = SubscriptionRequest.objects.filter(request_id=request_id)[0] sub_request.S3_uploaded = True sub_request.save() except Exception as e: + logger.error(f"Unable to set S3: {e}") + print(f"Unable to set S3: {e}") return else: logger.info(f"S3 is not available, skipping,...") +@app.task(name='remove_local_file') +def remove_local_file(local_file_path, request_id): + print(f"[INFO] Removing local file: {local_file_path}, ...") + try: + os.remove(local_file_path) + except Exception as e: + logger.info(f"Unable to remove local file: {e}") + @app.task(name='upload_obj_to_s3') def upload_obj_to_s3(byte_obj, s3_key): if s3_client.s3_client is not None: diff --git a/cope2n-api/fwd_api/celery_worker/worker.py b/cope2n-api/fwd_api/celery_worker/worker.py index b8530fc..ee497cd 100755 --- a/cope2n-api/fwd_api/celery_worker/worker.py +++ b/cope2n-api/fwd_api/celery_worker/worker.py @@ -37,6 +37,8 @@ app.conf.update({ Queue('do_pdf'), Queue('upload_file_to_s3'), Queue('upload_obj_to_s3'), + Queue('remove_local_file'), + ], 'task_routes': { @@ -51,6 +53,8 @@ app.conf.update({ 'do_pdf': {'queue': "do_pdf"}, 'upload_file_to_s3': {'queue': "upload_file_to_s3"}, 'upload_obj_to_s3': {'queue': "upload_obj_to_s3"}, + 'upload_file_to_s3': {'queue': "upload_file_to_s3"}, + 'remove_local_file': {'queue': "remove_local_file"}, } }) diff --git a/cope2n-api/fwd_api/migrations/0158_subscriptionrequest_s3_uploaded.py b/cope2n-api/fwd_api/migrations/0158_subscriptionrequest_s3_uploaded.py new file mode 100644 index 0000000..97b3810 --- /dev/null +++ b/cope2n-api/fwd_api/migrations/0158_subscriptionrequest_s3_uploaded.py @@ -0,0 +1,18 @@ +# Generated by Django 4.1.3 on 2023-12-22 10:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('fwd_api', '0157_alter_subscriptionrequest_created_at'), + ] + + operations = [ + migrations.AddField( + model_name='subscriptionrequest', + name='S3_uploaded', + field=models.BooleanField(default=False), + ), + ] diff --git a/cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py b/cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py new file mode 100644 index 0000000..b8d5f4b --- /dev/null +++ b/cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py @@ -0,0 +1,14 @@ +# Generated by Django 4.1.3 on 2023-12-25 07:39 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('fwd_api', '0158_subscriptionrequest_s3_uploaded'), + ('fwd_api', '0161_alter_subscriptionrequest_ai_inference_start_time_and_more'), + ] + + operations = [ + ] diff --git a/cope2n-api/fwd_api/utils/file.py b/cope2n-api/fwd_api/utils/file.py index 5e83d0a..4a94b39 100644 --- a/cope2n-api/fwd_api/utils/file.py +++ b/cope2n-api/fwd_api/utils/file.py @@ -160,6 +160,7 @@ def save_to_S3(file_name, rq, local_file_path): assert len(file_path.split("/")) >= 2, "file_path must have at least process type and request id" s3_key = os.path.join(file_path.split("/")[-2], file_path.split("/")[-1], file_name) c_connector.upload_file_to_s3((local_file_path, s3_key, request_id)) + c_connector.remove_local_file((local_file_path, request_id)) return s3_key except Exception as e: print(f"[ERROR]: {e}") diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index db1120c..45edf08 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -81,11 +81,11 @@ services: depends_on: db-sbt: condition: service_started - # command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && - # python manage.py migrate && - # python manage.py compilemessages && - # gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod - command: bash -c "tail -f > /dev/null" + command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && + python manage.py migrate && + python manage.py compilemessages && + gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod + # command: bash -c "tail -f > /dev/null" minio: image: minio/minio From f17485c4c6ca3c3c3e0c3dc52d25f110e19bb08f Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 10:40:18 +0700 Subject: [PATCH 10/14] Update: TR models path --- cope2n-ai-fi/configs/sdsap_sbt/configs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cope2n-ai-fi/configs/sdsap_sbt/configs.py b/cope2n-ai-fi/configs/sdsap_sbt/configs.py index 8d40ba0..e2d43b2 100755 --- a/cope2n-ai-fi/configs/sdsap_sbt/configs.py +++ b/cope2n-ai-fi/configs/sdsap_sbt/configs.py @@ -6,7 +6,7 @@ ocr_engine = { "device": device }, "recognizer": { - "version": "/workspace/cope2n-ai-fi/weights/models/sdsvtr/hub/sbt_20231218_e116_sdstr.pth", + "version": "/workspace/cope2n-ai-fi/weights/models/ocr_engine/sdsvtr/hub/sbt_20231218_e116_sdstr.pth", "device": device }, "deskew": { From da35ba418abc5683d76466e2153715abe84a5072 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 10:41:36 +0700 Subject: [PATCH 11/14] Update: Build --- .gitignore | 2 ++ cope2n-ai-fi/.dockerignore | 1 + cope2n-api/fwd_api/celery_worker/client_connector.py | 2 +- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index d1da6cb..7cffb73 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,5 @@ logs/ docker-compose_.yml cope2n-ai-fi/Dockerfile_old_work *.sql +*.sql +.env_prod diff --git a/cope2n-ai-fi/.dockerignore b/cope2n-ai-fi/.dockerignore index d38e621..b2ac9a7 100755 --- a/cope2n-ai-fi/.dockerignore +++ b/cope2n-ai-fi/.dockerignore @@ -1,6 +1,7 @@ .github .git .vscode +packages/ __pycache__ DataBase/image_temp/ DataBase/json_temp/ diff --git a/cope2n-api/fwd_api/celery_worker/client_connector.py b/cope2n-api/fwd_api/celery_worker/client_connector.py index db411a6..3ff8f88 100755 --- a/cope2n-api/fwd_api/celery_worker/client_connector.py +++ b/cope2n-api/fwd_api/celery_worker/client_connector.py @@ -46,7 +46,7 @@ class CeleryConnector: def upload_obj_to_s3(self, args): return self.send_task('upload_obj_to_s3', args) def remove_local_file(self, args): - return self.send_task('remove_local_file', args, countdown=600) # nearest execution of this task in 10 minutes + return self.send_task('remove_local_file', args, countdown=280) # nearest execution of this task in 280 seconds def process_fi(self, args): return self.send_task('process_fi_invoice', args) def process_fi_result(self, args): From 683df276a919cb277d1597a962eb1bbcc7cd8340 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 10:42:08 +0700 Subject: [PATCH 12/14] Remove env file --- .env_prod | 40 ---------------------------------------- 1 file changed, 40 deletions(-) delete mode 100644 .env_prod diff --git a/.env_prod b/.env_prod deleted file mode 100644 index 453c5a3..0000000 --- a/.env_prod +++ /dev/null @@ -1,40 +0,0 @@ -MEDIA_ROOT=/app/media -# DATABASE django setup -DB_ENGINE=django.db.backends.postgresql_psycopg2 -DB_SCHEMA=sbt2 -DB_USER=sbt -DB_PASSWORD=sbtCH240 -DB_HOST=sbt.cxetpslawu4p.ap-southeast-1.rds.amazonaws.com -DB_PUBLIC_PORT=5432 -DB_INTERNAL_PORT=5432 - -DEBUG=TRUE -CORS_ALLOWED_ORIGINS=* -CTEL_KEY=fTjWnZr4u7x!A%D*G-KaPdRgUkXp2s5v -DB_INTERNAL_KEY=7LYk-iaWTFPqsZHIE5GHuv41S0c_Vlb0ZVc-BnsEZqQ= -ALLOWED_HOSTS='*' -BROKER_URL=amqp://test:test@rabbitmq-manulife-sbt:5672 -BASE_URL=http://be-ctel-sbt:9000 -BASE_UI_URL=http://fe-sbt:9801 -HOST_MEDIA_FOLDER=./media -GID=1000 -UID=198 -SECRET_KEY=999999999999999999999999999999999999999999999999999999999999999999 -RABBITMQ_DEFAULT_USER=test -RABBITMQ_DEFAULT_PASS=test -BASE_PORT=9000 -S3_ACCESS_KEY=AKIA3AFPFVWZD77UACHE -S3_SECRET_KEY=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 -S3_BUCKET_NAME=ocr-sds - -AUTH_TOKEN_LIFE_TIME=168 -IMAGE_TOKEN_LIFE_TIME=168 -INTERNAL_SDS_KEY=TannedCung -FI_USER_NAME=sbt -FI_PASSWORD=7Eg4AbWIXDnufgn - -# Front end env variables -# VITE_PORT=80 -# VITE_PROXY=http://0.0.0.0 -# VITE_API_BASE_URL=http://0.0.0.0:8000 -# PORT=8002 \ No newline at end of file From a59d4c51f4d3d44530baeaca40dcbcc2db5db284 Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 11:07:04 +0700 Subject: [PATCH 13/14] Remove: redundant submodules --- cope2n-ai-fi/modules/sdsvkie_ | 1 - cope2n-ai-fi/modules/sdsvkv_oldu | 1 - cope2n-ai-fi/modules/sdsvtd_ | 1 - cope2n-ai-fi/modules/sdsvtr_ | 1 - 4 files changed, 4 deletions(-) delete mode 160000 cope2n-ai-fi/modules/sdsvkie_ delete mode 160000 cope2n-ai-fi/modules/sdsvkv_oldu delete mode 160000 cope2n-ai-fi/modules/sdsvtd_ delete mode 160000 cope2n-ai-fi/modules/sdsvtr_ diff --git a/cope2n-ai-fi/modules/sdsvkie_ b/cope2n-ai-fi/modules/sdsvkie_ deleted file mode 160000 index 8349a89..0000000 --- a/cope2n-ai-fi/modules/sdsvkie_ +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8349a89de7fd8c9e6958907047d16bdc23a77adf diff --git a/cope2n-ai-fi/modules/sdsvkv_oldu b/cope2n-ai-fi/modules/sdsvkv_oldu deleted file mode 160000 index bdba044..0000000 --- a/cope2n-ai-fi/modules/sdsvkv_oldu +++ /dev/null @@ -1 +0,0 @@ -Subproject commit bdba044bb2eacac7c7cfe0e0f321196d03b681f6 diff --git a/cope2n-ai-fi/modules/sdsvtd_ b/cope2n-ai-fi/modules/sdsvtd_ deleted file mode 160000 index a9a796f..0000000 --- a/cope2n-ai-fi/modules/sdsvtd_ +++ /dev/null @@ -1 +0,0 @@ -Subproject commit a9a796f843f6ad99977a8dcba02d65fe75225574 diff --git a/cope2n-ai-fi/modules/sdsvtr_ b/cope2n-ai-fi/modules/sdsvtr_ deleted file mode 160000 index 2c788e9..0000000 --- a/cope2n-ai-fi/modules/sdsvtr_ +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2c788e9e2d7fe369869d474fbb22426a4ca84590 From d3708468b04669f72e18d780bae1c741b79f9e4a Mon Sep 17 00:00:00 2001 From: dx-tan Date: Tue, 26 Dec 2023 14:10:59 +0700 Subject: [PATCH 14/14] Update Build --- cope2n-ai-fi/Dockerfile | 12 ++++++------ cope2n-ai-fi/requirements.txt | 3 ++- deploy_images.py | 7 ++++--- deploy_images.sh | 25 +++++++++++++------------ docker-compose-dev.yml | 2 +- docker-compose-prod.yml | 8 +++++--- 6 files changed, 31 insertions(+), 26 deletions(-) diff --git a/cope2n-ai-fi/Dockerfile b/cope2n-ai-fi/Dockerfile index 1bb583f..13ba941 100755 --- a/cope2n-ai-fi/Dockerfile +++ b/cope2n-ai-fi/Dockerfile @@ -48,12 +48,12 @@ RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtr && pip3 insta RUN cd /workspace/cope2n-ai-fi/modules/sdsvkvu && pip3 install -v -e . --no-cache-dir RUN cd /workspace/cope2n-ai-fi && pip3 install -r requirements.txt --no-cache-dir -RUN rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ - rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ - rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 && \ - ln -s /opt/conda/lib/libcublasLt.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ - ln -s /opt/conda/lib/libcublas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ - ln -s /opt/conda/lib/libnvblas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 +# RUN rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ +# rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ +# rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 && \ +# ln -s /opt/conda/lib/libcublasLt.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \ +# ln -s /opt/conda/lib/libcublas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \ +# ln -s /opt/conda/lib/libnvblas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 WORKDIR /workspace diff --git a/cope2n-ai-fi/requirements.txt b/cope2n-ai-fi/requirements.txt index 7c388a4..faf39a9 100755 --- a/cope2n-ai-fi/requirements.txt +++ b/cope2n-ai-fi/requirements.txt @@ -10,4 +10,5 @@ easydict imagesize==1.4.1 pdf2image==1.16.3 -redis==5.0.1 \ No newline at end of file +redis==5.0.1 +celery==5.3.6 \ No newline at end of file diff --git a/deploy_images.py b/deploy_images.py index 5f1804f..2de1774 100644 --- a/deploy_images.py +++ b/deploy_images.py @@ -8,8 +8,8 @@ from datetime import datetime BASH_FILE = './deploy_images.sh' S3_ENDPOINT = "" -S3_ACCESS_KEY = "AKIA3AFPFVWZD77UACHE" -S3_SECRET_KEY = "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" +S3_ACCESS_KEY = "secret" +S3_SECRET_KEY = "secret" S3_BUCKET = "ocr-deployment-config" class MinioS3Client: @@ -77,7 +77,8 @@ def deploy(): # Define the variable tag = str(random_hash()[:8]) now = datetime.now() - tag = tag + "_" + str(now.strftime("%d%m%y%H%M%S")) + # tag = tag + "_" + str(now.strftime("%d%m%y%H%M%S")) + tag = "4cae5134_261223123256" print(tag) # Execute the Bash script with the variable as a command-line argument diff --git a/deploy_images.sh b/deploy_images.sh index 8048a3b..b0b8195 100755 --- a/deploy_images.sh +++ b/deploy_images.sh @@ -1,23 +1,24 @@ #!/bin/bash +set -e tag=$1 echo "[INFO] Tag received from Python: $tag" -echo "[INFO] Pushing AI image with tag: $tag..." -docker compose -f docker-compose-dev.yml build cope2n-fi-sbt -docker tag sidp/cope2n-ai-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag} -docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag} +# echo "[INFO] Pushing AI image with tag: $tag..." +# docker compose -f docker-compose-dev.yml build cope2n-fi-sbt +# docker tag sidp/cope2n-ai-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag} +# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag} -echo "[INFO] Pushing BE image with tag: $tag..." -docker compose -f docker-compose-dev.yml build be-ctel-sbt -docker tag sidp/cope2n-be-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag} -docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag} +# echo "[INFO] Pushing BE image with tag: $tag..." +# docker compose -f docker-compose-dev.yml build be-ctel-sbt +# docker tag sidp/cope2n-be-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag} +# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag} -echo "[INFO] Pushing FE image with tag: $tag..." -docker compose -f docker-compose-dev.yml build fe-sbt -docker tag sidp/cope2n-fe-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} -docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} +# echo "[INFO] Pushing FE image with tag: $tag..." +# docker compose -f docker-compose-dev.yml build fe-sbt +# docker tag sidp/cope2n-fe-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} +# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag} cp ./docker-compose-prod.yml ./docker-compose_${tag}.yml sed -i "s/{{tag}}/$tag/g" ./docker-compose_${tag}.yml diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 45edf08..45b7e4f 100755 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -173,7 +173,7 @@ services: db-sbt: restart: always mem_reservation: 500m - image: postgres:14.7-alpine + image: postgres:15.4-alpine volumes: - ./data/postgres_data:/var/lib/postgresql/data networks: diff --git a/docker-compose-prod.yml b/docker-compose-prod.yml index 280499a..35106c7 100644 --- a/docker-compose-prod.yml +++ b/docker-compose-prod.yml @@ -51,15 +51,16 @@ services: - S3_SECRET_KEY=${S3_SECRET_KEY} - S3_BUCKET_NAME=${S3_BUCKET_NAME} restart: always + privileged: true # for chmod mem_limit: 10gb image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}} networks: - ctel-sbt volumes: - - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} + - BE_media:${MEDIA_ROOT} - BE_static:/app/static working_dir: /app - command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input && + command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && python manage.py migrate && python manage.py compilemessages && gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod @@ -135,7 +136,7 @@ services: rabbitmq-sbt: condition: service_started volumes: - - ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT} + - BE_media:${MEDIA_ROOT} working_dir: /app command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" @@ -191,3 +192,4 @@ services: volumes: db_data: BE_static: + BE_media: