Merge branch 'main' of https://code.sdsdev.co.kr/dx-tan/SBT-IDP into feature/tests
This commit is contained in:
commit
4444c3c96f
8
.gitignore
vendored
8
.gitignore
vendored
@ -28,4 +28,10 @@ curl.md
|
|||||||
cope2n-api/fwd_api/commands/init_database.py
|
cope2n-api/fwd_api/commands/init_database.py
|
||||||
/data
|
/data
|
||||||
backup
|
backup
|
||||||
demo-ocr/
|
demo-ocr/
|
||||||
|
logs/
|
||||||
|
docker-compose_.yml
|
||||||
|
cope2n-ai-fi/Dockerfile_old_work
|
||||||
|
*.sql
|
||||||
|
*.sql
|
||||||
|
.env_prod
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
.github
|
.github
|
||||||
.git
|
.git
|
||||||
.vscode
|
.vscode
|
||||||
|
packages/
|
||||||
__pycache__
|
__pycache__
|
||||||
DataBase/image_temp/
|
DataBase/image_temp/
|
||||||
DataBase/json_temp/
|
DataBase/json_temp/
|
||||||
|
@ -1,40 +1,64 @@
|
|||||||
FROM thucpd2408/env-deskew
|
FROM pytorch/pytorch:1.13.1-cuda11.6-cudnn8-runtime
|
||||||
|
|
||||||
COPY ./packages/cudnn-linux*.tar.xz /tmp/cudnn-linux*.tar.xz
|
RUN apt-get update && \
|
||||||
|
apt-get install -y git gcc g++ ffmpeg libsm6 libxext6 && \
|
||||||
|
apt-get -y autoremove && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
RUN tar -xvf /tmp/cudnn-linux*.tar.xz -C /tmp/ \
|
# RUN pip install torch==1.13.1+cu116 torchvision==0.14.1+cu116 torchaudio==0.13.1 --extra-index-url https://download.pytorch.org/whl/cu116 --no-cache-dir
|
||||||
&& cp /tmp/cudnn-*-archive/include/cudnn*.h /usr/local/cuda/include \
|
|
||||||
&& cp -P /tmp/cudnn-*-archive/lib/libcudnn* /usr/local/cuda/lib64 \
|
|
||||||
&& chmod a+r /usr/local/cuda/include/cudnn*.h /usr/local/cuda/lib64/libcudnn* \
|
|
||||||
&& rm -rf /tmp/cudnn-*-archive
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y gcc g++ ffmpeg libsm6 libxext6 poppler-utils
|
RUN pip install -U openmim==0.3.7 --no-cache-dir
|
||||||
|
RUN mim install mmcv-full==1.7.1
|
||||||
|
RUN pip install mmcv==1.6.0 -f https://download.openmmlab.com/mmcv/dst/cu116/torch1.13/index.html --no-cache-dir
|
||||||
|
|
||||||
WORKDIR /workspace
|
RUN ln -s /opt/conda/lib/python3.10/site-packages/torch/lib/libcudnn.so.8 /usr/lib/libcudnn.so && \
|
||||||
|
ln -s /opt/conda/lib/libcublas.so /usr/lib/libcublas.so
|
||||||
|
|
||||||
|
RUN python -m pip install paddlepaddle-gpu==2.4.2.post116 -f https://www.paddlepaddle.org.cn/whl/linux/mkl/avx/stable.html --no-cache-dir
|
||||||
|
|
||||||
# COPY ./modules/ocr_engine/externals/ /workspace/cope2n-ai-fi/modules/ocr_engine/externals/
|
RUN python -m pip install 'git+https://github.com/facebookresearch/detectron2.git' --no-cache-dir
|
||||||
# COPY ./modules/ocr_engine/requirements.txt /workspace/cope2n-ai-fi/modules/ocr_engine/requirements.txt
|
|
||||||
# COPY ./modules/sdsvkvu/ /workspace/cope2n-ai-fi/modules/sdsvkvu/
|
|
||||||
# COPY ./requirements.txt /workspace/cope2n-ai-fi/requirements.txt
|
|
||||||
|
|
||||||
|
# Install SDSV packages
|
||||||
COPY . /workspace/cope2n-ai-fi
|
COPY . /workspace/cope2n-ai-fi
|
||||||
|
|
||||||
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsv_dewarp && pip3 install -v -e .
|
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsv_dewarp && pip3 install -v -e . --no-cache-dir
|
||||||
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtd && pip3 install -v -e .
|
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtd && pip3 install -v -e . --no-cache-dir
|
||||||
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtr && pip3 install -v -e .
|
RUN cd /workspace/cope2n-ai-fi/modules/ocr_engine/externals/sdsvtr && pip3 install -v -e . --no-cache-dir
|
||||||
|
|
||||||
RUN cd /workspace/cope2n-ai-fi/modules/sdsvkvu && pip3 install -v -e .
|
# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsv_dewarp /tmp/sdsv_dewarp
|
||||||
RUN cd /workspace/cope2n-ai-fi && pip3 install -r requirements.txt
|
# RUN cd /tmp/sdsv_dewarp && pip install -v -e . --no-cache-dir
|
||||||
|
|
||||||
RUN rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \
|
# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsvtd /tmp/sdsvtd
|
||||||
rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \
|
# RUN cd /tmp/sdsvtd && pip install -v -e . --no-cache-dir
|
||||||
rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 && \
|
|
||||||
ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libcublasLt.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \
|
# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/externals/sdsvtr /tmp/sdsvtr
|
||||||
ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libcublas.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \
|
# RUN cd /tmp/sdsvtr && pip install -v -e . --no-cache-dir
|
||||||
ln -s /usr/local/cuda-11.8/targets/x86_64-linux/lib/libnvblas.so.11 /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11
|
|
||||||
|
# COPY ./modules/sdsvkvu/sdsvkvu/externals/sdsvocr/requirements.txt /tmp/sdsvocr/requirements.txt
|
||||||
|
# RUN pip install -r /tmp/sdsvocr/requirements.txt
|
||||||
|
|
||||||
|
# ENV PIP_DEFAULT_TIMEOUT 100
|
||||||
|
# COPY ./modules/sdsvkvu /tmp/sdsvkvu
|
||||||
|
# RUN cd /tmp/sdsvkvu && pip install -v -e . --no-cache-dir
|
||||||
|
|
||||||
|
# COPY ./requirements.txt /tmp/requirements.txt
|
||||||
|
# RUN pip install -r /tmp/requirements.txt --no-cache-dir
|
||||||
|
|
||||||
|
RUN cd /workspace/cope2n-ai-fi/modules/sdsvkvu && pip3 install -v -e . --no-cache-dir
|
||||||
|
RUN cd /workspace/cope2n-ai-fi && pip3 install -r requirements.txt --no-cache-dir
|
||||||
|
|
||||||
|
# RUN rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \
|
||||||
|
# rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \
|
||||||
|
# rm -f /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11 && \
|
||||||
|
# ln -s /opt/conda/lib/libcublasLt.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublasLt.so.11 && \
|
||||||
|
# ln -s /opt/conda/lib/libcublas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libcublas.so.11 && \
|
||||||
|
# ln -s /opt/conda/lib/libnvblas.so /usr/local/lib/python3.10/dist-packages/nvidia/cublas/lib/libnvblas.so.11
|
||||||
|
|
||||||
|
|
||||||
|
WORKDIR /workspace
|
||||||
|
|
||||||
ENV PYTHONPATH="."
|
ENV PYTHONPATH="."
|
||||||
ENV TZ="Asia/Ho_Chi_Minh"
|
ENV TZ="Asia/Ho_Chi_Minh"
|
||||||
|
|
||||||
CMD [ "sh", "run.sh"]
|
CMD [ "sh", "run.sh"]
|
@ -6,7 +6,7 @@ ocr_engine = {
|
|||||||
"device": device
|
"device": device
|
||||||
},
|
},
|
||||||
"recognizer": {
|
"recognizer": {
|
||||||
"version": "/workspace/cope2n-ai-fi/weights/models/sdsvtr/hub/sbt_20231218_e116_sdstr.pth",
|
"version": "/workspace/cope2n-ai-fi/weights/models/ocr_engine/sdsvtr/hub/sbt_20231218_e116_sdstr.pth",
|
||||||
"device": device
|
"device": device
|
||||||
},
|
},
|
||||||
"deskew": {
|
"deskew": {
|
||||||
|
@ -1 +0,0 @@
|
|||||||
Subproject commit 8349a89de7fd8c9e6958907047d16bdc23a77adf
|
|
@ -1 +0,0 @@
|
|||||||
Subproject commit bdba044bb2eacac7c7cfe0e0f321196d03b681f6
|
|
@ -1 +0,0 @@
|
|||||||
Subproject commit a9a796f843f6ad99977a8dcba02d65fe75225574
|
|
@ -1 +0,0 @@
|
|||||||
Subproject commit 2c788e9e2d7fe369869d474fbb22426a4ca84590
|
|
@ -10,4 +10,5 @@ easydict
|
|||||||
|
|
||||||
imagesize==1.4.1
|
imagesize==1.4.1
|
||||||
pdf2image==1.16.3
|
pdf2image==1.16.3
|
||||||
redis==5.0.1
|
redis==5.0.1
|
||||||
|
celery==5.3.6
|
@ -292,6 +292,7 @@ class CtelViewSet(viewsets.ViewSet):
|
|||||||
if serializer.data[0]["data"].get("status", 200) != 200:
|
if serializer.data[0]["data"].get("status", 200) != 200:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
serializer.data[0]["request_id"] = rq_id
|
||||||
return Response(status=status.HTTP_200_OK, data=serializer.data[0])
|
return Response(status=status.HTTP_200_OK, data=serializer.data[0])
|
||||||
raise ServiceTimeoutException(excArgs=f"{rq_id}")
|
raise ServiceTimeoutException(excArgs=f"{rq_id}")
|
||||||
|
|
||||||
|
@ -29,9 +29,9 @@ class CeleryConnector:
|
|||||||
'process_manulife_invoice': {'queue': "invoice_manulife"},
|
'process_manulife_invoice': {'queue': "invoice_manulife"},
|
||||||
'process_sbt_invoice': {'queue': "invoice_sbt"},
|
'process_sbt_invoice': {'queue': "invoice_sbt"},
|
||||||
'do_pdf': {'queue': "do_pdf"},
|
'do_pdf': {'queue': "do_pdf"},
|
||||||
'upload_file_to_s3': {'queue': "upload_file_to_s3"},
|
'upload_file_to_s3': {'queue': "upload_file_to_s3"},
|
||||||
'upload_obj_to_s3': {'queue': "upload_obj_to_s3"},
|
'upload_obj_to_s3': {'queue': "upload_obj_to_s3"},
|
||||||
|
'remove_local_file': {'queue': "remove_local_file"},
|
||||||
|
|
||||||
}
|
}
|
||||||
app = Celery(
|
app = Celery(
|
||||||
@ -45,6 +45,8 @@ class CeleryConnector:
|
|||||||
return self.send_task('upload_file_to_s3', args)
|
return self.send_task('upload_file_to_s3', args)
|
||||||
def upload_obj_to_s3(self, args):
|
def upload_obj_to_s3(self, args):
|
||||||
return self.send_task('upload_obj_to_s3', args)
|
return self.send_task('upload_obj_to_s3', args)
|
||||||
|
def remove_local_file(self, args):
|
||||||
|
return self.send_task('remove_local_file', args, countdown=280) # nearest execution of this task in 280 seconds
|
||||||
def process_fi(self, args):
|
def process_fi(self, args):
|
||||||
return self.send_task('process_fi_invoice', args)
|
return self.send_task('process_fi_invoice', args)
|
||||||
def process_fi_result(self, args):
|
def process_fi_result(self, args):
|
||||||
@ -86,10 +88,9 @@ class CeleryConnector:
|
|||||||
def process_invoice_sbt(self, args):
|
def process_invoice_sbt(self, args):
|
||||||
return self.send_task('process_sbt_invoice', args)
|
return self.send_task('process_sbt_invoice', args)
|
||||||
|
|
||||||
def send_task(self, name=None, args=None):
|
def send_task(self, name=None, args=None, countdown=None):
|
||||||
if name not in self.task_routes or 'queue' not in self.task_routes[name]:
|
if name not in self.task_routes or 'queue' not in self.task_routes[name]:
|
||||||
raise GeneralException("System")
|
raise GeneralException("System")
|
||||||
return self.app.send_task(name, args, queue=self.task_routes[name]['queue'], expires=300)
|
return self.app.send_task(name, args, queue=self.task_routes[name]['queue'], expires=300, countdown=countdown)
|
||||||
|
|
||||||
|
|
||||||
c_connector = CeleryConnector()
|
c_connector = CeleryConnector()
|
||||||
|
@ -119,14 +119,28 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files):
|
|||||||
|
|
||||||
|
|
||||||
@app.task(name='upload_file_to_s3')
|
@app.task(name='upload_file_to_s3')
|
||||||
def upload_file_to_s3(local_file_path, s3_key):
|
def upload_file_to_s3(local_file_path, s3_key, request_id):
|
||||||
if s3_client.s3_client is not None:
|
if s3_client.s3_client is not None:
|
||||||
res = s3_client.upload_file(local_file_path, s3_key)
|
try:
|
||||||
if res != None and res["ResponseMetadata"]["HTTPStatusCode"] == 200:
|
s3_client.upload_file(local_file_path, s3_key)
|
||||||
os.remove(local_file_path)
|
sub_request = SubscriptionRequest.objects.filter(request_id=request_id)[0]
|
||||||
|
sub_request.S3_uploaded = True
|
||||||
|
sub_request.save()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unable to set S3: {e}")
|
||||||
|
print(f"Unable to set S3: {e}")
|
||||||
|
return
|
||||||
else:
|
else:
|
||||||
logger.info(f"S3 is not available, skipping,...")
|
logger.info(f"S3 is not available, skipping,...")
|
||||||
|
|
||||||
|
@app.task(name='remove_local_file')
|
||||||
|
def remove_local_file(local_file_path, request_id):
|
||||||
|
print(f"[INFO] Removing local file: {local_file_path}, ...")
|
||||||
|
try:
|
||||||
|
os.remove(local_file_path)
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(f"Unable to remove local file: {e}")
|
||||||
|
|
||||||
@app.task(name='upload_obj_to_s3')
|
@app.task(name='upload_obj_to_s3')
|
||||||
def upload_obj_to_s3(byte_obj, s3_key):
|
def upload_obj_to_s3(byte_obj, s3_key):
|
||||||
if s3_client.s3_client is not None:
|
if s3_client.s3_client is not None:
|
||||||
|
@ -16,6 +16,17 @@ app: Celery = Celery(
|
|||||||
broker_transport_options={'confirm_publish': False},
|
broker_transport_options={'confirm_publish': False},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||||
|
app.autodiscover_tasks()
|
||||||
|
|
||||||
|
@app.on_after_finalize.connect
|
||||||
|
def setup_periodic_tasks(sender, **kwargs):
|
||||||
|
from fwd_api.bg_tasks.clean_local_files import clean_media
|
||||||
|
sender.add_periodic_task(
|
||||||
|
10.0, clean_media.s(), expires=120.0
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
app.conf.update({
|
app.conf.update({
|
||||||
'task_queues':
|
'task_queues':
|
||||||
[
|
[
|
||||||
@ -26,6 +37,8 @@ app.conf.update({
|
|||||||
Queue('do_pdf'),
|
Queue('do_pdf'),
|
||||||
Queue('upload_file_to_s3'),
|
Queue('upload_file_to_s3'),
|
||||||
Queue('upload_obj_to_s3'),
|
Queue('upload_obj_to_s3'),
|
||||||
|
Queue('remove_local_file'),
|
||||||
|
|
||||||
|
|
||||||
],
|
],
|
||||||
'task_routes': {
|
'task_routes': {
|
||||||
@ -40,6 +53,8 @@ app.conf.update({
|
|||||||
'do_pdf': {'queue': "do_pdf"},
|
'do_pdf': {'queue': "do_pdf"},
|
||||||
'upload_file_to_s3': {'queue': "upload_file_to_s3"},
|
'upload_file_to_s3': {'queue': "upload_file_to_s3"},
|
||||||
'upload_obj_to_s3': {'queue': "upload_obj_to_s3"},
|
'upload_obj_to_s3': {'queue': "upload_obj_to_s3"},
|
||||||
|
'upload_file_to_s3': {'queue': "upload_file_to_s3"},
|
||||||
|
'remove_local_file': {'queue': "remove_local_file"},
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2023-12-22 10:10
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('fwd_api', '0157_alter_subscriptionrequest_created_at'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='subscriptionrequest',
|
||||||
|
name='S3_uploaded',
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
14
cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py
Normal file
14
cope2n-api/fwd_api/migrations/0162_merge_20231225_1439.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2023-12-25 07:39
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('fwd_api', '0158_subscriptionrequest_s3_uploaded'),
|
||||||
|
('fwd_api', '0161_alter_subscriptionrequest_ai_inference_start_time_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
]
|
@ -18,7 +18,9 @@ class SubscriptionRequest(models.Model):
|
|||||||
subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE)
|
subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE)
|
||||||
created_at = models.DateTimeField(default=timezone.now, db_index=True)
|
created_at = models.DateTimeField(default=timezone.now, db_index=True)
|
||||||
updated_at = models.DateTimeField(auto_now=True)
|
updated_at = models.DateTimeField(auto_now=True)
|
||||||
|
S3_uploaded = models.BooleanField(default=False)
|
||||||
is_test_request = models.BooleanField(default=False)
|
is_test_request = models.BooleanField(default=False)
|
||||||
|
S3_uploaded = models.BooleanField(default=False)
|
||||||
|
|
||||||
preprocessing_time = models.FloatField(default=-1)
|
preprocessing_time = models.FloatField(default=-1)
|
||||||
ai_inference_start_time = models.FloatField(default=0)
|
ai_inference_start_time = models.FloatField(default=0)
|
||||||
|
@ -156,9 +156,11 @@ def resize_and_save_file(file_name: str, rq: SubscriptionRequest, file: Temporar
|
|||||||
def save_to_S3(file_name, rq, local_file_path):
|
def save_to_S3(file_name, rq, local_file_path):
|
||||||
try:
|
try:
|
||||||
file_path = get_folder_path(rq)
|
file_path = get_folder_path(rq)
|
||||||
|
request_id = rq.request_id
|
||||||
assert len(file_path.split("/")) >= 2, "file_path must have at least process type and request id"
|
assert len(file_path.split("/")) >= 2, "file_path must have at least process type and request id"
|
||||||
s3_key = os.path.join(file_path.split("/")[-2], file_path.split("/")[-1], file_name)
|
s3_key = os.path.join(file_path.split("/")[-2], file_path.split("/")[-1], file_name)
|
||||||
c_connector.upload_file_to_s3((local_file_path, s3_key))
|
c_connector.upload_file_to_s3((local_file_path, s3_key, request_id))
|
||||||
|
c_connector.remove_local_file((local_file_path, request_id))
|
||||||
return s3_key
|
return s3_key
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[ERROR]: {e}")
|
print(f"[ERROR]: {e}")
|
||||||
|
@ -49,4 +49,5 @@ djangorestframework-xml==2.0.0
|
|||||||
boto3==1.29.7
|
boto3==1.29.7
|
||||||
imagesize==1.4.1
|
imagesize==1.4.1
|
||||||
pdf2image==1.16.3
|
pdf2image==1.16.3
|
||||||
redis==5.0.1
|
redis==5.0.1
|
||||||
|
django-celery-beat==2.5.0
|
@ -8,8 +8,8 @@ from datetime import datetime
|
|||||||
|
|
||||||
BASH_FILE = './deploy_images.sh'
|
BASH_FILE = './deploy_images.sh'
|
||||||
S3_ENDPOINT = ""
|
S3_ENDPOINT = ""
|
||||||
S3_ACCESS_KEY = "AKIA3AFPFVWZD77UACHE"
|
S3_ACCESS_KEY = "secret"
|
||||||
S3_SECRET_KEY = "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"
|
S3_SECRET_KEY = "secret"
|
||||||
S3_BUCKET = "ocr-deployment-config"
|
S3_BUCKET = "ocr-deployment-config"
|
||||||
|
|
||||||
class MinioS3Client:
|
class MinioS3Client:
|
||||||
@ -77,7 +77,8 @@ def deploy():
|
|||||||
# Define the variable
|
# Define the variable
|
||||||
tag = str(random_hash()[:8])
|
tag = str(random_hash()[:8])
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
tag = tag + "_" + str(now.strftime("%d%m%y%H%M%S"))
|
# tag = tag + "_" + str(now.strftime("%d%m%y%H%M%S"))
|
||||||
|
tag = "4cae5134_261223123256"
|
||||||
print(tag)
|
print(tag)
|
||||||
|
|
||||||
# Execute the Bash script with the variable as a command-line argument
|
# Execute the Bash script with the variable as a command-line argument
|
||||||
|
@ -1,24 +1,25 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
tag=$1
|
tag=$1
|
||||||
|
|
||||||
echo "[INFO] Tag received from Python: $tag"
|
echo "[INFO] Tag received from Python: $tag"
|
||||||
|
|
||||||
echo "[INFO] Pushing AI image with tag: $tag..."
|
# echo "[INFO] Pushing AI image with tag: $tag..."
|
||||||
docker compose -f docker-compose-dev.yml build cope2n-fi-sbt
|
# docker compose -f docker-compose-dev.yml build cope2n-fi-sbt
|
||||||
docker tag sidp/cope2n-ai-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag}
|
# docker tag sidp/cope2n-ai-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag}
|
||||||
docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag}
|
# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:${tag}
|
||||||
|
|
||||||
echo "[INFO] Pushing BE image with tag: $tag..."
|
# echo "[INFO] Pushing BE image with tag: $tag..."
|
||||||
docker compose -f docker-compose-dev.yml build be-ctel-sbt
|
# docker compose -f docker-compose-dev.yml build be-ctel-sbt
|
||||||
docker tag sidp/cope2n-be-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag}
|
# docker tag sidp/cope2n-be-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag}
|
||||||
docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag}
|
# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:${tag}
|
||||||
|
|
||||||
echo "[INFO] Pushing FE image with tag: $tag..."
|
# echo "[INFO] Pushing FE image with tag: $tag..."
|
||||||
docker compose -f docker-compose-dev.yml build fe-sbt
|
# docker compose -f docker-compose-dev.yml build fe-sbt
|
||||||
docker tag sidp/cope2n-fe-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag}
|
# docker tag sidp/cope2n-fe-fi-sbt:latest public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag}
|
||||||
docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag}
|
# docker push public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:${tag}
|
||||||
|
|
||||||
cp ./docker-compose.yml ./docker-compose_${tag}.yml
|
cp ./docker-compose-prod.yml ./docker-compose_${tag}.yml
|
||||||
sed -i "s/{{tag}}/$tag/g" ./docker-compose_${tag}.yml
|
sed -i "s/{{tag}}/$tag/g" ./docker-compose_${tag}.yml
|
||||||
cp .env .env_${tag}
|
cp .env_prod .env_${tag}
|
||||||
|
@ -76,7 +76,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT}
|
- ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT}
|
||||||
- BE_static:/app/static
|
- BE_static:/app/static
|
||||||
# - ./cope2n-api:/app
|
- ./cope2n-api:/app
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
depends_on:
|
depends_on:
|
||||||
db-sbt:
|
db-sbt:
|
||||||
@ -85,6 +85,8 @@ services:
|
|||||||
python manage.py migrate &&
|
python manage.py migrate &&
|
||||||
python manage.py compilemessages &&
|
python manage.py compilemessages &&
|
||||||
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||||
|
# command: bash -c "tail -f > /dev/null"
|
||||||
|
|
||||||
minio:
|
minio:
|
||||||
image: minio/minio
|
image: minio/minio
|
||||||
environment:
|
environment:
|
||||||
@ -162,7 +164,7 @@ services:
|
|||||||
condition: service_started
|
condition: service_started
|
||||||
volumes:
|
volumes:
|
||||||
- ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT}
|
- ${HOST_MEDIA_FOLDER}:${MEDIA_ROOT}
|
||||||
# - ./cope2n-api:/app
|
- ./cope2n-api:/app
|
||||||
|
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
|
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
|
||||||
@ -171,7 +173,7 @@ services:
|
|||||||
db-sbt:
|
db-sbt:
|
||||||
restart: always
|
restart: always
|
||||||
mem_reservation: 500m
|
mem_reservation: 500m
|
||||||
image: postgres:14.7-alpine
|
image: postgres:15.4-alpine
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/postgres_data:/var/lib/postgresql/data
|
- ./data/postgres_data:/var/lib/postgresql/data
|
||||||
networks:
|
networks:
|
||||||
|
195
docker-compose-prod.yml
Normal file
195
docker-compose-prod.yml
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
|
||||||
|
# TODO: use docker-compose extend: for compact purpose
|
||||||
|
version: '3.0'
|
||||||
|
networks:
|
||||||
|
ctel-sbt:
|
||||||
|
driver: bridge
|
||||||
|
|
||||||
|
services:
|
||||||
|
cope2n-fi-sbt:
|
||||||
|
shm_size: 10gb
|
||||||
|
mem_limit: 10gb
|
||||||
|
restart: always
|
||||||
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:{{tag}}
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
privileged: true
|
||||||
|
environment:
|
||||||
|
- CELERY_BROKER=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672
|
||||||
|
working_dir: /workspace/cope2n-ai-fi
|
||||||
|
command: bash run.sh
|
||||||
|
deploy:
|
||||||
|
mode: replicated
|
||||||
|
replicas: 2
|
||||||
|
# Back-end services
|
||||||
|
be-ctel-sbt:
|
||||||
|
environment:
|
||||||
|
- MEDIA_ROOT=${MEDIA_ROOT}
|
||||||
|
- DB_ENGINE=${DB_ENGINE}
|
||||||
|
- DB_SCHEMA=${DB_SCHEMA}
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_HOST=${DB_HOST}
|
||||||
|
- DB_PORT=${DB_PUBLIC_PORT}
|
||||||
|
- DEBUG=${DEBUG}
|
||||||
|
- CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS}
|
||||||
|
- BASE_PORT=${BASE_PORT}
|
||||||
|
- CTEL_KEY=${CTEL_KEY}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- DB_INTERNAL_KEY=${DB_INTERNAL_KEY}
|
||||||
|
- ALLOWED_HOSTS=${ALLOWED_HOSTS}
|
||||||
|
- BROKER_URL=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672
|
||||||
|
- BASE_URL=http://be-ctel-sbt:${BASE_PORT}
|
||||||
|
- BASE_UI_URL=http://fe:${VITE_PORT}
|
||||||
|
- AUTH_TOKEN_LIFE_TIME=${AUTH_TOKEN_LIFE_TIME}
|
||||||
|
- IMAGE_TOKEN_LIFE_TIME=${IMAGE_TOKEN_LIFE_TIME}
|
||||||
|
- INTERNAL_SDS_KEY=${INTERNAL_SDS_KEY}
|
||||||
|
- FI_USER_NAME=${FI_USER_NAME}
|
||||||
|
- FI_PASSWORD=${FI_PASSWORD}
|
||||||
|
- S3_ENDPOINT=${S3_ENDPOINT}
|
||||||
|
- S3_ACCESS_KEY=${S3_ACCESS_KEY}
|
||||||
|
- S3_SECRET_KEY=${S3_SECRET_KEY}
|
||||||
|
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
|
||||||
|
restart: always
|
||||||
|
privileged: true # for chmod
|
||||||
|
mem_limit: 10gb
|
||||||
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}}
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
volumes:
|
||||||
|
- BE_media:${MEDIA_ROOT}
|
||||||
|
- BE_static:/app/static
|
||||||
|
working_dir: /app
|
||||||
|
command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
|
||||||
|
python manage.py migrate &&
|
||||||
|
python manage.py compilemessages &&
|
||||||
|
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||||
|
minio:
|
||||||
|
image: minio/minio
|
||||||
|
environment:
|
||||||
|
- MINIO_ROOT_USER=${S3_ACCESS_KEY}
|
||||||
|
- MINIO_ROOT_PASSWORD=${S3_SECRET_KEY}
|
||||||
|
- MINIO_ACCESS_KEY=${S3_ACCESS_KEY}
|
||||||
|
- MINIO_SECRET_KEY=${S3_SECRET_KEY}
|
||||||
|
volumes:
|
||||||
|
- ./data/minio_data:/data
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
restart: always
|
||||||
|
command: server --address :9884 --console-address :9885 /data
|
||||||
|
profiles: ["local"]
|
||||||
|
|
||||||
|
createbuckets:
|
||||||
|
image: minio/mc
|
||||||
|
depends_on:
|
||||||
|
- minio
|
||||||
|
entrypoint: >
|
||||||
|
/bin/sh -c "
|
||||||
|
/usr/bin/mc alias set myminio http://minio:9884 ${S3_ACCESS_KEY} ${S3_SECRET_KEY};
|
||||||
|
/usr/bin/mc mb myminio/${S3_BUCKET_NAME};
|
||||||
|
/usr/bin/mc policy set public myminio/${S3_BUCKET_NAME};
|
||||||
|
exit 0;
|
||||||
|
"
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
profiles: ["local"]
|
||||||
|
|
||||||
|
result-cache:
|
||||||
|
image: redis:6.2-alpine
|
||||||
|
restart: always
|
||||||
|
mem_limit: 10gb
|
||||||
|
command: redis-server --save 20 1 --loglevel warning
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
|
||||||
|
be-celery-sbt:
|
||||||
|
environment:
|
||||||
|
- MEDIA_ROOT=${MEDIA_ROOT}
|
||||||
|
- PYTHONPATH=${PYTHONPATH}:/app # For import module
|
||||||
|
- PYTHONUNBUFFERED=1 # For show print log
|
||||||
|
- DB_ENGINE=${DB_ENGINE}
|
||||||
|
- DB_SCHEMA=${DB_SCHEMA}
|
||||||
|
- DB_USER=${DB_USER}
|
||||||
|
- DB_PASSWORD=${DB_PASSWORD}
|
||||||
|
- DB_HOST=${DB_HOST}
|
||||||
|
- DB_PORT=${DB_PUBLIC_PORT}
|
||||||
|
- BROKER_URL=amqp://${RABBITMQ_DEFAULT_USER}:${RABBITMQ_DEFAULT_PASS}@rabbitmq-sbt:5672
|
||||||
|
- BASE_UI_URL=http://fe:${VITE_PORT}
|
||||||
|
- DEBUG=${DEBUG}
|
||||||
|
- DB_INTERNAL_KEY=${DB_INTERNAL_KEY}
|
||||||
|
- IMAGE_TOKEN_LIFE_TIME=${IMAGE_TOKEN_LIFE_TIME}
|
||||||
|
- CTEL_KEY=${CTEL_KEY}
|
||||||
|
- SECRET_KEY=${SECRET_KEY}
|
||||||
|
- ALLOWED_HOSTS=${ALLOWED_HOSTS}
|
||||||
|
- S3_ENDPOINT=${S3_ENDPOINT}
|
||||||
|
- S3_ACCESS_KEY=${S3_ACCESS_KEY}
|
||||||
|
- S3_SECRET_KEY=${S3_SECRET_KEY}
|
||||||
|
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
|
||||||
|
- BASE_URL=http://be-ctel-sbt:${BASE_PORT}
|
||||||
|
- REDIS_HOST=result-cache
|
||||||
|
- REDIS_PORT=6379
|
||||||
|
restart: always
|
||||||
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}}
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
depends_on:
|
||||||
|
rabbitmq-sbt:
|
||||||
|
condition: service_started
|
||||||
|
volumes:
|
||||||
|
- BE_media:${MEDIA_ROOT}
|
||||||
|
|
||||||
|
working_dir: /app
|
||||||
|
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
|
||||||
|
|
||||||
|
# Back-end persistent
|
||||||
|
db-sbt:
|
||||||
|
restart: always
|
||||||
|
mem_reservation: 500m
|
||||||
|
image: postgres:14.7-alpine
|
||||||
|
volumes:
|
||||||
|
- ./data/postgres_data:/var/lib/postgresql/data
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=${DB_USER}
|
||||||
|
- POSTGRES_PASSWORD=${DB_PASSWORD}
|
||||||
|
- POSTGRES_DB=${DB_SCHEMA}
|
||||||
|
profiles: ["local"]
|
||||||
|
|
||||||
|
rabbitmq-sbt:
|
||||||
|
mem_reservation: 600m
|
||||||
|
restart: always
|
||||||
|
image: rabbitmq:3.10-alpine
|
||||||
|
working_dir: /workspace/cope2n-api
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
environment:
|
||||||
|
- RABBITMQ_DEFAULT_USER=${RABBITMQ_DEFAULT_USER}
|
||||||
|
- RABBITMQ_DEFAULT_PASS=${RABBITMQ_DEFAULT_PASS}
|
||||||
|
|
||||||
|
# Front-end services
|
||||||
|
fe-sbt:
|
||||||
|
restart: always
|
||||||
|
mem_limit: 4gb
|
||||||
|
shm_size: 10gb
|
||||||
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:{{tag}}
|
||||||
|
privileged: true
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
depends_on:
|
||||||
|
be-ctel-sbt:
|
||||||
|
condition: service_started
|
||||||
|
be-celery-sbt:
|
||||||
|
condition: service_started
|
||||||
|
environment:
|
||||||
|
- VITE_PROXY=http://be-ctel-sbt:${BASE_PORT}
|
||||||
|
- VITE_API_BASE_URL=http://fe-sbt:80
|
||||||
|
volumes:
|
||||||
|
- BE_static:/backend-static
|
||||||
|
networks:
|
||||||
|
- ctel-sbt
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
db_data:
|
||||||
|
BE_static:
|
||||||
|
BE_media:
|
@ -10,7 +10,7 @@ services:
|
|||||||
shm_size: 10gb
|
shm_size: 10gb
|
||||||
mem_limit: 10gb
|
mem_limit: 10gb
|
||||||
restart: always
|
restart: always
|
||||||
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt:{{tag}}
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-ai-fi-sbt
|
||||||
networks:
|
networks:
|
||||||
- ctel-sbt
|
- ctel-sbt
|
||||||
privileged: true
|
privileged: true
|
||||||
@ -53,7 +53,7 @@ services:
|
|||||||
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
|
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
|
||||||
restart: always
|
restart: always
|
||||||
mem_limit: 10gb
|
mem_limit: 10gb
|
||||||
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}}
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:
|
||||||
networks:
|
networks:
|
||||||
- ctel-sbt
|
- ctel-sbt
|
||||||
volumes:
|
volumes:
|
||||||
@ -62,9 +62,6 @@ services:
|
|||||||
- ./cope2n-api:/app
|
- ./cope2n-api:/app
|
||||||
|
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
depends_on:
|
|
||||||
db-sbt:
|
|
||||||
condition: service_started
|
|
||||||
command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input &&
|
command: sh -c "chmod -R 777 /app/static; sleep 5; python manage.py collectstatic --no-input &&
|
||||||
python manage.py migrate &&
|
python manage.py migrate &&
|
||||||
python manage.py compilemessages &&
|
python manage.py compilemessages &&
|
||||||
@ -134,12 +131,10 @@ services:
|
|||||||
- REDIS_HOST=result-cache
|
- REDIS_HOST=result-cache
|
||||||
- REDIS_PORT=6379
|
- REDIS_PORT=6379
|
||||||
restart: always
|
restart: always
|
||||||
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:{{tag}}
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-be-fi-sbt:
|
||||||
networks:
|
networks:
|
||||||
- ctel-sbt
|
- ctel-sbt
|
||||||
depends_on:
|
depends_on:
|
||||||
db-sbt:
|
|
||||||
condition: service_started
|
|
||||||
rabbitmq-sbt:
|
rabbitmq-sbt:
|
||||||
condition: service_started
|
condition: service_started
|
||||||
volumes:
|
volumes:
|
||||||
@ -179,7 +174,7 @@ services:
|
|||||||
restart: always
|
restart: always
|
||||||
mem_limit: 4gb
|
mem_limit: 4gb
|
||||||
shm_size: 10gb
|
shm_size: 10gb
|
||||||
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt:{{tag}}
|
image: public.ecr.aws/v4n9y6r8/sidp/cope2n-fe-fi-sbt
|
||||||
privileged: true
|
privileged: true
|
||||||
ports:
|
ports:
|
||||||
- ${SIDP_SERVICE_PORT:-9881}:80
|
- ${SIDP_SERVICE_PORT:-9881}:80
|
||||||
|
@ -163,3 +163,4 @@ num_images = sum(x["num_files"] for x in results if x["success"])
|
|||||||
print("Total images:", num_images)
|
print("Total images:", num_images)
|
||||||
print("Uploading + Processing time: {:.3f}s".format(sum(processing_time) / num_images))
|
print("Uploading + Processing time: {:.3f}s".format(sum(processing_time) / num_images))
|
||||||
print("--------------------------------------")
|
print("--------------------------------------")
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user