Fix: false aggregate results

This commit is contained in:
dx-tan 2023-12-26 18:44:03 +07:00
parent ef53803c24
commit e2d576b83a
8 changed files with 28 additions and 6 deletions

View File

@ -111,6 +111,7 @@ DATABASES = {
'PASSWORD': env.str("DB_PASSWORD", None), 'PASSWORD': env.str("DB_PASSWORD", None),
'HOST': env.str("DB_HOST", None), 'HOST': env.str("DB_HOST", None),
'PORT': env.str("DB_PORT", None), 'PORT': env.str("DB_PORT", None),
'CONN_MAX_AGE': None,
} }
} }
@ -207,7 +208,7 @@ BROKER_URL = env.str("BROKER_URL", default="amqp://test:test@107.120.70.226:5672
CELERY_TASK_TRACK_STARTED = True CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT = 30 * 60 CELERY_TASK_TIME_LIMIT = 30 * 60
MAX_UPLOAD_SIZE_OF_A_FILE = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_SIZE_OF_A_FILE = 5 * 1024 * 1024 # 5 MB
MAX_UPLOAD_FILE_SIZE_OF_A_REQUEST = 100 * 1024 * 1024 # 100 MB MAX_UPLOAD_FILE_SIZE_OF_A_REQUEST = 100 * 1024 * 1024 # 100 MB
MAX_UPLOAD_FILES_IN_A_REQUEST = 5 MAX_UPLOAD_FILES_IN_A_REQUEST = 5
MAX_PIXEL_IN_A_FILE = 5000 MAX_PIXEL_IN_A_FILE = 5000

View File

@ -1,6 +1,7 @@
import traceback import traceback
import time import time
import uuid import uuid
import logging
from copy import deepcopy from copy import deepcopy
@ -146,6 +147,21 @@ def process_invoice_sbt_result(rq_id, result):
rq_id = rq_id.split("_sub_")[0] rq_id = rq_id.split("_sub_")[0]
rq: SubscriptionRequest = \ rq: SubscriptionRequest = \
SubscriptionRequest.objects.filter(request_id=rq_id, process_type=ProcessType.SBT_INVOICE.value)[0] SubscriptionRequest.objects.filter(request_id=rq_id, process_type=ProcessType.SBT_INVOICE.value)[0]
for i in range(10):
if rq.ai_inference_start_time == 0:
logging.warn(f"ai_inference_start_time = 0, looks like database is lagging, attemp {i} in 0.2 second ...")
rq.refresh_from_db()
time.sleep(0.2)
if i == 9: # return an error
logging.warn("Unable to retrieve rq, exiting")
rq.status = 404 # stop waiting
rq.predict_result = result
rq.save()
update_user(rq)
return "FailInvoice"
else:
break
# status = to_status(result) # status = to_status(result)
status = result.get("status", 200) status = result.get("status", 200)
redis_client.set_cache(rq_id, page_index, result) redis_client.set_cache(rq_id, page_index, result)
@ -156,13 +172,11 @@ def process_invoice_sbt_result(rq_id, result):
results = redis_client.get_all_cache(rq_id) results = redis_client.get_all_cache(rq_id)
rq.predict_result = aggregate_result(results, rq.doc_type) rq.predict_result = aggregate_result(results, rq.doc_type)
# print(f"[DEBUG]: rq.predict_result: {rq.predict_result}") # print(f"[DEBUG]: rq.predict_result: {rq.predict_result}")
redis_client.remove_cache(rq_id)
rq.save() rq.save()
else: else:
rq.status = 404 # stop waiting rq.status = 404 # stop waiting
rq.predict_result = result rq.predict_result = result
redis_client.remove_cache(rq_id)
rq.save() rq.save()
rq.ai_inference_time = time.time() - rq.ai_inference_start_time rq.ai_inference_time = time.time() - rq.ai_inference_start_time

View File

@ -18,7 +18,6 @@ class SubscriptionRequest(models.Model):
subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE) subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE)
created_at = models.DateTimeField(default=timezone.now, db_index=True) created_at = models.DateTimeField(default=timezone.now, db_index=True)
updated_at = models.DateTimeField(auto_now=True) updated_at = models.DateTimeField(auto_now=True)
S3_uploaded = models.BooleanField(default=False)
is_test_request = models.BooleanField(default=False) is_test_request = models.BooleanField(default=False)
S3_uploaded = models.BooleanField(default=False) S3_uploaded = models.BooleanField(default=False)

View File

@ -31,7 +31,7 @@ def validate_list_file(files, max_file_num=settings.MAX_UPLOAD_FILES_IN_A_REQUES
raise InvalidException(excArgs="files") raise InvalidException(excArgs="files")
extension = f.name.split(".")[-1].lower() in allowed_file_extensions extension = f.name.split(".")[-1].lower() in allowed_file_extensions
if not extension or "." not in f.name: if not extension or "." not in f.name:
raise FileFormatInvalidException(excArgs=allowed_file_extensions) raise FileFormatInvalidException(excArgs=list(allowed_file_extensions))
if f.size > settings.MAX_UPLOAD_SIZE_OF_A_FILE: if f.size > settings.MAX_UPLOAD_SIZE_OF_A_FILE:
raise LimitReachedException(excArgs=('A file', str(settings.MAX_UPLOAD_SIZE_OF_A_FILE / 1024 / 1024), 'MB')) raise LimitReachedException(excArgs=('A file', str(settings.MAX_UPLOAD_SIZE_OF_A_FILE / 1024 / 1024), 'MB'))
total_file_size += f.size total_file_size += f.size

View File

@ -1,5 +1,6 @@
import redis import redis
import json import json
from datetime import datetime, timedelta
from django.conf import settings from django.conf import settings
@ -14,6 +15,7 @@ class RedisUtils:
image_index: int image_index: int
""" """
self.redis_client.hset(request_id, image_index, json.dumps(data)) self.redis_client.hset(request_id, image_index, json.dumps(data))
self.redis_client.expire(request_id, 3600)
def get_all_cache(self, request_id): def get_all_cache(self, request_id):
resutlt = {} resutlt = {}

View File

@ -94,6 +94,12 @@ msgstr "Số lần yêu cầu"
msgid "Number of template" msgid "Number of template"
msgstr "Số mẫu tài liệu" msgstr "Số mẫu tài liệu"
msgid "Number of imei_file"
msgstr "Số lượng file IMEI"
msgid "Number of invoice_file"
msgstr "Số lượng file Invoice"
msgid "times" msgid "times"
msgstr "lượt" msgstr "lượt"

View File

@ -2,7 +2,7 @@ server {
# listen {{port}}; # listen {{port}};
# listen [::]:{{port}}; # listen [::]:{{port}};
server_name localhost; server_name localhost;
client_max_body_size 10M; client_max_body_size 100M;
#access_log /var/log/nginx/host.access.log main; #access_log /var/log/nginx/host.access.log main;