diff --git a/cope2n-api/fwd_api/api/ctel_view.py b/cope2n-api/fwd_api/api/ctel_view.py index e2664e0..c10af80 100755 --- a/cope2n-api/fwd_api/api/ctel_view.py +++ b/cope2n-api/fwd_api/api/ctel_view.py @@ -214,14 +214,6 @@ class CtelViewSet(viewsets.ViewSet): "imei": imei_file_objs, "invoice": invoice_file_objs } - total_page = len(files.keys()) - p_type = validated_data['type'] - new_request: SubscriptionRequest = SubscriptionRequest(pages=total_page, - pages_left=total_page, - process_type=p_type, status=1, request_id=rq_id, - provider_code=provider_code, - subscription=sub) - new_request.save() count = 0 doc_files_with_type = [] @@ -236,6 +228,15 @@ class CtelViewSet(viewsets.ViewSet): )) count += 1 + total_page = len(doc_files_with_type) + p_type = validated_data['type'] + new_request: SubscriptionRequest = SubscriptionRequest(pages=total_page, + pages_left=total_page, + process_type=p_type, status=1, request_id=rq_id, + provider_code=provider_code, + subscription=sub) + new_request.save() + # Run file processing in a pool of 2 threads. TODO: Convert to Celery worker when possible compact_files = [None] * len(doc_files_with_type) pool = ThreadPool(processes=2) diff --git a/cope2n-api/fwd_api/celery_worker/internal_task.py b/cope2n-api/fwd_api/celery_worker/internal_task.py index d1376d5..eeefc5c 100755 --- a/cope2n-api/fwd_api/celery_worker/internal_task.py +++ b/cope2n-api/fwd_api/celery_worker/internal_task.py @@ -13,6 +13,8 @@ from fwd_api.models import SubscriptionRequestFile from ..utils import file as FileUtils from ..utils import process as ProcessUtil from ..utils import s3 as S3Util +from fwd_api.constant.common import ProcessType + from celery.utils.log import get_task_logger from fwd import settings @@ -113,6 +115,24 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files): new_request.ai_inference_start_time = time.time() new_request.save() + trials = 0 + while True: + rq: SubscriptionRequest = \ + SubscriptionRequest.objects.filter(request_id=rq_id).first() + if rq.ai_inference_start_time != 0: + break + time.sleep(0.1) + trials += 1 + if trials > 5: + rq.preprocessing_time = time.time() - start_time + rq.doc_type = doc_type_string + rq.ai_inference_start_time = time.time() + rq.save() + if trials > 10: + rq.status = 404 + rq.save() + return + # Send to next queue for sub_rq_id, sub_id, urls, user_id, p_type in to_queue: ProcessUtil.send_to_queue2(sub_rq_id, sub_id, urls, user_id, p_type) diff --git a/cope2n-api/fwd_api/celery_worker/process_result_tasks.py b/cope2n-api/fwd_api/celery_worker/process_result_tasks.py index 5fe7466..b86d2f5 100755 --- a/cope2n-api/fwd_api/celery_worker/process_result_tasks.py +++ b/cope2n-api/fwd_api/celery_worker/process_result_tasks.py @@ -145,8 +145,7 @@ def process_invoice_sbt_result(rq_id, result): try: page_index = int(rq_id.split("_sub_")[1]) rq_id = rq_id.split("_sub_")[0] - rq: SubscriptionRequest = \ - SubscriptionRequest.objects.filter(request_id=rq_id, process_type=ProcessType.SBT_INVOICE.value)[0] + rq: SubscriptionRequest = SubscriptionRequest.objects.filter(request_id=rq_id).first() for i in range(10): if rq.ai_inference_start_time == 0: logging.warn(f"ai_inference_start_time = 0, looks like database is lagging, attemp {i} in 0.2 second ...") diff --git a/cope2n-api/fwd_api/utils/redis.py b/cope2n-api/fwd_api/utils/redis.py index 7a9b54e..ff65035 100644 --- a/cope2n-api/fwd_api/utils/redis.py +++ b/cope2n-api/fwd_api/utils/redis.py @@ -24,7 +24,7 @@ class RedisUtils: return resutlt def get_size(self, request_id): - return self.redis_client.hlen(request_id) + return self.redis_client.hlen(request_id) def remove_cache(self, request_id): self.redis_client.delete(request_id)