Workaround to fix missing db fields
This commit is contained in:
parent
29762d7413
commit
a131ed7580
@ -214,14 +214,6 @@ class CtelViewSet(viewsets.ViewSet):
|
|||||||
"imei": imei_file_objs,
|
"imei": imei_file_objs,
|
||||||
"invoice": invoice_file_objs
|
"invoice": invoice_file_objs
|
||||||
}
|
}
|
||||||
total_page = len(files.keys())
|
|
||||||
p_type = validated_data['type']
|
|
||||||
new_request: SubscriptionRequest = SubscriptionRequest(pages=total_page,
|
|
||||||
pages_left=total_page,
|
|
||||||
process_type=p_type, status=1, request_id=rq_id,
|
|
||||||
provider_code=provider_code,
|
|
||||||
subscription=sub)
|
|
||||||
new_request.save()
|
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
doc_files_with_type = []
|
doc_files_with_type = []
|
||||||
@ -236,6 +228,15 @@ class CtelViewSet(viewsets.ViewSet):
|
|||||||
))
|
))
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
|
total_page = len(doc_files_with_type)
|
||||||
|
p_type = validated_data['type']
|
||||||
|
new_request: SubscriptionRequest = SubscriptionRequest(pages=total_page,
|
||||||
|
pages_left=total_page,
|
||||||
|
process_type=p_type, status=1, request_id=rq_id,
|
||||||
|
provider_code=provider_code,
|
||||||
|
subscription=sub)
|
||||||
|
new_request.save()
|
||||||
|
|
||||||
# Run file processing in a pool of 2 threads. TODO: Convert to Celery worker when possible
|
# Run file processing in a pool of 2 threads. TODO: Convert to Celery worker when possible
|
||||||
compact_files = [None] * len(doc_files_with_type)
|
compact_files = [None] * len(doc_files_with_type)
|
||||||
pool = ThreadPool(processes=2)
|
pool = ThreadPool(processes=2)
|
||||||
|
@ -13,6 +13,8 @@ from fwd_api.models import SubscriptionRequestFile
|
|||||||
from ..utils import file as FileUtils
|
from ..utils import file as FileUtils
|
||||||
from ..utils import process as ProcessUtil
|
from ..utils import process as ProcessUtil
|
||||||
from ..utils import s3 as S3Util
|
from ..utils import s3 as S3Util
|
||||||
|
from fwd_api.constant.common import ProcessType
|
||||||
|
|
||||||
|
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
from fwd import settings
|
from fwd import settings
|
||||||
@ -113,6 +115,24 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files):
|
|||||||
new_request.ai_inference_start_time = time.time()
|
new_request.ai_inference_start_time = time.time()
|
||||||
new_request.save()
|
new_request.save()
|
||||||
|
|
||||||
|
trials = 0
|
||||||
|
while True:
|
||||||
|
rq: SubscriptionRequest = \
|
||||||
|
SubscriptionRequest.objects.filter(request_id=rq_id).first()
|
||||||
|
if rq.ai_inference_start_time != 0:
|
||||||
|
break
|
||||||
|
time.sleep(0.1)
|
||||||
|
trials += 1
|
||||||
|
if trials > 5:
|
||||||
|
rq.preprocessing_time = time.time() - start_time
|
||||||
|
rq.doc_type = doc_type_string
|
||||||
|
rq.ai_inference_start_time = time.time()
|
||||||
|
rq.save()
|
||||||
|
if trials > 10:
|
||||||
|
rq.status = 404
|
||||||
|
rq.save()
|
||||||
|
return
|
||||||
|
|
||||||
# Send to next queue
|
# Send to next queue
|
||||||
for sub_rq_id, sub_id, urls, user_id, p_type in to_queue:
|
for sub_rq_id, sub_id, urls, user_id, p_type in to_queue:
|
||||||
ProcessUtil.send_to_queue2(sub_rq_id, sub_id, urls, user_id, p_type)
|
ProcessUtil.send_to_queue2(sub_rq_id, sub_id, urls, user_id, p_type)
|
||||||
|
@ -145,8 +145,7 @@ def process_invoice_sbt_result(rq_id, result):
|
|||||||
try:
|
try:
|
||||||
page_index = int(rq_id.split("_sub_")[1])
|
page_index = int(rq_id.split("_sub_")[1])
|
||||||
rq_id = rq_id.split("_sub_")[0]
|
rq_id = rq_id.split("_sub_")[0]
|
||||||
rq: SubscriptionRequest = \
|
rq: SubscriptionRequest = SubscriptionRequest.objects.filter(request_id=rq_id).first()
|
||||||
SubscriptionRequest.objects.filter(request_id=rq_id, process_type=ProcessType.SBT_INVOICE.value)[0]
|
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
if rq.ai_inference_start_time == 0:
|
if rq.ai_inference_start_time == 0:
|
||||||
logging.warn(f"ai_inference_start_time = 0, looks like database is lagging, attemp {i} in 0.2 second ...")
|
logging.warn(f"ai_inference_start_time = 0, looks like database is lagging, attemp {i} in 0.2 second ...")
|
||||||
|
Loading…
Reference in New Issue
Block a user