Merge branch 'main' of https://code.sdsdev.co.kr/SDSRV-IDP/sbt-idp into dev/20240308

This commit is contained in:
dx-tan 2024-03-11 17:01:40 +07:00
commit d028b233dd
2 changed files with 51 additions and 5 deletions

View File

@ -193,11 +193,13 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files):
raise FileContentInvalidException
for j in range(len(_b_urls)):
_b_urls[j]["doc_type"] = file["file_type"]
_b_urls[j]["page_number"] = idx
_b_urls[j]["page_number"] = idx
_b_urls[j]["index_to_image_type"] = file["index_in_request"]
return idx, _b_urls[0]
elif extension in image_extensions:
this_url = ProcessUtil.process_image_local_file(file["file_name"], file["file_path"], new_request, user, file["file_type"], file["index_in_request"])[0]
this_url["page_number"] = idx
this_url["index_to_image_type"] = file["index_in_request"]
if file["file_type"]:
this_url["doc_type"] = file["file_type"]
return idx, this_url
@ -222,6 +224,7 @@ def process_pdf(rq_id, sub_id, p_type, user_id, files):
file_meta["ai_inference_profile"] = {}
file_meta["index_in_request"] = i
file_meta["preprocessing_time"] = preprocessing_time
file_meta["index_to_image_type"] = b_url["index_to_image_type"]
to_queue.append((fractorized_request_id, sub_id, [b_url], user_id, p_type, file_meta))
# Send to next queue

View File

@ -8,7 +8,7 @@ from copy import deepcopy
from fwd_api.celery_worker.worker import app
from fwd_api.models import SubscriptionRequest
from fwd_api.exception.exceptions import InvalidException
from fwd_api.models import SubscriptionRequest
from fwd_api.models import SubscriptionRequest, SubscriptionRequestFile
from fwd_api.constant.common import ProcessType
from fwd_api.utils.redis import RedisUtils
from fwd_api.utils import process as ProcessUtil
@ -146,10 +146,12 @@ def process_invoice_sbt_result(rq_id, result, metadata):
page_index = int(rq_id.split("_sub_")[1])
rq_id = rq_id.split("_sub_")[0]
rq: SubscriptionRequest = SubscriptionRequest.objects.filter(request_id=rq_id).first()
image_type = metadata["doc_type"]
index_in_request = metadata.pop("index_to_image_type", 0)
result["metadata"] = metadata
# status = to_status(result)
_update_subscription_rq_file(request_id=rq, index_in_request=index_in_request, doc_type=image_type, result=result)
status = result.get("status", 200)
redis_client.set_cache(rq_id, page_index, result)
done = rq.pages == redis_client.get_size(rq_id)
@ -175,7 +177,6 @@ def process_invoice_sbt_result(rq_id, result, metadata):
rq.status = 404 # stop waiting
rq.predict_result = result
rq.save()
update_user(rq)
except IndexError as e:
print(e)
@ -189,4 +190,46 @@ def process_invoice_sbt_result(rq_id, result, metadata):
rq.ai_inference_time = 0
rq.save()
return "FailInvoice"
def _update_subscription_rq_file(request_id, index_in_request, doc_type, result):
image = SubscriptionRequestFile.objects.filter(request=request_id, index_in_request=index_in_request, doc_type=doc_type).first()
retailer_name = None
sold_to_party = None
purchase_date = []
imei_number = []
predicted_res = __get_actual_predict_result(result=result)
if len(predicted_res)!=0:
for elem in predicted_res:
if elem["label"] == "retailername":
retailer_name = elem['value']
elif elem["label"] == "sold_to_party":
sold_to_party = elem['value']
elif elem["label"] == "purchase_date":
purchase_date = elem['value']
else:
imei_number = elem['value']
if doc_type=='invoice':
_predict_result = {
"retailername": retailer_name,
"sold_to_party": sold_to_party,
"purchase_date": purchase_date,
"imei_number": []
}
else:
_predict_result = {
"retailername": None,
"sold_to_party": None,
"purchase_date": [],
"imei_number": imei_number
}
image.predict_result = _predict_result
image.save()
def __get_actual_predict_result(result: dict):
predicted_res = result.get('content', {}).get('document', [])
if len(predicted_res)==0:
return []
predicted_res = predicted_res[0].get('content', [])
return predicted_res