Merge branch 'main' of https://code.sdsdev.co.kr/SDSRV-IDP/sbt-idp into main
This commit is contained in:
commit
39e18f45d7
@ -29,6 +29,7 @@ import copy
|
||||
|
||||
redis_client = RedisUtils()
|
||||
|
||||
|
||||
class AccuracyViewSet(viewsets.ViewSet):
|
||||
lookup_field = "username"
|
||||
|
||||
@ -117,13 +118,16 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
include_test = request.GET.get('includes_test', False)
|
||||
subsidiary = request.GET.get("subsidiary", "all")
|
||||
max_accuracy = float(request.GET.get("max_accuracy", 100))
|
||||
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||
# subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||
|
||||
base_query = Q(status=200)
|
||||
if start_date_str or end_date_str:
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
|
||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
|
||||
# We care only about day precision only
|
||||
start_date = timezone.datetime.strptime(
|
||||
start_date_str, '%Y-%m-%d')
|
||||
end_date = timezone.datetime.strptime(
|
||||
end_date_str, '%Y-%m-%d')
|
||||
end_date = end_date + timezone.timedelta(days=1)
|
||||
# Round:
|
||||
# end_date_str to the beginning of the next day
|
||||
@ -131,7 +135,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
start_date = timezone.make_aware(start_date)
|
||||
end_date = timezone.make_aware(end_date)
|
||||
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
|
||||
# inside logic will include second precision with timezone for calculation
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
base_query &= Q(created_at__range=(start_date, end_date))
|
||||
except Exception as e:
|
||||
@ -169,12 +174,15 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
if subsidiary not in list(settings.SUBS.keys()):
|
||||
raise InvalidException(excArgs="subsidiary")
|
||||
if subsidiary and subsidiary.lower().replace(" ", "") != "all":
|
||||
base_query &= Q(redemption_id__startswith=map_subsidiary_long_to_short(subsidiary))
|
||||
base_query &= Q(
|
||||
redemption_id__startswith=map_subsidiary_long_to_short(subsidiary))
|
||||
|
||||
if isinstance(max_accuracy, float):
|
||||
base_query &= Q(raw_accuracy__lt=(max_accuracy/100)) | Q(raw_accuracy__isnull=True)
|
||||
base_query &= Q(raw_accuracy__lt=(
|
||||
max_accuracy/100)) | Q(raw_accuracy__isnull=True)
|
||||
|
||||
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
||||
subscription_requests = SubscriptionRequest.objects.filter(
|
||||
base_query).order_by('created_at')
|
||||
|
||||
request_count = subscription_requests.count()
|
||||
|
||||
@ -189,17 +197,22 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
try:
|
||||
if rq.reviewed_result is not None:
|
||||
imeis = rq.reviewed_result.get("imei_number", [])
|
||||
purchase_date = rq.reviewed_result.get("purchase_date", [])
|
||||
purchase_date = rq.reviewed_result.get(
|
||||
"purchase_date", [])
|
||||
retailer = rq.reviewed_result.get("retailername", "")
|
||||
elif rq.feedback_result is not None:
|
||||
imeis = rq.feedback_result.get("imei_number", [])
|
||||
purchase_date = rq.feedback_result.get("purchase_date", [])
|
||||
purchase_date = rq.feedback_result.get(
|
||||
"purchase_date", [])
|
||||
retailer = rq.feedback_result.get("retailername", "")
|
||||
elif rq.predict_result is not None:
|
||||
if rq.predict_result.get("status", 404) == 200:
|
||||
imeis = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", [])
|
||||
purchase_date = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", [])
|
||||
retailer = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", [])
|
||||
imeis = rq.predict_result.get("content", {}).get("document", [])[
|
||||
0].get("content", [])[3].get("value", [])
|
||||
purchase_date = rq.predict_result.get("content", {}).get(
|
||||
"document", [])[0].get("content", [])[2].get("value", [])
|
||||
retailer = rq.predict_result.get("content", {}).get("document", [])[
|
||||
0].get("content", [])[0].get("value", [])
|
||||
except Exception as e:
|
||||
print(f"[ERROR]: {e}")
|
||||
print(f"[ERROR]: {rq}")
|
||||
@ -246,7 +259,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
include_test = request.data.get('include_test', False)
|
||||
subsidiary = request.data.get("subsidiary", "all")
|
||||
is_daily_report = request.data.get('is_daily_report', False)
|
||||
report_overview_duration = request.data.get("report_overview_duration", "")
|
||||
report_overview_duration = request.data.get(
|
||||
"report_overview_duration", "")
|
||||
report_type = request.data.get("report_type", "accuracy")
|
||||
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||
|
||||
@ -261,21 +275,28 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
start_date = end_date - timezone.timedelta(days=30)
|
||||
else:
|
||||
start_date = end_date - timezone.timedelta(days=7)
|
||||
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
|
||||
start_date = start_date.replace(
|
||||
hour=0, minute=0, second=0, microsecond=0)
|
||||
# inside logic will include second precision with timezone for calculation
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
else:
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
|
||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
|
||||
# We care only about day precision only
|
||||
start_date = timezone.datetime.strptime(
|
||||
start_date_str, '%Y-%m-%d')
|
||||
end_date = timezone.datetime.strptime(
|
||||
end_date_str, '%Y-%m-%d')
|
||||
# Round:
|
||||
# end_date_str to the beginning of the next day
|
||||
# start_date_str to the start of the date
|
||||
start_date = timezone.make_aware(start_date)
|
||||
end_date = timezone.make_aware(end_date)
|
||||
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
|
||||
end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
# inside logic will include second precision with timezone for calculation
|
||||
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date_str = (
|
||||
end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||
except ValueError:
|
||||
raise InvalidException(excArgs="Date format")
|
||||
|
||||
@ -291,7 +312,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
"report_type": report_type,
|
||||
}
|
||||
|
||||
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
||||
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + \
|
||||
"_" + uuid.uuid4().hex
|
||||
new_report: Report = Report(
|
||||
report_id=report_id,
|
||||
is_daily_report=is_daily_report,
|
||||
@ -407,7 +429,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
@action(detail=False, url_path="report_list", methods=["GET"])
|
||||
def get_report_list(self, request):
|
||||
if request.method == 'GET':
|
||||
exclude_daily_report = request.GET.get('exclude_daily_report', True)
|
||||
exclude_daily_report = request.GET.get(
|
||||
'exclude_daily_report', True)
|
||||
start_date_str = request.GET.get('start_date', "")
|
||||
end_date_str = request.GET.get('end_date', "")
|
||||
page_number = int(request.GET.get('page', 1))
|
||||
@ -418,8 +441,10 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
reports = Report.objects
|
||||
else:
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
start_date = timezone.datetime.strptime(
|
||||
start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(
|
||||
end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
except ValueError:
|
||||
raise InvalidException(excArgs="Date format")
|
||||
base_query = Q(created_at__range=(start_date, end_date))
|
||||
@ -434,16 +459,21 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
data = []
|
||||
for report in page:
|
||||
acc_keys = ["purchase_date", "retailername", "invoice_no", "imei_number", "avg"]
|
||||
acc_keys = ["purchase_date", "retailername",
|
||||
"invoice_no", "imei_number", "avg"]
|
||||
acc = {}
|
||||
for key in acc_keys:
|
||||
fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0
|
||||
rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0
|
||||
fb = report.feedback_accuracy.get(
|
||||
key, 0) if report.feedback_accuracy else 0
|
||||
rv = report.reviewed_accuracy.get(
|
||||
key, 0) if report.reviewed_accuracy else 0
|
||||
if report.report_type not in ["BILLING", "billing"]:
|
||||
acc[key] = report.combined_accuracy.get(key, 0) if report.combined_accuracy else max([fb, rv])
|
||||
acc[key] = report.combined_accuracy.get(
|
||||
key, 0) if report.combined_accuracy else max([fb, rv])
|
||||
else:
|
||||
acc[key] = None
|
||||
processing_time = report.average_OCR_time.get("avg", None) if report.average_OCR_time else None
|
||||
processing_time = report.average_OCR_time.get(
|
||||
"avg", None) if report.average_OCR_time else None
|
||||
if processing_time and processing_time == 0:
|
||||
processing_time = None
|
||||
data.append({
|
||||
@ -550,23 +580,28 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
try:
|
||||
this_overview = get_cache(key).get("data", [])
|
||||
if sub != "seao":
|
||||
this_overview = [d for d in this_overview if d.get("subs") != "+"]
|
||||
this_overview = [
|
||||
d for d in this_overview if d.get("subs") != "+"]
|
||||
else:
|
||||
for item in this_overview:
|
||||
if item.get("subs") == "+":
|
||||
item["extraction_date"] = item["extraction_date"].replace("Subtotal ", "").replace("(", "").replace(")", "") + "-32"
|
||||
item["extraction_date"] = item["extraction_date"].replace(
|
||||
"Subtotal ", "").replace("(", "").replace(")", "") + "-32"
|
||||
subsidiary_overview_reports += this_overview
|
||||
|
||||
except Exception as e:
|
||||
print(f"[WARM]: Unable to retrive data {key} from Redis, skipping...")
|
||||
print(
|
||||
f"[WARM]: Unable to retrive data {key} from Redis, skipping...")
|
||||
data = aggregate_overview(subsidiary_overview_reports)
|
||||
for item in data:
|
||||
if item.get("subs") == "+":
|
||||
item["extraction_date"] = "Subtotal (" + item["extraction_date"].replace("-32", "") + ")"
|
||||
item["extraction_date"] = "Subtotal (" + item["extraction_date"].replace(
|
||||
"-32", "") + ")"
|
||||
# Do the saving process
|
||||
report_fine_data = copy.deepcopy(data)
|
||||
for i, dat in enumerate(report_fine_data):
|
||||
keys = [x for x in list(dat.keys()) if "accuracy" in x.lower()]
|
||||
keys = [x for x in list(dat.keys())
|
||||
if "accuracy" in x.lower()]
|
||||
keys_percent = "images_quality"
|
||||
for x_key in report_fine_data[i][keys_percent].keys():
|
||||
if "percent" not in x_key:
|
||||
@ -575,11 +610,13 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
for key in keys:
|
||||
if report_fine_data[i][key]:
|
||||
for x_key in report_fine_data[i][key].keys():
|
||||
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100 if report_fine_data[i][key][x_key] is not None else None
|
||||
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key] * \
|
||||
100 if report_fine_data[i][key][x_key] is not None else None
|
||||
overview_filename = _subsidiary + "_" + duration + ".xlsx"
|
||||
data_workbook = dict2xlsx(report_fine_data, _type='report')
|
||||
|
||||
folder_path = os.path.join(settings.MEDIA_ROOT, "report", settings.OVERVIEW_REPORT_ROOT)
|
||||
folder_path = os.path.join(
|
||||
settings.MEDIA_ROOT, "report", settings.OVERVIEW_REPORT_ROOT)
|
||||
os.makedirs(folder_path, exist_ok=True)
|
||||
file_path = os.path.join(folder_path, overview_filename)
|
||||
data_workbook.save(file_path)
|
||||
@ -626,13 +663,15 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
tmp_file = "/tmp/" + s3_key
|
||||
os.makedirs("/tmp", exist_ok=True)
|
||||
download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file)
|
||||
download_from_S3(
|
||||
"report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file)
|
||||
file = open(tmp_file, 'rb')
|
||||
response = FileResponse(file, status=200)
|
||||
|
||||
# Set the content type and content disposition headers
|
||||
response['Content-Type'] = 'application/octet-stream'
|
||||
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
|
||||
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(
|
||||
os.path.basename(tmp_file))
|
||||
return response
|
||||
|
||||
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
||||
@ -659,8 +698,10 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
if not report.S3_dashboard_file_name and request.query_params["report_expression"] != "detail":
|
||||
raise NotFoundException(excArgs="S3 dashboard file name")
|
||||
|
||||
file_name = report.S3_file_name if request.query_params["report_expression"] == "detail" else report.S3_dashboard_file_name
|
||||
tmp_file = "/tmp/" + request.query_params["report_expression"] + "_" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime("%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx"
|
||||
file_name = report.S3_file_name if request.query_params[
|
||||
"report_expression"] == "detail" else report.S3_dashboard_file_name
|
||||
tmp_file = "/tmp/" + request.query_params["report_expression"] + "_" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime(
|
||||
"%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx"
|
||||
os.makedirs("/tmp", exist_ok=True)
|
||||
|
||||
download_from_S3(file_name, tmp_file)
|
||||
@ -669,7 +710,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
# Set the content type and content disposition headers
|
||||
response['Content-Type'] = 'application/octet-stream'
|
||||
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
|
||||
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(
|
||||
os.path.basename(tmp_file))
|
||||
return response
|
||||
|
||||
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
||||
@ -694,7 +736,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
if request.method == 'GET':
|
||||
base_query = Q(request_id=request_id)
|
||||
|
||||
subscription_request = SubscriptionRequest.objects.filter(base_query)
|
||||
subscription_request = SubscriptionRequest.objects.filter(
|
||||
base_query)
|
||||
|
||||
if subscription_request.count() == 0:
|
||||
raise NotFoundException(excArgs=request_id)
|
||||
@ -713,7 +756,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
data = []
|
||||
files = []
|
||||
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id, file_category=FileCategory.Origin.value)
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(
|
||||
request=subscription_request.id, file_category=FileCategory.Origin.value)
|
||||
|
||||
for subscription_request_file in subscription_request_files:
|
||||
sub = subscription_request.subscription
|
||||
@ -726,10 +770,12 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
if not reviewed_result:
|
||||
reviewed_result = copy.deepcopy(sample_result)
|
||||
reviewed_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))]
|
||||
reviewed_result["imei_number"] = [None for _ in range(
|
||||
subscription_request.doc_type.split(",").count("imei"))]
|
||||
if not feedback_result:
|
||||
feedback_result = copy.deepcopy(sample_result)
|
||||
feedback_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))]
|
||||
feedback_result["imei_number"] = [None for _ in range(
|
||||
subscription_request.doc_type.split(",").count("imei"))]
|
||||
if not predicted_result:
|
||||
predicted_result = copy.deepcopy(sample_result)
|
||||
|
||||
@ -755,14 +801,17 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
reviewed_result = subscription_request.reviewed_result
|
||||
feedback_result = subscription_request.feedback_result
|
||||
predicted_result = predict_result_to_ready(subscription_request.predict_result)
|
||||
predicted_result = predict_result_to_ready(
|
||||
subscription_request.predict_result)
|
||||
|
||||
if not reviewed_result:
|
||||
reviewed_result = copy.deepcopy(sample_result)
|
||||
reviewed_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))]
|
||||
reviewed_result["imei_number"] = [None for _ in range(
|
||||
subscription_request.doc_type.split(",").count("imei"))]
|
||||
if not feedback_result:
|
||||
feedback_result = copy.deepcopy(sample_result)
|
||||
feedback_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))]
|
||||
feedback_result["imei_number"] = [None for _ in range(
|
||||
subscription_request.doc_type.split(",").count("imei"))]
|
||||
if not predicted_result:
|
||||
predicted_result = copy.deepcopy(sample_result)
|
||||
|
||||
@ -797,13 +846,15 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
elif request.method == 'POST':
|
||||
data = request.data
|
||||
base_query = Q(request_id=request_id)
|
||||
subscription_request = SubscriptionRequest.objects.filter(base_query)
|
||||
subscription_request = SubscriptionRequest.objects.filter(
|
||||
base_query)
|
||||
|
||||
if subscription_request.count() == 0:
|
||||
raise NotFoundException(excArgs=request_id)
|
||||
|
||||
subscription_request = subscription_request.first()
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id)
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(
|
||||
request=subscription_request.id)
|
||||
|
||||
if "reviewed_result" not in data:
|
||||
raise InvalidException(excArgs=f'reviewed_result')
|
||||
@ -811,13 +862,16 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
reviewed_result = data["reviewed_result"]
|
||||
if not subscription_request.predict_result:
|
||||
raise InvalidException(excArgs=f'request_id')
|
||||
validate_review(reviewed_result, len(subscription_request.predict_result.get("content", {}).get("document", [{}])[0].get("content", [{}, {}, {}, {}])[3].get("value", [])))
|
||||
validate_review(reviewed_result, len(subscription_request.predict_result.get(
|
||||
"content", {}).get("document", [{}])[0].get("content", [{}, {}, {}, {}])[3].get("value", [])))
|
||||
reviewed_result['request_id'] = request_id
|
||||
|
||||
for subscription_request_file in subscription_request_files:
|
||||
if subscription_request_file.doc_type == 'invoice':
|
||||
subscription_request_file.reviewed_result = copy.deepcopy(reviewed_result)
|
||||
subscription_request_file.reviewed_result['imei_number'] = []
|
||||
subscription_request_file.reviewed_result = copy.deepcopy(
|
||||
reviewed_result)
|
||||
subscription_request_file.reviewed_result['imei_number'] = [
|
||||
]
|
||||
elif subscription_request_file.doc_type == 'imei':
|
||||
subscription_request_file.reviewed_result = {
|
||||
"retailername": None,
|
||||
@ -826,7 +880,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
"purchase_date": [],
|
||||
"imei_number": []}
|
||||
if len(reviewed_result["imei_number"])-1 >= subscription_request_file.index_in_request:
|
||||
subscription_request_file.reviewed_result["imei_number"] = reviewed_result["imei_number"][subscription_request_file.index_in_request]
|
||||
subscription_request_file.reviewed_result["imei_number"] = [reviewed_result[
|
||||
"imei_number"][subscription_request_file.index_in_request]]
|
||||
subscription_request_file.save()
|
||||
|
||||
subscription_request.reviewed_result = reviewed_result
|
||||
@ -864,14 +919,16 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
|
||||
base_query = Q(request_id=request_id)
|
||||
|
||||
subscription_request = SubscriptionRequest.objects.filter(base_query)
|
||||
subscription_request = SubscriptionRequest.objects.filter(
|
||||
base_query)
|
||||
|
||||
if subscription_request.count() == 0:
|
||||
raise NotFoundException(excArgs=request_id)
|
||||
|
||||
subscription_request = subscription_request.first()
|
||||
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id)
|
||||
subscription_request_files = SubscriptionRequestFile.objects.filter(
|
||||
request=subscription_request.id)
|
||||
|
||||
if "reason" not in data:
|
||||
raise InvalidException(excArgs=f'reason')
|
||||
@ -889,7 +946,8 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
subscription_request_file.save()
|
||||
is_available = True
|
||||
if not is_available:
|
||||
raise NotFoundException(excArgs=request_id + "/" + request_image_id)
|
||||
raise NotFoundException(
|
||||
excArgs=request_id + "/" + request_image_id)
|
||||
return JsonResponse({'message': 'success.'}, status=200)
|
||||
|
||||
else:
|
||||
|
@ -110,7 +110,7 @@ def create_accuracy_report(report_id, **kwargs):
|
||||
if request.status != 200 or not (request.reviewed_result or request.feedback_result):
|
||||
# Failed requests or lack of reviewed_result/feedback_result
|
||||
continue
|
||||
request_att, _report_files, _att = calculate_a_request(report, request)
|
||||
request_att, _report_files, _atts = calculate_a_request(report, request)
|
||||
report_files += _report_files
|
||||
report_engine.add(request, _report_files, report)
|
||||
request.feedback_accuracy = {"imei_number": mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
||||
@ -126,6 +126,7 @@ def create_accuracy_report(report_id, **kwargs):
|
||||
rq_accuracy = {"feedback": [],
|
||||
"reviewed": []}
|
||||
|
||||
for _att in _atts:
|
||||
for t in _att["acc"].keys():
|
||||
for cl in _att["acc"][t].keys():
|
||||
rq_accuracy[t] += _att["acc"][t][cl]
|
||||
|
@ -797,7 +797,7 @@ def calculate_a_request(report, request):
|
||||
0: "No",
|
||||
1: "Yes"}
|
||||
return review_status.get(input, "N/A")
|
||||
|
||||
atts = []
|
||||
request_att = {"acc": {"feedback": {"imei_number": [],
|
||||
"purchase_date": [],
|
||||
"retailername": [],
|
||||
@ -829,6 +829,7 @@ def calculate_a_request(report, request):
|
||||
for image in images:
|
||||
|
||||
status, att = calculate_subcription_file(image)
|
||||
atts.append(att)
|
||||
att["acc"]["feedback"], fb_max_indexes = acc_maximize_list_values(att["acc"]["feedback"])
|
||||
att["acc"]["reviewed"], rv_max_indexes = acc_maximize_list_values(att["acc"]["reviewed"])
|
||||
|
||||
@ -945,7 +946,7 @@ def calculate_a_request(report, request):
|
||||
print(f"[ERROR]: failed to calculate request: {request.request_id} - request_file: {image.file_name} because of {e}")
|
||||
continue
|
||||
|
||||
return request_att, report_files, att
|
||||
return request_att, report_files, atts
|
||||
|
||||
def calculate_subcription_file(subcription_request_file):
|
||||
att = {"acc": {"feedback": {},
|
||||
|
@ -44,6 +44,7 @@
|
||||
"pdfjs-dist": "^3.11.174",
|
||||
"process": "^0.11.10",
|
||||
"react": "^18.2.0",
|
||||
"react-awesome-lightbox": "^1.8.1",
|
||||
"react-chartjs-2": "^5.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-hotkeys-hook": "^4.5.0",
|
||||
@ -51,6 +52,7 @@
|
||||
"react-office-viewer": "^1.0.4",
|
||||
"react-router-dom": "^6.6.1",
|
||||
"styled-components": "^5.3.6",
|
||||
"ts-node": "^10.9.2",
|
||||
"uuid": "^9.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -119,7 +119,7 @@ export const MainLayout = ({ children }: { children: React.ReactNode }) => {
|
||||
style={{
|
||||
height: '100%',
|
||||
overflow: 'auto',
|
||||
padding: 32,
|
||||
padding: 16,
|
||||
background: colorBgContainer,
|
||||
}}
|
||||
>
|
||||
|
@ -31,6 +31,7 @@ const FileCard = ({ file, isSelected, onClick, setIsReasonModalOpen }) => {
|
||||
>
|
||||
{file['Doc Type'].toUpperCase()}
|
||||
</span>
|
||||
<br/>
|
||||
<span
|
||||
style={{
|
||||
fontSize: '12px',
|
||||
|
77
cope2n-fe/src/pages/reviews2/FileCard.tsx
Normal file
77
cope2n-fe/src/pages/reviews2/FileCard.tsx
Normal file
@ -0,0 +1,77 @@
|
||||
import { DownloadOutlined } from '@ant-design/icons';
|
||||
import { Button } from 'antd';
|
||||
|
||||
const FileCard = ({ file, isSelected, onClick, setIsReasonModalOpen }) => {
|
||||
const fileName = file['File Name'];
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{
|
||||
border: '1px solid #ccc',
|
||||
backgroundColor: isSelected ? '#d4ecff' : '#fff',
|
||||
padding: '4px 8px',
|
||||
margin: '0 0 4px',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
onClick={onClick}
|
||||
>
|
||||
<div>
|
||||
<p
|
||||
style={{
|
||||
fontSize: '12px',
|
||||
color: '#333',
|
||||
fontWeight: 'bold',
|
||||
cursor: 'default',
|
||||
margin: '4px',
|
||||
}}
|
||||
>
|
||||
{file['Doc Type'].toUpperCase()}
|
||||
</p>
|
||||
<span
|
||||
style={{
|
||||
fontSize: '12px',
|
||||
color: '#aaa',
|
||||
fontWeight: 'bold',
|
||||
cursor: 'default',
|
||||
maxWidth: '40px',
|
||||
overflow: 'hidden',
|
||||
textOverflow: 'ellipsis',
|
||||
}}
|
||||
>
|
||||
{fileName ? fileName.substring(0, 25).replace('temp_', '') : fileName}
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
style={{
|
||||
display: 'flex',
|
||||
justifyContent: 'center',
|
||||
alignItems: 'center',
|
||||
}}
|
||||
>
|
||||
<Button
|
||||
style={{
|
||||
margin: '4px 2px',
|
||||
}}
|
||||
onClick={() => {
|
||||
setIsReasonModalOpen(true);
|
||||
}}
|
||||
>
|
||||
Review
|
||||
</Button>
|
||||
<Button
|
||||
style={{
|
||||
margin: '4px 2px',
|
||||
}}
|
||||
onClick={() => {
|
||||
const downloadUrl = file['File URL'];
|
||||
window.open(downloadUrl, '_blank');
|
||||
}}
|
||||
>
|
||||
<DownloadOutlined />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileCard;
|
79
cope2n-fe/src/pages/reviews2/api.ts
Normal file
79
cope2n-fe/src/pages/reviews2/api.ts
Normal file
@ -0,0 +1,79 @@
|
||||
import { baseURL } from 'request/api';
|
||||
|
||||
export const fetchAllRequests = async (
|
||||
filterDateRange,
|
||||
filterSubsidiaries,
|
||||
filterReviewState,
|
||||
filterIncludeTests,
|
||||
page = 1,
|
||||
page_size = 20,
|
||||
max_accuracy = 100
|
||||
) => {
|
||||
const startDate =
|
||||
filterDateRange && filterDateRange[0] ? filterDateRange[0] : '';
|
||||
const endDate =
|
||||
filterDateRange && filterDateRange[1] ? filterDateRange[1] : '';
|
||||
let filterStr = '';
|
||||
filterStr += `page=${page}&page_size=${page_size}&`;
|
||||
if (filterSubsidiaries) {
|
||||
filterStr += `subsidiary=${filterSubsidiaries}&`;
|
||||
}
|
||||
if (filterReviewState) {
|
||||
filterStr += `is_reviewed=${filterReviewState}&`;
|
||||
}
|
||||
if (filterIncludeTests) {
|
||||
filterStr += `includes_test=${filterIncludeTests}&`;
|
||||
}
|
||||
if (startDate && endDate) {
|
||||
filterStr += `start_date=${startDate}&end_date=${endDate}&`;
|
||||
}
|
||||
filterStr += `max_accuracy=${max_accuracy}`
|
||||
const token = localStorage.getItem('sbt-token') || '';
|
||||
const data = await fetch(`${baseURL}/ctel/request_list/?${filterStr}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `${JSON.parse(token)}`,
|
||||
},
|
||||
}).then(async (res) => {
|
||||
const data = await res.json();
|
||||
return data;
|
||||
});
|
||||
return data;
|
||||
};
|
||||
|
||||
export const updateRevisedData = async (
|
||||
requestID: any,
|
||||
newRevisedData: any,
|
||||
) => {
|
||||
// const requestID = ;
|
||||
const token = localStorage.getItem('sbt-token') || '';
|
||||
const result = await fetch(`${baseURL}/ctel/request/${requestID}/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `${JSON.parse(token)}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
reviewed_result: newRevisedData,
|
||||
}),
|
||||
}).catch((error) => {
|
||||
console.log(error);
|
||||
throw error;
|
||||
});
|
||||
if (result.status != 200) {
|
||||
throw new Error('Could not update revised data');
|
||||
}
|
||||
};
|
||||
|
||||
export const fetchRequest = async (id) => {
|
||||
const token = localStorage.getItem('sbt-token') || '';
|
||||
const response = await fetch(`${baseURL}/ctel/request/${id}/`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `${JSON.parse(token)}`,
|
||||
},
|
||||
});
|
||||
return await (
|
||||
await response.json()
|
||||
).subscription_requests[0];
|
||||
};
|
49
cope2n-fe/src/pages/reviews2/const.ts
Normal file
49
cope2n-fe/src/pages/reviews2/const.ts
Normal file
@ -0,0 +1,49 @@
|
||||
import { t } from "@lingui/macro";
|
||||
|
||||
export const counter_measure_map = {
|
||||
invalid_image: 'Remove this image from the evaluation report',
|
||||
missing_information: 'Remove this image from the evaluation report',
|
||||
too_blurry_text: 'Remove this image from the evaluation report',
|
||||
too_small_text: 'Remove this image from the evaluation report',
|
||||
ocr_cannot_extract: 'Improve OCR',
|
||||
wrong_feedback: 'Update revised result and re-calculate accuracy',
|
||||
handwritten: 'Remove this image from the evaluation report',
|
||||
other: 'other',
|
||||
};
|
||||
|
||||
export const REASON_BAD_QUALITY = [
|
||||
{ value: 'invalid_image', label: t`Invalid image` },
|
||||
{
|
||||
value: 'missing_information',
|
||||
label: t`Missing information`,
|
||||
},
|
||||
{ value: 'too_blurry_text', label: t`Too blurry text` },
|
||||
{ value: 'too_small_text', label: t`Too small text` },
|
||||
{ value: 'handwritten', label: t`Handwritten` },
|
||||
{ value: 'wrong_feedback', label: t`Wrong Feedback` },
|
||||
{ value: 'ocr_cannot_extract', label: t`Ocr cannot extract` },
|
||||
{ value: 'other', label: t`Other` },
|
||||
]
|
||||
|
||||
export const SOLUTION_BAD_QUALITY =[
|
||||
{
|
||||
value: 'Remove this image from the evaluation report',
|
||||
label: t`Remove this image from the evaluation report`,
|
||||
},
|
||||
{ value: 'Improve OCR', label: t`Improve OCR` },
|
||||
{
|
||||
value: 'Update revised result and re-calculate accuracy',
|
||||
label: t`Update revised result and re-calculate accuracy`,
|
||||
},
|
||||
{ value: 'other', label: t`Other` },
|
||||
]
|
||||
|
||||
export const SUBSIDIARIES = [
|
||||
{ value: 'SEAO', label: 'SEAO' },
|
||||
{ value: 'SEAU', label: 'SEAU' },
|
||||
{ value: 'SESP', label: 'SESP' },
|
||||
{ value: 'SME', label: 'SME' },
|
||||
{ value: 'SEPCO', label: 'SEPCO' },
|
||||
{ value: 'TSE', label: 'TSE' },
|
||||
{ value: 'SEIN', label: 'SEIN' },
|
||||
]
|
1009
cope2n-fe/src/pages/reviews2/index.tsx
Normal file
1009
cope2n-fe/src/pages/reviews2/index.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -11,7 +11,7 @@ const environment = process.env.NODE_ENV;
|
||||
const AXIOS_TIMEOUT_MS = 30 * 60 * 1000; // This config sastified long-live upload file request
|
||||
const EXPIRED_PASSWORD_SIGNAL = 'expired_password';
|
||||
|
||||
export const baseURL = environment === 'development' ? 'http://107.120.133.27:9881/api' : '/api';
|
||||
export const baseURL = environment === 'development' ? 'http://107.120.133.27:9000/api' : '/api';
|
||||
// export const baseURL = '/api';
|
||||
|
||||
|
||||
|
@ -12,6 +12,7 @@ const DashboardPage = React.lazy(() => import('pages/dashboard'));
|
||||
const InferencePage = React.lazy(() => import('pages/inference/index'));
|
||||
|
||||
const ReviewsPage = React.lazy(() => import('pages/reviews'));
|
||||
const ReviewsPage2 = React.lazy(() => import('pages/reviews2'));
|
||||
const ReportsPage = React.lazy(() => import('pages/reports'));
|
||||
const ReportDetailPage = React.lazy(
|
||||
() => import('pages/reports/report_detail'),
|
||||
@ -65,6 +66,11 @@ export function useAppRouter() {
|
||||
path: '/reviews',
|
||||
element: <PrivateRoute element={<ReviewsPage />} />,
|
||||
},
|
||||
{
|
||||
path: '/reviews2',
|
||||
element: <PrivateRoute element={<ReviewsPage2 />} />,
|
||||
},
|
||||
|
||||
{
|
||||
path: '/users',
|
||||
element: <PrivateRoute element={<UsersPage />} />,
|
||||
|
@ -21,6 +21,9 @@ const normalizeData = (key, value) => {
|
||||
if (["imei_number", "purchase_date"].includes(key) && typeof(value) === "string") {
|
||||
value = value.split(",");
|
||||
}
|
||||
if(key === 'imei_number' && value === null){
|
||||
value = [null]
|
||||
}
|
||||
if (typeof (value) === "object" && value?.length > 0) {
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
value[i] = normalizeData("text", value[i]);
|
||||
|
Loading…
Reference in New Issue
Block a user