diff --git a/cope2n-api/fwd_api/api/accuracy_view.py b/cope2n-api/fwd_api/api/accuracy_view.py index 8f2c408..54f0a3c 100755 --- a/cope2n-api/fwd_api/api/accuracy_view.py +++ b/cope2n-api/fwd_api/api/accuracy_view.py @@ -29,80 +29,81 @@ import copy redis_client = RedisUtils() + class AccuracyViewSet(viewsets.ViewSet): lookup_field = "username" @extend_schema( - parameters=[ - OpenApiParameter( - name='start_date', - location=OpenApiParameter.QUERY, - description='Start date (YYYY-mm-DDTHH:MM:SSZ)', - type=OpenApiTypes.DATE, - default='2023-01-02T00:00:00+0700', - ), - OpenApiParameter( - name='end_date', - location=OpenApiParameter.QUERY, - description='End date (YYYY-mm-DDTHH:MM:SSZ)', - type=OpenApiTypes.DATE, - default='2024-01-10T00:00:00+0700', - ), - OpenApiParameter( - name='includes_test', - location=OpenApiParameter.QUERY, - description='Whether to include test record or not', - type=OpenApiTypes.BOOL, - ), - OpenApiParameter( - name='is_reviewed', - location=OpenApiParameter.QUERY, - description='Which records to be query', - type=OpenApiTypes.STR, - enum=['reviewed', 'not_reviewed', 'all'], - ), - OpenApiParameter( - name='subsidiary', - location=OpenApiParameter.QUERY, - description='Which subsidiary to be included', - type=OpenApiTypes.STR, - enum=list(settings.SUBS.keys()), - ), - OpenApiParameter( - name='request_id', - location=OpenApiParameter.QUERY, - description='Specific request id', - type=OpenApiTypes.STR, - ), - OpenApiParameter( - name='with_redemption_id', - location=OpenApiParameter.QUERY, - description='Specific redemption id', - type=OpenApiTypes.BOOL, - ), - OpenApiParameter( - name='page', - location=OpenApiParameter.QUERY, - description='Page number', - type=OpenApiTypes.INT, - required=False - ), - OpenApiParameter( - name='page_size', - location=OpenApiParameter.QUERY, - description='Number of items per page', - type=OpenApiTypes.INT, - required=False - ), - OpenApiParameter( - name='max_accuracy', - location=OpenApiParameter.QUERY, - description='Return requests with acccuracy smaller than this number', - type=OpenApiTypes.FLOAT, - required=False - ), - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='start_date', + location=OpenApiParameter.QUERY, + description='Start date (YYYY-mm-DDTHH:MM:SSZ)', + type=OpenApiTypes.DATE, + default='2023-01-02T00:00:00+0700', + ), + OpenApiParameter( + name='end_date', + location=OpenApiParameter.QUERY, + description='End date (YYYY-mm-DDTHH:MM:SSZ)', + type=OpenApiTypes.DATE, + default='2024-01-10T00:00:00+0700', + ), + OpenApiParameter( + name='includes_test', + location=OpenApiParameter.QUERY, + description='Whether to include test record or not', + type=OpenApiTypes.BOOL, + ), + OpenApiParameter( + name='is_reviewed', + location=OpenApiParameter.QUERY, + description='Which records to be query', + type=OpenApiTypes.STR, + enum=['reviewed', 'not_reviewed', 'all'], + ), + OpenApiParameter( + name='subsidiary', + location=OpenApiParameter.QUERY, + description='Which subsidiary to be included', + type=OpenApiTypes.STR, + enum=list(settings.SUBS.keys()), + ), + OpenApiParameter( + name='request_id', + location=OpenApiParameter.QUERY, + description='Specific request id', + type=OpenApiTypes.STR, + ), + OpenApiParameter( + name='with_redemption_id', + location=OpenApiParameter.QUERY, + description='Specific redemption id', + type=OpenApiTypes.BOOL, + ), + OpenApiParameter( + name='page', + location=OpenApiParameter.QUERY, + description='Page number', + type=OpenApiTypes.INT, + required=False + ), + OpenApiParameter( + name='page_size', + location=OpenApiParameter.QUERY, + description='Number of items per page', + type=OpenApiTypes.INT, + required=False + ), + OpenApiParameter( + name='max_accuracy', + location=OpenApiParameter.QUERY, + description='Return requests with acccuracy smaller than this number', + type=OpenApiTypes.FLOAT, + required=False + ), + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="request_list", methods=["GET"]) def get_request_list(self, request): @@ -117,13 +118,16 @@ class AccuracyViewSet(viewsets.ViewSet): include_test = request.GET.get('includes_test', False) subsidiary = request.GET.get("subsidiary", "all") max_accuracy = float(request.GET.get("max_accuracy", 100)) - subsidiary = map_subsidiary_long_to_short(subsidiary) + # subsidiary = map_subsidiary_long_to_short(subsidiary) base_query = Q(status=200) if start_date_str or end_date_str: try: - start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only - end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d') + # We care only about day precision only + start_date = timezone.datetime.strptime( + start_date_str, '%Y-%m-%d') + end_date = timezone.datetime.strptime( + end_date_str, '%Y-%m-%d') end_date = end_date + timezone.timedelta(days=1) # Round: # end_date_str to the beginning of the next day @@ -131,16 +135,17 @@ class AccuracyViewSet(viewsets.ViewSet): start_date = timezone.make_aware(start_date) end_date = timezone.make_aware(end_date) - start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation + # inside logic will include second precision with timezone for calculation + start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z') base_query &= Q(created_at__range=(start_date, end_date)) except Exception as e: raise InvalidException(excArgs="Date format") - - if request_id: + + if request_id: base_query &= Q(request_id=request_id) if isinstance(with_redemption_id, str): - with_redemption_id = True if with_redemption_id=="true" else False + with_redemption_id = True if with_redemption_id == "true" else False if with_redemption_id: base_query &= Q(redemption_id__isnull=False) else: @@ -151,7 +156,7 @@ class AccuracyViewSet(viewsets.ViewSet): else: base_query &= Q(redemption_id__isnull=True) if isinstance(include_test, str): - include_test = True if include_test=="true" else False + include_test = True if include_test == "true" else False if not include_test: base_query &= Q(is_test_request=False) elif isinstance(include_test, bool): @@ -168,19 +173,22 @@ class AccuracyViewSet(viewsets.ViewSet): if subsidiary.lower() != "seao": if subsidiary not in list(settings.SUBS.keys()): raise InvalidException(excArgs="subsidiary") - if subsidiary and subsidiary.lower().replace(" ", "")!="all": - base_query &= Q(redemption_id__startswith=map_subsidiary_long_to_short(subsidiary)) + if subsidiary and subsidiary.lower().replace(" ", "") != "all": + base_query &= Q( + redemption_id__startswith=map_subsidiary_long_to_short(subsidiary)) if isinstance(max_accuracy, float): - base_query &= Q(raw_accuracy__lt=(max_accuracy/100)) | Q(raw_accuracy__isnull=True) + base_query &= Q(raw_accuracy__lt=( + max_accuracy/100)) | Q(raw_accuracy__isnull=True) - subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at') + subscription_requests = SubscriptionRequest.objects.filter( + base_query).order_by('created_at') request_count = subscription_requests.count() paginator = Paginator(subscription_requests, page_size) page = paginator.get_page(page_number) - + data = [] for rq in page: imeis = [] @@ -189,17 +197,22 @@ class AccuracyViewSet(viewsets.ViewSet): try: if rq.reviewed_result is not None: imeis = rq.reviewed_result.get("imei_number", []) - purchase_date = rq.reviewed_result.get("purchase_date", []) + purchase_date = rq.reviewed_result.get( + "purchase_date", []) retailer = rq.reviewed_result.get("retailername", "") - elif rq.feedback_result is not None : + elif rq.feedback_result is not None: imeis = rq.feedback_result.get("imei_number", []) - purchase_date = rq.feedback_result.get("purchase_date", []) + purchase_date = rq.feedback_result.get( + "purchase_date", []) retailer = rq.feedback_result.get("retailername", "") elif rq.predict_result is not None: if rq.predict_result.get("status", 404) == 200: - imeis = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", []) - purchase_date = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", []) - retailer = rq.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", []) + imeis = rq.predict_result.get("content", {}).get("document", [])[ + 0].get("content", [])[3].get("value", []) + purchase_date = rq.predict_result.get("content", {}).get( + "document", [])[0].get("content", [])[2].get("value", []) + retailer = rq.predict_result.get("content", {}).get("document", [])[ + 0].get("content", [])[0].get("value", []) except Exception as e: print(f"[ERROR]: {e}") print(f"[ERROR]: {rq}") @@ -230,10 +243,10 @@ class AccuracyViewSet(viewsets.ViewSet): return JsonResponse(response) return JsonResponse({'error': 'Invalid request method.'}, status=405) - + @extend_schema( - request=ReportCreationSerializer(), - responses=None, tags=['Accuracy'] + request=ReportCreationSerializer(), + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="make_report", methods=["POST"]) def make_report(self, request): @@ -246,11 +259,12 @@ class AccuracyViewSet(viewsets.ViewSet): include_test = request.data.get('include_test', False) subsidiary = request.data.get("subsidiary", "all") is_daily_report = request.data.get('is_daily_report', False) - report_overview_duration = request.data.get("report_overview_duration", "") + report_overview_duration = request.data.get( + "report_overview_duration", "") report_type = request.data.get("report_type", "accuracy") subsidiary = map_subsidiary_long_to_short(subsidiary) - - if report_type=="billing" and subsidiary.lower().replace(" ", "") not in settings.SUB_FOR_BILLING: + + if report_type == "billing" and subsidiary.lower().replace(" ", "") not in settings.SUB_FOR_BILLING: raise InvalidException(excArgs="Subsidiary for billing report") if is_daily_report: @@ -261,37 +275,45 @@ class AccuracyViewSet(viewsets.ViewSet): start_date = end_date - timezone.timedelta(days=30) else: start_date = end_date - timezone.timedelta(days=7) - start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) - start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation + start_date = start_date.replace( + hour=0, minute=0, second=0, microsecond=0) + # inside logic will include second precision with timezone for calculation + start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z') else: try: - start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only - end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d') + # We care only about day precision only + start_date = timezone.datetime.strptime( + start_date_str, '%Y-%m-%d') + end_date = timezone.datetime.strptime( + end_date_str, '%Y-%m-%d') # Round: # end_date_str to the beginning of the next day # start_date_str to the start of the date start_date = timezone.make_aware(start_date) end_date = timezone.make_aware(end_date) - start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation - end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z') + # inside logic will include second precision with timezone for calculation + start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') + end_date_str = ( + end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z') except ValueError: raise InvalidException(excArgs="Date format") query_set = {"start_date_str": start_date_str, - "end_date_str": end_date_str, - "request_id": request_id, - "redemption_id": redemption_id, - "is_reviewed": is_reviewed, - "include_test": include_test, - "subsidiary": subsidiary, - "is_daily_report": is_daily_report, - "report_overview_duration": report_overview_duration, - "report_type": report_type, - } + "end_date_str": end_date_str, + "request_id": request_id, + "redemption_id": redemption_id, + "is_reviewed": is_reviewed, + "include_test": include_test, + "subsidiary": subsidiary, + "is_daily_report": is_daily_report, + "report_overview_duration": report_overview_duration, + "report_type": report_type, + } - report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex + report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + \ + "_" + uuid.uuid4().hex new_report: Report = Report( report_id=report_id, is_daily_report=is_daily_report, @@ -311,29 +333,29 @@ class AccuracyViewSet(viewsets.ViewSet): # Redundant, will be removed by 19 March 2024 @extend_schema( - parameters=[ - OpenApiParameter( - name='report_id', - location=OpenApiParameter.QUERY, - description='Specific report id', - type=OpenApiTypes.STR, - ), - OpenApiParameter( - name='page', - location=OpenApiParameter.QUERY, - description='Page number', - type=OpenApiTypes.INT, - required=False - ), - OpenApiParameter( - name='page_size', - location=OpenApiParameter.QUERY, - description='Number of items per page', - type=OpenApiTypes.INT, - required=False - ), - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='report_id', + location=OpenApiParameter.QUERY, + description='Specific report id', + type=OpenApiTypes.STR, + ), + OpenApiParameter( + name='page', + location=OpenApiParameter.QUERY, + description='Page number', + type=OpenApiTypes.INT, + required=False + ), + OpenApiParameter( + name='page_size', + location=OpenApiParameter.QUERY, + description='Number of items per page', + type=OpenApiTypes.INT, + required=False + ), + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="report_detail_list", methods=["GET"]) def get_report_detail_list(self, request): @@ -349,7 +371,7 @@ class AccuracyViewSet(viewsets.ViewSet): page = paginator.get_page(page_number) data = extract_report_detail_list(page, in_percent=False) - + response = { 'report_detail': data, 'metadata': {"subsidiary": map_subsidiary_short_to_long(report.subsidiary), @@ -366,48 +388,49 @@ class AccuracyViewSet(viewsets.ViewSet): return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( - parameters=[ - OpenApiParameter( - name='start_date', - location=OpenApiParameter.QUERY, - description='Start date (YYYY-mm-DDTHH:MM:SSZ)', - type=OpenApiTypes.DATE, - default='2024-01-02T00:00:00+0700', - ), - OpenApiParameter( - name='end_date', - location=OpenApiParameter.QUERY, - description='End date (YYYY-mm-DDTHH:MM:SSZ)', - type=OpenApiTypes.DATE, - default='2024-01-10T00:00:00+0700', - ), - OpenApiParameter( - name='daily_report_only', - location=OpenApiParameter.QUERY, - description='Specific report id', - type=OpenApiTypes.BOOL, - ), - OpenApiParameter( - name='page', - location=OpenApiParameter.QUERY, - description='Page number', - type=OpenApiTypes.INT, - required=False - ), - OpenApiParameter( - name='page_size', - location=OpenApiParameter.QUERY, - description='Number of items per page', - type=OpenApiTypes.INT, - required=False - ), - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='start_date', + location=OpenApiParameter.QUERY, + description='Start date (YYYY-mm-DDTHH:MM:SSZ)', + type=OpenApiTypes.DATE, + default='2024-01-02T00:00:00+0700', + ), + OpenApiParameter( + name='end_date', + location=OpenApiParameter.QUERY, + description='End date (YYYY-mm-DDTHH:MM:SSZ)', + type=OpenApiTypes.DATE, + default='2024-01-10T00:00:00+0700', + ), + OpenApiParameter( + name='daily_report_only', + location=OpenApiParameter.QUERY, + description='Specific report id', + type=OpenApiTypes.BOOL, + ), + OpenApiParameter( + name='page', + location=OpenApiParameter.QUERY, + description='Page number', + type=OpenApiTypes.INT, + required=False + ), + OpenApiParameter( + name='page_size', + location=OpenApiParameter.QUERY, + description='Number of items per page', + type=OpenApiTypes.INT, + required=False + ), + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="report_list", methods=["GET"]) def get_report_list(self, request): if request.method == 'GET': - exclude_daily_report = request.GET.get('exclude_daily_report', True) + exclude_daily_report = request.GET.get( + 'exclude_daily_report', True) start_date_str = request.GET.get('start_date', "") end_date_str = request.GET.get('end_date', "") page_number = int(request.GET.get('page', 1)) @@ -418,32 +441,39 @@ class AccuracyViewSet(viewsets.ViewSet): reports = Report.objects else: try: - start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z') - end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z') + start_date = timezone.datetime.strptime( + start_date_str, '%Y-%m-%dT%H:%M:%S%z') + end_date = timezone.datetime.strptime( + end_date_str, '%Y-%m-%dT%H:%M:%S%z') except ValueError: raise InvalidException(excArgs="Date format") base_query = Q(created_at__range=(start_date, end_date)) reports = Report.objects.filter(base_query) - if exclude_daily_report: + if exclude_daily_report: reports = Report.objects.filter(is_daily_report=False) reports = reports.order_by('created_at').reverse() - + paginator = Paginator(reports, page_size) page = paginator.get_page(page_number) data = [] for report in page: - acc_keys = ["purchase_date", "retailername", "invoice_no", "imei_number", "avg"] + acc_keys = ["purchase_date", "retailername", + "invoice_no", "imei_number", "avg"] acc = {} for key in acc_keys: - fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0 - rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0 + fb = report.feedback_accuracy.get( + key, 0) if report.feedback_accuracy else 0 + rv = report.reviewed_accuracy.get( + key, 0) if report.reviewed_accuracy else 0 if report.report_type not in ["BILLING", "billing"]: - acc[key] = report.combined_accuracy.get(key, 0) if report.combined_accuracy else max([fb, rv]) + acc[key] = report.combined_accuracy.get( + key, 0) if report.combined_accuracy else max([fb, rv]) else: acc[key] = None - processing_time = report.average_OCR_time.get("avg", None) if report.average_OCR_time else None + processing_time = report.average_OCR_time.get( + "avg", None) if report.average_OCR_time else None if processing_time and processing_time == 0: processing_time = None data.append({ @@ -478,22 +508,22 @@ class AccuracyViewSet(viewsets.ViewSet): return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( - parameters=[ - OpenApiParameter( - name='duration', - location=OpenApiParameter.QUERY, - description='one of [30d, 7d]', - type=OpenApiTypes.STR, - default='30d', - ), - OpenApiParameter( - name='subsidiary', - location=OpenApiParameter.QUERY, - description='Subsidiary', - type=OpenApiTypes.STR, - ) - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='duration', + location=OpenApiParameter.QUERY, + description='one of [30d, 7d]', + type=OpenApiTypes.STR, + default='30d', + ), + OpenApiParameter( + name='subsidiary', + location=OpenApiParameter.QUERY, + description='Subsidiary', + type=OpenApiTypes.STR, + ) + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="overview_sumary", methods=["GET"]) def overview_sumary(self, request): @@ -504,7 +534,7 @@ class AccuracyViewSet(viewsets.ViewSet): subsidiary = map_subsidiary_long_to_short(_subsidiary) # Retrive data from Redis - key = f"{subsidiary}_{duration}" + key = f"{subsidiary}_{duration}" data = get_cache(key).get("data", []) response = { 'overview_data': data, @@ -514,22 +544,22 @@ class AccuracyViewSet(viewsets.ViewSet): return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( - parameters=[ - OpenApiParameter( - name='duration', - location=OpenApiParameter.QUERY, - description='one of [30d, 7d]', - type=OpenApiTypes.STR, - default='30d', - ), - OpenApiParameter( - name='subsidiary', - location=OpenApiParameter.QUERY, - description='Subsidiary', - type=OpenApiTypes.STR, - ) - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='duration', + location=OpenApiParameter.QUERY, + description='one of [30d, 7d]', + type=OpenApiTypes.STR, + default='30d', + ), + OpenApiParameter( + name='subsidiary', + location=OpenApiParameter.QUERY, + description='Subsidiary', + type=OpenApiTypes.STR, + ) + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="overview", methods=["GET"]) def overview(self, request): @@ -538,7 +568,7 @@ class AccuracyViewSet(viewsets.ViewSet): duration = request.GET.get('duration', "") subsidiary = map_subsidiary_long_to_short(_subsidiary) - + if _subsidiary == "ALL": # aggregate_overview from subsibdiaries subsidiaries_to_include = list(settings.SUBS.values()) @@ -546,27 +576,32 @@ class AccuracyViewSet(viewsets.ViewSet): # subsidiaries_to_include.remove("seao") subsidiary_overview_reports = [] for sub in subsidiaries_to_include: - key = f"{sub}_{duration}" + key = f"{sub}_{duration}" try: this_overview = get_cache(key).get("data", []) if sub != "seao": - this_overview = [d for d in this_overview if d.get("subs") != "+"] + this_overview = [ + d for d in this_overview if d.get("subs") != "+"] else: for item in this_overview: if item.get("subs") == "+": - item["extraction_date"] = item["extraction_date"].replace("Subtotal ", "").replace("(", "").replace(")", "") + "-32" + item["extraction_date"] = item["extraction_date"].replace( + "Subtotal ", "").replace("(", "").replace(")", "") + "-32" subsidiary_overview_reports += this_overview except Exception as e: - print(f"[WARM]: Unable to retrive data {key} from Redis, skipping...") + print( + f"[WARM]: Unable to retrive data {key} from Redis, skipping...") data = aggregate_overview(subsidiary_overview_reports) for item in data: if item.get("subs") == "+": - item["extraction_date"] = "Subtotal (" + item["extraction_date"].replace("-32", "") + ")" + item["extraction_date"] = "Subtotal (" + item["extraction_date"].replace( + "-32", "") + ")" # Do the saving process report_fine_data = copy.deepcopy(data) for i, dat in enumerate(report_fine_data): - keys = [x for x in list(dat.keys()) if "accuracy" in x.lower()] + keys = [x for x in list(dat.keys()) + if "accuracy" in x.lower()] keys_percent = "images_quality" for x_key in report_fine_data[i][keys_percent].keys(): if "percent" not in x_key: @@ -575,44 +610,46 @@ class AccuracyViewSet(viewsets.ViewSet): for key in keys: if report_fine_data[i][key]: for x_key in report_fine_data[i][key].keys(): - report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100 if report_fine_data[i][key][x_key] is not None else None + report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key] * \ + 100 if report_fine_data[i][key][x_key] is not None else None overview_filename = _subsidiary + "_" + duration + ".xlsx" data_workbook = dict2xlsx(report_fine_data, _type='report') - - folder_path = os.path.join(settings.MEDIA_ROOT, "report", settings.OVERVIEW_REPORT_ROOT) - os.makedirs(folder_path, exist_ok = True) + + folder_path = os.path.join( + settings.MEDIA_ROOT, "report", settings.OVERVIEW_REPORT_ROOT) + os.makedirs(folder_path, exist_ok=True) file_path = os.path.join(folder_path, overview_filename) data_workbook.save(file_path) - s3_key=save_report_to_S3(None, file_path) - # redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) + s3_key = save_report_to_S3(None, file_path) + # redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) else: # Retrive data from Redis - key = f"{subsidiary}_{duration}" + key = f"{subsidiary}_{duration}" data = get_cache(key).get("data", []) response = { 'overview_data': data, } return JsonResponse(response, status=200) return JsonResponse({'error': 'Invalid request method.'}, status=405) - + @extend_schema( - parameters=[ - OpenApiParameter( - name='duration', - location=OpenApiParameter.QUERY, - description='one of [30d, 7d]', - type=OpenApiTypes.STR, - default='30d', - ), - OpenApiParameter( - name='subsidiary', - location=OpenApiParameter.QUERY, - description='Subsidiary', - type=OpenApiTypes.STR, - ) - ], - responses=None, tags=['Accuracy'] + parameters=[ + OpenApiParameter( + name='duration', + location=OpenApiParameter.QUERY, + description='one of [30d, 7d]', + type=OpenApiTypes.STR, + default='30d', + ), + OpenApiParameter( + name='subsidiary', + location=OpenApiParameter.QUERY, + description='Subsidiary', + type=OpenApiTypes.STR, + ) + ], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="overview_download_file", methods=["GET"]) def overview_download_file(self, request): @@ -626,20 +663,22 @@ class AccuracyViewSet(viewsets.ViewSet): tmp_file = "/tmp/" + s3_key os.makedirs("/tmp", exist_ok=True) - download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file) + download_from_S3( + "report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file) file = open(tmp_file, 'rb') response = FileResponse(file, status=200) # Set the content type and content disposition headers response['Content-Type'] = 'application/octet-stream' - response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file)) + response['Content-Disposition'] = 'attachment; filename="{0}"'.format( + os.path.basename(tmp_file)) return response return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( - parameters=[], - responses=None, tags=['Accuracy'] + parameters=[], + responses=None, tags=['Accuracy'] ) @action(detail=False, url_path=r"get_report_file/(?P[\w\-]+)", methods=["GET"]) def get_report_file(self, request, report_id): @@ -659,8 +698,10 @@ class AccuracyViewSet(viewsets.ViewSet): if not report.S3_dashboard_file_name and request.query_params["report_expression"] != "detail": raise NotFoundException(excArgs="S3 dashboard file name") - file_name = report.S3_file_name if request.query_params["report_expression"] == "detail" else report.S3_dashboard_file_name - tmp_file = "/tmp/" + request.query_params["report_expression"] + "_" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime("%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx" + file_name = report.S3_file_name if request.query_params[ + "report_expression"] == "detail" else report.S3_dashboard_file_name + tmp_file = "/tmp/" + request.query_params["report_expression"] + "_" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime( + "%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx" os.makedirs("/tmp", exist_ok=True) download_from_S3(file_name, tmp_file) @@ -669,7 +710,8 @@ class AccuracyViewSet(viewsets.ViewSet): # Set the content type and content disposition headers response['Content-Type'] = 'application/octet-stream' - response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file)) + response['Content-Disposition'] = 'attachment; filename="{0}"'.format( + os.path.basename(tmp_file)) return response return JsonResponse({'error': 'Invalid request method.'}, status=405) @@ -694,11 +736,12 @@ class AccuracyViewSet(viewsets.ViewSet): if request.method == 'GET': base_query = Q(request_id=request_id) - subscription_request = SubscriptionRequest.objects.filter(base_query) + subscription_request = SubscriptionRequest.objects.filter( + base_query) if subscription_request.count() == 0: raise NotFoundException(excArgs=request_id) - + subscription_request = subscription_request.first() sample_result = { @@ -709,11 +752,12 @@ class AccuracyViewSet(viewsets.ViewSet): "purchase_date": None, "imei_number": [] } - + data = [] files = [] - subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id, file_category=FileCategory.Origin.value) + subscription_request_files = SubscriptionRequestFile.objects.filter( + request=subscription_request.id, file_category=FileCategory.Origin.value) for subscription_request_file in subscription_request_files: sub = subscription_request.subscription @@ -726,10 +770,12 @@ class AccuracyViewSet(viewsets.ViewSet): if not reviewed_result: reviewed_result = copy.deepcopy(sample_result) - reviewed_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))] + reviewed_result["imei_number"] = [None for _ in range( + subscription_request.doc_type.split(",").count("imei"))] if not feedback_result: feedback_result = copy.deepcopy(sample_result) - feedback_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))] + feedback_result["imei_number"] = [None for _ in range( + subscription_request.doc_type.split(",").count("imei"))] if not predicted_result: predicted_result = copy.deepcopy(sample_result) @@ -755,14 +801,17 @@ class AccuracyViewSet(viewsets.ViewSet): reviewed_result = subscription_request.reviewed_result feedback_result = subscription_request.feedback_result - predicted_result = predict_result_to_ready(subscription_request.predict_result) + predicted_result = predict_result_to_ready( + subscription_request.predict_result) if not reviewed_result: reviewed_result = copy.deepcopy(sample_result) - reviewed_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))] + reviewed_result["imei_number"] = [None for _ in range( + subscription_request.doc_type.split(",").count("imei"))] if not feedback_result: feedback_result = copy.deepcopy(sample_result) - feedback_result["imei_number"] = [None for _ in range(subscription_request.doc_type.split(",").count("imei"))] + feedback_result["imei_number"] = [None for _ in range( + subscription_request.doc_type.split(",").count("imei"))] if not predicted_result: predicted_result = copy.deepcopy(sample_result) @@ -782,7 +831,7 @@ class AccuracyViewSet(viewsets.ViewSet): 'Server Processing Time (ms)': subscription_request.preprocessing_time + subscription_request.ai_inference_time, 'Is Reviewed': subscription_request.is_reviewed, 'Feedback Accuracy': subscription_request.feedback_accuracy, - 'Reviewed Accuracy': subscription_request.reviewed_accuracy, + 'Reviewed Accuracy': subscription_request.reviewed_accuracy, 'Created At': subscription_request.created_at.isoformat(), 'Updated At': subscription_request.updated_at.isoformat(), 'raw_accuracy': subscription_request.raw_accuracy*100 if isinstance(subscription_request.raw_accuracy, float) else None, @@ -793,31 +842,36 @@ class AccuracyViewSet(viewsets.ViewSet): } return JsonResponse(response) - + elif request.method == 'POST': data = request.data base_query = Q(request_id=request_id) - subscription_request = SubscriptionRequest.objects.filter(base_query) + subscription_request = SubscriptionRequest.objects.filter( + base_query) if subscription_request.count() == 0: raise NotFoundException(excArgs=request_id) - + subscription_request = subscription_request.first() - subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id) + subscription_request_files = SubscriptionRequestFile.objects.filter( + request=subscription_request.id) if "reviewed_result" not in data: raise InvalidException(excArgs=f'reviewed_result') - + reviewed_result = data["reviewed_result"] if not subscription_request.predict_result: raise InvalidException(excArgs=f'request_id') - validate_review(reviewed_result, len(subscription_request.predict_result.get("content", {}).get("document", [{}])[0].get("content", [{}, {}, {}, {}])[3].get("value", []))) + validate_review(reviewed_result, len(subscription_request.predict_result.get( + "content", {}).get("document", [{}])[0].get("content", [{}, {}, {}, {}])[3].get("value", []))) reviewed_result['request_id'] = request_id for subscription_request_file in subscription_request_files: if subscription_request_file.doc_type == 'invoice': - subscription_request_file.reviewed_result = copy.deepcopy(reviewed_result) - subscription_request_file.reviewed_result['imei_number'] = [] + subscription_request_file.reviewed_result = copy.deepcopy( + reviewed_result) + subscription_request_file.reviewed_result['imei_number'] = [ + ] elif subscription_request_file.doc_type == 'imei': subscription_request_file.reviewed_result = { "retailername": None, @@ -825,8 +879,9 @@ class AccuracyViewSet(viewsets.ViewSet): "invoice_no": None, "purchase_date": [], "imei_number": []} - if len(reviewed_result["imei_number"]) - 1 >= subscription_request_file.index_in_request: - subscription_request_file.reviewed_result["imei_number"] = reviewed_result["imei_number"][subscription_request_file.index_in_request] + if len(reviewed_result["imei_number"])-1 >= subscription_request_file.index_in_request: + subscription_request_file.reviewed_result["imei_number"] = [reviewed_result[ + "imei_number"][subscription_request_file.index_in_request]] subscription_request_file.save() subscription_request.reviewed_result = reviewed_result @@ -837,7 +892,7 @@ class AccuracyViewSet(viewsets.ViewSet): return JsonResponse({'message': 'success.'}, status=200) else: return JsonResponse({'error': 'Invalid request method.'}, status=405) - + @extend_schema( request={ 'multipart/form-data': { @@ -861,23 +916,25 @@ class AccuracyViewSet(viewsets.ViewSet): def request_image(self, request, request_id=None, request_image_id=None): if request.method == 'POST': data = request.data - + base_query = Q(request_id=request_id) - subscription_request = SubscriptionRequest.objects.filter(base_query) + subscription_request = SubscriptionRequest.objects.filter( + base_query) if subscription_request.count() == 0: raise NotFoundException(excArgs=request_id) - + subscription_request = subscription_request.first() - subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id) + subscription_request_files = SubscriptionRequestFile.objects.filter( + request=subscription_request.id) if "reason" not in data: raise InvalidException(excArgs=f'reason') if "solution" not in data: raise InvalidException(excArgs=f'solution') - + reason = data["reason"] solution = data["solution"] @@ -889,7 +946,8 @@ class AccuracyViewSet(viewsets.ViewSet): subscription_request_file.save() is_available = True if not is_available: - raise NotFoundException(excArgs=request_id + "/" + request_image_id) + raise NotFoundException( + excArgs=request_id + "/" + request_image_id) return JsonResponse({'message': 'success.'}, status=200) else: