from rest_framework import status, viewsets from rest_framework.decorators import action from rest_framework.response import Response from django.core.paginator import Paginator from django.http import JsonResponse, FileResponse, HttpResponse from django.utils import timezone from django.db.models import Q import uuid import os import pytz from fwd import settings from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes # from drf_spectacular.types import OpenApiString import json from ..exception.exceptions import InvalidException, RequiredFieldException, NotFoundException from ..models import SubscriptionRequest, Report, ReportFile, SubscriptionRequestFile from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg from ..utils.file import download_from_S3, convert_date_string from ..utils.redis import RedisUtils from ..utils.process import string_to_boolean from ..request.ReportCreationSerializer import ReportCreationSerializer from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long redis_client = RedisUtils() class AccuracyViewSet(viewsets.ViewSet): lookup_field = "username" @extend_schema( parameters=[ OpenApiParameter( name='start_date', location=OpenApiParameter.QUERY, description='Start date (YYYY-mm-DDTHH:MM:SSZ)', type=OpenApiTypes.DATE, default='2023-01-02T00:00:00+0700', ), OpenApiParameter( name='end_date', location=OpenApiParameter.QUERY, description='End date (YYYY-mm-DDTHH:MM:SSZ)', type=OpenApiTypes.DATE, default='2024-01-10T00:00:00+0700', ), OpenApiParameter( name='include_test', location=OpenApiParameter.QUERY, description='Whether to include test record or not', type=OpenApiTypes.BOOL, ), OpenApiParameter( name='is_reviewed', location=OpenApiParameter.QUERY, description='Which records to be query', type=OpenApiTypes.STR, enum=['reviewed', 'not reviewed', 'all'], ), OpenApiParameter( name='request_id', location=OpenApiParameter.QUERY, description='Specific request id', type=OpenApiTypes.STR, ), OpenApiParameter( name='redemption_id', location=OpenApiParameter.QUERY, description='Specific redemption id', type=OpenApiTypes.STR, ), OpenApiParameter( name='page', location=OpenApiParameter.QUERY, description='Page number', type=OpenApiTypes.INT, required=False ), OpenApiParameter( name='page_size', location=OpenApiParameter.QUERY, description='Number of items per page', type=OpenApiTypes.INT, required=False ), ], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="request_list", methods=["GET"]) def get_request_list(self, request): if request.method == 'GET': start_date_str = request.GET.get('start_date') end_date_str = request.GET.get('end_date') page_number = int(request.GET.get('page', 1)) page_size = int(request.GET.get('page_size', 10)) request_id = request.GET.get('request_id', None) redemption_id = request.GET.get('redemption_id', None) is_reviewed = request.GET.get('is_reviewed', None) include_test = request.GET.get('include_test', False) try: start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z') end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z') except ValueError: raise InvalidException(excArgs="Date format") base_query = Q(created_at__range=(start_date, end_date)) if request_id: base_query &= Q(request_id=request_id) if redemption_id: base_query &= Q(redemption_id=redemption_id) base_query &= Q(is_test_request=False) if isinstance(include_test, str): include_test = True if include_test=="true" else False if include_test: # base_query = ~base_query base_query.children = base_query.children[:-1] elif isinstance(include_test, bool): if include_test: base_query = ~base_query if isinstance(is_reviewed, str): if is_reviewed == "reviewed": base_query &= Q(is_reviewed=True) elif is_reviewed == "not reviewed": base_query &= Q(is_reviewed=False) elif is_reviewed == "all": pass subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at') paginator = Paginator(subscription_requests, page_size) page = paginator.get_page(page_number) data = [] for request in page: imeis = [] purchase_date = [] retailer = "" try: if request.reviewed_result is not None: imeis = request.reviewed_result.get("imei_number", []) purchase_date = request.reviewed_result.get("purchase_date", []) retailer = request.reviewed_result.get("retailername", "") elif request.feedback_result is not None : imeis = request.feedback_result.get("imei_number", []) purchase_date = request.feedback_result.get("purchase_date", []) retailer = request.feedback_result.get("retailername", "") elif request.predict_result is not None: if request.predict_result.get("status", 404) == 200: imeis = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", []) purchase_date = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", []) retailer = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", []) except Exception as e: print(f"[ERROR]: {e}") print(f"[ERROR]: {request}") data.append({ 'RequestID': request.request_id, 'RedemptionID': request.redemption_id, 'IMEIs': imeis, 'Purchase Date': purchase_date, 'Retailer': retailer, 'Client Request Time (ms)': request.client_request_time, 'Server Processing Time (ms)': request.preprocessing_time + request.ai_inference_time, 'Is Reviewed': request.is_reviewed, # 'Is Bad Quality': request.is_bad_image_quality, 'created_at': request.created_at.isoformat() }) response = { 'subscription_requests': data, 'page': { 'number': page.number, 'total_pages': page.paginator.num_pages, 'count': page.paginator.count, } } return JsonResponse(response) return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( request=ReportCreationSerializer(), responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="make_report", methods=["POST"]) def make_report(self, request): if request.method == 'POST': start_date_str = request.data.get('start_date') end_date_str = request.data.get('end_date') request_id = request.data.get('request_id', None) redemption_id = request.data.get('redemption_id', None) is_reviewed = request.data.get('is_reviewed', False) include_test = request.data.get('include_test', False) subsidiary = request.data.get("subsidiary", "all") is_daily_report = request.data.get('is_daily_report', False) report_overview_duration = request.data.get("report_overview_duration", "") subsidiary = map_subsidiary_long_to_short(subsidiary) if is_daily_report: if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION: raise InvalidException(excArgs="overview duration") end_date = timezone.now() if report_overview_duration == "30d": start_date = end_date - timezone.timedelta(days=30) else: start_date = end_date - timezone.timedelta(days=7) start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0) start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z') else: try: start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d') # Round: # end_date_str to the beginning of the next day # start_date_str to the start of the date start_date = timezone.make_aware(start_date) end_date = timezone.make_aware(end_date) start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z') except ValueError: raise InvalidException(excArgs="Date format") query_set = {"start_date_str": start_date_str, "end_date_str": end_date_str, "request_id": request_id, "redemption_id": redemption_id, "is_reviewed": is_reviewed, "include_test": include_test, "subsidiary": subsidiary, "is_daily_report": is_daily_report, "report_overview_duration": report_overview_duration } # if is_daily_report: # if (end_date-start_date) > timezone.timedelta(days=1): # raise InvalidException(excArgs="Date range") report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex new_report: Report = Report( report_id=report_id, is_daily_report=is_daily_report, subsidiary=subsidiary.lower().replace(" ", ""), include_test=include_test, include_reviewed=is_reviewed, start_at=start_date, end_at=end_date, status="Processing", ) new_report.save() # Background job to calculate accuracy shadow_report(report_id, query_set) return JsonResponse(status=status.HTTP_200_OK, data={"report_id": report_id}) @extend_schema( parameters=[ OpenApiParameter( name='report_id', location=OpenApiParameter.QUERY, description='Specific report id', type=OpenApiTypes.STR, ), OpenApiParameter( name='page', location=OpenApiParameter.QUERY, description='Page number', type=OpenApiTypes.INT, required=False ), OpenApiParameter( name='page_size', location=OpenApiParameter.QUERY, description='Number of items per page', type=OpenApiTypes.INT, required=False ), ], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="report_detail_list", methods=["GET"]) def get_report_detail_list(self, request): if request.method == 'GET': report_id = request.GET.get('report_id', None) page_number = int(request.GET.get('page', 1)) page_size = int(request.GET.get('page_size', 10)) report = Report.objects.filter(report_id=report_id).first() report_files = ReportFile.objects.filter(report=report) paginator = Paginator(report_files, page_size) page = paginator.get_page(page_number) data = extract_report_detail_list(page, in_percent=False) response = { 'report_detail': data, 'metadata': {"subsidiary": map_subsidiary_short_to_long(report.subsidiary), "start_at": report.start_at, "end_at": report.end_at}, 'page': { 'number': page.number, 'total_pages': page.paginator.num_pages, 'count': page.paginator.count, } } return JsonResponse(response, status=200) return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( parameters=[ OpenApiParameter( name='start_date', location=OpenApiParameter.QUERY, description='Start date (YYYY-mm-DDTHH:MM:SSZ)', type=OpenApiTypes.DATE, default='2023-01-02T00:00:00+0700', ), OpenApiParameter( name='end_date', location=OpenApiParameter.QUERY, description='End date (YYYY-mm-DDTHH:MM:SSZ)', type=OpenApiTypes.DATE, default='2024-01-10T00:00:00+0700', ), OpenApiParameter( name='daily_report_only', location=OpenApiParameter.QUERY, description='Specific report id', type=OpenApiTypes.BOOL, ), OpenApiParameter( name='page', location=OpenApiParameter.QUERY, description='Page number', type=OpenApiTypes.INT, required=False ), OpenApiParameter( name='page_size', location=OpenApiParameter.QUERY, description='Number of items per page', type=OpenApiTypes.INT, required=False ), ], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="report_list", methods=["GET"]) def get_report_list(self, request): if request.method == 'GET': exclude_daily_report = request.GET.get('exclude_daily_report', True) start_date_str = request.GET.get('start_date', "") end_date_str = request.GET.get('end_date', "") page_number = int(request.GET.get('page', 1)) page_size = int(request.GET.get('page_size', 10)) reports = None if not start_date_str or not end_date_str: reports = Report.objects else: try: start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z') end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z') except ValueError: raise InvalidException(excArgs="Date format") base_query = Q(created_at__range=(start_date, end_date)) reports = Report.objects.filter(base_query) if exclude_daily_report: reports = Report.objects.filter(is_daily_report=False) reports = reports.order_by('created_at').reverse() paginator = Paginator(reports, page_size) page = paginator.get_page(page_number) data = [] for report in page: acc_keys = ["purchase_date", "retailername", "imei_number", "avg"] acc = {} for key in acc_keys: fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0 rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0 acc[key] = max([fb, rv]) data.append({ "ID": report.id, "Created Date": report.created_at, "Start Date": report.start_at, "End Date": report.end_at, "No. Requests": report.number_request, "Status": report.status, "Purchase Date Acc": acc["purchase_date"], "Retailer Acc": acc["retailername"], "IMEI Acc": acc["imei_number"], "Avg. Accuracy": acc["avg"], "Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0, "Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0, "report_id": report.report_id, "Subsidiary": map_subsidiary_short_to_long(report.subsidiary), }) response = { 'report_detail': data, 'page': { 'number': page.number, 'total_pages': page.paginator.num_pages, 'count': page.paginator.count, } } return JsonResponse(response, status=200) return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( parameters=[ OpenApiParameter( name='duration', location=OpenApiParameter.QUERY, description='one of [30d, 7d]', type=OpenApiTypes.STR, default='30d', ), OpenApiParameter( name='subsidiary', location=OpenApiParameter.QUERY, description='Subsidiary', type=OpenApiTypes.STR, ) ], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="overview", methods=["GET"]) def overview(self, request): if request.method == 'GET': subsidiary = request.GET.get('subsidiary', "ALL") duration = request.GET.get('duration', "") subsidiary = map_subsidiary_long_to_short(subsidiary) # Retrive data from Redis key = f"{subsidiary}_{duration}" data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) response = { 'overview_data': data, } return JsonResponse(response, status=200) return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( parameters=[ OpenApiParameter( name='duration', location=OpenApiParameter.QUERY, description='one of [30d, 7d]', type=OpenApiTypes.STR, default='30d', ), OpenApiParameter( name='subsidiary', location=OpenApiParameter.QUERY, description='Subsidiary', type=OpenApiTypes.STR, ) ], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path="overview_download_file", methods=["GET"]) def overview_download_file(self, request): if request.method == 'GET': subsidiary = request.GET.get('subsidiary', "ALL") duration = request.GET.get('duration', "") subsidiary = map_subsidiary_long_to_short(subsidiary) s3_key = f"{subsidiary}_{duration}.xlsx" tmp_file = "/tmp/" + s3_key os.makedirs("/tmp", exist_ok=True) download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file) file = open(tmp_file, 'rb') response = FileResponse(file, status=200) # Set the content type and content disposition headers response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file)) return response return JsonResponse({'error': 'Invalid request method.'}, status=405) @extend_schema( parameters=[], responses=None, tags=['Accuracy'] ) @action(detail=False, url_path=r"get_report_file/(?P[\w\-]+)", methods=["GET"]) def get_report_file(self, request, report_id): if request.method == 'GET': # report_id = request.GET.get('report_id', None) if not report_id: raise RequiredFieldException(excArgs="report_id1") report_num = Report.objects.filter(report_id=report_id).count() if report_num == 0: raise NotFoundException(excArgs=f"report: {report_id}") report = Report.objects.filter(report_id=report_id).first() # download from s3 to local target_timezone = pytz.timezone(settings.TIME_ZONE) tmp_file = "/tmp/" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime("%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx" os.makedirs("/tmp", exist_ok=True) if not report.S3_file_name: raise NotFoundException(excArgs="S3 file name") download_from_S3(report.S3_file_name, tmp_file) file = open(tmp_file, 'rb') response = FileResponse(file, status=200) # Set the content type and content disposition headers response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file)) return response return JsonResponse({'error': 'Invalid request method.'}, status=405) class RequestViewSet(viewsets.ViewSet): lookup_field = "username" @extend_schema( request={ 'multipart/form-data': { 'type': 'object', 'properties': { 'reviewed_result': { 'type': 'string', 'default': '''{"request_id": "Sample request_id", "imei_number": ["sample_imei1", "sample_imei2"], "retailername": "Sample Retailer", "purchase_date": "01/01/1970", "sold_to_party": "Sample party"}''', }, }, }, }, responses=None, tags=['Request'] ) @action(detail=False, url_path=r"request/(?P[\w\-]+)", methods=["GET", "POST"]) def get_subscription_request(self, request, request_id=None): if request.method == 'GET': base_query = Q(request_id=request_id) subscription_request = SubscriptionRequest.objects.filter(base_query) if subscription_request.count() == 0: raise NotFoundException(excArgs=request_id) subscription_request = subscription_request.first() data = [] imeis = [] purchase_date = [] retailer = "" try: if subscription_request.reviewed_result is not None: imeis = subscription_request.reviewed_result.get("imei_number", []) purchase_date = subscription_request.reviewed_result.get("purchase_date", []) retailer = subscription_request.reviewed_result.get("retailername", "") elif subscription_request.feedback_result is not None : imeis = subscription_request.feedback_result.get("imei_number", []) purchase_date = subscription_request.feedback_result.get("purchase_date", []) retailer = subscription_request.feedback_result.get("retailername", "") elif subscription_request.predict_result is not None: if subscription_request.predict_result.get("status", 404) == 200: imeis = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", []) purchase_date = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", []) retailer = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", []) except Exception as e: print(f"[ERROR]: {e}") print(f"[ERROR]: {subscription_request}") data.append({ 'Document Type': subscription_request.doc_type, 'RequestID': subscription_request.request_id, 'RedemptionID': subscription_request.redemption_id, 'Process Type': subscription_request.process_type, 'Provider Code': subscription_request.provider_code, 'Status': subscription_request.status, 'IMEIs': imeis, 'Purchase Date': purchase_date, 'Retailer': retailer, 'Reviewed result': subscription_request.reviewed_result, 'Feedback result': subscription_request.feedback_result, 'Is Test Request': subscription_request.is_test_request, 'Client Request Time (ms)': subscription_request.client_request_time, 'Server Processing Time (ms)': subscription_request.preprocessing_time + subscription_request.ai_inference_time, 'Is Reviewed': subscription_request.is_reviewed, # 'Is Bad Quality': subscription_request.is_bad_image_quality, 'created_at': subscription_request.created_at.isoformat(), 'updated_at': subscription_request.updated_at.isoformat() }) response = { 'subscription_requests': data } return JsonResponse(response) elif request.method == 'POST': data = request.data base_query = Q(request_id=request_id) subscription_request = SubscriptionRequest.objects.filter(base_query) if subscription_request.count() == 0: raise NotFoundException(excArgs=request_id) subscription_request = subscription_request.first() file_query = Q(request=request_id) subscription_request_files = SubscriptionRequestFile.objects.filter(file_query) reviewed_result = json.loads(data["reviewed_result"]) for subscription_request_file in subscription_request_files: if subscription_request_file.doc_type == 'invoice': subscription_request_file.reviewed_result = reviewed_result subscription_request_file.reviewed_result['imei_number'] = [] elif subscription_request_file.doc_type == 'imei': subscription_request_file.reviewed_result = {"retailername": None, "sold_to_party": None, "purchase_date": [], "imei_number": [reviewed_result["imei_number"][subscription_request_file.index_in_request]]} subscription_request_file.save() for field in ['retailername', 'sold_to_party', 'purchase_date', 'imei_number']: if not field in reviewed_result.keys(): raise RequiredFieldException(excArgs=f'reviewed_result.{field}') subscription_request.reviewed_result = reviewed_result subscription_request.reviewed_result['request_id'] = request_id subscription_request.is_reviewed = True subscription_request.save() return JsonResponse({'message': 'success.'}, status=200) else: return JsonResponse({'error': 'Invalid request method.'}, status=405)