2024-01-05 07:18:16 +00:00
|
|
|
from rest_framework import status, viewsets
|
|
|
|
from rest_framework.decorators import action
|
|
|
|
from rest_framework.response import Response
|
|
|
|
from django.core.paginator import Paginator
|
2024-02-01 07:32:20 +00:00
|
|
|
from django.http import JsonResponse, FileResponse, HttpResponse
|
2024-01-05 07:18:16 +00:00
|
|
|
from django.utils import timezone
|
|
|
|
from django.db.models import Q
|
2024-01-31 03:00:18 +00:00
|
|
|
import uuid
|
2024-02-01 07:32:20 +00:00
|
|
|
import os
|
|
|
|
from fwd import settings
|
2024-01-05 07:18:16 +00:00
|
|
|
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
|
|
|
|
# from drf_spectacular.types import OpenApiString
|
2024-01-29 10:43:10 +00:00
|
|
|
import json
|
2024-02-01 07:32:20 +00:00
|
|
|
from ..exception.exceptions import InvalidException, RequiredFieldException, NotFoundException
|
2024-01-31 03:00:18 +00:00
|
|
|
from ..models import SubscriptionRequest, Report, ReportFile
|
2024-02-01 07:32:20 +00:00
|
|
|
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
|
2024-02-02 06:00:30 +00:00
|
|
|
from ..utils.file import download_from_S3, convert_date_string
|
2024-01-31 03:00:18 +00:00
|
|
|
from ..utils.process import string_to_boolean
|
2024-02-01 07:32:20 +00:00
|
|
|
from ..celery_worker.client_connector import c_connector
|
2024-01-05 07:18:16 +00:00
|
|
|
|
|
|
|
class AccuracyViewSet(viewsets.ViewSet):
|
|
|
|
lookup_field = "username"
|
|
|
|
|
|
|
|
@extend_schema(
|
2024-01-31 03:00:18 +00:00
|
|
|
parameters=[
|
|
|
|
OpenApiParameter(
|
|
|
|
name='start_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2023-01-02T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='end_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2024-01-10T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='include_test',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Whether to include test record or not',
|
|
|
|
type=OpenApiTypes.BOOL,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='is_reviewed',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Which records to be query',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
enum=['reviewed', 'not reviewed', 'all'],
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='request_id',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific request id',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='redemption_id',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific redemption id',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Page number',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page_size',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Number of items per page',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
],
|
|
|
|
responses=None, tags=['Accuracy']
|
2024-01-05 07:18:16 +00:00
|
|
|
)
|
|
|
|
@action(detail=False, url_path="request_list", methods=["GET"])
|
2024-01-31 03:00:18 +00:00
|
|
|
def get_request_list(self, request):
|
2024-01-05 07:18:16 +00:00
|
|
|
if request.method == 'GET':
|
|
|
|
start_date_str = request.GET.get('start_date')
|
|
|
|
end_date_str = request.GET.get('end_date')
|
|
|
|
page_number = int(request.GET.get('page', 1))
|
|
|
|
page_size = int(request.GET.get('page_size', 10))
|
|
|
|
request_id = request.GET.get('request_id', None)
|
|
|
|
redemption_id = request.GET.get('redemption_id', None)
|
|
|
|
is_reviewed = request.GET.get('is_reviewed', None)
|
|
|
|
include_test = request.GET.get('include_test', False)
|
|
|
|
|
|
|
|
try:
|
2024-01-31 03:00:18 +00:00
|
|
|
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
2024-01-05 07:18:16 +00:00
|
|
|
except ValueError:
|
2024-01-31 03:00:18 +00:00
|
|
|
raise InvalidException(excArgs="Date format")
|
|
|
|
|
2024-01-05 07:18:16 +00:00
|
|
|
base_query = Q(created_at__range=(start_date, end_date))
|
|
|
|
if request_id:
|
|
|
|
base_query &= Q(request_id=request_id)
|
|
|
|
if redemption_id:
|
|
|
|
base_query &= Q(redemption_id=redemption_id)
|
|
|
|
base_query &= Q(is_test_request=False)
|
|
|
|
if isinstance(include_test, str):
|
|
|
|
include_test = True if include_test=="true" else False
|
|
|
|
if include_test:
|
|
|
|
# base_query = ~base_query
|
|
|
|
base_query.children = base_query.children[:-1]
|
|
|
|
|
|
|
|
elif isinstance(include_test, bool):
|
|
|
|
if include_test:
|
|
|
|
base_query = ~base_query
|
|
|
|
if isinstance(is_reviewed, str):
|
|
|
|
if is_reviewed == "reviewed":
|
|
|
|
base_query &= Q(is_reviewed=True)
|
|
|
|
elif is_reviewed == "not reviewed":
|
|
|
|
base_query &= Q(is_reviewed=False)
|
|
|
|
elif is_reviewed == "all":
|
|
|
|
pass
|
|
|
|
|
|
|
|
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
|
|
|
|
|
|
|
paginator = Paginator(subscription_requests, page_size)
|
|
|
|
page = paginator.get_page(page_number)
|
2024-01-31 03:00:18 +00:00
|
|
|
|
2024-01-05 07:18:16 +00:00
|
|
|
data = []
|
|
|
|
for request in page:
|
|
|
|
imeis = []
|
|
|
|
purchase_date = []
|
|
|
|
retailer = ""
|
|
|
|
try:
|
|
|
|
if request.reviewed_result is not None:
|
|
|
|
imeis = request.reviewed_result.get("imei_number", [])
|
|
|
|
purchase_date = request.reviewed_result.get("purchase_date", [])
|
|
|
|
retailer = request.reviewed_result.get("retailername", "")
|
|
|
|
elif request.feedback_result is not None :
|
|
|
|
imeis = request.feedback_result.get("imei_number", [])
|
|
|
|
purchase_date = request.feedback_result.get("purchase_date", [])
|
|
|
|
retailer = request.feedback_result.get("retailername", "")
|
|
|
|
elif request.predict_result is not None:
|
|
|
|
if request.predict_result.get("status", 404) == 200:
|
|
|
|
imeis = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", [])
|
|
|
|
purchase_date = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", [])
|
|
|
|
retailer = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", [])
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[ERROR]: {e}")
|
|
|
|
print(f"[ERROR]: {request}")
|
|
|
|
data.append({
|
|
|
|
'RequestID': request.request_id,
|
|
|
|
'RedemptionID': request.redemption_id,
|
|
|
|
'IMEIs': imeis,
|
|
|
|
'Purchase Date': purchase_date,
|
|
|
|
'Retailer': retailer,
|
|
|
|
'Client Request Time (ms)': request.client_request_time,
|
|
|
|
'Server Processing Time (ms)': request.preprocessing_time + request.ai_inference_time,
|
|
|
|
'Is Reviewed': request.is_reviewed,
|
2024-01-26 09:37:12 +00:00
|
|
|
# 'Is Bad Quality': request.is_bad_image_quality,
|
2024-01-05 07:18:16 +00:00
|
|
|
'created_at': request.created_at.isoformat()
|
|
|
|
})
|
|
|
|
|
|
|
|
response = {
|
|
|
|
'subscription_requests': data,
|
|
|
|
'page': {
|
|
|
|
'number': page.number,
|
|
|
|
'total_pages': page.paginator.num_pages,
|
|
|
|
'count': page.paginator.count,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return JsonResponse(response)
|
|
|
|
|
2024-01-26 09:37:12 +00:00
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
2024-01-31 03:00:18 +00:00
|
|
|
|
|
|
|
@extend_schema(
|
|
|
|
parameters=[
|
|
|
|
OpenApiParameter(
|
|
|
|
name='is_daily_report',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Whether to include test record or not',
|
|
|
|
type=OpenApiTypes.BOOL,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='start_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2023-01-02T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='end_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2024-01-10T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='include_test',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Whether to include test record or not',
|
|
|
|
type=OpenApiTypes.BOOL,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='is_reviewed',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Which records to be query',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
enum=['reviewed', 'not reviewed', 'all'],
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='request_id',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific request id',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='redemption_id',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific redemption id',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='subsidiary',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Subsidiary',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
],
|
|
|
|
responses=None, tags=['Accuracy']
|
|
|
|
)
|
|
|
|
@action(detail=False, url_path="make_report", methods=["GET"])
|
|
|
|
def make_report(self, request):
|
|
|
|
if request.method == 'GET':
|
|
|
|
start_date_str = request.GET.get('start_date')
|
|
|
|
end_date_str = request.GET.get('end_date')
|
|
|
|
request_id = request.GET.get('request_id', None)
|
|
|
|
redemption_id = request.GET.get('redemption_id', None)
|
2024-02-01 07:32:20 +00:00
|
|
|
is_reviewed = string_to_boolean(request.GET.get('is_reviewed', "false"))
|
|
|
|
include_test = string_to_boolean(request.GET.get('include_test', "false"))
|
2024-01-31 03:00:18 +00:00
|
|
|
subsidiary = request.GET.get("subsidiary", "all")
|
2024-02-01 07:32:20 +00:00
|
|
|
is_daily_report = string_to_boolean(request.GET.get('is_daily_report', "false"))
|
2024-01-31 03:00:18 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
except ValueError:
|
|
|
|
raise InvalidException(excArgs="Date format")
|
|
|
|
|
|
|
|
query_set = {"start_date_str": start_date_str,
|
|
|
|
"end_date_str": end_date_str,
|
|
|
|
"request_id": request_id,
|
|
|
|
"redemption_id": redemption_id,
|
|
|
|
"is_reviewed": is_reviewed,
|
|
|
|
"include_test": include_test,
|
|
|
|
"subsidiary": subsidiary,
|
|
|
|
"is_daily_report": is_daily_report,
|
|
|
|
}
|
|
|
|
|
|
|
|
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
|
|
|
new_report: Report = Report(
|
|
|
|
report_id=report_id,
|
|
|
|
is_daily_report=is_daily_report,
|
|
|
|
subsidiary=subsidiary.lower().replace(" ", ""),
|
|
|
|
include_test=include_test,
|
|
|
|
include_reviewed=is_reviewed,
|
|
|
|
start_at=start_date,
|
|
|
|
end_at=end_date,
|
2024-02-01 07:32:20 +00:00
|
|
|
status="Processing",
|
2024-01-31 03:00:18 +00:00
|
|
|
)
|
2024-02-01 07:32:20 +00:00
|
|
|
if is_daily_report:
|
|
|
|
new_report.created_at = end_date
|
2024-01-31 03:00:18 +00:00
|
|
|
new_report.save()
|
|
|
|
# Background job to calculate accuracy
|
|
|
|
shadow_report(report_id, query_set)
|
|
|
|
|
|
|
|
return JsonResponse(status=status.HTTP_200_OK, data={"report_id": report_id})
|
|
|
|
|
|
|
|
@extend_schema(
|
|
|
|
parameters=[
|
|
|
|
OpenApiParameter(
|
|
|
|
name='report_id',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific report id',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Page number',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page_size',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Number of items per page',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
],
|
|
|
|
responses=None, tags=['Accuracy']
|
|
|
|
)
|
|
|
|
@action(detail=False, url_path="report_detail_list", methods=["GET"])
|
|
|
|
def get_report_detail_list(self, request):
|
|
|
|
if request.method == 'GET':
|
|
|
|
report_id = request.GET.get('report_id', None)
|
|
|
|
page_number = int(request.GET.get('page', 1))
|
|
|
|
page_size = int(request.GET.get('page_size', 10))
|
|
|
|
|
|
|
|
report = Report.objects.filter(report_id=report_id).first()
|
|
|
|
report_files = ReportFile.objects.filter(report=report)
|
|
|
|
|
|
|
|
paginator = Paginator(report_files, page_size)
|
|
|
|
page = paginator.get_page(page_number)
|
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
data = extract_report_detail_list(page, in_percent=False)
|
|
|
|
|
2024-01-31 03:00:18 +00:00
|
|
|
response = {
|
|
|
|
'report_detail': data,
|
2024-02-01 07:32:20 +00:00
|
|
|
'metadata': {"subsidiary": report.subsidiary,
|
|
|
|
"start_at": report.start_at,
|
|
|
|
"end_at": report.end_at},
|
2024-01-31 03:00:18 +00:00
|
|
|
'page': {
|
|
|
|
'number': page.number,
|
|
|
|
'total_pages': page.paginator.num_pages,
|
|
|
|
'count': page.paginator.count,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return JsonResponse(response, status=200)
|
|
|
|
|
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
|
|
|
|
|
|
|
@extend_schema(
|
|
|
|
parameters=[
|
|
|
|
OpenApiParameter(
|
|
|
|
name='start_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2023-01-02T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='end_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2024-01-10T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='daily_report_only',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Specific report id',
|
|
|
|
type=OpenApiTypes.BOOL,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Page number',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page_size',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Number of items per page',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
],
|
|
|
|
responses=None, tags=['Accuracy']
|
|
|
|
)
|
|
|
|
@action(detail=False, url_path="report_list", methods=["GET"])
|
|
|
|
def get_report_list(self, request):
|
|
|
|
if request.method == 'GET':
|
|
|
|
daily_report_only = request.GET.get('daily_report_only', False)
|
|
|
|
start_date_str = request.GET.get('start_date', "")
|
|
|
|
end_date_str = request.GET.get('end_date', "")
|
|
|
|
page_number = int(request.GET.get('page', 1))
|
|
|
|
page_size = int(request.GET.get('page_size', 10))
|
|
|
|
|
|
|
|
if not start_date_str or not end_date_str:
|
|
|
|
reports = Report.objects.all()
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
except ValueError:
|
|
|
|
raise InvalidException(excArgs="Date format")
|
|
|
|
base_query = Q(created_at__range=(start_date, end_date))
|
|
|
|
if daily_report_only:
|
|
|
|
base_query &= Q(is_daily_report=True)
|
|
|
|
reports = Report.objects.filter(base_query).order_by('created_at')
|
|
|
|
|
|
|
|
|
|
|
|
paginator = Paginator(reports, page_size)
|
|
|
|
page = paginator.get_page(page_number)
|
|
|
|
|
|
|
|
data = []
|
|
|
|
for report in page:
|
|
|
|
data.append({
|
|
|
|
"ID": report.id,
|
|
|
|
"Created Date": report.created_at,
|
|
|
|
"No. Requests": report.number_request,
|
|
|
|
"Status": report.status,
|
|
|
|
"Purchase Date Acc": report.reviewed_accuracy.get("purchase_date", None) if report.reviewed_accuracy else None,
|
|
|
|
"Retailer Acc": report.feedback_accuracy.get("retailername", None) if report.reviewed_accuracy else None,
|
|
|
|
"IMEI Acc": report.feedback_accuracy.get("imei_number", None) if report.reviewed_accuracy else None,
|
|
|
|
"Avg. Accuracy": report.feedback_accuracy.get("avg", None) if report.reviewed_accuracy else None,
|
|
|
|
"Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0,
|
2024-02-01 07:32:20 +00:00
|
|
|
"Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0,
|
2024-01-31 03:00:18 +00:00
|
|
|
"report_id": report.report_id,
|
|
|
|
})
|
|
|
|
|
|
|
|
response = {
|
|
|
|
'report_detail': data,
|
|
|
|
'page': {
|
|
|
|
'number': page.number,
|
|
|
|
'total_pages': page.paginator.num_pages,
|
|
|
|
'count': page.paginator.count,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return JsonResponse(response, status=200)
|
|
|
|
|
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
|
|
|
|
|
|
|
@extend_schema(
|
|
|
|
parameters=[
|
|
|
|
OpenApiParameter(
|
|
|
|
name='start_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2023-01-02T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='end_date',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
|
|
type=OpenApiTypes.DATE,
|
|
|
|
default='2024-01-10T00:00:00+0700',
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='subsidiary',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Subsidiary',
|
|
|
|
type=OpenApiTypes.STR,
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Page number',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
OpenApiParameter(
|
|
|
|
name='page_size',
|
|
|
|
location=OpenApiParameter.QUERY,
|
|
|
|
description='Number of items per page',
|
|
|
|
type=OpenApiTypes.INT,
|
|
|
|
required=False
|
|
|
|
),
|
|
|
|
],
|
|
|
|
responses=None, tags=['Accuracy']
|
|
|
|
)
|
|
|
|
@action(detail=False, url_path="overview", methods=["GET"])
|
|
|
|
def overview(self, request):
|
|
|
|
if request.method == 'GET':
|
|
|
|
subsidiary = request.GET.get('subsidiary', None)
|
|
|
|
start_date_str = request.GET.get('start_date', "")
|
|
|
|
end_date_str = request.GET.get('end_date', "")
|
|
|
|
page_number = int(request.GET.get('page', 1))
|
|
|
|
page_size = int(request.GET.get('page_size', 10))
|
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
base_query = Q()
|
2024-02-01 08:14:05 +00:00
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
if start_date_str and end_date_str:
|
2024-01-31 03:00:18 +00:00
|
|
|
try:
|
|
|
|
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
|
|
except ValueError:
|
|
|
|
raise InvalidException(excArgs="Date format")
|
2024-02-01 07:32:20 +00:00
|
|
|
base_query &= Q(created_at__range=(start_date, end_date))
|
2024-01-26 09:37:12 +00:00
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
if subsidiary:
|
|
|
|
base_query &= Q(subsidiary=subsidiary)
|
|
|
|
base_query &= Q(is_daily_report=True)
|
2024-02-02 06:00:30 +00:00
|
|
|
reports = Report.objects.filter(base_query).order_by('created_at').reverse()
|
2024-01-31 03:00:18 +00:00
|
|
|
|
|
|
|
paginator = Paginator(reports, page_size)
|
|
|
|
page = paginator.get_page(page_number)
|
|
|
|
|
|
|
|
data = []
|
|
|
|
this_month_report = MonthReportAccumulate()
|
|
|
|
for report in page:
|
|
|
|
res = this_month_report.add(report)
|
|
|
|
if not(res):
|
|
|
|
_, _data, total = this_month_report()
|
|
|
|
data += [total]
|
|
|
|
data += _data
|
|
|
|
this_month_report = MonthReportAccumulate()
|
|
|
|
this_month_report.add(report)
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
_, _data, total = this_month_report()
|
|
|
|
data += [total]
|
|
|
|
data += _data
|
2024-02-01 07:32:20 +00:00
|
|
|
# Generate xlsx file
|
|
|
|
# workbook = dict2xlsx(data, _type="report")
|
|
|
|
# tmp_file = f"/tmp/{str(uuid.uuid4())}.xlsx"
|
|
|
|
# os.makedirs(os.path.dirname(tmp_file), exist_ok=True)
|
|
|
|
# workbook.save(tmp_file)
|
|
|
|
# c_connector.remove_local_file((tmp_file, "fake_request_id"))
|
2024-01-31 03:00:18 +00:00
|
|
|
|
|
|
|
response = {
|
2024-02-01 07:32:20 +00:00
|
|
|
# 'file': load_xlsx_file(),
|
2024-01-31 03:00:18 +00:00
|
|
|
'overview_data': data,
|
|
|
|
'page': {
|
|
|
|
'number': page.number,
|
|
|
|
'total_pages': page.paginator.num_pages,
|
|
|
|
'count': page.paginator.count,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return JsonResponse(response, status=200)
|
|
|
|
|
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
@extend_schema(
|
|
|
|
parameters=[],
|
|
|
|
responses=None, tags=['Accuracy']
|
|
|
|
)
|
|
|
|
@action(detail=False, url_path=r"get_report_file/(?P<report_id>[\w\-]+)", methods=["GET"])
|
|
|
|
def get_report_file(self, request, report_id):
|
|
|
|
if request.method == 'GET':
|
|
|
|
# report_id = request.GET.get('report_id', None)
|
|
|
|
|
|
|
|
if not report_id:
|
|
|
|
raise RequiredFieldException(excArgs="report_id1")
|
|
|
|
report_num = Report.objects.filter(report_id=report_id).count()
|
|
|
|
if report_num == 0:
|
|
|
|
raise NotFoundException(excArgs=f"report: {report_id}")
|
|
|
|
report = Report.objects.filter(report_id=report_id).first()
|
|
|
|
# download from s3 to local
|
2024-02-02 06:00:30 +00:00
|
|
|
tmp_file = "/tmp/" + report.subsidiary + "_" + report.start_at.strftime("%Y%m%d") + "_" + report.end_at.strftime("%Y%m%d") + "_created_on_" + report.created_at.strftime("%Y%m%d") + ".xlsx"
|
2024-02-01 07:32:20 +00:00
|
|
|
os.makedirs("/tmp", exist_ok=True)
|
|
|
|
if not report.S3_file_name:
|
|
|
|
raise NotFoundException(excArgs="S3 file name")
|
|
|
|
download_from_S3(report.S3_file_name, tmp_file)
|
|
|
|
file = open(tmp_file, 'rb')
|
|
|
|
response = FileResponse(file, status=200)
|
|
|
|
|
|
|
|
# Set the content type and content disposition headers
|
|
|
|
response['Content-Type'] = 'application/octet-stream'
|
|
|
|
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
|
|
|
|
return response
|
2024-01-26 09:37:12 +00:00
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
2024-01-26 09:37:12 +00:00
|
|
|
class RequestViewSet(viewsets.ViewSet):
|
|
|
|
lookup_field = "username"
|
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
@extend_schema(
|
|
|
|
request={
|
2024-01-26 09:37:12 +00:00
|
|
|
'multipart/form-data': {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
|
|
|
'reviewed_result': {
|
|
|
|
'type': 'string',
|
2024-02-01 07:32:20 +00:00
|
|
|
'default': '''{"request_id": "Sample request_id", "imei_number": ["sample_imei1", "sample_imei2"], "retailername": "Sample Retailer", "purchase_date": "01/01/1970", "sold_to_party": "Sample party"}''',
|
2024-01-26 09:37:12 +00:00
|
|
|
},
|
2024-02-01 07:32:20 +00:00
|
|
|
},
|
2024-01-26 09:37:12 +00:00
|
|
|
},
|
2024-02-01 07:32:20 +00:00
|
|
|
},
|
|
|
|
responses=None,
|
|
|
|
tags=['Request']
|
2024-01-26 09:37:12 +00:00
|
|
|
)
|
|
|
|
@action(detail=False, url_path=r"request/(?P<request_id>[\w\-]+)", methods=["GET", "POST"])
|
|
|
|
def get_subscription_request(self, request, request_id=None):
|
|
|
|
if request.method == 'GET':
|
|
|
|
base_query = Q(request_id=request_id)
|
|
|
|
|
2024-02-01 08:14:05 +00:00
|
|
|
subscription_request = SubscriptionRequest.objects.filter(base_query)
|
|
|
|
|
|
|
|
if subscription_request.count() == 0:
|
|
|
|
raise NotFoundException(excArgs=request_id)
|
|
|
|
|
|
|
|
subscription_request = subscription_request.first()
|
2024-01-26 09:37:12 +00:00
|
|
|
|
|
|
|
data = []
|
|
|
|
|
|
|
|
imeis = []
|
|
|
|
purchase_date = []
|
|
|
|
retailer = ""
|
|
|
|
try:
|
|
|
|
if subscription_request.reviewed_result is not None:
|
|
|
|
imeis = subscription_request.reviewed_result.get("imei_number", [])
|
|
|
|
purchase_date = subscription_request.reviewed_result.get("purchase_date", [])
|
|
|
|
retailer = subscription_request.reviewed_result.get("retailername", "")
|
|
|
|
elif subscription_request.feedback_result is not None :
|
|
|
|
imeis = subscription_request.feedback_result.get("imei_number", [])
|
|
|
|
purchase_date = subscription_request.feedback_result.get("purchase_date", [])
|
|
|
|
retailer = subscription_request.feedback_result.get("retailername", "")
|
|
|
|
elif subscription_request.predict_result is not None:
|
|
|
|
if subscription_request.predict_result.get("status", 404) == 200:
|
|
|
|
imeis = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", [])
|
|
|
|
purchase_date = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", [])
|
|
|
|
retailer = subscription_request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", [])
|
|
|
|
except Exception as e:
|
|
|
|
print(f"[ERROR]: {e}")
|
|
|
|
print(f"[ERROR]: {subscription_request}")
|
|
|
|
data.append({
|
2024-02-01 08:14:05 +00:00
|
|
|
'Document Type': subscription_request.doc_type,
|
2024-01-26 09:37:12 +00:00
|
|
|
'RequestID': subscription_request.request_id,
|
|
|
|
'RedemptionID': subscription_request.redemption_id,
|
2024-02-01 08:14:05 +00:00
|
|
|
'Process Type': subscription_request.process_type,
|
|
|
|
'Provider Code': subscription_request.provider_code,
|
|
|
|
'Status': subscription_request.status,
|
2024-01-26 09:37:12 +00:00
|
|
|
'IMEIs': imeis,
|
|
|
|
'Purchase Date': purchase_date,
|
|
|
|
'Retailer': retailer,
|
|
|
|
'Reviewed result': subscription_request.reviewed_result,
|
|
|
|
'Feedback result': subscription_request.feedback_result,
|
2024-02-01 08:14:05 +00:00
|
|
|
'Is Test Request': subscription_request.is_test_request,
|
2024-01-26 09:37:12 +00:00
|
|
|
'Client Request Time (ms)': subscription_request.client_request_time,
|
|
|
|
'Server Processing Time (ms)': subscription_request.preprocessing_time + subscription_request.ai_inference_time,
|
|
|
|
'Is Reviewed': subscription_request.is_reviewed,
|
2024-02-01 08:14:05 +00:00
|
|
|
|
2024-01-26 09:37:12 +00:00
|
|
|
# 'Is Bad Quality': subscription_request.is_bad_image_quality,
|
2024-01-29 10:43:10 +00:00
|
|
|
'created_at': subscription_request.created_at.isoformat(),
|
|
|
|
'updated_at': subscription_request.updated_at.isoformat()
|
2024-01-26 09:37:12 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
response = {
|
|
|
|
'subscription_requests': data
|
|
|
|
}
|
|
|
|
|
|
|
|
return JsonResponse(response)
|
|
|
|
|
|
|
|
elif request.method == 'POST':
|
|
|
|
data = request.data
|
|
|
|
|
|
|
|
base_query = Q(request_id=request_id)
|
|
|
|
|
2024-02-01 08:14:05 +00:00
|
|
|
subscription_request = SubscriptionRequest.objects.filter(base_query)
|
|
|
|
|
|
|
|
if subscription_request.count() == 0:
|
|
|
|
raise NotFoundException(excArgs=request_id)
|
|
|
|
|
|
|
|
subscription_request = subscription_request.first()
|
2024-01-26 09:37:12 +00:00
|
|
|
|
2024-02-01 07:32:20 +00:00
|
|
|
reviewed_result = json.loads(data["reviewed_result"])
|
2024-01-29 10:43:10 +00:00
|
|
|
for field in ['retailername', 'sold_to_party', 'purchase_date', 'imei_number']:
|
|
|
|
if not field in reviewed_result.keys():
|
|
|
|
raise RequiredFieldException(excArgs=f'reviewed_result.{field}')
|
|
|
|
subscription_request.reviewed_result = reviewed_result
|
|
|
|
subscription_request.reviewed_result['request_id'] = request_id
|
|
|
|
subscription_request.is_reviewed = True
|
|
|
|
subscription_request.save()
|
2024-01-26 09:37:12 +00:00
|
|
|
|
|
|
|
return JsonResponse({'message': 'success.'}, status=200)
|
|
|
|
else:
|
2024-01-31 03:00:18 +00:00
|
|
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|