sbt-idp/cope2n-api/fwd_api/api/accuracy_view.py

702 lines
31 KiB
Python
Raw Normal View History

2024-01-05 07:18:16 +00:00
from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from django.core.paginator import Paginator
2024-02-01 07:32:20 +00:00
from django.http import JsonResponse, FileResponse, HttpResponse
2024-01-05 07:18:16 +00:00
from django.utils import timezone
from django.db.models import Q
2024-01-31 03:00:18 +00:00
import uuid
2024-02-01 07:32:20 +00:00
import os
import pytz
2024-02-01 07:32:20 +00:00
from fwd import settings
2024-01-05 07:18:16 +00:00
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
# from drf_spectacular.types import OpenApiString
2024-01-29 10:43:10 +00:00
import json
2024-02-01 07:32:20 +00:00
from ..exception.exceptions import InvalidException, RequiredFieldException, NotFoundException
2024-02-06 05:52:22 +00:00
from ..models import SubscriptionRequest, Report, ReportFile, SubscriptionRequestFile
2024-02-01 07:32:20 +00:00
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
from ..utils.file import download_from_S3, convert_date_string
2024-02-06 03:14:44 +00:00
from ..utils.redis import RedisUtils
2024-01-31 03:00:18 +00:00
from ..utils.process import string_to_boolean
2024-02-07 05:39:24 +00:00
from ..request.ReportCreationSerializer import ReportCreationSerializer
2024-02-06 03:14:44 +00:00
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
redis_client = RedisUtils()
2024-01-05 07:18:16 +00:00
class AccuracyViewSet(viewsets.ViewSet):
lookup_field = "username"
@extend_schema(
2024-01-31 03:00:18 +00:00
parameters=[
OpenApiParameter(
name='start_date',
location=OpenApiParameter.QUERY,
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
type=OpenApiTypes.DATE,
default='2023-01-02T00:00:00+0700',
),
OpenApiParameter(
name='end_date',
location=OpenApiParameter.QUERY,
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
type=OpenApiTypes.DATE,
default='2024-01-10T00:00:00+0700',
),
OpenApiParameter(
2024-02-21 05:46:41 +00:00
name='includes_test',
2024-01-31 03:00:18 +00:00
location=OpenApiParameter.QUERY,
description='Whether to include test record or not',
type=OpenApiTypes.BOOL,
),
OpenApiParameter(
name='is_reviewed',
location=OpenApiParameter.QUERY,
description='Which records to be query',
type=OpenApiTypes.STR,
2024-02-21 05:46:41 +00:00
enum=['reviewed', 'not_reviewed', 'all'],
),
OpenApiParameter(
name='subsidiary',
location=OpenApiParameter.QUERY,
description='Which subsidiary to be included',
type=OpenApiTypes.STR,
enum=list(settings.SUBS.keys()),
2024-01-31 03:00:18 +00:00
),
OpenApiParameter(
name='request_id',
location=OpenApiParameter.QUERY,
description='Specific request id',
type=OpenApiTypes.STR,
),
OpenApiParameter(
name='redemption_id',
location=OpenApiParameter.QUERY,
description='Specific redemption id',
type=OpenApiTypes.STR,
),
OpenApiParameter(
name='page',
location=OpenApiParameter.QUERY,
description='Page number',
type=OpenApiTypes.INT,
required=False
),
OpenApiParameter(
name='page_size',
location=OpenApiParameter.QUERY,
description='Number of items per page',
type=OpenApiTypes.INT,
required=False
),
],
responses=None, tags=['Accuracy']
2024-01-05 07:18:16 +00:00
)
@action(detail=False, url_path="request_list", methods=["GET"])
2024-01-31 03:00:18 +00:00
def get_request_list(self, request):
2024-01-05 07:18:16 +00:00
if request.method == 'GET':
start_date_str = request.GET.get('start_date')
end_date_str = request.GET.get('end_date')
page_number = int(request.GET.get('page', 1))
page_size = int(request.GET.get('page_size', 10))
request_id = request.GET.get('request_id', None)
redemption_id = request.GET.get('redemption_id', None)
is_reviewed = request.GET.get('is_reviewed', None)
2024-02-21 05:46:41 +00:00
include_test = request.GET.get('includes_test', False)
subsidiary = request.data.get("subsidiary", "all")
subsidiary = map_subsidiary_long_to_short(subsidiary)
base_query = Q()
if start_date_str or end_date_str:
try:
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
# Round:
# end_date_str to the beginning of the next day
# start_date_str to the start of the date
start_date = timezone.make_aware(start_date)
end_date = timezone.make_aware(end_date)
2024-01-05 07:18:16 +00:00
2024-02-21 05:46:41 +00:00
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z')
base_query &= Q(created_at__range=(start_date, end_date))
except Exception as e:
raise InvalidException(excArgs="Date format")
2024-01-31 03:00:18 +00:00
2024-01-05 07:18:16 +00:00
if request_id:
base_query &= Q(request_id=request_id)
if redemption_id:
base_query &= Q(redemption_id=redemption_id)
base_query &= Q(is_test_request=False)
if isinstance(include_test, str):
include_test = True if include_test=="true" else False
if include_test:
# base_query = ~base_query
base_query.children = base_query.children[:-1]
elif isinstance(include_test, bool):
if include_test:
base_query = ~base_query
if isinstance(is_reviewed, str):
if is_reviewed == "reviewed":
base_query &= Q(is_reviewed=True)
2024-02-21 05:46:41 +00:00
elif is_reviewed == "not_reviewed":
2024-01-05 07:18:16 +00:00
base_query &= Q(is_reviewed=False)
elif is_reviewed == "all":
pass
2024-02-21 05:46:41 +00:00
if isinstance(subsidiary, str):
if subsidiary and subsidiary.lower().replace(" ", "")!="all":
base_query &= Q(redemption_id__startswith=subsidiary)
2024-01-05 07:18:16 +00:00
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
request_count = subscription_requests.count()
2024-01-05 07:18:16 +00:00
paginator = Paginator(subscription_requests, page_size)
page = paginator.get_page(page_number)
2024-01-31 03:00:18 +00:00
2024-01-05 07:18:16 +00:00
data = []
for request in page:
imeis = []
purchase_date = []
retailer = ""
try:
if request.reviewed_result is not None:
imeis = request.reviewed_result.get("imei_number", [])
purchase_date = request.reviewed_result.get("purchase_date", [])
retailer = request.reviewed_result.get("retailername", "")
elif request.feedback_result is not None :
imeis = request.feedback_result.get("imei_number", [])
purchase_date = request.feedback_result.get("purchase_date", [])
retailer = request.feedback_result.get("retailername", "")
elif request.predict_result is not None:
if request.predict_result.get("status", 404) == 200:
imeis = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", [])
purchase_date = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", [])
retailer = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", [])
except Exception as e:
print(f"[ERROR]: {e}")
print(f"[ERROR]: {request}")
data.append({
'RequestID': request.request_id,
'RedemptionID': request.redemption_id,
'IMEIs': imeis,
'Purchase Date': purchase_date,
'Retailer': retailer,
'Client Request Time (ms)': request.client_request_time,
'Server Processing Time (ms)': request.preprocessing_time + request.ai_inference_time,
'Is Reviewed': request.is_reviewed,
2024-01-26 09:37:12 +00:00
# 'Is Bad Quality': request.is_bad_image_quality,
2024-01-05 07:18:16 +00:00
'created_at': request.created_at.isoformat()
})
response = {
'subscription_requests': data,
'page': {
'number': page.number,
'total_pages': page.paginator.num_pages,
'count': page.paginator.count,
'total_requests': request_count
2024-01-05 07:18:16 +00:00
}
}
return JsonResponse(response)
2024-01-26 09:37:12 +00:00
return JsonResponse({'error': 'Invalid request method.'}, status=405)
2024-01-31 03:00:18 +00:00
@extend_schema(
2024-02-07 05:39:24 +00:00
request=ReportCreationSerializer(),
2024-01-31 03:00:18 +00:00
responses=None, tags=['Accuracy']
)
2024-02-07 05:39:24 +00:00
@action(detail=False, url_path="make_report", methods=["POST"])
2024-01-31 03:00:18 +00:00
def make_report(self, request):
2024-02-07 05:39:24 +00:00
if request.method == 'POST':
start_date_str = request.data.get('start_date')
end_date_str = request.data.get('end_date')
request_id = request.data.get('request_id', None)
redemption_id = request.data.get('redemption_id', None)
is_reviewed = request.data.get('is_reviewed', False)
include_test = request.data.get('include_test', False)
subsidiary = request.data.get("subsidiary", "all")
is_daily_report = request.data.get('is_daily_report', False)
report_overview_duration = request.data.get("report_overview_duration", "")
2024-02-06 03:14:44 +00:00
subsidiary = map_subsidiary_long_to_short(subsidiary)
if is_daily_report:
if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
raise InvalidException(excArgs="overview duration")
end_date = timezone.now()
if report_overview_duration == "30d":
start_date = end_date - timezone.timedelta(days=30)
else:
start_date = end_date - timezone.timedelta(days=7)
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
2024-02-07 05:39:24 +00:00
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
2024-02-06 03:14:44 +00:00
end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
else:
try:
2024-02-07 05:39:24 +00:00
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
# Round:
# end_date_str to the beginning of the next day
# start_date_str to the start of the date
start_date = timezone.make_aware(start_date)
end_date = timezone.make_aware(end_date)
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z')
2024-02-06 03:14:44 +00:00
except ValueError:
raise InvalidException(excArgs="Date format")
2024-01-31 03:00:18 +00:00
query_set = {"start_date_str": start_date_str,
"end_date_str": end_date_str,
"request_id": request_id,
"redemption_id": redemption_id,
"is_reviewed": is_reviewed,
"include_test": include_test,
"subsidiary": subsidiary,
"is_daily_report": is_daily_report,
2024-02-06 03:14:44 +00:00
"report_overview_duration": report_overview_duration
2024-01-31 03:00:18 +00:00
}
2024-02-05 05:56:51 +00:00
# if is_daily_report:
# if (end_date-start_date) > timezone.timedelta(days=1):
# raise InvalidException(excArgs="Date range")
2024-01-31 03:00:18 +00:00
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
new_report: Report = Report(
report_id=report_id,
is_daily_report=is_daily_report,
subsidiary=subsidiary.lower().replace(" ", ""),
include_test=include_test,
include_reviewed=is_reviewed,
start_at=start_date,
end_at=end_date,
2024-02-01 07:32:20 +00:00
status="Processing",
2024-01-31 03:00:18 +00:00
)
new_report.save()
# Background job to calculate accuracy
shadow_report(report_id, query_set)
return JsonResponse(status=status.HTTP_200_OK, data={"report_id": report_id})
@extend_schema(
parameters=[
OpenApiParameter(
name='report_id',
location=OpenApiParameter.QUERY,
description='Specific report id',
type=OpenApiTypes.STR,
),
OpenApiParameter(
name='page',
location=OpenApiParameter.QUERY,
description='Page number',
type=OpenApiTypes.INT,
required=False
),
OpenApiParameter(
name='page_size',
location=OpenApiParameter.QUERY,
description='Number of items per page',
type=OpenApiTypes.INT,
required=False
),
],
responses=None, tags=['Accuracy']
)
@action(detail=False, url_path="report_detail_list", methods=["GET"])
def get_report_detail_list(self, request):
if request.method == 'GET':
report_id = request.GET.get('report_id', None)
page_number = int(request.GET.get('page', 1))
page_size = int(request.GET.get('page_size', 10))
report = Report.objects.filter(report_id=report_id).first()
report_files = ReportFile.objects.filter(report=report)
paginator = Paginator(report_files, page_size)
page = paginator.get_page(page_number)
2024-02-01 07:32:20 +00:00
data = extract_report_detail_list(page, in_percent=False)
2024-01-31 03:00:18 +00:00
response = {
'report_detail': data,
2024-02-06 03:14:44 +00:00
'metadata': {"subsidiary": map_subsidiary_short_to_long(report.subsidiary),
2024-02-01 07:32:20 +00:00
"start_at": report.start_at,
"end_at": report.end_at},
2024-01-31 03:00:18 +00:00
'page': {
'number': page.number,
'total_pages': page.paginator.num_pages,
'count': page.paginator.count,
}
}
return JsonResponse(response, status=200)
return JsonResponse({'error': 'Invalid request method.'}, status=405)
@extend_schema(
parameters=[
OpenApiParameter(
name='start_date',
location=OpenApiParameter.QUERY,
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
type=OpenApiTypes.DATE,
default='2023-01-02T00:00:00+0700',
),
OpenApiParameter(
name='end_date',
location=OpenApiParameter.QUERY,
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
type=OpenApiTypes.DATE,
default='2024-01-10T00:00:00+0700',
),
OpenApiParameter(
name='daily_report_only',
location=OpenApiParameter.QUERY,
description='Specific report id',
type=OpenApiTypes.BOOL,
),
OpenApiParameter(
name='page',
location=OpenApiParameter.QUERY,
description='Page number',
type=OpenApiTypes.INT,
required=False
),
OpenApiParameter(
name='page_size',
location=OpenApiParameter.QUERY,
description='Number of items per page',
type=OpenApiTypes.INT,
required=False
),
],
responses=None, tags=['Accuracy']
)
@action(detail=False, url_path="report_list", methods=["GET"])
def get_report_list(self, request):
if request.method == 'GET':
2024-02-07 06:38:04 +00:00
exclude_daily_report = request.GET.get('exclude_daily_report', True)
2024-01-31 03:00:18 +00:00
start_date_str = request.GET.get('start_date', "")
end_date_str = request.GET.get('end_date', "")
page_number = int(request.GET.get('page', 1))
page_size = int(request.GET.get('page_size', 10))
2024-02-07 06:38:04 +00:00
reports = None
2024-01-31 03:00:18 +00:00
if not start_date_str or not end_date_str:
2024-02-07 06:38:04 +00:00
reports = Report.objects
2024-01-31 03:00:18 +00:00
else:
try:
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
except ValueError:
raise InvalidException(excArgs="Date format")
base_query = Q(created_at__range=(start_date, end_date))
2024-02-07 06:38:04 +00:00
reports = Report.objects.filter(base_query)
if exclude_daily_report:
reports = Report.objects.filter(is_daily_report=False)
reports = reports.order_by('created_at').reverse()
2024-01-31 03:00:18 +00:00
paginator = Paginator(reports, page_size)
page = paginator.get_page(page_number)
data = []
for report in page:
2024-02-06 03:14:44 +00:00
acc_keys = ["purchase_date", "retailername", "imei_number", "avg"]
acc = {}
for key in acc_keys:
fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0
rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0
2024-02-18 14:52:23 +00:00
acc[key] = report.combined_accuracy.get(key, 0) if report.combined_accuracy else max([fb, rv])
2024-01-31 03:00:18 +00:00
data.append({
"ID": report.id,
"Created Date": report.created_at,
2024-02-06 03:14:44 +00:00
"Start Date": report.start_at,
"End Date": report.end_at,
2024-01-31 03:00:18 +00:00
"No. Requests": report.number_request,
"Status": report.status,
2024-02-06 03:14:44 +00:00
"Purchase Date Acc": acc["purchase_date"],
"Retailer Acc": acc["retailername"],
"IMEI Acc": acc["imei_number"],
"Avg. Accuracy": acc["avg"],
2024-01-31 03:00:18 +00:00
"Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0,
2024-02-01 07:32:20 +00:00
"Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0,
2024-01-31 03:00:18 +00:00
"report_id": report.report_id,
2024-02-06 03:14:44 +00:00
"Subsidiary": map_subsidiary_short_to_long(report.subsidiary),
2024-01-31 03:00:18 +00:00
})
response = {
'report_detail': data,
'page': {
'number': page.number,
'total_pages': page.paginator.num_pages,
'count': page.paginator.count,
}
}
return JsonResponse(response, status=200)
return JsonResponse({'error': 'Invalid request method.'}, status=405)
@extend_schema(
parameters=[
OpenApiParameter(
2024-02-06 03:14:44 +00:00
name='duration',
2024-01-31 03:00:18 +00:00
location=OpenApiParameter.QUERY,
2024-02-06 03:14:44 +00:00
description='one of [30d, 7d]',
type=OpenApiTypes.STR,
default='30d',
2024-01-31 03:00:18 +00:00
),
OpenApiParameter(
name='subsidiary',
location=OpenApiParameter.QUERY,
description='Subsidiary',
type=OpenApiTypes.STR,
2024-02-06 03:14:44 +00:00
)
],
responses=None, tags=['Accuracy']
)
@action(detail=False, url_path="overview", methods=["GET"])
def overview(self, request):
if request.method == 'GET':
_subsidiary = request.GET.get('subsidiary', "ALL")
2024-02-06 03:14:44 +00:00
duration = request.GET.get('duration', "")
subsidiary = map_subsidiary_long_to_short(_subsidiary)
2024-02-06 03:14:44 +00:00
# Retrive data from Redis
key = f"{subsidiary}_{duration}"
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", [])
response = {
'overview_data': data,
}
return JsonResponse(response, status=200)
return JsonResponse({'error': 'Invalid request method.'}, status=405)
@extend_schema(
parameters=[
2024-01-31 03:00:18 +00:00
OpenApiParameter(
2024-02-06 03:14:44 +00:00
name='duration',
2024-01-31 03:00:18 +00:00
location=OpenApiParameter.QUERY,
2024-02-06 03:14:44 +00:00
description='one of [30d, 7d]',
type=OpenApiTypes.STR,
default='30d',
2024-01-31 03:00:18 +00:00
),
OpenApiParameter(
2024-02-06 03:14:44 +00:00
name='subsidiary',
2024-01-31 03:00:18 +00:00
location=OpenApiParameter.QUERY,
2024-02-06 03:14:44 +00:00
description='Subsidiary',
type=OpenApiTypes.STR,
)
2024-01-31 03:00:18 +00:00
],
responses=None, tags=['Accuracy']
)
2024-02-06 03:14:44 +00:00
@action(detail=False, url_path="overview_download_file", methods=["GET"])
def overview_download_file(self, request):
2024-01-31 03:00:18 +00:00
if request.method == 'GET':
2024-02-06 03:14:44 +00:00
subsidiary = request.GET.get('subsidiary', "ALL")
duration = request.GET.get('duration', "")
2024-01-31 03:00:18 +00:00
2024-02-06 03:14:44 +00:00
subsidiary = map_subsidiary_long_to_short(subsidiary)
2024-02-01 08:14:05 +00:00
2024-02-06 03:14:44 +00:00
s3_key = f"{subsidiary}_{duration}.xlsx"
2024-01-31 03:00:18 +00:00
2024-02-06 03:14:44 +00:00
tmp_file = "/tmp/" + s3_key
os.makedirs("/tmp", exist_ok=True)
download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file)
file = open(tmp_file, 'rb')
response = FileResponse(file, status=200)
2024-01-31 03:00:18 +00:00
2024-02-06 03:14:44 +00:00
# Set the content type and content disposition headers
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
return response
2024-01-31 03:00:18 +00:00
return JsonResponse({'error': 'Invalid request method.'}, status=405)
2024-02-01 07:32:20 +00:00
@extend_schema(
parameters=[],
responses=None, tags=['Accuracy']
)
@action(detail=False, url_path=r"get_report_file/(?P<report_id>[\w\-]+)", methods=["GET"])
def get_report_file(self, request, report_id):
if request.method == 'GET':
# report_id = request.GET.get('report_id', None)
if not report_id:
raise RequiredFieldException(excArgs="report_id1")
report_num = Report.objects.filter(report_id=report_id).count()
if report_num == 0:
raise NotFoundException(excArgs=f"report: {report_id}")
report = Report.objects.filter(report_id=report_id).first()
# download from s3 to local
target_timezone = pytz.timezone(settings.TIME_ZONE)
tmp_file = "/tmp/" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime("%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx"
2024-02-01 07:32:20 +00:00
os.makedirs("/tmp", exist_ok=True)
if not report.S3_file_name:
raise NotFoundException(excArgs="S3 file name")
download_from_S3(report.S3_file_name, tmp_file)
file = open(tmp_file, 'rb')
response = FileResponse(file, status=200)
# Set the content type and content disposition headers
response['Content-Type'] = 'application/octet-stream'
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
return response
2024-01-26 09:37:12 +00:00
2024-02-01 07:32:20 +00:00
return JsonResponse({'error': 'Invalid request method.'}, status=405)
2024-01-26 09:37:12 +00:00
class RequestViewSet(viewsets.ViewSet):
lookup_field = "username"
2024-02-01 07:32:20 +00:00
@extend_schema(
request={
2024-01-26 09:37:12 +00:00
'multipart/form-data': {
'type': 'object',
'properties': {
'reviewed_result': {
'type': 'string',
2024-02-01 07:32:20 +00:00
'default': '''{"request_id": "Sample request_id", "imei_number": ["sample_imei1", "sample_imei2"], "retailername": "Sample Retailer", "purchase_date": "01/01/1970", "sold_to_party": "Sample party"}''',
2024-01-26 09:37:12 +00:00
},
2024-02-01 07:32:20 +00:00
},
2024-01-26 09:37:12 +00:00
},
2024-02-01 07:32:20 +00:00
},
responses=None,
tags=['Request']
2024-01-26 09:37:12 +00:00
)
@action(detail=False, url_path=r"request/(?P<request_id>[\w\-]+)", methods=["GET", "POST"])
def get_subscription_request(self, request, request_id=None):
if request.method == 'GET':
base_query = Q(request_id=request_id)
2024-02-01 08:14:05 +00:00
subscription_request = SubscriptionRequest.objects.filter(base_query)
if subscription_request.count() == 0:
raise NotFoundException(excArgs=request_id)
subscription_request = subscription_request.first()
2024-01-26 09:37:12 +00:00
data = []
files = []
file_query = Q(request=request_id)
subscription_request_files = SubscriptionRequestFile.objects.filter(file_query)
for subscription_request_file in subscription_request_files:
files.append({
'File Name': subscription_request_file.file_name,
'File Path': subscription_request_file.file_path,
'File Category': subscription_request_file.file_category,
'Origin_Name': subscription_request_file.origin_name,
'Is Bad Image Quality': subscription_request_file.is_bad_image_quality,
'Doc Type': subscription_request_file.doc_type,
'Processing Time (ms)': subscription_request_file.processing_time,
'Reason': subscription_request_file.reason,
'Counter Measures': subscription_request_file.counter_measures,
'Predicted Result': subscription_request_file.predict_result,
'Feedback Result': subscription_request_file.feedback_result,
'Reviewed Result': subscription_request_file.reviewed_result,
'Feedback Accuracy': subscription_request_file.feedback_accuracy,
'Reviewed Accuracy': subscription_request_file.reviewed_accuracy,
'Created At': subscription_request_file.created_at.isoformat(),
'Updated At': subscription_request_file.updated_at.isoformat()
})
2024-01-26 09:37:12 +00:00
data.append({
2024-02-01 08:14:05 +00:00
'Document Type': subscription_request.doc_type,
2024-01-26 09:37:12 +00:00
'RequestID': subscription_request.request_id,
'RedemptionID': subscription_request.redemption_id,
2024-02-01 08:14:05 +00:00
'Process Type': subscription_request.process_type,
'Provider Code': subscription_request.provider_code,
'Status': subscription_request.status,
'Files': files,
'Reviewed Result': subscription_request.reviewed_result,
'Feedback Result': subscription_request.feedback_result,
'Predicted Result': subscription_request.predict_result,
2024-02-01 08:14:05 +00:00
'Is Test Request': subscription_request.is_test_request,
2024-01-26 09:37:12 +00:00
'Client Request Time (ms)': subscription_request.client_request_time,
'Server Processing Time (ms)': subscription_request.preprocessing_time + subscription_request.ai_inference_time,
'Is Reviewed': subscription_request.is_reviewed,
'Subscription': subscription_request.subscription,
'Feedback Accuracy': subscription_request.feedback_accuracy,
'Reviewed Accuracy': subscription_request.reviewed_accuracy,
'Created At': subscription_request.created_at.isoformat(),
'Updated At': subscription_request.updated_at.isoformat()
2024-01-26 09:37:12 +00:00
})
response = {
'subscription_requests': data
}
return JsonResponse(response)
elif request.method == 'POST':
data = request.data
base_query = Q(request_id=request_id)
2024-02-01 08:14:05 +00:00
subscription_request = SubscriptionRequest.objects.filter(base_query)
if subscription_request.count() == 0:
raise NotFoundException(excArgs=request_id)
subscription_request = subscription_request.first()
2024-01-26 09:37:12 +00:00
2024-02-06 05:52:22 +00:00
file_query = Q(request=request_id)
subscription_request_files = SubscriptionRequestFile.objects.filter(file_query)
2024-02-19 02:18:43 +00:00
reviewed_result = json.loads(data["reviewed_result"])
2024-02-06 05:52:22 +00:00
for subscription_request_file in subscription_request_files:
if subscription_request_file.doc_type == 'invoice':
subscription_request_file.reviewed_result = reviewed_result
subscription_request_file.reviewed_result['imei_number'] = []
elif subscription_request_file.doc_type == 'imei':
subscription_request_file.reviewed_result = {"retailername": None, "sold_to_party": None, "purchase_date": [], "imei_number": [reviewed_result["imei_number"][subscription_request_file.index_in_request]]}
subscription_request_file.save()
2024-01-29 10:43:10 +00:00
for field in ['retailername', 'sold_to_party', 'purchase_date', 'imei_number']:
if not field in reviewed_result.keys():
raise RequiredFieldException(excArgs=f'reviewed_result.{field}')
subscription_request.reviewed_result = reviewed_result
subscription_request.reviewed_result['request_id'] = request_id
subscription_request.is_reviewed = True
subscription_request.save()
2024-01-26 09:37:12 +00:00
return JsonResponse({'message': 'success.'}, status=200)
else:
2024-01-31 03:00:18 +00:00
return JsonResponse({'error': 'Invalid request method.'}, status=405)
class RequestImageViewSet(viewsets.ViewSet):
lookup_field = "username"
@action(detail=False, url_path=r"request_image/(?P<request_id>[\w\-]+)/(?P<image_name>[\w\-]+)", methods=["GET"])
def get_request_image(self, request, request_id=None, image_name=None):
if request.method == 'GET':
base_query = Q(request_id=request_id)
subscription_request = SubscriptionRequest.objects.filter(base_query)
if subscription_request.count() == 0:
raise NotFoundException(excArgs=request_id)
subscription_request = subscription_request.first()
file_query = Q(request=request_id)
subscription_request_files = SubscriptionRequestFile.objects.filter(file_query)
for subscription_request_file in subscription_request_files:
if image_name == subscription_request_file.file_name:
tmp_file = None
download_from_S3("ocr-sds/ocr_invoice/" + request_id + "/" + image_name, tmp_file)
if tmp_file is not None:
file = open(tmp_file, 'rb')
return FileResponse(file, status=200)
else:
raise NotFoundException(excArgs=image_name)
raise NotFoundException(excArgs=image_name)
else:
return JsonResponse({'error': 'Invalid request method.'}, status=405)