from rest_framework import status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from django.core.paginator import Paginator
from django.http import JsonResponse, FileResponse, HttpResponse
from django.utils import timezone
from django.db.models import Q
import uuid
import os
import copy
import pytz
from fwd import settings
from drf_spectacular.utils import extend_schema, OpenApiParameter, OpenApiTypes
# from drf_spectacular.types import OpenApiString
import json
from ..exception.exceptions import InvalidException, RequiredFieldException, NotFoundException
from ..models import SubscriptionRequest, Report, ReportFile, SubscriptionRequestFile
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url
from ..utils.redis import RedisUtils
from ..utils.process import string_to_boolean
from ..utils.cache import get_cache, set_cache
from fwd_api.constant.common import FileCategory
from ..request.ReportCreationSerializer import ReportCreationSerializer
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
from ..utils.report import aggregate_overview
from fwd_api.utils.accuracy import predict_result_to_ready
import copy

redis_client = RedisUtils()

class AccuracyViewSet(viewsets.ViewSet):
    lookup_field = "username"

    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='start_date',
            location=OpenApiParameter.QUERY,
            description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
            type=OpenApiTypes.DATE,
            default='2023-01-02T00:00:00+0700',
        ),
        OpenApiParameter(
            name='end_date',
            location=OpenApiParameter.QUERY,
            description='End date (YYYY-mm-DDTHH:MM:SSZ)',
            type=OpenApiTypes.DATE,
            default='2024-01-10T00:00:00+0700',
        ),
        OpenApiParameter(
            name='includes_test',
            location=OpenApiParameter.QUERY,
            description='Whether to include test record or not',
            type=OpenApiTypes.BOOL,
        ),
        OpenApiParameter(
            name='is_reviewed',
            location=OpenApiParameter.QUERY,
            description='Which records to be query',
            type=OpenApiTypes.STR,
            enum=['reviewed', 'not_reviewed', 'all'],
        ),
        OpenApiParameter(
            name='subsidiary',
            location=OpenApiParameter.QUERY,
            description='Which subsidiary to be included',
            type=OpenApiTypes.STR,
            enum=list(settings.SUBS.keys()),
        ),
        OpenApiParameter(
            name='request_id',
            location=OpenApiParameter.QUERY,
            description='Specific request id',
            type=OpenApiTypes.STR,
        ),
        OpenApiParameter(
            name='redemption_id',
            location=OpenApiParameter.QUERY,
            description='Specific redemption id',
            type=OpenApiTypes.STR,
        ),
        OpenApiParameter(
            name='page',
            location=OpenApiParameter.QUERY,
            description='Page number',
            type=OpenApiTypes.INT,
            required=False
        ),
        OpenApiParameter(
            name='page_size',
            location=OpenApiParameter.QUERY,
            description='Number of items per page',
            type=OpenApiTypes.INT,
            required=False
        ),
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="request_list", methods=["GET"])
    def get_request_list(self, request):
        if request.method == 'GET':
            start_date_str = request.GET.get('start_date')
            end_date_str = request.GET.get('end_date')
            page_number = int(request.GET.get('page', 1))
            page_size = int(request.GET.get('page_size', 10))
            request_id = request.GET.get('request_id', None)
            redemption_id = request.GET.get('redemption_id', None)
            is_reviewed = request.GET.get('is_reviewed', None)
            include_test = request.GET.get('includes_test', False)
            subsidiary = request.data.get("subsidiary", "all")
            subsidiary = map_subsidiary_long_to_short(subsidiary)

            base_query = Q(status=200)
            if start_date_str or end_date_str:
                try:
                    start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
                    end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
                    end_date = end_date + timezone.timedelta(days=1)
                    # Round:
                    #   end_date_str to the beginning of the next day
                    #   start_date_str to the start of the date
                    start_date = timezone.make_aware(start_date)
                    end_date = timezone.make_aware(end_date)

                    start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
                    end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
                    base_query &= Q(created_at__range=(start_date, end_date))
                except Exception as e:
                    raise InvalidException(excArgs="Date format")
            
            if request_id:  
                base_query &= Q(request_id=request_id)
            if redemption_id:  
                base_query &= Q(redemption_id=redemption_id)
            if isinstance(include_test, str):
                include_test = True if include_test=="true" else False
                if not include_test:
                    base_query &= Q(is_test_request=False)
            elif isinstance(include_test, bool):
                if not include_test:
                    base_query &= Q(is_test_request=False)
            if isinstance(is_reviewed, str):
                if is_reviewed == "reviewed":
                    base_query &= Q(is_reviewed=True)
                elif is_reviewed == "not_reviewed":
                    base_query &= Q(is_reviewed=False)
                elif is_reviewed == "all":
                    pass
            if isinstance(subsidiary, str):
                if subsidiary and subsidiary.lower().replace(" ", "")!="all":
                    base_query &= Q(redemption_id__startswith=subsidiary)

            subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')

            request_count = subscription_requests.count()

            paginator = Paginator(subscription_requests, page_size)
            page = paginator.get_page(page_number)
            
            data = []
            for request in page:
                imeis = []
                purchase_date = []
                retailer = ""
                try:
                    if request.reviewed_result is not None:
                        imeis = request.reviewed_result.get("imei_number", [])
                        purchase_date = request.reviewed_result.get("purchase_date", [])
                        retailer = request.reviewed_result.get("retailername", "")
                    elif request.feedback_result is not None :
                        imeis = request.feedback_result.get("imei_number", [])
                        purchase_date = request.feedback_result.get("purchase_date", [])
                        retailer = request.feedback_result.get("retailername", "")
                    elif request.predict_result is not None:
                        if request.predict_result.get("status", 404) == 200:
                            imeis = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[3].get("value", [])
                            purchase_date = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[2].get("value", [])
                            retailer = request.predict_result.get("content", {}).get("document", [])[0].get("content", [])[0].get("value", [])
                except Exception as e:
                    print(f"[ERROR]: {e}")
                    print(f"[ERROR]: {request}")
                data.append({
                    'RequestID': request.request_id,
                    'RedemptionID': request.redemption_id,
                    'IMEIs': imeis,
                    'Purchase Date': purchase_date,
                    'Retailer': retailer,
                    'Client Request Time (ms)': request.client_request_time,
                    'Server Processing Time (ms)': request.preprocessing_time + request.ai_inference_time,
                    'Is Reviewed': request.is_reviewed,
                    # 'Is Bad Quality': request.is_bad_image_quality,
                    'created_at': request.created_at.isoformat()
                })

            response = {
                'subscription_requests': data,
                'page': {
                    'number': page.number,
                    'total_pages': page.paginator.num_pages,
                    'count': page.paginator.count,
                    'total_requests': request_count
                }
            }

            return JsonResponse(response)

        return JsonResponse({'error': 'Invalid request method.'}, status=405)
        
    @extend_schema(
    request=ReportCreationSerializer(),
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="make_report", methods=["POST"])
    def make_report(self, request):
        if request.method == 'POST':
            start_date_str = request.data.get('start_date')
            end_date_str = request.data.get('end_date')
            request_id = request.data.get('request_id', None)
            redemption_id = request.data.get('redemption_id', None)
            is_reviewed = request.data.get('is_reviewed', False)
            include_test = request.data.get('include_test', False)
            subsidiary = request.data.get("subsidiary", "all")
            is_daily_report = request.data.get('is_daily_report', False)
            report_overview_duration = request.data.get("report_overview_duration", "")
            subsidiary = map_subsidiary_long_to_short(subsidiary)

            if is_daily_report:
                if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
                    raise InvalidException(excArgs="overview duration")
                end_date = timezone.now()
                if report_overview_duration == "30d":
                    start_date = end_date - timezone.timedelta(days=30)
                else:
                    start_date = end_date - timezone.timedelta(days=7)
                start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
                start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
                end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
            else:
                try:
                    start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
                    end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%d')
                    # Round:
                    #   end_date_str to the beginning of the next day
                    #   start_date_str to the start of the date
                    start_date = timezone.make_aware(start_date)
                    end_date = timezone.make_aware(end_date)

                    start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z') # inside logic will include second precision with timezone for calculation
                    end_date_str = (end_date + timezone.timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%S%z')
                except ValueError:
                    raise InvalidException(excArgs="Date format")

            query_set = {"start_date_str": start_date_str,
                            "end_date_str": end_date_str,
                            "request_id": request_id,
                            "redemption_id": redemption_id,
                            "is_reviewed": is_reviewed,
                            "include_test": include_test,
                            "subsidiary": subsidiary,
                            "is_daily_report": is_daily_report,
                            "report_overview_duration": report_overview_duration
                            }
            # if is_daily_report:
            #     if (end_date-start_date) > timezone.timedelta(days=1):
            #         raise InvalidException(excArgs="Date range")

            report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
            new_report: Report = Report(
                report_id=report_id,
                is_daily_report=is_daily_report,
                subsidiary=subsidiary.lower().replace(" ", ""),
                include_test=include_test,
                include_reviewed=is_reviewed,
                start_at=start_date,
                end_at=end_date,
                status="Processing",
            )
            new_report.save()
            # Background job to calculate accuracy
            shadow_report(report_id, query_set)

        return JsonResponse(status=status.HTTP_200_OK, data={"report_id": report_id})

    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='report_id',
            location=OpenApiParameter.QUERY,
            description='Specific report id',
            type=OpenApiTypes.STR,
        ),
        OpenApiParameter(
            name='page',
            location=OpenApiParameter.QUERY,
            description='Page number',
            type=OpenApiTypes.INT,
            required=False
        ),
        OpenApiParameter(
            name='page_size',
            location=OpenApiParameter.QUERY,
            description='Number of items per page',
            type=OpenApiTypes.INT,
            required=False
        ),
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="report_detail_list", methods=["GET"])
    def get_report_detail_list(self, request):
        if request.method == 'GET':
            report_id = request.GET.get('report_id', None)
            page_number = int(request.GET.get('page', 1))
            page_size = int(request.GET.get('page_size', 10))

            report = Report.objects.filter(report_id=report_id).first()
            report_files = ReportFile.objects.filter(report=report)

            paginator = Paginator(report_files, page_size)
            page = paginator.get_page(page_number)

            data = extract_report_detail_list(page, in_percent=False)
            
            response = {
                'report_detail': data,
                'metadata': {"subsidiary": map_subsidiary_short_to_long(report.subsidiary),
                             "start_at": report.start_at,
                             "end_at": report.end_at},
                'page': {
                    'number': page.number,
                    'total_pages': page.paginator.num_pages,
                    'count': page.paginator.count,
                }
            }
            return JsonResponse(response, status=200)

        return JsonResponse({'error': 'Invalid request method.'}, status=405)

    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='start_date',
            location=OpenApiParameter.QUERY,
            description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
            type=OpenApiTypes.DATE,
            default='2024-01-02T00:00:00+0700',
        ),
        OpenApiParameter(
            name='end_date',
            location=OpenApiParameter.QUERY,
            description='End date (YYYY-mm-DDTHH:MM:SSZ)',
            type=OpenApiTypes.DATE,
            default='2024-01-10T00:00:00+0700',
        ),
        OpenApiParameter(
            name='daily_report_only',
            location=OpenApiParameter.QUERY,
            description='Specific report id',
            type=OpenApiTypes.BOOL,
        ),
        OpenApiParameter(
            name='page',
            location=OpenApiParameter.QUERY,
            description='Page number',
            type=OpenApiTypes.INT,
            required=False
        ),
        OpenApiParameter(
            name='page_size',
            location=OpenApiParameter.QUERY,
            description='Number of items per page',
            type=OpenApiTypes.INT,
            required=False
        ),
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="report_list", methods=["GET"])
    def get_report_list(self, request):
        if request.method == 'GET':
            exclude_daily_report = request.GET.get('exclude_daily_report', True)
            start_date_str = request.GET.get('start_date', "")
            end_date_str = request.GET.get('end_date', "")
            page_number = int(request.GET.get('page', 1))
            page_size = int(request.GET.get('page_size', 10))

            reports = None
            if not start_date_str or not end_date_str:
                reports = Report.objects
            else:
                try:
                    start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
                    end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
                except ValueError:
                    raise InvalidException(excArgs="Date format")
                base_query = Q(created_at__range=(start_date, end_date))
                reports = Report.objects.filter(base_query)

            if exclude_daily_report:  
                reports = Report.objects.filter(is_daily_report=False)
            reports = reports.order_by('created_at').reverse()
                
            paginator = Paginator(reports, page_size)
            page = paginator.get_page(page_number)

            data = []
            for report in page:
                acc_keys = ["purchase_date", "retailername", "imei_number", "avg"]
                acc = {}
                for key in acc_keys:
                    fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0
                    rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0
                    acc[key] = report.combined_accuracy.get(key, 0) if report.combined_accuracy else max([fb, rv])
                data.append({
                    "ID": report.id,
                    "Created Date": report.created_at,
                    "Start Date": report.start_at,
                    "End Date": report.end_at,
                    "No. Requests": report.number_request,
                    "Status": report.status,
                    "Purchase Date Acc": acc["purchase_date"],
                    "Retailer Acc": acc["retailername"],
                    "IMEI Acc": acc["imei_number"],
                    "Avg. Accuracy": acc["avg"],
                    "Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0,
                    "Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0,
                    "report_id": report.report_id,
                    "Subsidiary": map_subsidiary_short_to_long(report.subsidiary),
                })

            response = {
                'report_detail': data,
                'page': {
                    'number': page.number,
                    'total_pages': page.paginator.num_pages,
                    'count': page.paginator.count,
                }
            }
            return JsonResponse(response, status=200)

        return JsonResponse({'error': 'Invalid request method.'}, status=405)

    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='duration',
            location=OpenApiParameter.QUERY,
            description='one of [30d, 7d]',
            type=OpenApiTypes.STR,
            default='30d',
        ),
        OpenApiParameter(
            name='subsidiary',
            location=OpenApiParameter.QUERY,
            description='Subsidiary',
            type=OpenApiTypes.STR,
        )
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="overview_sumary", methods=["GET"])
    def overview_sumary(self, request):
        if request.method == 'GET':
            _subsidiary = request.GET.get('subsidiary', "ALL")
            duration = request.GET.get('duration', "")

            subsidiary = map_subsidiary_long_to_short(_subsidiary)

            # Retrive data from Redis
            key =  f"{subsidiary}_{duration}"
            data = get_cache(key).get("data", [])
            response = {
                'overview_data': data,
            }
            return JsonResponse(response, status=200)

        return JsonResponse({'error': 'Invalid request method.'}, status=405)

    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='duration',
            location=OpenApiParameter.QUERY,
            description='one of [30d, 7d]',
            type=OpenApiTypes.STR,
            default='30d',
        ),
        OpenApiParameter(
            name='subsidiary',
            location=OpenApiParameter.QUERY,
            description='Subsidiary',
            type=OpenApiTypes.STR,
        )
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="overview", methods=["GET"])
    def overview(self, request):
        if request.method == 'GET':
            _subsidiary = request.GET.get('subsidiary', "ALL")
            duration = request.GET.get('duration', "")

            subsidiary = map_subsidiary_long_to_short(_subsidiary)
            
            if _subsidiary == "ALL":
                # aggregate_overview from subsibdiaries
                subsidiaries_to_include = list(settings.SUBS.values())
                subsidiaries_to_include.remove("all")
                # subsidiaries_to_include.remove("seao")
                subsidiary_overview_reports = []
                for sub in subsidiaries_to_include:
                    key =  f"{sub}_{duration}"
                    try:
                        this_overview = get_cache(key).get("data", [])
                        if sub != "seao":
                            this_overview = [d for d in this_overview if d.get("subs") != "+"]
                        else:
                            for item in this_overview:
                                if item.get("subs") == "+":
                                    item["extraction_date"] = item["extraction_date"].replace("Subtotal ", "").replace("(", "").replace(")", "") + "-32"
                        subsidiary_overview_reports += this_overview

                    except Exception as e:
                        print(f"[WARM]: Unable to retrive data {key} from Redis, skipping...")
                data = aggregate_overview(subsidiary_overview_reports)
                for item in data:
                    if item.get("subs") == "+":
                        item["extraction_date"] = "Subtotal (" + item["extraction_date"].replace("-32", "") + ")"
                # Do the saving process
                report_fine_data = copy.deepcopy(data)
                for i, dat in enumerate(report_fine_data):
                    keys = [x for x in list(dat.keys()) if "accuracy" in x.lower()]
                    keys_percent = "images_quality"
                    for x_key in report_fine_data[i][keys_percent].keys():
                        if "percent" not in x_key:
                            continue
                        report_fine_data[i][keys_percent][x_key] = report_fine_data[i][keys_percent][x_key]*100
                    for key in keys:
                        if report_fine_data[i][key]:
                            for x_key in report_fine_data[i][key].keys():
                                report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
                overview_filename = _subsidiary + "_" + duration + ".xlsx"
                data_workbook = dict2xlsx(report_fine_data, _type='report')
                
                folder_path = os.path.join(settings.MEDIA_ROOT, "report", settings.OVERVIEW_REPORT_ROOT)
                os.makedirs(folder_path, exist_ok = True)
                file_path = os.path.join(folder_path, overview_filename)
                data_workbook.save(file_path)

                s3_key=save_report_to_S3(None, file_path)
                # redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) 
            else:
                # Retrive data from Redis
                key =  f"{subsidiary}_{duration}"
                data = get_cache(key).get("data", [])
            response = {
                'overview_data': data,
            }
            return JsonResponse(response, status=200)
        return JsonResponse({'error': 'Invalid request method.'}, status=405)
    
    @extend_schema(
    parameters=[
        OpenApiParameter(
            name='duration',
            location=OpenApiParameter.QUERY,
            description='one of [30d, 7d]',
            type=OpenApiTypes.STR,
            default='30d',
        ),
        OpenApiParameter(
            name='subsidiary',
            location=OpenApiParameter.QUERY,
            description='Subsidiary',
            type=OpenApiTypes.STR,
        )
    ],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path="overview_download_file", methods=["GET"])
    def overview_download_file(self, request):
        if request.method == 'GET':
            subsidiary = request.GET.get('subsidiary', "ALL")
            duration = request.GET.get('duration', "")

            subsidiary = map_subsidiary_long_to_short(subsidiary)

            s3_key = f"{subsidiary}_{duration}.xlsx"

            tmp_file = "/tmp/" + s3_key
            os.makedirs("/tmp", exist_ok=True)
            download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file)
            file = open(tmp_file, 'rb')
            response = FileResponse(file, status=200)

            # Set the content type and content disposition headers
            response['Content-Type'] = 'application/octet-stream'
            response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
            return response

        return JsonResponse({'error': 'Invalid request method.'}, status=405)

    @extend_schema(
    parameters=[],
    responses=None, tags=['Accuracy']
    )
    @action(detail=False, url_path=r"get_report_file/(?P<report_id>[\w\-]+)", methods=["GET"])
    def get_report_file(self, request, report_id):
        if request.method == 'GET':
            # report_id = request.GET.get('report_id', None)

            if not report_id:
                raise RequiredFieldException(excArgs="report_id1")
            report_num = Report.objects.filter(report_id=report_id).count()
            if report_num == 0:
                raise NotFoundException(excArgs=f"report: {report_id}")
            report = Report.objects.filter(report_id=report_id).first()
            # download from s3 to local
            target_timezone = pytz.timezone(settings.TIME_ZONE)
            tmp_file = "/tmp/" + report.subsidiary + "_" + report.start_at.astimezone(target_timezone).strftime("%Y%m%d") + "_" + report.end_at.astimezone(target_timezone).strftime("%Y%m%d") + "_created_on_" + report.created_at.astimezone(target_timezone).strftime("%Y%m%d") + ".xlsx"
            os.makedirs("/tmp", exist_ok=True)
            if not report.S3_file_name:
                raise NotFoundException(excArgs="S3 file name")
            download_from_S3(report.S3_file_name, tmp_file)
            file = open(tmp_file, 'rb')
            response = FileResponse(file, status=200)

            # Set the content type and content disposition headers
            response['Content-Type'] = 'application/octet-stream'
            response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
            return response

        return JsonResponse({'error': 'Invalid request method.'}, status=405)

    @extend_schema(
        request={
            'multipart/form-data': {
                'type': 'object',
                'properties': {
                    'reviewed_result': {
                        'type': 'string',
                        'default': '''{"request_id": "Sample request_id", "imei_number": ["sample_imei1", "sample_imei2"], "retailername": "Sample Retailer", "purchase_date": "01/01/1970", "sold_to_party": "Sample party"}''',
                    },
                },
            },
        },
        responses=None,
        tags=['Accuracy']
    )
    @action(detail=False, url_path=r"request/(?P<request_id>[\w\-]+)", methods=["GET", "POST"])
    def get_subscription_request(self, request, request_id=None):
        if request.method == 'GET':
            base_query = Q(request_id=request_id)

            subscription_request = SubscriptionRequest.objects.filter(base_query)

            if subscription_request.count() == 0:
                raise NotFoundException(excArgs=request_id)
            
            subscription_request = subscription_request.first()

            sample_result = {
                "request_id": subscription_request.request_id,
                "retailername": None,
                "sold_to_party": None,
                "purchase_date": None,
                "imei_number": []
            }
          
            data = []
            files = []

            subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id, file_category=FileCategory.Origin.value)

            for subscription_request_file in subscription_request_files:
                sub = subscription_request.subscription
                user_id = sub.user.id
                sync_id = sub.user.sync_id
                sub_id = sub.id
                reviewed_result = subscription_request_file.reviewed_result
                feedback_result = subscription_request_file.feedback_result
                predicted_result = subscription_request_file.predict_result

                if not reviewed_result:
                    reviewed_result = copy.deepcopy(sample_result)
                if not feedback_result:
                    feedback_result = copy.deepcopy(sample_result)
                if not predicted_result:
                    predicted_result = copy.deepcopy(sample_result)

                files.append({
                    'File Name': subscription_request_file.file_name,
                    'File Path': subscription_request_file.file_path,
                    'File Category': subscription_request_file.file_category,
                    'File URL': build_S3_url("sbt_invoice/" + subscription_request.request_id + "/" + subscription_request_file.file_name, 600),
                    'Original Name': subscription_request_file.origin_name,
                    'Is Bad Image Quality': subscription_request_file.is_bad_image_quality,
                    'Doc Type': subscription_request_file.doc_type,
                    'Processing Time (ms)': subscription_request_file.processing_time,
                    'Reason': subscription_request_file.reason,
                    'Counter Measures': subscription_request_file.counter_measures,
                    'Predicted Result': predicted_result,
                    'Feedback Result': feedback_result,
                    'Reviewed Result': reviewed_result,
                    'Feedback Accuracy': subscription_request_file.feedback_accuracy,
                    'Reviewed Accuracy': subscription_request_file.reviewed_accuracy,
                    'Created At': subscription_request_file.created_at.isoformat(),
                    'Updated At': subscription_request_file.updated_at.isoformat()
                })

            reviewed_result = subscription_request.reviewed_result
            feedback_result = subscription_request.feedback_result
            predicted_result = predict_result_to_ready(subscription_request.predict_result)

            if not reviewed_result:
                reviewed_result = copy.deepcopy(sample_result)
            if not feedback_result:
                feedback_result = copy.deepcopy(sample_result)
            if not predicted_result:
                predicted_result = copy.deepcopy(sample_result)

            data.append({
                'Document Type': subscription_request.doc_type,
                'RequestID': subscription_request.request_id,
                'RedemptionID': subscription_request.redemption_id,
                'Process Type': subscription_request.process_type,
                'Provider Code': subscription_request.provider_code,
                'Status': subscription_request.status,
                'Files': files,
                'Reviewed Result': reviewed_result,
                'Feedback Result': feedback_result,
                'Predicted Result': predicted_result,
                'Is Test Request': subscription_request.is_test_request,
                'Client Request Time (ms)': subscription_request.client_request_time,
                'Server Processing Time (ms)': subscription_request.preprocessing_time + subscription_request.ai_inference_time,
                'Is Reviewed': subscription_request.is_reviewed,
                'Feedback Accuracy': subscription_request.feedback_accuracy,
                'Reviewed Accuracy': subscription_request.reviewed_accuracy, 
                'Created At': subscription_request.created_at.isoformat(),
                'Updated At': subscription_request.updated_at.isoformat()
            })

            response = {
                'subscription_requests': data
            }

            return JsonResponse(response)
        
        elif request.method == 'POST':
            data = request.data
            
            base_query = Q(request_id=request_id)

            subscription_request = SubscriptionRequest.objects.filter(base_query)

            if subscription_request.count() == 0:
                raise NotFoundException(excArgs=request_id)
            
            subscription_request = subscription_request.first()

            subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id)

            if "reviewed_result" not in data:
                raise InvalidException(excArgs=f'reviewed_result')
            
            reviewed_result = data["reviewed_result"]
            for field in ['retailername', 'sold_to_party', 'purchase_date', 'imei_number']:
                if not field in reviewed_result.keys():
                    raise RequiredFieldException(excArgs=f'reviewed_result.{field}')
            reviewed_result['request_id'] = request_id

            for subscription_request_file in subscription_request_files:
                if subscription_request_file.doc_type == 'invoice':
                    subscription_request_file.reviewed_result = reviewed_result
                    subscription_request_file.reviewed_result['imei_number'] = []
                elif subscription_request_file.doc_type == 'imei':
                    subscription_request_file.reviewed_result = {
                        "retailername": None,
                        "sold_to_party": None,
                        "purchase_date": [],
                        "imei_number": []}
                    if len(reviewed_result["imei_number"]) - 1 >= subscription_request_file.index_in_request:
                        subscription_request_file.reviewed_result["imei_number"] = reviewed_result["imei_number"][subscription_request_file.index_in_request]
                subscription_request_file.save()

            subscription_request.reviewed_result = reviewed_result
            subscription_request.reviewed_result['request_id'] = request_id
            subscription_request.is_reviewed = True
            subscription_request.save()

            return JsonResponse({'message': 'success.'}, status=200)
        else:
            return JsonResponse({'error': 'Invalid request method.'}, status=405)
        
    @extend_schema(
        request={
            'multipart/form-data': {
                'type': 'object',
                'properties': {
                    'reason': {
                        'type': 'string',
                        'default': '''"Sample reason"''',
                    },
                },
            },
        },
        responses=None,
        tags=['Accuracy']
    )
    @action(detail=False, url_path=r"request_image/(?P<request_id>[\w\-]+)/(?P<request_image_id>[\w\-]+)", methods=["POST"])
    def request_image(self, request, request_id=None, request_image_id=None):
        if request.method == 'POST':
            data = request.data
            
            base_query = Q(request_id=request_id)

            subscription_request = SubscriptionRequest.objects.filter(base_query)

            if subscription_request.count() == 0:
                raise NotFoundException(excArgs=request_id)
            
            subscription_request = subscription_request.first()

            subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id)

            if "reason" not in data:
                raise InvalidException(excArgs=f'reason')
            
            reason = data["reason"]

            is_available = False
            for subscription_request_file in subscription_request_files:
                if subscription_request_file.file_name.split(".")[0] == request_image_id:
                    subscription_request_file.reason = reason
                    subscription_request_file.save()
                    is_available = True
            if not is_available:
                raise NotFoundException(excArgs=request_id + "/" + request_image_id)
        else:
            return JsonResponse({'error': 'Invalid request method.'}, status=405)