update billing report
This commit is contained in:
parent
4b5de38b28
commit
0ab18b06b4
BIN
cope2n-api/billing_report.xlsx
Normal file
BIN
cope2n-api/billing_report.xlsx
Normal file
Binary file not shown.
10
cope2n-api/fwd_api/api/accuracy_view.py
Normal file → Executable file
10
cope2n-api/fwd_api/api/accuracy_view.py
Normal file → Executable file
@ -222,7 +222,11 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
subsidiary = request.data.get("subsidiary", "all")
|
subsidiary = request.data.get("subsidiary", "all")
|
||||||
is_daily_report = request.data.get('is_daily_report', False)
|
is_daily_report = request.data.get('is_daily_report', False)
|
||||||
report_overview_duration = request.data.get("report_overview_duration", "")
|
report_overview_duration = request.data.get("report_overview_duration", "")
|
||||||
|
report_type = request.data.get("report_type", "accuracy")
|
||||||
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||||
|
|
||||||
|
if report_type=="billing" and subsidiary.lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
||||||
|
raise InvalidException(excArgs="Subsidiary for billing report")
|
||||||
|
|
||||||
if is_daily_report:
|
if is_daily_report:
|
||||||
if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
|
if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
|
||||||
@ -258,11 +262,9 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
"include_test": include_test,
|
"include_test": include_test,
|
||||||
"subsidiary": subsidiary,
|
"subsidiary": subsidiary,
|
||||||
"is_daily_report": is_daily_report,
|
"is_daily_report": is_daily_report,
|
||||||
"report_overview_duration": report_overview_duration
|
"report_overview_duration": report_overview_duration,
|
||||||
|
"report_type": report_type,
|
||||||
}
|
}
|
||||||
# if is_daily_report:
|
|
||||||
# if (end_date-start_date) > timezone.timedelta(days=1):
|
|
||||||
# raise InvalidException(excArgs="Date range")
|
|
||||||
|
|
||||||
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
||||||
new_report: Report = Report(
|
new_report: Report = Report(
|
||||||
|
124
cope2n-api/fwd_api/celery_worker/process_report_tasks.py
Normal file → Executable file
124
cope2n-api/fwd_api/celery_worker/process_report_tasks.py
Normal file → Executable file
@ -3,7 +3,9 @@ import traceback
|
|||||||
from fwd_api.models import SubscriptionRequest, Report, ReportFile
|
from fwd_api.models import SubscriptionRequest, Report, ReportFile
|
||||||
from fwd_api.celery_worker.worker import app
|
from fwd_api.celery_worker.worker import app
|
||||||
from ..utils import s3 as S3Util
|
from ..utils import s3 as S3Util
|
||||||
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list, calculate_a_request, ReportAccumulateByRequest
|
from ..utils.accuracy import (update_temp_accuracy, IterAvg, calculate_and_save_subcription_file,
|
||||||
|
count_transactions, extract_report_detail_list, calculate_a_request,
|
||||||
|
ReportAccumulateByRequest, create_billing_data)
|
||||||
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
|
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
|
||||||
from ..utils import time_stuff
|
from ..utils import time_stuff
|
||||||
from ..utils.redis import RedisUtils
|
from ..utils.redis import RedisUtils
|
||||||
@ -162,31 +164,40 @@ def make_a_report(report_id, query_set):
|
|||||||
|
|
||||||
@app.task(name='make_a_report_2')
|
@app.task(name='make_a_report_2')
|
||||||
def make_a_report_2(report_id, query_set):
|
def make_a_report_2(report_id, query_set):
|
||||||
|
report_type = query_set.pop("report_type", "accuracy")
|
||||||
|
if report_type == "accuracy":
|
||||||
|
create_accuracy_report(report_id=report_id, **query_set)
|
||||||
|
elif "billing":
|
||||||
|
create_billing_report(report_id=report_id, **query_set)
|
||||||
|
else:
|
||||||
|
raise TypeError("Invalid report type")
|
||||||
|
|
||||||
|
|
||||||
|
def create_accuracy_report(report_id, **kwargs):
|
||||||
try:
|
try:
|
||||||
start_date = timezone.datetime.strptime(query_set["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
start_date = timezone.datetime.strptime(kwargs["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||||
end_date = timezone.datetime.strptime(query_set["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
end_date = timezone.datetime.strptime(kwargs["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||||
base_query = Q(created_at__range=(start_date, end_date))
|
base_query = Q(created_at__range=(start_date, end_date))
|
||||||
if query_set["request_id"]:
|
if kwargs["request_id"]:
|
||||||
base_query &= Q(request_id=query_set["request_id"])
|
base_query &= Q(request_id=kwargs["request_id"])
|
||||||
if query_set["redemption_id"]:
|
if kwargs["redemption_id"]:
|
||||||
base_query &= Q(redemption_id=query_set["redemption_id"])
|
base_query &= Q(redemption_id=kwargs["redemption_id"])
|
||||||
base_query &= Q(is_test_request=False)
|
base_query &= Q(is_test_request=False)
|
||||||
if isinstance(query_set["include_test"], str):
|
if isinstance(kwargs["include_test"], str):
|
||||||
query_set["include_test"] = True if query_set["include_test"].lower() in ["true", "yes", "1"] else False
|
include_test = True if kwargs["include_test"].lower() in ["true", "yes", "1"] else False
|
||||||
if query_set["include_test"]:
|
if include_test:
|
||||||
# base_query = ~base_query
|
# base_query = ~base_query
|
||||||
base_query.children = base_query.children[:-1]
|
base_query.children = base_query.children[:-1]
|
||||||
|
elif isinstance(kwargs["include_test"], bool):
|
||||||
elif isinstance(query_set["include_test"], bool):
|
if kwargs["include_test"]:
|
||||||
if query_set["include_test"]:
|
|
||||||
base_query = ~base_query
|
base_query = ~base_query
|
||||||
if isinstance(query_set["subsidiary"], str):
|
if isinstance(kwargs["subsidiary"], str):
|
||||||
if query_set["subsidiary"] and query_set["subsidiary"].lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
if kwargs["subsidiary"] and kwargs["subsidiary"].lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
||||||
base_query &= Q(redemption_id__startswith=query_set["subsidiary"])
|
base_query &= Q(redemption_id__startswith=kwargs["subsidiary"])
|
||||||
if isinstance(query_set["is_reviewed"], str):
|
if isinstance(kwargs["is_reviewed"], str):
|
||||||
if query_set["is_reviewed"] == "reviewed":
|
if kwargs["is_reviewed"] == "reviewed":
|
||||||
base_query &= Q(is_reviewed=True)
|
base_query &= Q(is_reviewed=True)
|
||||||
elif query_set["is_reviewed"] == "not reviewed":
|
elif kwargs["is_reviewed"] == "not reviewed":
|
||||||
base_query &= Q(is_reviewed=False)
|
base_query &= Q(is_reviewed=False)
|
||||||
|
|
||||||
errors = []
|
errors = []
|
||||||
@ -213,8 +224,7 @@ def make_a_report_2(report_id, query_set):
|
|||||||
# TODO: Multithreading
|
# TODO: Multithreading
|
||||||
# Calculate accuracy, processing time, ....Then save.
|
# Calculate accuracy, processing time, ....Then save.
|
||||||
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
||||||
report: Report = \
|
report: Report = Report.objects.filter(report_id=report_id).first()
|
||||||
Report.objects.filter(report_id=report_id).first()
|
|
||||||
# TODO: number of transaction by doc type
|
# TODO: number of transaction by doc type
|
||||||
num_request = 0
|
num_request = 0
|
||||||
report_files = []
|
report_files = []
|
||||||
@ -226,14 +236,14 @@ def make_a_report_2(report_id, query_set):
|
|||||||
request_att, _report_files = calculate_a_request(report, request)
|
request_att, _report_files = calculate_a_request(report, request)
|
||||||
report_files += _report_files
|
report_files += _report_files
|
||||||
report_engine.add(request, _report_files)
|
report_engine.add(request, _report_files)
|
||||||
request.feedback_accuracy = {"imei_number" : mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
request.feedback_accuracy = {"imei_number": mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
||||||
"purchase_date" : mean_list(request_att["acc"]["feedback"].get("purchase_date", [None])),
|
"purchase_date": mean_list(request_att["acc"]["feedback"].get("purchase_date", [None])),
|
||||||
"retailername" : mean_list(request_att["acc"]["feedback"].get("retailername", [None])),
|
"retailername": mean_list(request_att["acc"]["feedback"].get("retailername", [None])),
|
||||||
"sold_to_party" : mean_list(request_att["acc"]["feedback"].get("sold_to_party", [None]))}
|
"sold_to_party": mean_list(request_att["acc"]["feedback"].get("sold_to_party", [None]))}
|
||||||
request.reviewed_accuracy = {"imei_number" : mean_list(request_att["acc"]["reviewed"].get("imei_number", [None])),
|
request.reviewed_accuracy = {"imei_number": mean_list(request_att["acc"]["reviewed"].get("imei_number", [None])),
|
||||||
"purchase_date" : mean_list(request_att["acc"]["reviewed"].get("purchase_date", [None])),
|
"purchase_date": mean_list(request_att["acc"]["reviewed"].get("purchase_date", [None])),
|
||||||
"retailername" : mean_list(request_att["acc"]["reviewed"].get("retailername", [None])),
|
"retailername": mean_list(request_att["acc"]["reviewed"].get("retailername", [None])),
|
||||||
"sold_to_party" : mean_list(request_att["acc"]["reviewed"].get("sold_to_party", [None]))}
|
"sold_to_party": mean_list(request_att["acc"]["reviewed"].get("sold_to_party", [None]))}
|
||||||
request.save()
|
request.save()
|
||||||
number_images += request_att["total_images"]
|
number_images += request_att["total_images"]
|
||||||
number_bad_images += request_att["bad_images"]
|
number_bad_images += request_att["bad_images"]
|
||||||
@ -249,7 +259,7 @@ def make_a_report_2(report_id, query_set):
|
|||||||
num_request += 1
|
num_request += 1
|
||||||
review_progress += request_att.get("is_reviewed", [])
|
review_progress += request_att.get("is_reviewed", [])
|
||||||
|
|
||||||
report_fine_data, _save_data = report_engine.save(report.report_id, query_set.get("is_daily_report", False), query_set["include_test"])
|
report_fine_data, _save_data = report_engine.save(report.report_id, kwargs.get("is_daily_report", False), kwargs["include_test"])
|
||||||
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
|
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
|
||||||
# Do saving process
|
# Do saving process
|
||||||
report.number_request = num_request
|
report.number_request = num_request
|
||||||
@ -260,27 +270,27 @@ def make_a_report_2(report_id, query_set):
|
|||||||
# FIXME: refactor this data stream for endurability
|
# FIXME: refactor this data stream for endurability
|
||||||
report.average_OCR_time = {"invoice": time_cost["invoice"](), "imei": time_cost["imei"](),
|
report.average_OCR_time = {"invoice": time_cost["invoice"](), "imei": time_cost["imei"](),
|
||||||
"invoice_count": time_cost["invoice"].count, "imei_count": time_cost["imei"].count}
|
"invoice_count": time_cost["invoice"].count, "imei_count": time_cost["imei"].count}
|
||||||
|
|
||||||
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
|
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(
|
||||||
|
report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
|
||||||
report.number_imei_transaction = transaction_att.get("imei", 0)
|
report.number_imei_transaction = transaction_att.get("imei", 0)
|
||||||
report.number_invoice_transaction = transaction_att.get("invoice", 0)
|
report.number_invoice_transaction = transaction_att.get("invoice", 0)
|
||||||
|
|
||||||
acumulated_acc = {"feedback": {},
|
acumulated_acc = {"feedback": {},
|
||||||
"reviewed": {},
|
"reviewed": {},
|
||||||
"acumulated": {}}
|
"acumulated": {}}
|
||||||
for acc_type in ["feedback", "reviewed", "acumulated"]:
|
for acc_type in ["feedback", "reviewed", "acumulated"]:
|
||||||
avg_acc = IterAvg()
|
avg_acc = IterAvg()
|
||||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||||
acumulated_acc[acc_type][key] = accuracy[acc_type][key]()
|
acumulated_acc[acc_type][key] = accuracy[acc_type][key]()
|
||||||
acumulated_acc[acc_type][key+"_count"] = accuracy[acc_type][key].count
|
acumulated_acc[acc_type][key + "_count"] = accuracy[acc_type][key].count
|
||||||
avg_acc.add_avg(acumulated_acc[acc_type][key], acumulated_acc[acc_type][key+"_count"])
|
avg_acc.add_avg(acumulated_acc[acc_type][key], acumulated_acc[acc_type][key+"_count"])
|
||||||
acumulated_acc[acc_type]["avg"] = avg_acc()
|
acumulated_acc[acc_type]["avg"] = avg_acc()
|
||||||
|
|
||||||
report.feedback_accuracy = acumulated_acc["feedback"]
|
report.feedback_accuracy = acumulated_acc["feedback"]
|
||||||
report.reviewed_accuracy = acumulated_acc["reviewed"]
|
report.reviewed_accuracy = acumulated_acc["reviewed"]
|
||||||
report.combined_accuracy = acumulated_acc["acumulated"]
|
report.combined_accuracy = acumulated_acc["acumulated"]
|
||||||
|
|
||||||
report.num_reviewed = review_progress.count(1)
|
report.num_reviewed = review_progress.count(1)
|
||||||
report.num_not_reviewed = review_progress.count(0)
|
report.num_not_reviewed = review_progress.count(0)
|
||||||
report.num_no_reviewed = review_progress.count(-1)
|
report.num_no_reviewed = review_progress.count(-1)
|
||||||
@ -294,8 +304,8 @@ def make_a_report_2(report_id, query_set):
|
|||||||
data = extract_report_detail_list(report_files, lower=True)
|
data = extract_report_detail_list(report_files, lower=True)
|
||||||
data_workbook = dict2xlsx(data, _type='report_detail')
|
data_workbook = dict2xlsx(data, _type='report_detail')
|
||||||
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
||||||
s3_key=save_report_to_S3(report.report_id, local_workbook, 5)
|
s3_key = save_report_to_S3(report.report_id, local_workbook, 5)
|
||||||
if query_set["is_daily_report"]:
|
if kwargs["is_daily_report"]:
|
||||||
# Save overview dashboard
|
# Save overview dashboard
|
||||||
# multiple accuracy by 100
|
# multiple accuracy by 100
|
||||||
save_data = copy.deepcopy(_save_data)
|
save_data = copy.deepcopy(_save_data)
|
||||||
@ -313,10 +323,9 @@ def make_a_report_2(report_id, query_set):
|
|||||||
for x_key in report_fine_data[i][key].keys():
|
for x_key in report_fine_data[i][key].keys():
|
||||||
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
|
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
|
||||||
data_workbook = dict2xlsx(report_fine_data, _type='report')
|
data_workbook = dict2xlsx(report_fine_data, _type='report')
|
||||||
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
|
overview_filename = kwargs["subsidiary"] + "_" + kwargs["report_overview_duration"] + ".xlsx"
|
||||||
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
||||||
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
s3_key = save_report_to_S3(report.report_id, local_workbook)
|
||||||
# redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
|
|
||||||
set_cache(overview_filename.replace(".xlsx", ""), save_data)
|
set_cache(overview_filename.replace(".xlsx", ""), save_data)
|
||||||
|
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
@ -327,3 +336,34 @@ def make_a_report_2(report_id, query_set):
|
|||||||
print("[ERROR]: an error occured while processing report: ", report_id)
|
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return 400
|
return 400
|
||||||
|
|
||||||
|
|
||||||
|
def create_billing_report(report_id, **kwargs):
|
||||||
|
try:
|
||||||
|
start_date = timezone.datetime.strptime(
|
||||||
|
kwargs["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
end_date = timezone.datetime.strptime(
|
||||||
|
kwargs["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
base_query = Q(created_at__range=(start_date, end_date))
|
||||||
|
base_query &= Q(is_test_request=False)
|
||||||
|
|
||||||
|
subscription_requests = SubscriptionRequest.objects.filter(
|
||||||
|
base_query).order_by('created_at')
|
||||||
|
report: Report = Report.objects.filter(report_id=report_id).first()
|
||||||
|
billing_data = create_billing_data(subscription_requests)
|
||||||
|
report.number_request = len(subscription_requests)
|
||||||
|
report.number_images = len(billing_data)
|
||||||
|
report.status = "Ready"
|
||||||
|
report.save()
|
||||||
|
data_workbook = dict2xlsx(billing_data, _type='billing_report')
|
||||||
|
local_workbook = save_workbook_file(
|
||||||
|
report.report_id + ".xlsx", report, data_workbook)
|
||||||
|
s3_key = save_report_to_S3(report.report_id, local_workbook)
|
||||||
|
except IndexError as e:
|
||||||
|
print(e)
|
||||||
|
traceback.print_exc()
|
||||||
|
print("NotFound request by report id, %d", report_id)
|
||||||
|
except Exception as e:
|
||||||
|
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||||
|
traceback.print_exc()
|
||||||
|
return 400
|
||||||
|
18
cope2n-api/fwd_api/migrations/0185_report_report_type.py
Executable file
18
cope2n-api/fwd_api/migrations/0185_report_report_type.py
Executable file
@ -0,0 +1,18 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2024-03-06 06:57
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('fwd_api', '0184_caching'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name='report',
|
||||||
|
name='report_type',
|
||||||
|
field=models.CharField(choices=[('BILLING', 'billing'), ('ACCURACY', 'accuracy')], default='accuracy', max_length=10),
|
||||||
|
),
|
||||||
|
]
|
5
cope2n-api/fwd_api/models/Report.py
Normal file → Executable file
5
cope2n-api/fwd_api/models/Report.py
Normal file → Executable file
@ -2,6 +2,7 @@ from django.db import models
|
|||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from fwd_api.models.Subscription import Subscription
|
from fwd_api.models.Subscription import Subscription
|
||||||
|
|
||||||
|
|
||||||
class Report(models.Model):
|
class Report(models.Model):
|
||||||
# Metadata
|
# Metadata
|
||||||
id = models.AutoField(primary_key=True)
|
id = models.AutoField(primary_key=True)
|
||||||
@ -42,4 +43,6 @@ class Report(models.Model):
|
|||||||
|
|
||||||
feedback_accuracy = models.JSONField(null=True)
|
feedback_accuracy = models.JSONField(null=True)
|
||||||
reviewed_accuracy = models.JSONField(null=True)
|
reviewed_accuracy = models.JSONField(null=True)
|
||||||
combined_accuracy = models.JSONField(null=True)
|
combined_accuracy = models.JSONField(null=True)
|
||||||
|
report_type = models.CharField(max_length=10, choices=[
|
||||||
|
("BILLING", "billing"), ("ACCURACY", "accuracy")], default="accuracy")
|
||||||
|
5
cope2n-api/fwd_api/request/ReportCreationSerializer.py
Normal file → Executable file
5
cope2n-api/fwd_api/request/ReportCreationSerializer.py
Normal file → Executable file
@ -36,4 +36,9 @@ class ReportCreationSerializer(serializers.Serializer):
|
|||||||
report_overview_duration = serializers.CharField(
|
report_overview_duration = serializers.CharField(
|
||||||
help_text=f'open of {settings.OVERVIEW_REPORT_DURATION}',
|
help_text=f'open of {settings.OVERVIEW_REPORT_DURATION}',
|
||||||
default=None
|
default=None
|
||||||
|
)
|
||||||
|
report_type = serializers.ChoiceField(
|
||||||
|
help_text='What type of report to create',
|
||||||
|
choices=['billing', 'accuracy'],
|
||||||
|
default="accuracy"
|
||||||
)
|
)
|
@ -797,6 +797,53 @@ def acc_maximize_list_values(acc):
|
|||||||
pos[k] = acc[k].index(acc[k][0])
|
pos[k] = acc[k].index(acc[k][0])
|
||||||
return acc, pos
|
return acc, pos
|
||||||
|
|
||||||
|
|
||||||
|
def create_billing_data(subscription_requests):
|
||||||
|
billing_data = []
|
||||||
|
for request in subscription_requests:
|
||||||
|
if request.status != 200:
|
||||||
|
continue
|
||||||
|
images = SubscriptionRequestFile.objects.filter(request=request, file_category=FileCategory.Origin.value)
|
||||||
|
for image in images:
|
||||||
|
if not image.doc_type:
|
||||||
|
_doc_type = image.file_name.split("_")[1]
|
||||||
|
if _doc_type in ["imei", "invoice"]:
|
||||||
|
image.doc_type = _doc_type
|
||||||
|
image.save()
|
||||||
|
else:
|
||||||
|
_doc_type = image.doc_type
|
||||||
|
|
||||||
|
doc_type = "SN/IMEI" if _doc_type == "imei" else "Invoice"
|
||||||
|
|
||||||
|
_sub = ""
|
||||||
|
redemption_id = ""
|
||||||
|
if request.redemption_id:
|
||||||
|
_sub = map_subsidiary_short_to_long(request.redemption_id[:2])
|
||||||
|
redemption_id = request.redemption_id
|
||||||
|
|
||||||
|
format_to_time = '%m/%d/%Y %H:%M'
|
||||||
|
format_to_date = '%m/%d/%Y'
|
||||||
|
format_to_month = '%B %Y'
|
||||||
|
|
||||||
|
rq_created_at = request.created_at
|
||||||
|
print(type(redemption_id))
|
||||||
|
rq_created_at = timezone.make_aware(rq_created_at)
|
||||||
|
print(rq_created_at)
|
||||||
|
rq_month = rq_created_at.strftime(format_to_month)
|
||||||
|
rq_date = rq_created_at.strftime(format_to_date)
|
||||||
|
rq_time = rq_created_at.strftime(format_to_time)
|
||||||
|
|
||||||
|
billing_data.append({
|
||||||
|
"request_month": rq_month,
|
||||||
|
"subsidiary": _sub,
|
||||||
|
"image_type": doc_type,
|
||||||
|
"redemption_number": redemption_id,
|
||||||
|
"request_id": request.request_id,
|
||||||
|
"request_date": rq_date,
|
||||||
|
"request_time_(utc)": rq_time
|
||||||
|
})
|
||||||
|
return billing_data
|
||||||
|
|
||||||
def calculate_a_request(report, request):
|
def calculate_a_request(report, request):
|
||||||
request_att = {"acc": {"feedback": {"imei_number": [],
|
request_att = {"acc": {"feedback": {"imei_number": [],
|
||||||
"purchase_date": [],
|
"purchase_date": [],
|
||||||
|
73
cope2n-api/fwd_api/utils/file.py
Normal file → Executable file
73
cope2n-api/fwd_api/utils/file.py
Normal file → Executable file
@ -521,37 +521,58 @@ def dict2xlsx(input: json, _type='report'):
|
|||||||
}
|
}
|
||||||
start_index = 4
|
start_index = 4
|
||||||
|
|
||||||
|
elif _type == 'billing_report':
|
||||||
|
wb = load_workbook(filename = 'billing_report.xlsx')
|
||||||
|
ws = wb['Sheet1']
|
||||||
|
mapping = {
|
||||||
|
'B': 'request_month',
|
||||||
|
'C': 'subsidiary',
|
||||||
|
'D': 'image_type',
|
||||||
|
'E': 'redemption_number',
|
||||||
|
'F': 'request_id',
|
||||||
|
'G': "request_date",
|
||||||
|
'H': "request_time_(utc)"
|
||||||
|
}
|
||||||
|
start_index = 4
|
||||||
|
|
||||||
for subtotal in input:
|
for subtotal in input:
|
||||||
for key in mapping.keys():
|
for key in mapping.keys():
|
||||||
value = get_value(subtotal, mapping[key])
|
if _type!="billing_report":
|
||||||
ws[key + str(start_index)] = value
|
value = get_value(subtotal, mapping[key])
|
||||||
if key in ['C', 'D', 'E'] and value == 0:
|
ws[key + str(start_index)] = value
|
||||||
ws[key + str(start_index)] = "-"
|
if key in ['C', 'D', 'E'] and value == 0:
|
||||||
ws[key + str(start_index)].border = border
|
ws[key + str(start_index)] = "-"
|
||||||
ws[key + str(start_index)].font = font_black
|
ws[key + str(start_index)].border = border
|
||||||
if 'accuracy' in mapping[key] or 'time' in mapping[key] or 'percent' in mapping[key] or 'speed' in mapping[key] or mapping[key] in ["review_progress"]:
|
ws[key + str(start_index)].font = font_black
|
||||||
ws[key + str(start_index)].number_format = '0.0'
|
if 'accuracy' in mapping[key] or 'time' in mapping[key] or 'percent' in mapping[key] or 'speed' in mapping[key] or mapping[key] in ["review_progress"]:
|
||||||
|
ws[key + str(start_index)].number_format = '0.0'
|
||||||
|
|
||||||
if _type == 'report':
|
if _type == 'report':
|
||||||
if subtotal['subs'] == '+':
|
if subtotal['subs'] == '+':
|
||||||
ws[key + str(start_index)].font = font_black_bold
|
ws[key + str(start_index)].font = font_black_bold
|
||||||
if key in ['A', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R']:
|
if key in ['A', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R']:
|
||||||
ws[key + str(start_index)].fill = fill_gray
|
ws[key + str(start_index)].fill = fill_gray
|
||||||
elif key == 'B':
|
elif key == 'B':
|
||||||
ws[key + str(start_index)].fill = fill_green
|
ws[key + str(start_index)].fill = fill_green
|
||||||
elif key in ['C', 'D', 'E', 'F', 'G', 'H']:
|
elif key in ['C', 'D', 'E', 'F', 'G', 'H']:
|
||||||
ws[key + str(start_index)].fill = fill_yellow
|
ws[key + str(start_index)].fill = fill_yellow
|
||||||
if 'average_accuracy_rate' in mapping[key] and type(value) in [int, float] and value < 98:
|
if 'average_accuracy_rate' in mapping[key] and type(value) in [int, float] and value < 98:
|
||||||
|
ws[key + str(start_index)].font = font_red
|
||||||
|
elif 'average_processing_time' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
||||||
|
ws[key + str(start_index)].font = font_red
|
||||||
|
elif 'bad_percent' in mapping[key] and type(value) in [int, float] and value > 10:
|
||||||
|
ws[key + str(start_index)].font = font_red
|
||||||
|
elif _type == 'report_detail':
|
||||||
|
if 'accuracy' in mapping[key] and type(value) in [int, float] and value < 75:
|
||||||
ws[key + str(start_index)].font = font_red
|
ws[key + str(start_index)].font = font_red
|
||||||
elif 'average_processing_time' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
elif 'speed' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
||||||
ws[key + str(start_index)].font = font_red
|
ws[key + str(start_index)].font = font_red
|
||||||
elif 'bad_percent' in mapping[key] and type(value) in [int, float] and value > 10:
|
else:
|
||||||
ws[key + str(start_index)].font = font_red
|
value = get_value(subtotal, mapping[key])
|
||||||
elif _type == 'report_detail':
|
value = "-" if value=="" else value
|
||||||
if 'accuracy' in mapping[key] and type(value) in [int, float] and value < 75:
|
ws[key + str(start_index)] = value
|
||||||
ws[key + str(start_index)].font = font_red
|
ws[key + str(start_index)].border = border
|
||||||
elif 'speed' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
ws[key + str(start_index)].font = font_black
|
||||||
ws[key + str(start_index)].font = font_red
|
|
||||||
|
|
||||||
start_index += 1
|
start_index += 1
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user