update billing report
This commit is contained in:
parent
4b5de38b28
commit
0ab18b06b4
BIN
cope2n-api/billing_report.xlsx
Normal file
BIN
cope2n-api/billing_report.xlsx
Normal file
Binary file not shown.
10
cope2n-api/fwd_api/api/accuracy_view.py
Normal file → Executable file
10
cope2n-api/fwd_api/api/accuracy_view.py
Normal file → Executable file
@ -222,8 +222,12 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
subsidiary = request.data.get("subsidiary", "all")
|
||||
is_daily_report = request.data.get('is_daily_report', False)
|
||||
report_overview_duration = request.data.get("report_overview_duration", "")
|
||||
report_type = request.data.get("report_type", "accuracy")
|
||||
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||
|
||||
if report_type=="billing" and subsidiary.lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
||||
raise InvalidException(excArgs="Subsidiary for billing report")
|
||||
|
||||
if is_daily_report:
|
||||
if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
|
||||
raise InvalidException(excArgs="overview duration")
|
||||
@ -258,11 +262,9 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
"include_test": include_test,
|
||||
"subsidiary": subsidiary,
|
||||
"is_daily_report": is_daily_report,
|
||||
"report_overview_duration": report_overview_duration
|
||||
"report_overview_duration": report_overview_duration,
|
||||
"report_type": report_type,
|
||||
}
|
||||
# if is_daily_report:
|
||||
# if (end_date-start_date) > timezone.timedelta(days=1):
|
||||
# raise InvalidException(excArgs="Date range")
|
||||
|
||||
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
||||
new_report: Report = Report(
|
||||
|
116
cope2n-api/fwd_api/celery_worker/process_report_tasks.py
Normal file → Executable file
116
cope2n-api/fwd_api/celery_worker/process_report_tasks.py
Normal file → Executable file
@ -3,7 +3,9 @@ import traceback
|
||||
from fwd_api.models import SubscriptionRequest, Report, ReportFile
|
||||
from fwd_api.celery_worker.worker import app
|
||||
from ..utils import s3 as S3Util
|
||||
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list, calculate_a_request, ReportAccumulateByRequest
|
||||
from ..utils.accuracy import (update_temp_accuracy, IterAvg, calculate_and_save_subcription_file,
|
||||
count_transactions, extract_report_detail_list, calculate_a_request,
|
||||
ReportAccumulateByRequest, create_billing_data)
|
||||
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
|
||||
from ..utils import time_stuff
|
||||
from ..utils.redis import RedisUtils
|
||||
@ -162,31 +164,40 @@ def make_a_report(report_id, query_set):
|
||||
|
||||
@app.task(name='make_a_report_2')
|
||||
def make_a_report_2(report_id, query_set):
|
||||
report_type = query_set.pop("report_type", "accuracy")
|
||||
if report_type == "accuracy":
|
||||
create_accuracy_report(report_id=report_id, **query_set)
|
||||
elif "billing":
|
||||
create_billing_report(report_id=report_id, **query_set)
|
||||
else:
|
||||
raise TypeError("Invalid report type")
|
||||
|
||||
|
||||
def create_accuracy_report(report_id, **kwargs):
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(query_set["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(query_set["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
start_date = timezone.datetime.strptime(kwargs["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(kwargs["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
base_query = Q(created_at__range=(start_date, end_date))
|
||||
if query_set["request_id"]:
|
||||
base_query &= Q(request_id=query_set["request_id"])
|
||||
if query_set["redemption_id"]:
|
||||
base_query &= Q(redemption_id=query_set["redemption_id"])
|
||||
if kwargs["request_id"]:
|
||||
base_query &= Q(request_id=kwargs["request_id"])
|
||||
if kwargs["redemption_id"]:
|
||||
base_query &= Q(redemption_id=kwargs["redemption_id"])
|
||||
base_query &= Q(is_test_request=False)
|
||||
if isinstance(query_set["include_test"], str):
|
||||
query_set["include_test"] = True if query_set["include_test"].lower() in ["true", "yes", "1"] else False
|
||||
if query_set["include_test"]:
|
||||
if isinstance(kwargs["include_test"], str):
|
||||
include_test = True if kwargs["include_test"].lower() in ["true", "yes", "1"] else False
|
||||
if include_test:
|
||||
# base_query = ~base_query
|
||||
base_query.children = base_query.children[:-1]
|
||||
|
||||
elif isinstance(query_set["include_test"], bool):
|
||||
if query_set["include_test"]:
|
||||
elif isinstance(kwargs["include_test"], bool):
|
||||
if kwargs["include_test"]:
|
||||
base_query = ~base_query
|
||||
if isinstance(query_set["subsidiary"], str):
|
||||
if query_set["subsidiary"] and query_set["subsidiary"].lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
||||
base_query &= Q(redemption_id__startswith=query_set["subsidiary"])
|
||||
if isinstance(query_set["is_reviewed"], str):
|
||||
if query_set["is_reviewed"] == "reviewed":
|
||||
if isinstance(kwargs["subsidiary"], str):
|
||||
if kwargs["subsidiary"] and kwargs["subsidiary"].lower().replace(" ", "") not in settings.SUB_FOR_BILLING:
|
||||
base_query &= Q(redemption_id__startswith=kwargs["subsidiary"])
|
||||
if isinstance(kwargs["is_reviewed"], str):
|
||||
if kwargs["is_reviewed"] == "reviewed":
|
||||
base_query &= Q(is_reviewed=True)
|
||||
elif query_set["is_reviewed"] == "not reviewed":
|
||||
elif kwargs["is_reviewed"] == "not reviewed":
|
||||
base_query &= Q(is_reviewed=False)
|
||||
|
||||
errors = []
|
||||
@ -213,8 +224,7 @@ def make_a_report_2(report_id, query_set):
|
||||
# TODO: Multithreading
|
||||
# Calculate accuracy, processing time, ....Then save.
|
||||
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
||||
report: Report = \
|
||||
Report.objects.filter(report_id=report_id).first()
|
||||
report: Report = Report.objects.filter(report_id=report_id).first()
|
||||
# TODO: number of transaction by doc type
|
||||
num_request = 0
|
||||
report_files = []
|
||||
@ -226,14 +236,14 @@ def make_a_report_2(report_id, query_set):
|
||||
request_att, _report_files = calculate_a_request(report, request)
|
||||
report_files += _report_files
|
||||
report_engine.add(request, _report_files)
|
||||
request.feedback_accuracy = {"imei_number" : mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
||||
"purchase_date" : mean_list(request_att["acc"]["feedback"].get("purchase_date", [None])),
|
||||
"retailername" : mean_list(request_att["acc"]["feedback"].get("retailername", [None])),
|
||||
"sold_to_party" : mean_list(request_att["acc"]["feedback"].get("sold_to_party", [None]))}
|
||||
request.reviewed_accuracy = {"imei_number" : mean_list(request_att["acc"]["reviewed"].get("imei_number", [None])),
|
||||
"purchase_date" : mean_list(request_att["acc"]["reviewed"].get("purchase_date", [None])),
|
||||
"retailername" : mean_list(request_att["acc"]["reviewed"].get("retailername", [None])),
|
||||
"sold_to_party" : mean_list(request_att["acc"]["reviewed"].get("sold_to_party", [None]))}
|
||||
request.feedback_accuracy = {"imei_number": mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
||||
"purchase_date": mean_list(request_att["acc"]["feedback"].get("purchase_date", [None])),
|
||||
"retailername": mean_list(request_att["acc"]["feedback"].get("retailername", [None])),
|
||||
"sold_to_party": mean_list(request_att["acc"]["feedback"].get("sold_to_party", [None]))}
|
||||
request.reviewed_accuracy = {"imei_number": mean_list(request_att["acc"]["reviewed"].get("imei_number", [None])),
|
||||
"purchase_date": mean_list(request_att["acc"]["reviewed"].get("purchase_date", [None])),
|
||||
"retailername": mean_list(request_att["acc"]["reviewed"].get("retailername", [None])),
|
||||
"sold_to_party": mean_list(request_att["acc"]["reviewed"].get("sold_to_party", [None]))}
|
||||
request.save()
|
||||
number_images += request_att["total_images"]
|
||||
number_bad_images += request_att["bad_images"]
|
||||
@ -249,7 +259,7 @@ def make_a_report_2(report_id, query_set):
|
||||
num_request += 1
|
||||
review_progress += request_att.get("is_reviewed", [])
|
||||
|
||||
report_fine_data, _save_data = report_engine.save(report.report_id, query_set.get("is_daily_report", False), query_set["include_test"])
|
||||
report_fine_data, _save_data = report_engine.save(report.report_id, kwargs.get("is_daily_report", False), kwargs["include_test"])
|
||||
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
|
||||
# Do saving process
|
||||
report.number_request = num_request
|
||||
@ -261,8 +271,8 @@ def make_a_report_2(report_id, query_set):
|
||||
report.average_OCR_time = {"invoice": time_cost["invoice"](), "imei": time_cost["imei"](),
|
||||
"invoice_count": time_cost["invoice"].count, "imei_count": time_cost["imei"].count}
|
||||
|
||||
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
|
||||
|
||||
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(
|
||||
report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
|
||||
report.number_imei_transaction = transaction_att.get("imei", 0)
|
||||
report.number_invoice_transaction = transaction_att.get("invoice", 0)
|
||||
|
||||
@ -273,7 +283,7 @@ def make_a_report_2(report_id, query_set):
|
||||
avg_acc = IterAvg()
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
acumulated_acc[acc_type][key] = accuracy[acc_type][key]()
|
||||
acumulated_acc[acc_type][key+"_count"] = accuracy[acc_type][key].count
|
||||
acumulated_acc[acc_type][key + "_count"] = accuracy[acc_type][key].count
|
||||
avg_acc.add_avg(acumulated_acc[acc_type][key], acumulated_acc[acc_type][key+"_count"])
|
||||
acumulated_acc[acc_type]["avg"] = avg_acc()
|
||||
|
||||
@ -294,8 +304,8 @@ def make_a_report_2(report_id, query_set):
|
||||
data = extract_report_detail_list(report_files, lower=True)
|
||||
data_workbook = dict2xlsx(data, _type='report_detail')
|
||||
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
||||
s3_key=save_report_to_S3(report.report_id, local_workbook, 5)
|
||||
if query_set["is_daily_report"]:
|
||||
s3_key = save_report_to_S3(report.report_id, local_workbook, 5)
|
||||
if kwargs["is_daily_report"]:
|
||||
# Save overview dashboard
|
||||
# multiple accuracy by 100
|
||||
save_data = copy.deepcopy(_save_data)
|
||||
@ -313,10 +323,9 @@ def make_a_report_2(report_id, query_set):
|
||||
for x_key in report_fine_data[i][key].keys():
|
||||
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
|
||||
data_workbook = dict2xlsx(report_fine_data, _type='report')
|
||||
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
|
||||
overview_filename = kwargs["subsidiary"] + "_" + kwargs["report_overview_duration"] + ".xlsx"
|
||||
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
||||
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
||||
# redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
|
||||
s3_key = save_report_to_S3(report.report_id, local_workbook)
|
||||
set_cache(overview_filename.replace(".xlsx", ""), save_data)
|
||||
|
||||
except IndexError as e:
|
||||
@ -327,3 +336,34 @@ def make_a_report_2(report_id, query_set):
|
||||
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||
traceback.print_exc()
|
||||
return 400
|
||||
|
||||
|
||||
def create_billing_report(report_id, **kwargs):
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(
|
||||
kwargs["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(
|
||||
kwargs["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
base_query = Q(created_at__range=(start_date, end_date))
|
||||
base_query &= Q(is_test_request=False)
|
||||
|
||||
subscription_requests = SubscriptionRequest.objects.filter(
|
||||
base_query).order_by('created_at')
|
||||
report: Report = Report.objects.filter(report_id=report_id).first()
|
||||
billing_data = create_billing_data(subscription_requests)
|
||||
report.number_request = len(subscription_requests)
|
||||
report.number_images = len(billing_data)
|
||||
report.status = "Ready"
|
||||
report.save()
|
||||
data_workbook = dict2xlsx(billing_data, _type='billing_report')
|
||||
local_workbook = save_workbook_file(
|
||||
report.report_id + ".xlsx", report, data_workbook)
|
||||
s3_key = save_report_to_S3(report.report_id, local_workbook)
|
||||
except IndexError as e:
|
||||
print(e)
|
||||
traceback.print_exc()
|
||||
print("NotFound request by report id, %d", report_id)
|
||||
except Exception as e:
|
||||
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||
traceback.print_exc()
|
||||
return 400
|
||||
|
18
cope2n-api/fwd_api/migrations/0185_report_report_type.py
Executable file
18
cope2n-api/fwd_api/migrations/0185_report_report_type.py
Executable file
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.1.3 on 2024-03-06 06:57
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('fwd_api', '0184_caching'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='report',
|
||||
name='report_type',
|
||||
field=models.CharField(choices=[('BILLING', 'billing'), ('ACCURACY', 'accuracy')], default='accuracy', max_length=10),
|
||||
),
|
||||
]
|
3
cope2n-api/fwd_api/models/Report.py
Normal file → Executable file
3
cope2n-api/fwd_api/models/Report.py
Normal file → Executable file
@ -2,6 +2,7 @@ from django.db import models
|
||||
from django.utils import timezone
|
||||
from fwd_api.models.Subscription import Subscription
|
||||
|
||||
|
||||
class Report(models.Model):
|
||||
# Metadata
|
||||
id = models.AutoField(primary_key=True)
|
||||
@ -43,3 +44,5 @@ class Report(models.Model):
|
||||
feedback_accuracy = models.JSONField(null=True)
|
||||
reviewed_accuracy = models.JSONField(null=True)
|
||||
combined_accuracy = models.JSONField(null=True)
|
||||
report_type = models.CharField(max_length=10, choices=[
|
||||
("BILLING", "billing"), ("ACCURACY", "accuracy")], default="accuracy")
|
||||
|
5
cope2n-api/fwd_api/request/ReportCreationSerializer.py
Normal file → Executable file
5
cope2n-api/fwd_api/request/ReportCreationSerializer.py
Normal file → Executable file
@ -37,3 +37,8 @@ class ReportCreationSerializer(serializers.Serializer):
|
||||
help_text=f'open of {settings.OVERVIEW_REPORT_DURATION}',
|
||||
default=None
|
||||
)
|
||||
report_type = serializers.ChoiceField(
|
||||
help_text='What type of report to create',
|
||||
choices=['billing', 'accuracy'],
|
||||
default="accuracy"
|
||||
)
|
@ -797,6 +797,53 @@ def acc_maximize_list_values(acc):
|
||||
pos[k] = acc[k].index(acc[k][0])
|
||||
return acc, pos
|
||||
|
||||
|
||||
def create_billing_data(subscription_requests):
|
||||
billing_data = []
|
||||
for request in subscription_requests:
|
||||
if request.status != 200:
|
||||
continue
|
||||
images = SubscriptionRequestFile.objects.filter(request=request, file_category=FileCategory.Origin.value)
|
||||
for image in images:
|
||||
if not image.doc_type:
|
||||
_doc_type = image.file_name.split("_")[1]
|
||||
if _doc_type in ["imei", "invoice"]:
|
||||
image.doc_type = _doc_type
|
||||
image.save()
|
||||
else:
|
||||
_doc_type = image.doc_type
|
||||
|
||||
doc_type = "SN/IMEI" if _doc_type == "imei" else "Invoice"
|
||||
|
||||
_sub = ""
|
||||
redemption_id = ""
|
||||
if request.redemption_id:
|
||||
_sub = map_subsidiary_short_to_long(request.redemption_id[:2])
|
||||
redemption_id = request.redemption_id
|
||||
|
||||
format_to_time = '%m/%d/%Y %H:%M'
|
||||
format_to_date = '%m/%d/%Y'
|
||||
format_to_month = '%B %Y'
|
||||
|
||||
rq_created_at = request.created_at
|
||||
print(type(redemption_id))
|
||||
rq_created_at = timezone.make_aware(rq_created_at)
|
||||
print(rq_created_at)
|
||||
rq_month = rq_created_at.strftime(format_to_month)
|
||||
rq_date = rq_created_at.strftime(format_to_date)
|
||||
rq_time = rq_created_at.strftime(format_to_time)
|
||||
|
||||
billing_data.append({
|
||||
"request_month": rq_month,
|
||||
"subsidiary": _sub,
|
||||
"image_type": doc_type,
|
||||
"redemption_number": redemption_id,
|
||||
"request_id": request.request_id,
|
||||
"request_date": rq_date,
|
||||
"request_time_(utc)": rq_time
|
||||
})
|
||||
return billing_data
|
||||
|
||||
def calculate_a_request(report, request):
|
||||
request_att = {"acc": {"feedback": {"imei_number": [],
|
||||
"purchase_date": [],
|
||||
|
21
cope2n-api/fwd_api/utils/file.py
Normal file → Executable file
21
cope2n-api/fwd_api/utils/file.py
Normal file → Executable file
@ -521,8 +521,23 @@ def dict2xlsx(input: json, _type='report'):
|
||||
}
|
||||
start_index = 4
|
||||
|
||||
elif _type == 'billing_report':
|
||||
wb = load_workbook(filename = 'billing_report.xlsx')
|
||||
ws = wb['Sheet1']
|
||||
mapping = {
|
||||
'B': 'request_month',
|
||||
'C': 'subsidiary',
|
||||
'D': 'image_type',
|
||||
'E': 'redemption_number',
|
||||
'F': 'request_id',
|
||||
'G': "request_date",
|
||||
'H': "request_time_(utc)"
|
||||
}
|
||||
start_index = 4
|
||||
|
||||
for subtotal in input:
|
||||
for key in mapping.keys():
|
||||
if _type!="billing_report":
|
||||
value = get_value(subtotal, mapping[key])
|
||||
ws[key + str(start_index)] = value
|
||||
if key in ['C', 'D', 'E'] and value == 0:
|
||||
@ -552,6 +567,12 @@ def dict2xlsx(input: json, _type='report'):
|
||||
ws[key + str(start_index)].font = font_red
|
||||
elif 'speed' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
||||
ws[key + str(start_index)].font = font_red
|
||||
else:
|
||||
value = get_value(subtotal, mapping[key])
|
||||
value = "-" if value=="" else value
|
||||
ws[key + str(start_index)] = value
|
||||
ws[key + str(start_index)].border = border
|
||||
ws[key + str(start_index)].font = font_black
|
||||
|
||||
start_index += 1
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user