Apply changes
This commit is contained in:
parent
793205bb41
commit
3b4ded2f6e
@ -220,6 +220,9 @@ SIZE_TO_COMPRESS = 2 * 1024 * 1024
|
||||
MAX_NUMBER_OF_TEMPLATE = 3
|
||||
MAX_PAGES_OF_PDF_FILE = 50
|
||||
|
||||
OVERVIEW_REFRESH_INTERVAL = 2
|
||||
OVERVIEW_REPORT_KEY = "overview"
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
|
||||
|
@ -256,6 +256,9 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
"subsidiary": subsidiary,
|
||||
"is_daily_report": is_daily_report,
|
||||
}
|
||||
# if is_daily_report:
|
||||
# if (end_date-start_date) > timezone.timedelta(days=1):
|
||||
# raise InvalidException(excArgs="Date range")
|
||||
|
||||
report_id = "report" + "_" + timezone.datetime.now().strftime("%Y%m%d%H%M%S%z") + "_" + uuid.uuid4().hex
|
||||
new_report: Report = Report(
|
||||
@ -268,8 +271,6 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
end_at=end_date,
|
||||
status="Processing",
|
||||
)
|
||||
if is_daily_report:
|
||||
new_report.created_at = end_date
|
||||
new_report.save()
|
||||
# Background job to calculate accuracy
|
||||
shadow_report(report_id, query_set)
|
||||
@ -380,7 +381,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
page_size = int(request.GET.get('page_size', 10))
|
||||
|
||||
if not start_date_str or not end_date_str:
|
||||
reports = Report.objects.all()
|
||||
reports = Report.objects.all().order_by('created_at').reverse()
|
||||
else:
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
@ -390,11 +391,10 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
base_query = Q(created_at__range=(start_date, end_date))
|
||||
if daily_report_only:
|
||||
base_query &= Q(is_daily_report=True)
|
||||
reports = Report.objects.filter(base_query).order_by('created_at')
|
||||
|
||||
reports = Report.objects.filter(base_query).order_by('created_at').reverse()
|
||||
|
||||
paginator = Paginator(reports, page_size)
|
||||
page = paginator.get_page(page_number).order_by('created_at')
|
||||
page = paginator.get_page(page_number)
|
||||
|
||||
data = []
|
||||
for report in page:
|
||||
@ -480,12 +480,15 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||
except ValueError:
|
||||
raise InvalidException(excArgs="Date format")
|
||||
else:
|
||||
end_date = timezone.datetime.now()
|
||||
start_date = end_date - timezone.timedelta(days=30)
|
||||
base_query &= Q(created_at__range=(start_date, end_date))
|
||||
|
||||
if subsidiary:
|
||||
base_query &= Q(subsidiary=subsidiary)
|
||||
base_query &= Q(is_daily_report=True)
|
||||
reports = Report.objects.filter(base_query).order_by('created_at').reverse()
|
||||
reports = Report.objects.filter(base_query).order_by('start_at').reverse()
|
||||
|
||||
paginator = Paginator(reports, page_size)
|
||||
page = paginator.get_page(page_number)
|
||||
@ -500,8 +503,6 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
data += _data
|
||||
this_month_report = MonthReportAccumulate()
|
||||
this_month_report.add(report)
|
||||
else:
|
||||
continue
|
||||
_, _data, total = this_month_report()
|
||||
data += [total]
|
||||
data += _data
|
||||
|
@ -36,6 +36,8 @@ class CeleryConnector:
|
||||
'remove_local_file': {'queue': "remove_local_file"},
|
||||
'csv_feedback': {'queue': "csv_feedback"},
|
||||
'make_a_report': {'queue': "report"},
|
||||
'make_a_report_2': {'queue': "report_2"},
|
||||
|
||||
|
||||
}
|
||||
app = Celery(
|
||||
@ -45,6 +47,10 @@ class CeleryConnector:
|
||||
)
|
||||
def make_a_report(self, args):
|
||||
return self.send_task('make_a_report', args)
|
||||
|
||||
def make_a_report_2(self, args):
|
||||
return self.send_task('make_a_report_2', args)
|
||||
|
||||
def csv_feedback(self, args):
|
||||
return self.send_task('csv_feedback', args)
|
||||
def do_pdf(self, args):
|
||||
|
@ -13,6 +13,7 @@ from fwd_api.models import SubscriptionRequestFile, FeedbackRequest, Report
|
||||
from ..utils import file as FileUtils
|
||||
from ..utils import process as ProcessUtil
|
||||
from ..utils import s3 as S3Util
|
||||
from ..utils.accuracy import validate_feedback_file
|
||||
from fwd_api.constant.common import ProcessType
|
||||
import csv
|
||||
import json
|
||||
@ -117,6 +118,9 @@ def process_csv_feedback(csv_file_path, feedback_id):
|
||||
_feedback_result = copy.deepcopy(sub_rq.feedback_result)
|
||||
_reviewed_result = copy.deepcopy(sub_rq.reviewed_result)
|
||||
image.processing_time = time_cost.get(image.doc_type, [0 for _ in range(image.index_in_request)])[image.index_in_request]
|
||||
if not validate_feedback_file(_feedback_result, _predict_result):
|
||||
status[request_id] = "Missalign imei number between feedback and predict"
|
||||
continue
|
||||
if image.doc_type == "invoice":
|
||||
_predict_result["imei_number"] = []
|
||||
if _feedback_result:
|
||||
|
@ -3,8 +3,9 @@ import traceback
|
||||
from fwd_api.models import SubscriptionRequest, Report, ReportFile
|
||||
from fwd_api.celery_worker.worker import app
|
||||
from ..utils import s3 as S3Util
|
||||
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list
|
||||
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list, calculate_a_request, ReportAccumulateByRequest
|
||||
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3
|
||||
from ..utils import time_stuff
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q
|
||||
|
||||
@ -29,6 +30,7 @@ def mean_list(l):
|
||||
|
||||
@app.task(name='make_a_report')
|
||||
def make_a_report(report_id, query_set):
|
||||
# TODO: to be deprecated
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(query_set["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(query_set["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
@ -152,3 +154,134 @@ def make_a_report(report_id, query_set):
|
||||
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||
traceback.print_exc()
|
||||
return 400
|
||||
|
||||
@app.task(name='make_a_report_2')
|
||||
def make_a_report_2(report_id, query_set):
|
||||
try:
|
||||
start_date = timezone.datetime.strptime(query_set["start_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
end_date = timezone.datetime.strptime(query_set["end_date_str"], '%Y-%m-%dT%H:%M:%S%z')
|
||||
base_query = Q(created_at__range=(start_date, end_date))
|
||||
if query_set["request_id"]:
|
||||
base_query &= Q(request_id=query_set["request_id"])
|
||||
if query_set["redemption_id"]:
|
||||
base_query &= Q(redemption_id=query_set["redemption_id"])
|
||||
base_query &= Q(is_test_request=False)
|
||||
if isinstance(query_set["include_test"], str):
|
||||
query_set["include_test"] = True if query_set["include_test"].lower() in ["true", "yes", "1"] else False
|
||||
if query_set["include_test"]:
|
||||
# base_query = ~base_query
|
||||
base_query.children = base_query.children[:-1]
|
||||
|
||||
elif isinstance(query_set["include_test"], bool):
|
||||
if query_set["include_test"]:
|
||||
base_query = ~base_query
|
||||
if isinstance(query_set["subsidiary"], str):
|
||||
if query_set["subsidiary"] and query_set["subsidiary"].lower().replace(" ", "")!="all":
|
||||
base_query &= Q(redemption_id__startswith=query_set["subsidiary"])
|
||||
if isinstance(query_set["is_reviewed"], str):
|
||||
if query_set["is_reviewed"] == "reviewed":
|
||||
base_query &= Q(is_reviewed=True)
|
||||
elif query_set["is_reviewed"] == "not reviewed":
|
||||
base_query &= Q(is_reviewed=False)
|
||||
# elif query_set["is_reviewed"] == "all":
|
||||
# pass
|
||||
|
||||
errors = []
|
||||
# Create a placeholder to fill
|
||||
accuracy = {"feedback" :{"imei_number": IterAvg(),
|
||||
"purchase_date": IterAvg(),
|
||||
"retailername": IterAvg(),
|
||||
"sold_to_party": IterAvg(),},
|
||||
"reviewed" :{"imei_number": IterAvg(),
|
||||
"purchase_date": IterAvg(),
|
||||
"retailername": IterAvg(),
|
||||
"sold_to_party": IterAvg(),}
|
||||
} # {"imei": {"acc": 0.1, count: 1}, ...}
|
||||
time_cost = {"invoice": IterAvg(),
|
||||
"imei": IterAvg()}
|
||||
number_images = 0
|
||||
number_bad_images = 0
|
||||
# TODO: Multithreading
|
||||
# Calculate accuracy, processing time, ....Then save.
|
||||
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
|
||||
report: Report = \
|
||||
Report.objects.filter(report_id=report_id).first()
|
||||
# TODO: number of transaction by doc type
|
||||
num_request = 0
|
||||
report_files = []
|
||||
report_engine = ReportAccumulateByRequest(report.subsidiary)
|
||||
for request in subscription_requests:
|
||||
if request.status != 200 or not (request.reviewed_result or request.feedback_result):
|
||||
# Failed requests or lack of reviewed_result/feedback_result
|
||||
continue
|
||||
request_att, _report_files = calculate_a_request(report, request)
|
||||
report_files += _report_files
|
||||
report_engine.add(request, _report_files)
|
||||
request.feedback_accuracy = {"imei_number" : mean_list(request_att["acc"]["feedback"].get("imei_number", [None])),
|
||||
"purchase_date" : mean_list(request_att["acc"]["feedback"].get("purchase_date", [None])),
|
||||
"retailername" : mean_list(request_att["acc"]["feedback"].get("retailername", [None])),
|
||||
"sold_to_party" : mean_list(request_att["acc"]["feedback"].get("sold_to_party", [None]))}
|
||||
request.reviewed_accuracy = {"imei_number" : mean_list(request_att["acc"]["reviewed"].get("imei_number", [None])),
|
||||
"purchase_date" : mean_list(request_att["acc"]["reviewed"].get("purchase_date", [None])),
|
||||
"retailername" : mean_list(request_att["acc"]["reviewed"].get("retailername", [None])),
|
||||
"sold_to_party" : mean_list(request_att["acc"]["reviewed"].get("sold_to_party", [None]))}
|
||||
request.save()
|
||||
number_images += request_att["total_images"]
|
||||
number_bad_images += request_att["bad_images"]
|
||||
update_temp_accuracy(accuracy["feedback"], request_att["acc"]["feedback"], keys=["imei_number", "purchase_date", "retailername", "sold_to_party"])
|
||||
update_temp_accuracy(accuracy["reviewed"], request_att["acc"]["reviewed"], keys=["imei_number", "purchase_date", "retailername", "sold_to_party"])
|
||||
|
||||
time_cost["imei"].add(request_att["time_cost"].get("imei", []))
|
||||
time_cost["invoice"].add(request_att["time_cost"].get("invoice", []))
|
||||
|
||||
errors += request_att["err"]
|
||||
num_request += 1
|
||||
|
||||
report_engine.save(query_set.get("is_daily_report", False), query_set["include_test"])
|
||||
transaction_att = count_transactions(start_date, end_date)
|
||||
# Do saving process
|
||||
report.number_request = num_request
|
||||
report.number_images = number_images
|
||||
report.number_imei = time_cost["imei"].count
|
||||
report.number_invoice = time_cost["invoice"].count
|
||||
report.number_bad_images = number_bad_images
|
||||
# FIXME: refactor this data stream for endurability
|
||||
report.average_OCR_time = {"invoice": time_cost["invoice"](), "imei": time_cost["imei"](),
|
||||
"invoice_count": time_cost["invoice"].count, "imei_count": time_cost["imei"].count}
|
||||
|
||||
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
|
||||
|
||||
report.number_imei_transaction = transaction_att.get("imei", 0)
|
||||
report.number_invoice_transaction = transaction_att.get("invoice", 0)
|
||||
|
||||
acumulated_acc = {"feedback": {},
|
||||
"reviewed": {}}
|
||||
for acc_type in ["feedback", "reviewed"]:
|
||||
avg_acc = IterAvg()
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
acumulated_acc[acc_type][key] = accuracy[acc_type][key]()
|
||||
acumulated_acc[acc_type][key+"_count"] = accuracy[acc_type][key].count
|
||||
avg_acc.add_avg(acumulated_acc[acc_type][key], acumulated_acc[acc_type][key+"_count"])
|
||||
acumulated_acc[acc_type]["avg"] = avg_acc()
|
||||
|
||||
report.feedback_accuracy = acumulated_acc["feedback"]
|
||||
report.reviewed_accuracy = acumulated_acc["reviewed"]
|
||||
|
||||
report.errors = "|".join(errors)
|
||||
report.status = "Ready"
|
||||
report.save()
|
||||
# Saving a xlsx file
|
||||
report_files = ReportFile.objects.filter(report=report)
|
||||
data = extract_report_detail_list(report_files, lower=True)
|
||||
data_workbook = dict2xlsx(data, _type='report_detail')
|
||||
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
||||
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
||||
|
||||
except IndexError as e:
|
||||
print(e)
|
||||
traceback.print_exc()
|
||||
print("NotFound request by report id, %d", report_id)
|
||||
except Exception as e:
|
||||
print("[ERROR]: an error occured while processing report: ", report_id)
|
||||
traceback.print_exc()
|
||||
return 400
|
||||
|
@ -42,7 +42,7 @@ app.conf.update({
|
||||
Queue('remove_local_file'),
|
||||
Queue('csv_feedback'),
|
||||
Queue('report'),
|
||||
|
||||
Queue('report_2'),
|
||||
],
|
||||
'task_routes': {
|
||||
'process_sap_invoice_result': {'queue': 'invoice_sap_rs'},
|
||||
@ -61,6 +61,7 @@ app.conf.update({
|
||||
'remove_local_file': {'queue': "remove_local_file"},
|
||||
'csv_feedback': {'queue': "csv_feedback"},
|
||||
'make_a_report': {'queue': "report"},
|
||||
'make_a_report_2': {'queue': "report_2"},
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.1.3 on 2024-02-04 23:32
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('fwd_api', '0178_alter_reportfile_acc'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='reportfile',
|
||||
name='is_bad_image',
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.1.3 on 2024-02-05 02:44
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('fwd_api', '0179_reportfile_is_bad_image'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='reportfile',
|
||||
name='time_cost',
|
||||
field=models.FloatField(default=None, null=True),
|
||||
),
|
||||
]
|
@ -16,6 +16,7 @@ class ReportFile(models.Model):
|
||||
# Data
|
||||
S3_uploaded = models.BooleanField(default=False)
|
||||
doc_type = models.CharField(max_length=200)
|
||||
is_bad_image = models.BooleanField(default=False)
|
||||
|
||||
predict_result = models.JSONField(null=True)
|
||||
feedback_result = models.JSONField(null=True)
|
||||
@ -25,7 +26,7 @@ class ReportFile(models.Model):
|
||||
reviewed_accuracy = models.JSONField(null=True)
|
||||
acc = models.FloatField(default=0, null=True)
|
||||
|
||||
time_cost = models.FloatField(default=0)
|
||||
time_cost = models.FloatField(default=None, null=True)
|
||||
is_reviewed = models.CharField(default="NA", max_length=5) # NA, No, Yes
|
||||
bad_image_reason = models.TextField(default="")
|
||||
counter_measures = models.TextField(default="")
|
||||
|
@ -5,14 +5,301 @@ import copy
|
||||
from typing import Any
|
||||
from .ocr_utils.ocr_metrics import eval_ocr_metric
|
||||
from .ocr_utils.sbt_report import post_processing_str
|
||||
import uuid
|
||||
from fwd_api.models import SubscriptionRequest, SubscriptionRequestFile, ReportFile
|
||||
from ..celery_worker.client_connector import c_connector
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
import redis
|
||||
from fwd import settings
|
||||
from ..models import SubscriptionRequest, Report, ReportFile
|
||||
import json
|
||||
|
||||
BAD_THRESHOLD = 0.75
|
||||
|
||||
valid_keys = ["retailername", "sold_to_party", "purchase_date", "imei_number"]
|
||||
|
||||
class ReportAccumulateByRequest:
|
||||
def __init__(self, sub):
|
||||
self.redis_client = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, decode_responses=True)
|
||||
self.sub = sub
|
||||
self.current_time = None
|
||||
self.data = {} # {"month": [total, {"day": day_data}]}
|
||||
self.total_format = {
|
||||
'subs': "+",
|
||||
'extraction_date': "Subtotal ()",
|
||||
'total_images': 0,
|
||||
'images_quality': {
|
||||
'successful': 0,
|
||||
'successful_percent': 0,
|
||||
'bad': 0,
|
||||
'bad_percent': 0
|
||||
},
|
||||
'average_accuracy_rate': {
|
||||
'imei': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailer_name': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
'average_processing_time': {
|
||||
'imei': IterAvg(),
|
||||
'invoice': IterAvg()
|
||||
},
|
||||
'usage': {
|
||||
'imei':0,
|
||||
'invoice': 0,
|
||||
'request': 0
|
||||
},
|
||||
'feedback_accuracy': {
|
||||
'imei_number': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailername': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
'reviewed_accuracy': {
|
||||
'imei_number': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailername': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
'num_request': 0
|
||||
}
|
||||
self.day_format = {
|
||||
'subs': sub,
|
||||
'extraction_date': "",
|
||||
'num_imei': 0,
|
||||
'num_invoice': 0,
|
||||
'total_images': 0,
|
||||
'images_quality': {
|
||||
'successful': 0,
|
||||
'successful_percent': 0,
|
||||
'bad': 0,
|
||||
'bad_percent': 0
|
||||
},
|
||||
'average_accuracy_rate': {
|
||||
'imei': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailer_name': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
'average_processing_time': {
|
||||
'imei': IterAvg(),
|
||||
'invoice': IterAvg()
|
||||
},
|
||||
'usage': {
|
||||
'imei': 0,
|
||||
'invoice': 0,
|
||||
'request': 0
|
||||
},
|
||||
'feedback_accuracy': {
|
||||
'imei_number': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailername': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
'reviewed_accuracy': {
|
||||
'imei_number': IterAvg(),
|
||||
'purchase_date': IterAvg(),
|
||||
'retailername': IterAvg(),
|
||||
'sold_to_party': IterAvg()
|
||||
},
|
||||
"report_files": [],
|
||||
'num_request': 0
|
||||
},
|
||||
|
||||
@staticmethod
|
||||
def update_total(total, report_file):
|
||||
total["total_images"] += 1
|
||||
total["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
|
||||
total["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
||||
|
||||
print(f"[DEBUG]: report_file.reviewed_accuracy: {report_file.reviewed_accuracy}")
|
||||
print(f"[DEBUG]: report_file.feedback_accuracy: {report_file.feedback_accuracy}")
|
||||
|
||||
|
||||
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
||||
total["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", []))
|
||||
total["average_accuracy_rate"]["purchase_date"].add(report_file.reviewed_accuracy.get("purchase_date", []))
|
||||
total["average_accuracy_rate"]["retailer_name"].add(report_file.reviewed_accuracy.get("retailername", []))
|
||||
total["average_accuracy_rate"]["sold_to_party"].add(report_file.reviewed_accuracy.get("sold_to_party", []))
|
||||
elif sum([len(report_file.feedback_accuracy[x]) for x in report_file.feedback_accuracy.keys() if "_count" not in x]) > 0:
|
||||
total["average_accuracy_rate"]["imei"].add(report_file.feedback_accuracy.get("imei_number", []))
|
||||
total["average_accuracy_rate"]["purchase_date"].add(report_file.feedback_accuracy.get("purchase_date", []))
|
||||
total["average_accuracy_rate"]["retailer_name"].add(report_file.feedback_accuracy.get("retailername", []))
|
||||
total["average_accuracy_rate"]["sold_to_party"].add(report_file.feedback_accuracy.get("sold_to_party", []))
|
||||
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
total["feedback_accuracy"][key].add(report_file.feedback_accuracy.get(key, []))
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
total["reviewed_accuracy"][key].add(report_file.reviewed_accuracy.get(key, []))
|
||||
|
||||
if not total["average_processing_time"].get(report_file.doc_type, None):
|
||||
print(f"[WARM]: Weird doctype: {report_file.doc_type}")
|
||||
total["average_processing_time"] = IterAvg()
|
||||
total["average_processing_time"][report_file.doc_type].add_avg(report_file.time_cost, 1) if report_file.time_cost else 0
|
||||
|
||||
total["usage"]["imei"] += 1 if report_file.doc_type == "imei" else 0
|
||||
total["usage"]["invoice"] += 1 if report_file.doc_type == "invoice" else 0
|
||||
|
||||
return total
|
||||
|
||||
@staticmethod
|
||||
def update_day(day_data, report_file):
|
||||
day_data["total_images"] += 1
|
||||
day_data["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
|
||||
day_data["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
||||
day_data["num_imei"] += 1 if report_file.doc_type == "imei" else 0
|
||||
day_data["num_invoice"] += 1 if report_file.doc_type == "invoice" else 0
|
||||
|
||||
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
||||
day_data["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", 0))
|
||||
day_data["average_accuracy_rate"]["purchase_date"].add(report_file.reviewed_accuracy.get("purchase_date", 0))
|
||||
day_data["average_accuracy_rate"]["retailer_name"].add(report_file.reviewed_accuracy.get("retailername", 0))
|
||||
day_data["average_accuracy_rate"]["sold_to_party"].add(report_file.reviewed_accuracy.get("sold_to_party", 0))
|
||||
elif sum([len(report_file.feedback_accuracy[x]) for x in report_file.feedback_accuracy.keys() if "_count" not in x]) > 0:
|
||||
day_data["average_accuracy_rate"]["imei"].add(report_file.feedback_accuracy.get("imei_number", 0))
|
||||
day_data["average_accuracy_rate"]["purchase_date"].add(report_file.feedback_accuracy.get("purchase_date", 0))
|
||||
day_data["average_accuracy_rate"]["retailer_name"].add(report_file.feedback_accuracy.get("retailername", 0))
|
||||
day_data["average_accuracy_rate"]["sold_to_party"].add(report_file.feedback_accuracy.get("sold_to_party", 0))
|
||||
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
day_data["feedback_accuracy"][key].add(report_file.feedback_accuracy.get(key, 0))
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
day_data["reviewed_accuracy"][key].add(report_file.reviewed_accuracy.get(key, 0))
|
||||
|
||||
if not day_data["average_processing_time"].get(report_file.doc_type, None):
|
||||
print(f"[WARM]: Weird doctype: {report_file.doc_type}")
|
||||
day_data["average_processing_time"] = IterAvg()
|
||||
day_data["average_processing_time"][report_file.doc_type].add_avg(report_file.time_cost, 1) if report_file.time_cost else 0
|
||||
|
||||
return day_data
|
||||
|
||||
def add(self, request, report_files):
|
||||
this_month = request.created_at.strftime("%Y%m")
|
||||
this_day = request.created_at.strftime("%Y%m%d")
|
||||
if not self.data.get(this_month, None):
|
||||
self.data[this_month] = [copy.deepcopy(self.total_format), {}]
|
||||
if not self.data[this_month][1].get(this_day, None):
|
||||
self.data[this_month][1][this_day] = copy.deepcopy(self.day_format)[0]
|
||||
self.data[this_month][1][this_day]['extraction_date'] = request.created_at.strftime("%Y-%m-%d")
|
||||
usage = self.count_transactions_within_day(this_day)
|
||||
self.data[this_month][1][this_day]["usage"]["imei"] = usage.get("imei", 0)
|
||||
self.data[this_month][1][this_day]["usage"]["invoice"] = usage.get("invoice", 0)
|
||||
self.data[this_month][1][this_day]["usage"]["request"] = usage.get("request", 0)
|
||||
|
||||
self.data[this_month][1][this_day]['num_request'] += 1
|
||||
self.data[this_month][0]['num_request'] += 1
|
||||
for report_file in report_files:
|
||||
self.data[this_month][0] = self.update_total(self.data[this_month][0], report_file) # Update the subtotal within the month
|
||||
self.data[this_month][1][this_day] = self.update_day(self.data[this_month][1][this_day], report_file) # Update the subtotal of the day
|
||||
# save repot detail
|
||||
|
||||
def count_transactions_within_day(self, date_string):
|
||||
# convert this day into timezone.datetime at UTC
|
||||
start_date = datetime.strptime(date_string, "%Y%m%d")
|
||||
start_date_utc = timezone.make_aware(start_date, timezone=timezone.utc)
|
||||
end_date_utc = start_date_utc + timezone.timedelta(days=1)
|
||||
return count_transactions(start_date_utc, end_date_utc)
|
||||
|
||||
def save(self, root_report_id, is_daily_report=False, include_test=False):
|
||||
report_data = self.get()
|
||||
fine_data = []
|
||||
save_data = {"file": {"overview": f"{root_report_id}/{root_report_id}.xlsx"},
|
||||
"data": fine_data} # {"sub_report_id": "S3 location", "data": fine_data}
|
||||
# extract data
|
||||
for month in report_data.keys():
|
||||
fine_data.append(report_data[month][0])
|
||||
for day in report_data[month][1].keys():
|
||||
fine_data.append(report_data[month][1][day])
|
||||
# save daily reports
|
||||
report_id = root_report_id + "_" + day
|
||||
start_date = datetime.strptime(day, "%Y%m%d")
|
||||
start_date_utc = timezone.make_aware(start_date, timezone=timezone.utc)
|
||||
end_date_utc = start_date_utc + timezone.timedelta(days=1)
|
||||
_average_OCR_time = {"invoice": self.data[month][1][day]["average_processing_time"]["invoice"](), "imei": self.data[month][1][day]["average_processing_time"]["imei"](),
|
||||
"invoice_count": self.data[month][1][day]["average_processing_time"]["invoice"].count, "imei_count": self.data[month][1][day]["average_processing_time"]["imei"].count}
|
||||
|
||||
_average_OCR_time["avg"] = (_average_OCR_time["invoice"]*_average_OCR_time["invoice_count"] + _average_OCR_time["imei"]*_average_OCR_time["imei_count"])/(_average_OCR_time["imei_count"] + _average_OCR_time["invoice_count"]) if (_average_OCR_time["imei_count"] + _average_OCR_time["invoice_count"]) > 0 else None
|
||||
acumulated_acc = {"feedback_accuracy": {},
|
||||
"reviewed_accuracy": {}}
|
||||
for acc_type in ["feedback_accuracy", "reviewed_accuracy"]:
|
||||
avg_acc = IterAvg()
|
||||
for key in ["imei_number", "purchase_date", "retailername", "sold_to_party"]:
|
||||
acumulated_acc[acc_type][key] = self.data[month][1][day][acc_type][key]()
|
||||
acumulated_acc[acc_type][key+"_count"] = self.data[month][1][day][acc_type][key].count
|
||||
avg_acc.add_avg(acumulated_acc[acc_type][key], acumulated_acc[acc_type][key+"_count"])
|
||||
acumulated_acc[acc_type]["avg"] = avg_acc()
|
||||
acumulated_acc[acc_type]["avg_count"] = avg_acc.count
|
||||
new_report: Report = Report(
|
||||
report_id=report_id,
|
||||
is_daily_report=is_daily_report,
|
||||
subsidiary=self.sub.lower().replace(" ", ""),
|
||||
include_test=include_test,
|
||||
start_at=start_date_utc,
|
||||
end_at=end_date_utc,
|
||||
status="Ready",
|
||||
number_request=report_data[month][1][day]["num_request"],
|
||||
number_images=report_data[month][1][day]["total_images"],
|
||||
number_imei=report_data[month][1][day]["num_imei"],
|
||||
number_invoice=report_data[month][1][day]["num_invoice"],
|
||||
number_bad_images=report_data[month][1][day]["images_quality"]["bad"],
|
||||
average_OCR_time=_average_OCR_time,
|
||||
number_imei_transaction=report_data[month][1][day]["usage"]["imei"],
|
||||
number_invoice_transaction=report_data[month][1][day]["usage"]["invoice"],
|
||||
feedback_accuracy=acumulated_acc["feedback_accuracy"],
|
||||
reviewed_accuracy=acumulated_acc["reviewed_accuracy"],
|
||||
)
|
||||
new_report.save()
|
||||
# save data to redis for overview retrieval
|
||||
self.redis_client.set(settings.OVERVIEW_REPORT_KEY, json.dumps(save_data))
|
||||
print(f'[DEBUG]: fine_data: {fine_data}')
|
||||
|
||||
def get(self) -> Any:
|
||||
# FIXME: This looks like a junk
|
||||
_data = copy.deepcopy(self.data)
|
||||
for month in _data.keys():
|
||||
num_transaction_imei = 0
|
||||
num_transaction_invoice = 0
|
||||
for day in _data[month][1].keys():
|
||||
num_transaction_imei += _data[month][1][day]["usage"].get("imei", 0)
|
||||
num_transaction_invoice += _data[month][1][day]["usage"].get("invoice", 0)
|
||||
_data[month][1][day]["average_accuracy_rate"]["imei"] = _data[month][1][day]["average_accuracy_rate"]["imei"]()
|
||||
_data[month][1][day]["average_accuracy_rate"]["purchase_date"] = _data[month][1][day]["average_accuracy_rate"]["purchase_date"]()
|
||||
_data[month][1][day]["average_accuracy_rate"]["retailer_name"] = _data[month][1][day]["average_accuracy_rate"]["retailer_name"]()
|
||||
_data[month][1][day]["average_accuracy_rate"]["sold_to_party"] = _data[month][1][day]["average_accuracy_rate"]["sold_to_party"]()
|
||||
_data[month][1][day]["average_processing_time"]["imei"] = _data[month][1][day]["average_processing_time"]["imei"]()
|
||||
_data[month][1][day]["average_processing_time"]["invoice"] = _data[month][1][day]["average_processing_time"]["invoice"]()
|
||||
|
||||
_data[month][1][day]["feedback_accuracy"]["imei_number"] = _data[month][1][day]["feedback_accuracy"]["imei_number"]()
|
||||
_data[month][1][day]["feedback_accuracy"]["purchase_date"] = _data[month][1][day]["feedback_accuracy"]["purchase_date"]()
|
||||
_data[month][1][day]["feedback_accuracy"]["retailername"] = _data[month][1][day]["feedback_accuracy"]["retailername"]()
|
||||
_data[month][1][day]["feedback_accuracy"]["sold_to_party"] = _data[month][1][day]["feedback_accuracy"]["sold_to_party"]()
|
||||
_data[month][1][day]["reviewed_accuracy"]["imei_number"] = _data[month][1][day]["reviewed_accuracy"]["imei_number"]()
|
||||
_data[month][1][day]["reviewed_accuracy"]["purchase_date"] = _data[month][1][day]["reviewed_accuracy"]["purchase_date"]()
|
||||
_data[month][1][day]["reviewed_accuracy"]["retailername"] = _data[month][1][day]["reviewed_accuracy"]["retailername"]()
|
||||
_data[month][1][day]["reviewed_accuracy"]["sold_to_party"] = _data[month][1][day]["reviewed_accuracy"]["sold_to_party"]()
|
||||
|
||||
_data[month][0]["usage"]["imei"] = num_transaction_imei
|
||||
_data[month][0]["usage"]["invoice"] = num_transaction_invoice
|
||||
_data[month][0]["average_accuracy_rate"]["imei"] = _data[month][0]["average_accuracy_rate"]["imei"]()
|
||||
_data[month][0]["average_accuracy_rate"]["purchase_date"] = _data[month][0]["average_accuracy_rate"]["purchase_date"]()
|
||||
_data[month][0]["average_accuracy_rate"]["retailer_name"] = _data[month][0]["average_accuracy_rate"]["retailer_name"]()
|
||||
_data[month][0]["average_accuracy_rate"]["sold_to_party"] = _data[month][0]["average_accuracy_rate"]["sold_to_party"]()
|
||||
_data[month][0]["average_processing_time"]["imei"] = _data[month][0]["average_processing_time"]["imei"]()
|
||||
_data[month][0]["average_processing_time"]["invoice"] = _data[month][0]["average_processing_time"]["invoice"]()
|
||||
|
||||
_data[month][0]["feedback_accuracy"]["imei_number"] = _data[month][0]["feedback_accuracy"]["imei_number"]()
|
||||
_data[month][0]["feedback_accuracy"]["purchase_date"] = _data[month][0]["feedback_accuracy"]["purchase_date"]()
|
||||
_data[month][0]["feedback_accuracy"]["retailername"] = _data[month][0]["feedback_accuracy"]["retailername"]()
|
||||
_data[month][0]["feedback_accuracy"]["sold_to_party"] = _data[month][0]["feedback_accuracy"]["sold_to_party"]()
|
||||
_data[month][0]["reviewed_accuracy"]["imei_number"] = _data[month][0]["reviewed_accuracy"]["imei_number"]()
|
||||
_data[month][0]["reviewed_accuracy"]["purchase_date"] = _data[month][0]["reviewed_accuracy"]["purchase_date"]()
|
||||
_data[month][0]["reviewed_accuracy"]["retailername"] = _data[month][0]["reviewed_accuracy"]["retailername"]()
|
||||
_data[month][0]["reviewed_accuracy"]["sold_to_party"] = _data[month][0]["reviewed_accuracy"]["sold_to_party"]()
|
||||
|
||||
return _data
|
||||
|
||||
|
||||
class MonthReportAccumulate:
|
||||
def __init__(self):
|
||||
self.month = None
|
||||
@ -89,7 +376,7 @@ class MonthReportAccumulate:
|
||||
self.total["usage"]["invoice"] += report.number_invoice_transaction
|
||||
|
||||
def add(self, report):
|
||||
report_month = report.created_at.month
|
||||
report_month = report.start_at.month
|
||||
|
||||
if self.month is None:
|
||||
self.month = report_month
|
||||
@ -103,7 +390,7 @@ class MonthReportAccumulate:
|
||||
new_data = copy.deepcopy(self.data_format)[0]
|
||||
new_data["num_imei"] = report.number_imei
|
||||
new_data["subs"] = report.subsidiary
|
||||
new_data["extraction_date"] = report.created_at
|
||||
new_data["extraction_date"] = report.start_at
|
||||
new_data["num_invoice"] = report.number_invoice
|
||||
new_data["total_images"] = report.number_images
|
||||
new_data["images_quality"]["successful"] = report.number_images - report.number_bad_images
|
||||
@ -195,6 +482,16 @@ class IterAvg:
|
||||
def __call__(self):
|
||||
return self.avg
|
||||
|
||||
def validate_feedback_file(feedback, predict):
|
||||
if feedback:
|
||||
imei_feedback = feedback.get("imei_number", [])
|
||||
imei_feedback = [x for x in imei_feedback if x != ""]
|
||||
num_imei_feedback = len(imei_feedback)
|
||||
num_imei_predict = len(predict.get("imei_number", []))
|
||||
if num_imei_feedback != num_imei_predict:
|
||||
return False
|
||||
return True
|
||||
|
||||
def first_of_list(the_list):
|
||||
if not the_list:
|
||||
return None
|
||||
@ -254,6 +551,10 @@ def count_transactions(start_date, end_date):
|
||||
transaction_att[doc_type] = 1
|
||||
else:
|
||||
transaction_att[doc_type] += 1
|
||||
if not transaction_att.get("request", None):
|
||||
transaction_att["request"] = 1
|
||||
else:
|
||||
transaction_att["request"] += 1
|
||||
return transaction_att
|
||||
|
||||
def convert_datetime_format(date_string: str, is_gt=False) -> str:
|
||||
@ -387,6 +688,7 @@ def calculate_and_save_subcription_file(report, request):
|
||||
reviewed_accuracy=att["acc"]["reviewed"],
|
||||
acc=att["avg_acc"],
|
||||
time_cost=image.processing_time,
|
||||
is_bad_image=att["is_bad_image"],
|
||||
bad_image_reason=image.reason,
|
||||
counter_measures=image.counter_measures,
|
||||
error="|".join(att["err"])
|
||||
@ -416,6 +718,72 @@ def calculate_and_save_subcription_file(report, request):
|
||||
|
||||
return request_att
|
||||
|
||||
def calculate_a_request(report, request):
|
||||
request_att = {"acc": {"feedback": {"imei_number": [],
|
||||
"purchase_date": [],
|
||||
"retailername": [],
|
||||
"sold_to_party": [],
|
||||
},
|
||||
"reviewed": {"imei_number": [],
|
||||
"purchase_date": [],
|
||||
"retailername": [],
|
||||
"sold_to_party": [],
|
||||
}},
|
||||
"err": [],
|
||||
"time_cost": {},
|
||||
"total_images": 0,
|
||||
"bad_images": 0}
|
||||
images = SubscriptionRequestFile.objects.filter(request=request)
|
||||
report_files = []
|
||||
for image in images:
|
||||
status, att = calculate_subcription_file(image)
|
||||
if status != 200:
|
||||
continue
|
||||
image.feedback_accuracy = att["acc"]["feedback"]
|
||||
image.reviewed_accuracy = att["acc"]["reviewed"]
|
||||
image.is_bad_image_quality = att["is_bad_image"]
|
||||
image.save()
|
||||
new_report_file = ReportFile(report=report,
|
||||
correspond_request_id=request.request_id,
|
||||
correspond_redemption_id=request.redemption_id,
|
||||
doc_type=image.doc_type,
|
||||
predict_result=image.predict_result,
|
||||
feedback_result=image.feedback_result,
|
||||
reviewed_result=image.reviewed_result,
|
||||
feedback_accuracy=att["acc"]["feedback"],
|
||||
reviewed_accuracy=att["acc"]["reviewed"],
|
||||
acc=att["avg_acc"],
|
||||
is_bad_image=att["is_bad_image"],
|
||||
time_cost=image.processing_time,
|
||||
bad_image_reason=image.reason,
|
||||
counter_measures=image.counter_measures,
|
||||
error="|".join(att["err"])
|
||||
)
|
||||
report_files.append(new_report_file)
|
||||
if request_att["time_cost"].get(image.doc_type, None):
|
||||
request_att["time_cost"][image.doc_type].append(image.processing_time)
|
||||
else:
|
||||
request_att["time_cost"][image.doc_type] = [image.processing_time]
|
||||
try:
|
||||
request_att["acc"]["feedback"]["imei_number"] += att["acc"]["feedback"]["imei_number"]
|
||||
request_att["acc"]["feedback"]["purchase_date"] += att["acc"]["feedback"]["purchase_date"]
|
||||
request_att["acc"]["feedback"]["retailername"] += att["acc"]["feedback"]["retailername"]
|
||||
request_att["acc"]["feedback"]["sold_to_party"] += att["acc"]["feedback"]["sold_to_party"]
|
||||
|
||||
request_att["acc"]["reviewed"]["imei_number"] += att["acc"]["reviewed"]["imei_number"]
|
||||
request_att["acc"]["reviewed"]["purchase_date"] += att["acc"]["reviewed"]["purchase_date"]
|
||||
request_att["acc"]["reviewed"]["retailername"] += att["acc"]["reviewed"]["retailername"]
|
||||
request_att["acc"]["reviewed"]["sold_to_party"] += att["acc"]["reviewed"]["sold_to_party"]
|
||||
|
||||
request_att["bad_images"] += int(att["is_bad_image"])
|
||||
request_att["total_images"] += 1
|
||||
request_att["err"] += att["err"]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
continue
|
||||
|
||||
return request_att, report_files
|
||||
|
||||
|
||||
def calculate_subcription_file(subcription_request_file):
|
||||
att = {"acc": {"feedback": {},
|
||||
@ -518,5 +886,5 @@ def calculate_attributions(request): # for one request, return in order
|
||||
return acc, data, time_cost, image_quality_num, error
|
||||
|
||||
def shadow_report(report_id, query):
|
||||
c_connector.make_a_report(
|
||||
c_connector.make_a_report_2(
|
||||
(report_id, query))
|
@ -13,8 +13,8 @@ class RedisUtils:
|
||||
request_id: str
|
||||
data: dict
|
||||
image_index: int
|
||||
"""
|
||||
self.redis_client.hset(request_id, image_index, json.dumps(data))
|
||||
"""request_id
|
||||
self.redis_client.hset(, image_index, json.dumps(data))
|
||||
self.redis_client.expire(request_id, 3600)
|
||||
|
||||
def get_all_cache(self, request_id):
|
||||
|
9
cope2n-api/fwd_api/utils/time_stuff.py
Normal file
9
cope2n-api/fwd_api/utils/time_stuff.py
Normal file
@ -0,0 +1,9 @@
|
||||
def is_the_same_day(first_day, second_day):
|
||||
if first_day.day == second_day.day and first_day.month == second_day.month and first_day.year == second_day.year:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_the_same_month(first_day, second_day):
|
||||
if first_day.month == second_day.month and first_day.year == second_day.year:
|
||||
return True
|
||||
return False
|
@ -1,24 +1,11 @@
|
||||
###################
|
||||
# BUILD FOR LOCAL DEVELOPMENT
|
||||
###################
|
||||
FROM node:16-alpine AS development
|
||||
WORKDIR /app/
|
||||
COPY --chown=node:node package*.json ./
|
||||
RUN npm ci
|
||||
COPY --chown=node:node . .
|
||||
USER node
|
||||
FROM node:21-alpine AS build
|
||||
|
||||
###################
|
||||
# BUILD FOR PRODUCTION
|
||||
###################
|
||||
FROM node:16-alpine AS build
|
||||
WORKDIR /app/
|
||||
ENV NODE_ENV production
|
||||
COPY --chown=node:node package*.json ./
|
||||
COPY --chown=node:node --from=development /app/node_modules ./node_modules
|
||||
RUN npm install
|
||||
COPY --chown=node:node . .
|
||||
RUN npm run build
|
||||
RUN npm ci --only=production && npm cache clean --force
|
||||
RUN npm cache clean --force
|
||||
USER node
|
||||
|
||||
###################
|
||||
|
@ -1,61 +0,0 @@
|
||||
server {
|
||||
# listen {{port}};
|
||||
# listen [::]:{{port}};
|
||||
# server_name localhost;
|
||||
client_max_body_size 100M;
|
||||
|
||||
#access_log /var/log/nginx/host.access.log main;
|
||||
|
||||
location ~ ^/api {
|
||||
proxy_pass {{proxy_server}};
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_send_timeout 300;
|
||||
}
|
||||
|
||||
location /static/drf_spectacular_sidecar/ {
|
||||
alias /backend-static/drf_spectacular_sidecar/;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri /index.html;
|
||||
}
|
||||
|
||||
location ~ ^/static/drf_spectacular_sidecar/swagger-ui-dist {
|
||||
proxy_pass {{proxy_server}};
|
||||
}
|
||||
|
||||
#error_page 404 /404.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
|
||||
#
|
||||
#location ~ \.php$ {
|
||||
# proxy_pass http://127.0.0.1;
|
||||
#}
|
||||
|
||||
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
|
||||
#
|
||||
#location ~ \.php$ {
|
||||
# root html;
|
||||
# fastcgi_pass 127.0.0.1:9000;
|
||||
# fastcgi_index index.php;
|
||||
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
|
||||
# include fastcgi_params;
|
||||
#}
|
||||
|
||||
# deny access to .htaccess files, if Apache's document root
|
||||
# concurs with nginx's one
|
||||
#
|
||||
#location ~ /\.ht {
|
||||
# deny all;
|
||||
#}
|
||||
}
|
@ -2,7 +2,7 @@
|
||||
"name": "sbt-ui",
|
||||
"version": "0.1.0",
|
||||
"scripts": {
|
||||
"start": "NODE_ENV=development npm run extract && npm run compile && vite --host",
|
||||
"start": "NODE_ENV=development vite --host",
|
||||
"build": "NODE_ENV=production npm run extract && npm run compile && tsc && vite build",
|
||||
"serve": "vite preview",
|
||||
"extract": "lingui extract --clean",
|
||||
|
@ -1,5 +0,0 @@
|
||||
#!/bin/sh
|
||||
# update port and BD proxy
|
||||
sed "s#{{proxy_server}}#$VITE_PROXY#g" /configs/nginx.conf > /etc/nginx/conf.d/default.conf
|
||||
# run up
|
||||
nginx -g 'daemon off;'
|
@ -5,7 +5,7 @@ import React from 'react';
|
||||
interface DataType {
|
||||
key: React.Key;
|
||||
subSidiaries: string;
|
||||
extractionDate: string | Date;
|
||||
extractionDate: string;
|
||||
snOrImeiNumber: number;
|
||||
invoiceNumber: number;
|
||||
totalImages: number;
|
||||
@ -28,12 +28,33 @@ const columns: TableColumnsType<DataType> = [
|
||||
dataIndex: 'subSidiaries',
|
||||
key: 'subSidiaries',
|
||||
width: '100px',
|
||||
render: (_, record) => {
|
||||
if (record.subSidiaries === '+') return '';
|
||||
return record.subSidiaries;
|
||||
},
|
||||
filters: [
|
||||
{ text: 'all', value: 'all' },
|
||||
{ text: 'sesp', value: 'sesp' },
|
||||
{ text: 'seau', value: 'seau' },
|
||||
],
|
||||
filterMode: 'menu',
|
||||
onFilter: (value: string, record) => record.subSidiaries.includes(value),
|
||||
},
|
||||
{
|
||||
title: 'OCR extraction date',
|
||||
dataIndex: 'extractionDate',
|
||||
key: 'extractionDate',
|
||||
width: '130px',
|
||||
render: (_, record) => {
|
||||
if (record.extractionDate.includes('Subtotal'))
|
||||
return (
|
||||
<span style={{ fontWeight: 'bold' }}>{record.extractionDate}</span>
|
||||
);
|
||||
return record.extractionDate;
|
||||
},
|
||||
filters: [{ text: 'Subtotal', value: 'Subtotal' }],
|
||||
filterMode: 'menu',
|
||||
onFilter: (value: string, record) => record.extractionDate.includes(value),
|
||||
},
|
||||
{
|
||||
title: 'OCR Images',
|
||||
@ -73,7 +94,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
key: 'successfulPercentage',
|
||||
width: '120px',
|
||||
render: (_, record) => {
|
||||
return <span>{(record.successfulPercentage * 100).toFixed(2)}</span>;
|
||||
return <span>{(record.successfulPercentage * 100)?.toFixed(2)}</span>;
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -91,7 +112,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
const isAbnormal = record.badPercentage * 100 > 10;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.badPercentage * 100).toFixed(2)}
|
||||
{(record.badPercentage * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
@ -111,7 +132,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
const isAbnormal = record.snImeiAAR * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.snImeiAAR * 100).toFixed(2)}
|
||||
{(record.snImeiAAR * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
@ -139,7 +160,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
const isAbnormal = record.retailerNameAAR * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.retailerNameAAR * 100).toFixed(2)}
|
||||
{(record.retailerNameAAR * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
@ -157,7 +178,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
const isAbnormal = record.snImeiAPT > 2;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record.snImeiAPT.toFixed(2)}
|
||||
{record?.snImeiAPT?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
@ -170,7 +191,7 @@ const columns: TableColumnsType<DataType> = [
|
||||
const isAbnormal = record.invoiceAPT > 2;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record.invoiceAPT.toFixed(2)}
|
||||
{record?.invoiceAPT?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
@ -215,49 +236,9 @@ const ReportOverViewTable: React.FC<ReportOverViewTableProps> = ({
|
||||
isLoading,
|
||||
data,
|
||||
}) => {
|
||||
// const [pagination, setPagination] = useState({
|
||||
// page: 1,
|
||||
// page_size: 10,
|
||||
// });
|
||||
// const { isLoading, data } = useOverViewReport({
|
||||
// page: pagination.page,
|
||||
// });
|
||||
|
||||
console.log('check >>>', pagination, isLoading, data);
|
||||
|
||||
const overviewDataResponse = data as any;
|
||||
const dataSubsRows = overviewDataResponse?.overview_data
|
||||
.map((item, index) => {
|
||||
if (item.subs.includes('+')) {
|
||||
return {
|
||||
key: index,
|
||||
subSidiaries: '',
|
||||
extractionDate: item.extraction_date,
|
||||
snOrImeiNumber: '',
|
||||
invoiceNumber: '',
|
||||
totalImages: item.total_images,
|
||||
successfulNumber: item.images_quality.successful,
|
||||
successfulPercentage: item.images_quality.successful_percent,
|
||||
badNumber: item.images_quality.bad,
|
||||
badPercentage: item.images_quality.bad_percent,
|
||||
snImeiAAR: item.average_accuracy_rate.imei,
|
||||
purchaseDateAAR: item.average_accuracy_rate.purchase_date,
|
||||
retailerNameAAR: item.average_accuracy_rate.retailer_name,
|
||||
snImeiAPT: item.average_processing_time.imei,
|
||||
invoiceAPT: item.average_processing_time.invoice,
|
||||
snImeiTC: item.usage.imei,
|
||||
invoiceTC: item.usage.invoice,
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((item) => item);
|
||||
|
||||
const expandedRowRender = () => {
|
||||
const subData = overviewDataResponse?.overview_data
|
||||
.map((item, index) => {
|
||||
if (!item.subs.includes('+')) {
|
||||
const dataSubsRows = overviewDataResponse?.overview_data.map(
|
||||
(item, index) => {
|
||||
return {
|
||||
key: index,
|
||||
subSidiaries: item.subs,
|
||||
@ -277,172 +258,9 @@ const ReportOverViewTable: React.FC<ReportOverViewTableProps> = ({
|
||||
snImeiTC: item.usage.imei,
|
||||
invoiceTC: item.usage.invoice,
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((item) => item);
|
||||
},
|
||||
);
|
||||
|
||||
const subColumns: TableColumnsType<DataType> = [
|
||||
{
|
||||
title: 'Subs',
|
||||
dataIndex: 'subSidiaries',
|
||||
key: 'subSidiaries',
|
||||
width: '100px',
|
||||
},
|
||||
{
|
||||
title: 'OCR extraction date',
|
||||
dataIndex: 'extractionDate',
|
||||
key: 'extractionDate',
|
||||
width: '130px',
|
||||
render: (_, record) => {
|
||||
return <span>{record?.extractionDate.toString().split('T')[0]}</span>;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'SN/IMEI',
|
||||
dataIndex: 'snOrImeiNumber',
|
||||
key: 'snOrImeiNumber',
|
||||
width: '50px',
|
||||
},
|
||||
{
|
||||
title: 'Invoice',
|
||||
dataIndex: 'invoiceNumber',
|
||||
key: 'invoiceNumber',
|
||||
width: '50px',
|
||||
},
|
||||
{
|
||||
title: 'Total Images',
|
||||
dataIndex: 'totalImages',
|
||||
key: 'totalImages',
|
||||
width: '130px',
|
||||
},
|
||||
{
|
||||
title: 'Successful',
|
||||
dataIndex: 'successfulNumber',
|
||||
key: 'successfulNumber',
|
||||
width: '50px',
|
||||
},
|
||||
{
|
||||
title: '% Successful',
|
||||
dataIndex: 'successfulPercentage',
|
||||
key: 'successfulPercentage',
|
||||
width: '120px',
|
||||
render: (_, record) => {
|
||||
return <span>{(record.successfulPercentage * 100).toFixed(2)}</span>;
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Bad',
|
||||
dataIndex: 'badNumber',
|
||||
key: 'badNumber',
|
||||
width: '30px',
|
||||
},
|
||||
{
|
||||
title: '% Bad',
|
||||
dataIndex: 'badPercentage',
|
||||
key: 'badPercentage',
|
||||
width: '60px',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.badPercentage * 100 > 10;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.badPercentage * 100).toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
title: 'IMEI / Serial no.',
|
||||
dataIndex: 'snImeiAAR',
|
||||
key: 'snImeiAAR',
|
||||
width: '130px',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.snImeiAAR * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.snImeiAAR * 100).toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Purchase date',
|
||||
dataIndex: 'purchaseDateAAR',
|
||||
key: 'purchaseDateAAR',
|
||||
width: '130px',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.purchaseDateAAR * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.purchaseDateAAR * 100).toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Retailer name',
|
||||
dataIndex: 'retailerNameAAR',
|
||||
key: 'retailerNameAAR',
|
||||
width: '130px',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.retailerNameAAR * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{(record.retailerNameAAR * 100).toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
title: 'SN/IMEI',
|
||||
dataIndex: 'snImeiAPT',
|
||||
key: 'snImeiAPT',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.snImeiAPT > 2;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record.snImeiAPT.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'Invoice',
|
||||
dataIndex: 'invoiceAPT',
|
||||
key: 'invoiceAPT',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record.invoiceAPT > 2;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record.invoiceAPT.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
title: 'SN/IMEI',
|
||||
dataIndex: 'snImeiTC',
|
||||
key: 'snImeiTC',
|
||||
},
|
||||
{
|
||||
title: 'Invoice',
|
||||
dataIndex: 'invoiceTC',
|
||||
key: 'invoiceTC',
|
||||
},
|
||||
];
|
||||
return (
|
||||
<Table
|
||||
columns={subColumns}
|
||||
dataSource={subData}
|
||||
pagination={false}
|
||||
bordered
|
||||
// showHeader={false}
|
||||
/>
|
||||
);
|
||||
};
|
||||
return (
|
||||
<div>
|
||||
<Table
|
||||
@ -451,7 +269,6 @@ const ReportOverViewTable: React.FC<ReportOverViewTableProps> = ({
|
||||
dataSource={dataSubsRows}
|
||||
bordered
|
||||
size='small'
|
||||
expandable={{ expandedRowRender, defaultExpandedRowKeys: [0, 1] }}
|
||||
scroll={{ x: 2000 }}
|
||||
pagination={{
|
||||
current: pagination.page,
|
||||
|
@ -65,9 +65,12 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'Purchase Date Acc',
|
||||
key: 'Purchase Date Acc',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record['Purchase Date Acc'] * 100 < 98;
|
||||
return (
|
||||
record['Purchase Date Acc'] &&
|
||||
Number(record['Purchase Date Acc']).toFixed(2)
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['Purchase Date Acc'] &&
|
||||
(Number(record['Purchase Date Acc']) * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
@ -77,8 +80,12 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'Retailer Acc',
|
||||
key: 'Retailer Acc',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record['Retailer Acc'] * 100 < 98;
|
||||
return (
|
||||
record['Retailer Acc'] && Number(record['Retailer Acc']).toFixed(2)
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['Retailer Acc'] &&
|
||||
(Number(record['Retailer Acc']) * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
@ -87,7 +94,13 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'IMEI Acc',
|
||||
key: 'IMEI Acc',
|
||||
render: (_, record) => {
|
||||
return record['IMEI Acc'] && Number(record['IMEI Acc']).toFixed(2);
|
||||
const isAbnormal = record['IMEI Acc'] * 100 < 98;
|
||||
return (
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['IMEI Acc'] &&
|
||||
(Number(record['IMEI Acc']) * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -95,8 +108,12 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'Avg Accuracy',
|
||||
key: 'Avg Accuracy',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record['Avg Accuracy'] * 100 < 98;
|
||||
return (
|
||||
record['Avg Accuracy'] && Number(record['Avg Accuracy']).toFixed(2)
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['Avg Accuracy'] &&
|
||||
(Number(record['Avg Accuracy']) * 100)?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
@ -105,9 +122,12 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'Avg. Client Request Time',
|
||||
key: 'Avg. Client Request Time',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record['Avg Client Request Time'] > 2;
|
||||
return (
|
||||
record['Avg Client Request Time'] &&
|
||||
Number(record['Avg Client Request Time']).toFixed(2)
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['Avg Client Request Time'] &&
|
||||
Number(record['Avg Client Request Time'])?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
@ -116,9 +136,12 @@ const ReportTable: React.FC = () => {
|
||||
dataIndex: 'Avg. OCR Processing Time',
|
||||
key: 'Avg. OCR Processing Time',
|
||||
render: (_, record) => {
|
||||
const isAbnormal = record['Avg. OCR Processing Time'] > 2;
|
||||
return (
|
||||
record['Avg. OCR Processing Time'] &&
|
||||
Number(record['Avg. OCR Processing Time']).toFixed(2)
|
||||
<span style={{ color: isAbnormal ? 'red' : '' }}>
|
||||
{record['Avg. OCR Processing Time'] &&
|
||||
Number(record['Avg. OCR Processing Time'])?.toFixed(2)}
|
||||
</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
|
@ -96,10 +96,8 @@ export async function downloadReport(report_id: string) {
|
||||
let filename = "report.xlsx";
|
||||
try {
|
||||
let basename = response.headers['content-disposition'].split('filename=')[1].split('.')[0];
|
||||
if (basename.charAt(0) === '_') {
|
||||
basename = basename.substring(1);
|
||||
}
|
||||
filename = `${basename}.xlsx`
|
||||
let extension = response.headers['content-disposition'].split('.')[1].split(';')[0];
|
||||
filename = `${basename}.${extension}`
|
||||
} catch(err) {
|
||||
console.log(err);
|
||||
}
|
||||
|
@ -84,12 +84,12 @@ services:
|
||||
depends_on:
|
||||
db-sbt:
|
||||
condition: service_started
|
||||
# command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
|
||||
# python manage.py makemigrations &&
|
||||
# python manage.py migrate &&
|
||||
# python manage.py compilemessages &&
|
||||
# gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||
command: bash -c "tail -f > /dev/null"
|
||||
command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
|
||||
python manage.py makemigrations &&
|
||||
python manage.py migrate &&
|
||||
python manage.py compilemessages &&
|
||||
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||
# command: bash -c "tail -f > /dev/null"
|
||||
|
||||
minio:
|
||||
image: minio/minio
|
||||
|
17
junk_tests/date_compare.py
Normal file
17
junk_tests/date_compare.py
Normal file
@ -0,0 +1,17 @@
|
||||
from datetime import datetime
|
||||
|
||||
# Assuming you have two datetime objects for the same day in different months
|
||||
date_jan = datetime(2022, 2, 15, 12, 30, 0)
|
||||
date_feb = datetime(2022, 2, 15, 8, 45, 0)
|
||||
|
||||
# Check if they are the same day
|
||||
if date_jan.day == date_feb.day and date_jan.month == date_feb.month and date_jan.year == date_feb.year:
|
||||
print("They are the same day")
|
||||
else:
|
||||
print("They are different days")
|
||||
|
||||
# Check if they are the same month
|
||||
if date_jan.month == date_feb.month and date_jan.year == date_feb.year:
|
||||
print("They are the same month")
|
||||
else:
|
||||
print("They are different months")
|
Loading…
Reference in New Issue
Block a user