Merge pull request #68 from SDSRV-IDP/dev/20240308

Fix: #67
This commit is contained in:
Đỗ Xuân Tân 2024-03-06 16:46:41 +07:00 committed by GitHub Enterprise
commit b5263e903e
6 changed files with 89 additions and 81 deletions

View File

@ -20,6 +20,7 @@ from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S
from ..utils.redis import RedisUtils
from ..utils.process import string_to_boolean
from ..utils.cache import get_cache, set_cache
from fwd_api.constant.common import FileCategory
from ..request.ReportCreationSerializer import ReportCreationSerializer
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
from ..utils.report import aggregate_overview
@ -110,7 +111,7 @@ class AccuracyViewSet(viewsets.ViewSet):
subsidiary = request.data.get("subsidiary", "all")
subsidiary = map_subsidiary_long_to_short(subsidiary)
base_query = Q()
base_query = Q(status=200)
if start_date_str or end_date_str:
try:
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%d') # We care only about day precision only
@ -670,7 +671,7 @@ class AccuracyViewSet(viewsets.ViewSet):
data = []
files = []
subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id)
subscription_request_files = SubscriptionRequestFile.objects.filter(request=subscription_request.id, file_category=FileCategory.Origin.value)
for subscription_request_file in subscription_request_files:
sub = subscription_request.subscription

View File

@ -261,6 +261,8 @@ def make_a_report_2(report_id, query_set):
report.average_OCR_time = {"invoice": time_cost["invoice"](), "imei": time_cost["imei"](),
"invoice_count": time_cost["invoice"].count, "imei_count": time_cost["imei"].count}
report.average_OCR_time["invoice"] = 0 if report.average_OCR_time["invoice"] is None else report.average_OCR_time["invoice"]
report.average_OCR_time["imei"] = 0 if report.average_OCR_time["imei"] is None else report.average_OCR_time["imei"]
report.average_OCR_time["avg"] = (report.average_OCR_time["invoice"]*report.average_OCR_time["invoice_count"] + report.average_OCR_time["imei"]*report.average_OCR_time["imei_count"])/(report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) if (report.average_OCR_time["imei_count"] + report.average_OCR_time["invoice_count"]) > 0 else None
report.number_imei_transaction = transaction_att.get("imei", 0)
@ -311,7 +313,7 @@ def make_a_report_2(report_id, query_set):
for key in keys:
if report_fine_data[i][key]:
for x_key in report_fine_data[i][key].keys():
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100 if report_fine_data[i][key][x_key] is not None else None
data_workbook = dict2xlsx(report_fine_data, _type='report')
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)

View File

@ -115,9 +115,10 @@ class ReportAccumulateByRequest:
@staticmethod
def update_total(total, report_file):
total["total_images"] += 1
if report_file.bad_image_reason not in settings.ACC_EXCLUDE_RESEASONS:
total["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
total["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
total["total_images"] += 1
doc_type = "imei"
if report_file.doc_type in ["imei", "invoice", "all"]:
doc_type = report_file.doc_type
@ -160,9 +161,10 @@ class ReportAccumulateByRequest:
@staticmethod
def update_day(day_data, report_file):
day_data["total_images"] += 1
if report_file.bad_image_reason not in settings.ACC_EXCLUDE_RESEASONS:
day_data["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
day_data["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
day_data["total_images"] += 1
doc_type = "imei"
if report_file.doc_type in ["imei", "invoice", "all"]:
doc_type = report_file.doc_type
@ -255,6 +257,8 @@ class ReportAccumulateByRequest:
_average_OCR_time = {"invoice": self.data[month][1][day]["average_processing_time"]["invoice"](), "imei": self.data[month][1][day]["average_processing_time"]["imei"](),
"invoice_count": self.data[month][1][day]["average_processing_time"]["invoice"].count, "imei_count": self.data[month][1][day]["average_processing_time"]["imei"].count}
_average_OCR_time["invoice"] = 0 if _average_OCR_time["invoice"] is None else _average_OCR_time["invoice"]
_average_OCR_time["imei"] = 0 if _average_OCR_time["imei"] is None else _average_OCR_time["imei"]
_average_OCR_time["avg"] = (_average_OCR_time["invoice"]*_average_OCR_time["invoice_count"] + _average_OCR_time["imei"]*_average_OCR_time["imei_count"])/(_average_OCR_time["imei_count"] + _average_OCR_time["invoice_count"]) if (_average_OCR_time["imei_count"] + _average_OCR_time["invoice_count"]) > 0 else None
acumulated_acc = {"feedback_accuracy": {},
"reviewed_accuracy": {}}
@ -527,6 +531,8 @@ class IterAvg:
self.avg = (self.avg*(self.count-count) + avg*count)/(self.count)
def __call__(self):
if self.count == 0:
return None
return self.avg
def validate_feedback_file(feedback, predict):

View File

@ -10,7 +10,6 @@ def set_cache(key, value):
this_cache.save()
return this_cache
def get_cache(key):
value = {}
cache = Caching.objects.filter(key=key)

View File

@ -1,5 +1,5 @@
export const formatPercent = (value: number, floatingPoint: number = 1) => {
if (value === 0) {
if (value === null || value === undefined) {
return '-';
}
if (value < 100.0) {

View File

@ -89,12 +89,12 @@ services:
depends_on:
db-sbt:
condition: service_started
command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
python manage.py makemigrations &&
python manage.py migrate &&
python manage.py compilemessages &&
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
# command: bash -c "tail -f > /dev/null"
# command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
# python manage.py makemigrations &&
# python manage.py migrate &&
# python manage.py compilemessages &&
# gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
command: bash -c "tail -f > /dev/null"
minio:
image: minio/minio
@ -179,8 +179,8 @@ services:
- ./cope2n-api:/app
working_dir: /app
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
# command: bash -c "tail -f > /dev/null"
# command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
command: bash -c "tail -f > /dev/null"
# Back-end persistent
db-sbt: