Update: issues on 29 Feb

This commit is contained in:
dx-tan 2024-02-29 13:12:50 +07:00
parent 615e980aea
commit 72a8e05b5a
12 changed files with 37 additions and 35 deletions

@ -1 +1 @@
Subproject commit d01de312ab86db554ffa2f1b01396ef8d56b78ed
Subproject commit 671d7917c657ad185a06772e0b707b45fe59788a

View File

@ -505,7 +505,7 @@ class AccuracyViewSet(viewsets.ViewSet):
# aggregate_overview from subsibdiaries
subsidiaries_to_include = list(settings.SUBS.values())
subsidiaries_to_include.remove("all")
subsidiaries_to_include.remove("seao")
# subsidiaries_to_include.remove("seao")
subsidiary_overview_reports = []
for sub in subsidiaries_to_include:
key = f"{sub}_{duration}"

View File

@ -208,7 +208,7 @@ def make_a_report_2(report_id, query_set):
bad_image_list = []
number_images = 0
number_bad_images = 0
review_process = []
review_progress = []
# TODO: Multithreading
# Calculate accuracy, processing time, ....Then save.
subscription_requests = SubscriptionRequest.objects.filter(base_query).order_by('created_at')
@ -246,7 +246,7 @@ def make_a_report_2(report_id, query_set):
errors += request_att["err"]
num_request += 1
review_process += request_att.get("is_reviewed", [])
review_progress += request_att.get("is_reviewed", [])
report_fine_data, _save_data = report_engine.save(report.report_id, query_set.get("is_daily_report", False), query_set["include_test"])
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
@ -280,9 +280,9 @@ def make_a_report_2(report_id, query_set):
report.reviewed_accuracy = acumulated_acc["reviewed"]
report.combined_accuracy = acumulated_acc["acumulated"]
report.num_reviewed = review_process.count(1)
report.num_not_reviewed = review_process.count(0)
report.num_no_reviewed = review_process.count(-1)
report.num_reviewed = review_progress.count(1)
report.num_not_reviewed = review_progress.count(0)
report.num_no_reviewed = review_progress.count(-1)
report.errors = "|".join(errors)
report.status = "Ready"
@ -298,7 +298,7 @@ def make_a_report_2(report_id, query_set):
# Save overview dashboard
# multiple accuracy by 100
save_data = copy.deepcopy(_save_data)
review_key = "review_process"
review_key = "review_progress"
for i, dat in enumerate(report_fine_data):
report_fine_data[i][review_key] = report_fine_data[i][review_key]*100
keys = [x for x in list(dat.keys()) if "accuracy" in x.lower()]

View File

@ -66,7 +66,7 @@ class ReportAccumulateByRequest:
'sold_to_party': IterAvg()
},
'num_request': 0,
"review_process": []
"review_progress": []
}
self.day_format = {
'subs': sub,
@ -110,7 +110,7 @@ class ReportAccumulateByRequest:
},
"report_files": [],
"num_request": 0,
"review_process": []
"review_progress": []
},
@staticmethod
@ -155,7 +155,7 @@ class ReportAccumulateByRequest:
total["usage"]["imei"] += 1 if doc_type == "imei" else 0
total["usage"]["invoice"] += 1 if doc_type == "invoice" else 0
total["usage"]["total_images"] += 1
total["review_process"].append(report_file.review_status)
total["review_progress"].append(report_file.review_status)
return total
@staticmethod
@ -192,7 +192,7 @@ class ReportAccumulateByRequest:
print(f"[WARM]: Weird doctype: {report_file.doc_type}")
day_data["average_processing_time"][report_file.doc_type] = IterAvg()
day_data["average_processing_time"][report_file.doc_type].add_avg(report_file.time_cost, 1) if report_file.time_cost else 0
day_data["review_process"].append(report_file.review_status)
day_data["review_progress"].append(report_file.review_status)
return day_data
def add(self, request, report_files):
@ -319,7 +319,7 @@ class ReportAccumulateByRequest:
_data[month][1][day]["reviewed_accuracy"]["purchase_date"] = _data[month][1][day]["reviewed_accuracy"]["purchase_date"]()
_data[month][1][day]["reviewed_accuracy"]["retailername"] = _data[month][1][day]["reviewed_accuracy"]["retailername"]()
_data[month][1][day]["reviewed_accuracy"]["sold_to_party"] = _data[month][1][day]["reviewed_accuracy"]["sold_to_party"]()
_data[month][1][day]["review_process"] = _data[month][1][day]["review_process"].count(1)/(_data[month][1][day]["review_process"].count(0)+ _data[month][1][day]["review_process"].count(1)) if (_data[month][1][day]["review_process"].count(0)+ _data[month][1][day]["review_process"].count(1)) >0 else 0
_data[month][1][day]["review_progress"] = _data[month][1][day]["review_progress"].count(1)/(_data[month][1][day]["review_progress"].count(0)+ _data[month][1][day]["review_progress"].count(1)) if (_data[month][1][day]["review_progress"].count(0)+ _data[month][1][day]["review_progress"].count(1)) >0 else 0
_data[month][1][day].pop("report_files")
_data[month][1][day]["images_quality"]["successful_percent"] = _data[month][1][day]["images_quality"]["successful"]/_data[month][1][day]["total_images"] if _data[month][1][day]["total_images"] > 0 else 0
@ -343,7 +343,7 @@ class ReportAccumulateByRequest:
_data[month][0]["reviewed_accuracy"]["purchase_date"] = _data[month][0]["reviewed_accuracy"]["purchase_date"]()
_data[month][0]["reviewed_accuracy"]["retailername"] = _data[month][0]["reviewed_accuracy"]["retailername"]()
_data[month][0]["reviewed_accuracy"]["sold_to_party"] = _data[month][0]["reviewed_accuracy"]["sold_to_party"]()
_data[month][0]["review_process"] = _data[month][0]["review_process"].count(1)/(_data[month][0]["review_process"].count(0)+ _data[month][0]["review_process"].count(1)) if (_data[month][0]["review_process"].count(0)+ _data[month][0]["review_process"].count(1)) >0 else 0
_data[month][0]["review_progress"] = _data[month][0]["review_progress"].count(1)/(_data[month][0]["review_progress"].count(0)+ _data[month][0]["review_progress"].count(1)) if (_data[month][0]["review_progress"].count(0)+ _data[month][0]["review_progress"].count(1)) >0 else 0
return _data

View File

@ -529,7 +529,7 @@ def dict2xlsx(input: json, _type='report'):
ws[key + str(start_index)] = "-"
ws[key + str(start_index)].border = border
ws[key + str(start_index)].font = font_black
if 'accuracy' in mapping[key] or 'time' in mapping[key] or 'percent' in mapping[key] or 'speed' in mapping[key] or mapping[key] in ["review_process"]:
if 'accuracy' in mapping[key] or 'time' in mapping[key] or 'percent' in mapping[key] or 'speed' in mapping[key] or mapping[key] in ["review_progress"]:
ws[key + str(start_index)].number_format = '0.0'
if _type == 'report':

@ -1 +1 @@
Subproject commit d01de312ab86db554ffa2f1b01396ef8d56b78ed
Subproject commit 671d7917c657ad185a06772e0b707b45fe59788a

View File

@ -0,0 +1,2 @@
python manage.py migrate-csv-revert reviewed/0131-0206-Mai-.csv
python manage.py migrate-database-010224 2024-01-28T00:00:00+0700 2024-02-07T00:00:00+0700

View File

@ -15,8 +15,8 @@ login_token = None
# Define the login credentials
login_credentials = {
'username': 'sbt',
# 'password': '7Eg4AbWIXDnufgn'
'password': 'abc'
'password': '7Eg4AbWIXDnufgn'
# 'password': 'abc'
}
# Define the command to call the update API

View File

@ -21,7 +21,7 @@ interface DataType {
invoiceAPT: number;
snImeiTC: number; // TC: transaction count
invoiceTC: number;
reviewProcess: number;
reviewProgress: number;
}
const columns: TableColumnsType<DataType> = [
@ -214,7 +214,7 @@ const columns: TableColumnsType<DataType> = [
],
},
{
title: 'Average Processing Per Image (Seconds)',
title: 'Average Processing Time Per Image (Seconds)',
children: [
{
title: 'SN/IMEI',
@ -245,14 +245,14 @@ const columns: TableColumnsType<DataType> = [
],
},
{
title: 'Review Process',
dataIndex: 'review_process',
key: 'review_process',
title: 'Review Progress',
dataIndex: 'review_progress',
key: 'review_progress',
width: '100px',
render: (_, record) => {
return (
<span>
{formatPercent(record.reviewProcess)==='-'? 0:formatPercent(record.reviewProcess)}
{formatPercent(record.reviewProgress)==='-'? 0:formatPercent(record.reviewProgress)}
</span>
);
},
@ -289,7 +289,7 @@ const ReportOverViewTable: React.FC<ReportOverViewTableProps> = ({
invoiceAPT: item.average_processing_time.invoice,
snImeiTC: item.usage.imei,
invoiceTC: item.usage.invoice,
reviewProcess:item.review_process,
reviewProgress:item.review_progress,
};
},
);

View File

@ -91,7 +91,7 @@ const ReportTable: React.FC = () => {
},
},
{
title: 'Purchase Date Acc',
title: 'Purchase Date Accuracy',
dataIndex: 'Purchase Date Acc',
key: 'Purchase Date Acc',
render: (_, record) => {
@ -105,7 +105,7 @@ const ReportTable: React.FC = () => {
},
{
title: 'Retailer Acc',
title: 'Retailer Accuracy',
dataIndex: 'Retailer Acc',
key: 'Retailer Acc',
render: (_, record) => {
@ -118,7 +118,7 @@ const ReportTable: React.FC = () => {
},
},
{
title: 'IMEI Acc',
title: 'IMEI Accuracy',
dataIndex: 'IMEI Acc',
key: 'IMEI Acc',
render: (_, record) => {

View File

@ -6,8 +6,8 @@ tag=$1
echo "[INFO] Tag received from Python: $tag"
echo "[INFO] Updating everything the remote..."
git submodule update --recursive --remote
# echo "[INFO] Updating everything the remote..."
# git submodule update --recursive --remote
echo "[INFO] Pushing AI image with tag: $tag..."
docker compose -f docker-compose-dev.yml build cope2n-fi-sbt

View File

@ -73,7 +73,7 @@ services:
- S3_BUCKET_NAME=${S3_BUCKET_NAME}
restart: always
ports:
- 6000:9000
- 9000:9000
networks:
- ctel-sbt
volumes:
@ -101,8 +101,8 @@ services:
volumes:
- ./data/minio_data:/data
ports:
- 6884:9884
- 6885:9885
- 9884:9884
- 9885:9885
networks:
- ctel-sbt
restart: always
@ -191,7 +191,7 @@ services:
- POSTGRES_PASSWORD=${DB_PASSWORD}
- POSTGRES_DB=${DB_SCHEMA}
ports:
- 54321:5432
- 5432:5432
rabbitmq-sbt:
mem_reservation: 600m
@ -215,7 +215,7 @@ services:
shm_size: 10gb
privileged: true
ports:
- 6881:80
- 9881:80
depends_on:
be-ctel-sbt:
condition: service_started