Update: revert report list order, rename xlsx file
This commit is contained in:
parent
93aee28494
commit
af03dd55e8
@ -14,7 +14,7 @@ import json
|
||||
from ..exception.exceptions import InvalidException, RequiredFieldException, NotFoundException
|
||||
from ..models import SubscriptionRequest, Report, ReportFile
|
||||
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
|
||||
from ..utils.file import download_from_S3
|
||||
from ..utils.file import download_from_S3, convert_date_string
|
||||
from ..utils.process import string_to_boolean
|
||||
from ..celery_worker.client_connector import c_connector
|
||||
|
||||
@ -485,7 +485,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
if subsidiary:
|
||||
base_query &= Q(subsidiary=subsidiary)
|
||||
base_query &= Q(is_daily_report=True)
|
||||
reports = Report.objects.filter(base_query).order_by('created_at')
|
||||
reports = Report.objects.filter(base_query).order_by('created_at').reverse()
|
||||
|
||||
paginator = Paginator(reports, page_size)
|
||||
page = paginator.get_page(page_number)
|
||||
@ -541,7 +541,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
||||
raise NotFoundException(excArgs=f"report: {report_id}")
|
||||
report = Report.objects.filter(report_id=report_id).first()
|
||||
# download from s3 to local
|
||||
tmp_file = "/tmp/" + "report_" + uuid.uuid4().hex + ".xlsx"
|
||||
tmp_file = "/tmp/" + report.subsidiary + "_" + report.start_at.strftime("%Y%m%d") + "_" + report.end_at.strftime("%Y%m%d") + "_created_on_" + report.created_at.strftime("%Y%m%d") + ".xlsx"
|
||||
os.makedirs("/tmp", exist_ok=True)
|
||||
if not report.S3_file_name:
|
||||
raise NotFoundException(excArgs="S3 file name")
|
||||
|
@ -49,8 +49,8 @@ class CeleryConnector:
|
||||
return self.send_task('csv_feedback', args)
|
||||
def do_pdf(self, args):
|
||||
return self.send_task('do_pdf', args)
|
||||
def upload_file_to_s3(self, args):
|
||||
return self.send_task('upload_file_to_s3', args)
|
||||
def upload_feedback_to_s3(self, args):
|
||||
return self.send_task('upload_feedback_to_s3', args)
|
||||
def upload_file_to_s3(self, args):
|
||||
return self.send_task('upload_file_to_s3', args)
|
||||
def upload_report_to_s3(self, args):
|
||||
|
@ -16,7 +16,9 @@ from ..utils import s3 as S3Util
|
||||
from fwd_api.constant.common import ProcessType
|
||||
import csv
|
||||
import json
|
||||
import copy
|
||||
|
||||
from fwd_api.utils.accuracy import predict_result_to_ready
|
||||
from celery.utils.log import get_task_logger
|
||||
from fwd import settings
|
||||
|
||||
@ -79,6 +81,7 @@ def process_csv_feedback(csv_file_path, feedback_id):
|
||||
continue
|
||||
else:
|
||||
sub_rq = sub_rqs[0]
|
||||
images = SubscriptionRequestFile.objects.filter(request=sub_rq)
|
||||
fb = {}
|
||||
# update user result (with validate)
|
||||
redemption_id = row.get('redemptionNumber')
|
||||
@ -99,6 +102,39 @@ def process_csv_feedback(csv_file_path, feedback_id):
|
||||
if len(redemption_id) > 0:
|
||||
sub_rq.redemption_id = redemption_id
|
||||
sub_rq.save()
|
||||
# Update files
|
||||
time_cost = {"imei": [], "invoice": [], "all": []}
|
||||
imei_count = 0
|
||||
if sub_rq.ai_inference_profile is None:
|
||||
time_cost["imei"] = [-1 for _ in range(len(images))]
|
||||
time_cost["invoice"] = [-1]
|
||||
time_cost["all"] = [-1]
|
||||
else:
|
||||
for k, v in sub_rq.ai_inference_profile.items():
|
||||
time_cost[k.split("_")[0]].append(v["inference"][1][0] - v["inference"][0] + (v["postprocess"][1]-v["postprocess"][0]))
|
||||
for i, image in enumerate(images):
|
||||
_predict_result = copy.deepcopy(predict_result_to_ready(sub_rq.predict_result))
|
||||
_feedback_result = copy.deepcopy(sub_rq.feedback_result)
|
||||
_reviewed_result = copy.deepcopy(sub_rq.reviewed_result)
|
||||
image.processing_time = time_cost.get(image.doc_type, [0 for _ in range(image.index_in_request)])[image.index_in_request]
|
||||
if image.doc_type == "invoice":
|
||||
_predict_result["imei_number"] = []
|
||||
if _feedback_result:
|
||||
_feedback_result["imei_number"] = []
|
||||
else:
|
||||
None
|
||||
if _reviewed_result:
|
||||
_reviewed_result["imei_number"] = []
|
||||
else:
|
||||
None
|
||||
else:
|
||||
_predict_result = {"retailername": None, "sold_to_party": None, "purchase_date": [], "imei_number": [_predict_result["imei_number"][image.index_in_request]]}
|
||||
_feedback_result = {"retailername": None, "sold_to_party": None, "purchase_date": None, "imei_number": [_feedback_result["imei_number"][image.index_in_request]]} if _feedback_result else None
|
||||
_reviewed_result = {"retailername": None, "sold_to_party": None, "purchase_date": None, "imei_number": [_reviewed_result["imei_number"][image.index_in_request]]} if _reviewed_result else None
|
||||
image.predict_result = _predict_result
|
||||
image.feedback_result = _feedback_result
|
||||
image.reviewed_result = _reviewed_result
|
||||
image.save()
|
||||
# update log into database
|
||||
feedback_rq = FeedbackRequest.objects.filter(feedback_id=feedback_id).first()
|
||||
feedback_rq.error_status = status
|
||||
|
@ -7,6 +7,7 @@ import json
|
||||
from PIL import Image, ExifTags
|
||||
from django.core.files.uploadedfile import TemporaryUploadedFile
|
||||
from django.utils import timezone
|
||||
from datetime import datetime
|
||||
|
||||
from fwd import settings
|
||||
from ..utils import s3 as S3Util
|
||||
@ -30,6 +31,16 @@ s3_client = S3Util.MinioS3Client(
|
||||
bucket_name=settings.S3_BUCKET_NAME
|
||||
)
|
||||
|
||||
def convert_date_string(date_string):
|
||||
# Parse the input date string
|
||||
date_format = "%Y-%m-%d %H:%M:%S.%f %z"
|
||||
parsed_date = datetime.strptime(date_string, date_format)
|
||||
|
||||
# Format the date as "YYYYMMDD"
|
||||
formatted_date = parsed_date.strftime("%Y%m%d")
|
||||
|
||||
return formatted_date
|
||||
|
||||
def validate_report_list(request):
|
||||
start_date_str = request.GET.get('start_date')
|
||||
end_date_str = request.GET.get('end_date')
|
||||
|
34
cope2n-fe/Dockerfile
Normal file
34
cope2n-fe/Dockerfile
Normal file
@ -0,0 +1,34 @@
|
||||
###################
|
||||
# BUILD FOR LOCAL DEVELOPMENT
|
||||
###################
|
||||
FROM node:16-alpine AS development
|
||||
WORKDIR /app/
|
||||
COPY --chown=node:node package*.json ./
|
||||
RUN npm ci
|
||||
COPY --chown=node:node . .
|
||||
USER node
|
||||
|
||||
###################
|
||||
# BUILD FOR PRODUCTION
|
||||
###################
|
||||
FROM node:16-alpine AS build
|
||||
WORKDIR /app/
|
||||
ENV NODE_ENV production
|
||||
COPY --chown=node:node package*.json ./
|
||||
COPY --chown=node:node --from=development /app/node_modules ./node_modules
|
||||
COPY --chown=node:node . .
|
||||
RUN npm run build
|
||||
RUN npm ci --only=production && npm cache clean --force
|
||||
USER node
|
||||
|
||||
###################
|
||||
# PRODUCTION
|
||||
###################
|
||||
FROM nginx:stable-alpine AS nginx
|
||||
|
||||
COPY --from=build /app/dist/ /usr/share/nginx/html/
|
||||
COPY --from=build /app/run.sh /app/
|
||||
COPY --from=build /app/nginx.conf /configs/
|
||||
RUN chmod +x /app/run.sh
|
||||
|
||||
CMD ["/app/run.sh"]
|
61
cope2n-fe/nginx.conf
Normal file
61
cope2n-fe/nginx.conf
Normal file
@ -0,0 +1,61 @@
|
||||
server {
|
||||
# listen {{port}};
|
||||
# listen [::]:{{port}};
|
||||
# server_name localhost;
|
||||
client_max_body_size 100M;
|
||||
|
||||
#access_log /var/log/nginx/host.access.log main;
|
||||
|
||||
location ~ ^/api {
|
||||
proxy_pass {{proxy_server}};
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_send_timeout 300;
|
||||
}
|
||||
|
||||
location /static/drf_spectacular_sidecar/ {
|
||||
alias /backend-static/drf_spectacular_sidecar/;
|
||||
}
|
||||
|
||||
location / {
|
||||
root /usr/share/nginx/html;
|
||||
index index.html index.htm;
|
||||
try_files $uri /index.html;
|
||||
}
|
||||
|
||||
location ~ ^/static/drf_spectacular_sidecar/swagger-ui-dist {
|
||||
proxy_pass {{proxy_server}};
|
||||
}
|
||||
|
||||
#error_page 404 /404.html;
|
||||
|
||||
# redirect server error pages to the static page /50x.html
|
||||
#
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /50x.html {
|
||||
root /usr/share/nginx/html;
|
||||
}
|
||||
|
||||
# proxy the PHP scripts to Apache listening on 127.0.0.1:80
|
||||
#
|
||||
#location ~ \.php$ {
|
||||
# proxy_pass http://127.0.0.1;
|
||||
#}
|
||||
|
||||
# pass the PHP scripts to FastCGI server listening on 127.0.0.1:9000
|
||||
#
|
||||
#location ~ \.php$ {
|
||||
# root html;
|
||||
# fastcgi_pass 127.0.0.1:9000;
|
||||
# fastcgi_index index.php;
|
||||
# fastcgi_param SCRIPT_FILENAME /scripts$fastcgi_script_name;
|
||||
# include fastcgi_params;
|
||||
#}
|
||||
|
||||
# deny access to .htaccess files, if Apache's document root
|
||||
# concurs with nginx's one
|
||||
#
|
||||
#location ~ /\.ht {
|
||||
# deny all;
|
||||
#}
|
||||
}
|
5
cope2n-fe/run.sh
Normal file
5
cope2n-fe/run.sh
Normal file
@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
# update port and BD proxy
|
||||
sed "s#{{proxy_server}}#$VITE_PROXY#g" /configs/nginx.conf > /etc/nginx/conf.d/default.conf
|
||||
# run up
|
||||
nginx -g 'daemon off;'
|
@ -84,12 +84,12 @@ services:
|
||||
depends_on:
|
||||
db-sbt:
|
||||
condition: service_started
|
||||
# command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
|
||||
# python manage.py makemigrations &&
|
||||
# python manage.py migrate &&
|
||||
# python manage.py compilemessages &&
|
||||
# gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||
command: bash -c "tail -f > /dev/null"
|
||||
command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
|
||||
python manage.py makemigrations &&
|
||||
python manage.py migrate &&
|
||||
python manage.py compilemessages &&
|
||||
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
|
||||
# command: bash -c "tail -f > /dev/null"
|
||||
|
||||
minio:
|
||||
image: minio/minio
|
||||
|
Loading…
Reference in New Issue
Block a user