Update
This commit is contained in:
parent
3115665bb0
commit
2e0a369f84
9
api-cronjob/Dockerfile
Normal file
9
api-cronjob/Dockerfile
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
FROM python:3.9-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY script.py .
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get -y install curl
|
||||||
|
|
||||||
|
CMD [ "python", "script.py" ]
|
@ -143,8 +143,8 @@ LANGUAGE_CODE = "en-us"
|
|||||||
USE_I18N = True
|
USE_I18N = True
|
||||||
|
|
||||||
CELERY_ENABLE_UTC = False
|
CELERY_ENABLE_UTC = False
|
||||||
CELERY_TIMEZONE = "Asia/Ho_Chi_Minh"
|
CELERY_TIMEZONE = "Asia/Singapore"
|
||||||
TIME_ZONE = "Asia/Ho_Chi_Minh"
|
TIME_ZONE = "Asia/Singapore"
|
||||||
USE_TZ = True
|
USE_TZ = True
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
# Static files (CSS, JavaScript, Images)
|
||||||
@ -221,7 +221,18 @@ MAX_NUMBER_OF_TEMPLATE = 3
|
|||||||
MAX_PAGES_OF_PDF_FILE = 50
|
MAX_PAGES_OF_PDF_FILE = 50
|
||||||
|
|
||||||
OVERVIEW_REFRESH_INTERVAL = 2
|
OVERVIEW_REFRESH_INTERVAL = 2
|
||||||
OVERVIEW_REPORT_KEY = "overview"
|
OVERVIEW_REPORT_ROOT = "overview"
|
||||||
|
OVERVIEW_REPORT_DURATION = ["30d", "7d"]
|
||||||
|
|
||||||
|
SUBS = {
|
||||||
|
"SEAU": "AU",
|
||||||
|
"SESP": "SG",
|
||||||
|
"SME": "MY",
|
||||||
|
"SEPCO": "PH",
|
||||||
|
"TSE": "TH",
|
||||||
|
"SEIN": "ID",
|
||||||
|
"ALL": "all"
|
||||||
|
}
|
||||||
|
|
||||||
CACHES = {
|
CACHES = {
|
||||||
'default': {
|
'default': {
|
||||||
|
@ -15,8 +15,11 @@ from ..exception.exceptions import InvalidException, RequiredFieldException, Not
|
|||||||
from ..models import SubscriptionRequest, Report, ReportFile
|
from ..models import SubscriptionRequest, Report, ReportFile
|
||||||
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
|
from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list, extract_report_detail_list, IterAvg
|
||||||
from ..utils.file import download_from_S3, convert_date_string
|
from ..utils.file import download_from_S3, convert_date_string
|
||||||
|
from ..utils.redis import RedisUtils
|
||||||
from ..utils.process import string_to_boolean
|
from ..utils.process import string_to_boolean
|
||||||
from ..celery_worker.client_connector import c_connector
|
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
|
||||||
|
|
||||||
|
redis_client = RedisUtils()
|
||||||
|
|
||||||
class AccuracyViewSet(viewsets.ViewSet):
|
class AccuracyViewSet(viewsets.ViewSet):
|
||||||
lookup_field = "username"
|
lookup_field = "username"
|
||||||
@ -226,6 +229,12 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
description='Subsidiary',
|
description='Subsidiary',
|
||||||
type=OpenApiTypes.STR,
|
type=OpenApiTypes.STR,
|
||||||
),
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name='report_overview_duration',
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description=f'open of {settings.OVERVIEW_REPORT_DURATION}',
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
),
|
||||||
],
|
],
|
||||||
responses=None, tags=['Accuracy']
|
responses=None, tags=['Accuracy']
|
||||||
)
|
)
|
||||||
@ -240,7 +249,21 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
include_test = string_to_boolean(request.GET.get('include_test', "false"))
|
include_test = string_to_boolean(request.GET.get('include_test', "false"))
|
||||||
subsidiary = request.GET.get("subsidiary", "all")
|
subsidiary = request.GET.get("subsidiary", "all")
|
||||||
is_daily_report = string_to_boolean(request.GET.get('is_daily_report', "false"))
|
is_daily_report = string_to_boolean(request.GET.get('is_daily_report', "false"))
|
||||||
|
report_overview_duration = request.GET.get("report_overview_duration", "")
|
||||||
|
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||||
|
|
||||||
|
if is_daily_report:
|
||||||
|
if report_overview_duration not in settings.OVERVIEW_REPORT_DURATION:
|
||||||
|
raise InvalidException(excArgs="overview duration")
|
||||||
|
end_date = timezone.now()
|
||||||
|
if report_overview_duration == "30d":
|
||||||
|
start_date = end_date - timezone.timedelta(days=30)
|
||||||
|
else:
|
||||||
|
start_date = end_date - timezone.timedelta(days=7)
|
||||||
|
start_date = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
start_date_str = start_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
end_date_str = end_date.strftime('%Y-%m-%dT%H:%M:%S%z')
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
||||||
@ -255,6 +278,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
"include_test": include_test,
|
"include_test": include_test,
|
||||||
"subsidiary": subsidiary,
|
"subsidiary": subsidiary,
|
||||||
"is_daily_report": is_daily_report,
|
"is_daily_report": is_daily_report,
|
||||||
|
"report_overview_duration": report_overview_duration
|
||||||
}
|
}
|
||||||
# if is_daily_report:
|
# if is_daily_report:
|
||||||
# if (end_date-start_date) > timezone.timedelta(days=1):
|
# if (end_date-start_date) > timezone.timedelta(days=1):
|
||||||
@ -319,7 +343,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
|
|
||||||
response = {
|
response = {
|
||||||
'report_detail': data,
|
'report_detail': data,
|
||||||
'metadata': {"subsidiary": report.subsidiary,
|
'metadata': {"subsidiary": map_subsidiary_short_to_long(report.subsidiary),
|
||||||
"start_at": report.start_at,
|
"start_at": report.start_at,
|
||||||
"end_at": report.end_at},
|
"end_at": report.end_at},
|
||||||
'page': {
|
'page': {
|
||||||
@ -396,20 +420,30 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
paginator = Paginator(reports, page_size)
|
paginator = Paginator(reports, page_size)
|
||||||
page = paginator.get_page(page_number)
|
page = paginator.get_page(page_number)
|
||||||
|
|
||||||
|
|
||||||
data = []
|
data = []
|
||||||
for report in page:
|
for report in page:
|
||||||
|
acc_keys = ["purchase_date", "retailername", "imei_number", "avg"]
|
||||||
|
acc = {}
|
||||||
|
for key in acc_keys:
|
||||||
|
fb = report.feedback_accuracy.get(key, 0) if report.feedback_accuracy else 0
|
||||||
|
rv = report.reviewed_accuracy.get(key, 0) if report.reviewed_accuracy else 0
|
||||||
|
acc[key] = max([fb, rv])
|
||||||
data.append({
|
data.append({
|
||||||
"ID": report.id,
|
"ID": report.id,
|
||||||
"Created Date": report.created_at,
|
"Created Date": report.created_at,
|
||||||
|
"Start Date": report.start_at,
|
||||||
|
"End Date": report.end_at,
|
||||||
"No. Requests": report.number_request,
|
"No. Requests": report.number_request,
|
||||||
"Status": report.status,
|
"Status": report.status,
|
||||||
"Purchase Date Acc": report.reviewed_accuracy.get("purchase_date", None) if report.reviewed_accuracy else None,
|
"Purchase Date Acc": acc["purchase_date"],
|
||||||
"Retailer Acc": report.feedback_accuracy.get("retailername", None) if report.reviewed_accuracy else None,
|
"Retailer Acc": acc["retailername"],
|
||||||
"IMEI Acc": report.feedback_accuracy.get("imei_number", None) if report.reviewed_accuracy else None,
|
"IMEI Acc": acc["imei_number"],
|
||||||
"Avg. Accuracy": report.feedback_accuracy.get("avg", None) if report.reviewed_accuracy else None,
|
"Avg. Accuracy": acc["avg"],
|
||||||
"Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0,
|
"Avg. Client Request Time": report.average_client_time.get("avg", 0) if report.average_client_time else 0,
|
||||||
"Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0,
|
"Avg. OCR Processing Time": report.average_OCR_time.get("avg", 0) if report.average_OCR_time else 0,
|
||||||
"report_id": report.report_id,
|
"report_id": report.report_id,
|
||||||
|
"Subsidiary": map_subsidiary_short_to_long(report.subsidiary),
|
||||||
})
|
})
|
||||||
|
|
||||||
response = {
|
response = {
|
||||||
@ -427,105 +461,80 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
@extend_schema(
|
@extend_schema(
|
||||||
parameters=[
|
parameters=[
|
||||||
OpenApiParameter(
|
OpenApiParameter(
|
||||||
name='start_date',
|
name='duration',
|
||||||
location=OpenApiParameter.QUERY,
|
location=OpenApiParameter.QUERY,
|
||||||
description='Start date (YYYY-mm-DDTHH:MM:SSZ)',
|
description='one of [30d, 7d]',
|
||||||
type=OpenApiTypes.DATE,
|
type=OpenApiTypes.STR,
|
||||||
default='2023-01-02T00:00:00+0700',
|
default='30d',
|
||||||
),
|
|
||||||
OpenApiParameter(
|
|
||||||
name='end_date',
|
|
||||||
location=OpenApiParameter.QUERY,
|
|
||||||
description='End date (YYYY-mm-DDTHH:MM:SSZ)',
|
|
||||||
type=OpenApiTypes.DATE,
|
|
||||||
default='2024-01-10T00:00:00+0700',
|
|
||||||
),
|
),
|
||||||
OpenApiParameter(
|
OpenApiParameter(
|
||||||
name='subsidiary',
|
name='subsidiary',
|
||||||
location=OpenApiParameter.QUERY,
|
location=OpenApiParameter.QUERY,
|
||||||
description='Subsidiary',
|
description='Subsidiary',
|
||||||
type=OpenApiTypes.STR,
|
type=OpenApiTypes.STR,
|
||||||
),
|
)
|
||||||
OpenApiParameter(
|
|
||||||
name='page',
|
|
||||||
location=OpenApiParameter.QUERY,
|
|
||||||
description='Page number',
|
|
||||||
type=OpenApiTypes.INT,
|
|
||||||
required=False
|
|
||||||
),
|
|
||||||
OpenApiParameter(
|
|
||||||
name='page_size',
|
|
||||||
location=OpenApiParameter.QUERY,
|
|
||||||
description='Number of items per page',
|
|
||||||
type=OpenApiTypes.INT,
|
|
||||||
required=False
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
responses=None, tags=['Accuracy']
|
responses=None, tags=['Accuracy']
|
||||||
)
|
)
|
||||||
@action(detail=False, url_path="overview", methods=["GET"])
|
@action(detail=False, url_path="overview", methods=["GET"])
|
||||||
def overview(self, request):
|
def overview(self, request):
|
||||||
if request.method == 'GET':
|
if request.method == 'GET':
|
||||||
subsidiary = request.GET.get('subsidiary', None)
|
subsidiary = request.GET.get('subsidiary', "ALL")
|
||||||
start_date_str = request.GET.get('start_date', "")
|
duration = request.GET.get('duration', "")
|
||||||
end_date_str = request.GET.get('end_date', "")
|
|
||||||
page_number = int(request.GET.get('page', 1))
|
|
||||||
page_size = int(request.GET.get('page_size', 10))
|
|
||||||
|
|
||||||
base_query = Q()
|
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||||
|
|
||||||
if start_date_str and end_date_str:
|
|
||||||
try:
|
|
||||||
start_date = timezone.datetime.strptime(start_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
||||||
end_date = timezone.datetime.strptime(end_date_str, '%Y-%m-%dT%H:%M:%S%z')
|
|
||||||
except ValueError:
|
|
||||||
raise InvalidException(excArgs="Date format")
|
|
||||||
else:
|
|
||||||
end_date = timezone.datetime.now()
|
|
||||||
start_date = end_date - timezone.timedelta(days=30)
|
|
||||||
base_query &= Q(created_at__range=(start_date, end_date))
|
|
||||||
|
|
||||||
if subsidiary:
|
|
||||||
base_query &= Q(subsidiary=subsidiary)
|
|
||||||
base_query &= Q(is_daily_report=True)
|
|
||||||
reports = Report.objects.filter(base_query).order_by('start_at').reverse()
|
|
||||||
|
|
||||||
paginator = Paginator(reports, page_size)
|
|
||||||
page = paginator.get_page(page_number)
|
|
||||||
|
|
||||||
data = []
|
|
||||||
this_month_report = MonthReportAccumulate()
|
|
||||||
for report in page:
|
|
||||||
res = this_month_report.add(report)
|
|
||||||
if not(res):
|
|
||||||
_, _data, total = this_month_report()
|
|
||||||
data += [total]
|
|
||||||
data += _data
|
|
||||||
this_month_report = MonthReportAccumulate()
|
|
||||||
this_month_report.add(report)
|
|
||||||
_, _data, total = this_month_report()
|
|
||||||
data += [total]
|
|
||||||
data += _data
|
|
||||||
# Generate xlsx file
|
|
||||||
# workbook = dict2xlsx(data, _type="report")
|
|
||||||
# tmp_file = f"/tmp/{str(uuid.uuid4())}.xlsx"
|
|
||||||
# os.makedirs(os.path.dirname(tmp_file), exist_ok=True)
|
|
||||||
# workbook.save(tmp_file)
|
|
||||||
# c_connector.remove_local_file((tmp_file, "fake_request_id"))
|
|
||||||
|
|
||||||
|
# Retrive data from Redis
|
||||||
|
key = f"{subsidiary}_{duration}"
|
||||||
|
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", [])
|
||||||
response = {
|
response = {
|
||||||
# 'file': load_xlsx_file(),
|
|
||||||
'overview_data': data,
|
'overview_data': data,
|
||||||
'page': {
|
|
||||||
'number': page.number,
|
|
||||||
'total_pages': page.paginator.num_pages,
|
|
||||||
'count': page.paginator.count,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return JsonResponse(response, status=200)
|
return JsonResponse(response, status=200)
|
||||||
|
|
||||||
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
||||||
|
|
||||||
|
@extend_schema(
|
||||||
|
parameters=[
|
||||||
|
OpenApiParameter(
|
||||||
|
name='duration',
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description='one of [30d, 7d]',
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
default='30d',
|
||||||
|
),
|
||||||
|
OpenApiParameter(
|
||||||
|
name='subsidiary',
|
||||||
|
location=OpenApiParameter.QUERY,
|
||||||
|
description='Subsidiary',
|
||||||
|
type=OpenApiTypes.STR,
|
||||||
|
)
|
||||||
|
],
|
||||||
|
responses=None, tags=['Accuracy']
|
||||||
|
)
|
||||||
|
@action(detail=False, url_path="overview_download_file", methods=["GET"])
|
||||||
|
def overview_download_file(self, request):
|
||||||
|
if request.method == 'GET':
|
||||||
|
subsidiary = request.GET.get('subsidiary', "ALL")
|
||||||
|
duration = request.GET.get('duration', "")
|
||||||
|
|
||||||
|
subsidiary = map_subsidiary_long_to_short(subsidiary)
|
||||||
|
|
||||||
|
s3_key = f"{subsidiary}_{duration}.xlsx"
|
||||||
|
|
||||||
|
tmp_file = "/tmp/" + s3_key
|
||||||
|
os.makedirs("/tmp", exist_ok=True)
|
||||||
|
download_from_S3("report/" + settings.OVERVIEW_REPORT_ROOT + "/" + s3_key, tmp_file)
|
||||||
|
file = open(tmp_file, 'rb')
|
||||||
|
response = FileResponse(file, status=200)
|
||||||
|
|
||||||
|
# Set the content type and content disposition headers
|
||||||
|
response['Content-Type'] = 'application/octet-stream'
|
||||||
|
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(os.path.basename(tmp_file))
|
||||||
|
return response
|
||||||
|
|
||||||
|
return JsonResponse({'error': 'Invalid request method.'}, status=405)
|
||||||
|
|
||||||
@extend_schema(
|
@extend_schema(
|
||||||
parameters=[],
|
parameters=[],
|
||||||
responses=None, tags=['Accuracy']
|
responses=None, tags=['Accuracy']
|
||||||
|
@ -65,6 +65,7 @@ class CeleryConnector:
|
|||||||
return self.send_task('upload_obj_to_s3', args)
|
return self.send_task('upload_obj_to_s3', args)
|
||||||
def remove_local_file(self, args):
|
def remove_local_file(self, args):
|
||||||
return self.send_task('remove_local_file', args, countdown=280) # nearest execution of this task in 280 seconds
|
return self.send_task('remove_local_file', args, countdown=280) # nearest execution of this task in 280 seconds
|
||||||
|
|
||||||
def process_fi(self, args):
|
def process_fi(self, args):
|
||||||
return self.send_task('process_fi_invoice', args)
|
return self.send_task('process_fi_invoice', args)
|
||||||
def process_fi_result(self, args):
|
def process_fi_result(self, args):
|
||||||
|
@ -6,12 +6,16 @@ from ..utils import s3 as S3Util
|
|||||||
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list, calculate_a_request, ReportAccumulateByRequest
|
from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_subcription_file, count_transactions, extract_report_detail_list, calculate_a_request, ReportAccumulateByRequest
|
||||||
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3
|
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3
|
||||||
from ..utils import time_stuff
|
from ..utils import time_stuff
|
||||||
|
from ..utils.redis import RedisUtils
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
|
import json
|
||||||
|
import copy
|
||||||
|
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
from fwd import settings
|
from fwd import settings
|
||||||
|
|
||||||
|
redis_client = RedisUtils()
|
||||||
|
|
||||||
logger = get_task_logger(__name__)
|
logger = get_task_logger(__name__)
|
||||||
|
|
||||||
@ -107,7 +111,7 @@ def make_a_report(report_id, query_set):
|
|||||||
|
|
||||||
errors += request_att["err"]
|
errors += request_att["err"]
|
||||||
num_request += 1
|
num_request += 1
|
||||||
transaction_att = count_transactions(start_date, end_date)
|
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
|
||||||
# Do saving process
|
# Do saving process
|
||||||
report.number_request = num_request
|
report.number_request = num_request
|
||||||
report.number_images = number_images
|
report.number_images = number_images
|
||||||
@ -237,8 +241,8 @@ def make_a_report_2(report_id, query_set):
|
|||||||
errors += request_att["err"]
|
errors += request_att["err"]
|
||||||
num_request += 1
|
num_request += 1
|
||||||
|
|
||||||
report_engine.save(query_set.get("is_daily_report", False), query_set["include_test"])
|
report_fine_data, _save_data = report_engine.save(report.report_id, query_set.get("is_daily_report", False), query_set["include_test"])
|
||||||
transaction_att = count_transactions(start_date, end_date)
|
transaction_att = count_transactions(start_date, end_date, report.subsidiary)
|
||||||
# Do saving process
|
# Do saving process
|
||||||
report.number_request = num_request
|
report.number_request = num_request
|
||||||
report.number_images = number_images
|
report.number_images = number_images
|
||||||
@ -276,6 +280,26 @@ def make_a_report_2(report_id, query_set):
|
|||||||
data_workbook = dict2xlsx(data, _type='report_detail')
|
data_workbook = dict2xlsx(data, _type='report_detail')
|
||||||
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
local_workbook = save_workbook_file(report.report_id + ".xlsx", report, data_workbook)
|
||||||
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
||||||
|
if query_set["is_daily_report"]:
|
||||||
|
# Save overview dashboard
|
||||||
|
# multiple accuracy by 100
|
||||||
|
save_data = copy.deepcopy(_save_data)
|
||||||
|
for i, dat in enumerate(report_fine_data):
|
||||||
|
keys = [x for x in list(dat.keys()) if "accuracy" in x.lower()]
|
||||||
|
keys_percent = "images_quality"
|
||||||
|
for x_key in report_fine_data[i][keys_percent].keys():
|
||||||
|
if "percent" not in x_key:
|
||||||
|
continue
|
||||||
|
report_fine_data[i][keys_percent][x_key] = report_fine_data[i][keys_percent][x_key]*100
|
||||||
|
for key in keys:
|
||||||
|
if report_fine_data[i][key]:
|
||||||
|
for x_key in report_fine_data[i][key].keys():
|
||||||
|
report_fine_data[i][key][x_key] = report_fine_data[i][key][x_key]*100
|
||||||
|
data_workbook = dict2xlsx(report_fine_data, _type='report')
|
||||||
|
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
|
||||||
|
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
||||||
|
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
||||||
|
redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
|
||||||
|
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
@ -8,6 +8,7 @@ from .ocr_utils.sbt_report import post_processing_str
|
|||||||
import uuid
|
import uuid
|
||||||
from fwd_api.models import SubscriptionRequest, SubscriptionRequestFile, ReportFile
|
from fwd_api.models import SubscriptionRequest, SubscriptionRequestFile, ReportFile
|
||||||
from ..celery_worker.client_connector import c_connector
|
from ..celery_worker.client_connector import c_connector
|
||||||
|
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
import redis
|
import redis
|
||||||
@ -21,7 +22,7 @@ valid_keys = ["retailername", "sold_to_party", "purchase_date", "imei_number"]
|
|||||||
|
|
||||||
class ReportAccumulateByRequest:
|
class ReportAccumulateByRequest:
|
||||||
def __init__(self, sub):
|
def __init__(self, sub):
|
||||||
self.redis_client = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, decode_responses=True)
|
# self.redis_client = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, decode_responses=True)
|
||||||
self.sub = sub
|
self.sub = sub
|
||||||
self.current_time = None
|
self.current_time = None
|
||||||
self.data = {} # {"month": [total, {"day": day_data}]}
|
self.data = {} # {"month": [total, {"day": day_data}]}
|
||||||
@ -112,10 +113,7 @@ class ReportAccumulateByRequest:
|
|||||||
total["total_images"] += 1
|
total["total_images"] += 1
|
||||||
total["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
|
total["images_quality"]["successful"] += 1 if not report_file.is_bad_image else 0
|
||||||
total["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
total["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
||||||
|
# total["report_files"].append(report_file)
|
||||||
print(f"[DEBUG]: report_file.reviewed_accuracy: {report_file.reviewed_accuracy}")
|
|
||||||
print(f"[DEBUG]: report_file.feedback_accuracy: {report_file.feedback_accuracy}")
|
|
||||||
|
|
||||||
|
|
||||||
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
||||||
total["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", []))
|
total["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", []))
|
||||||
@ -150,6 +148,7 @@ class ReportAccumulateByRequest:
|
|||||||
day_data["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
day_data["images_quality"]["bad"] += 1 if report_file.is_bad_image else 0
|
||||||
day_data["num_imei"] += 1 if report_file.doc_type == "imei" else 0
|
day_data["num_imei"] += 1 if report_file.doc_type == "imei" else 0
|
||||||
day_data["num_invoice"] += 1 if report_file.doc_type == "invoice" else 0
|
day_data["num_invoice"] += 1 if report_file.doc_type == "invoice" else 0
|
||||||
|
day_data["report_files"].append(report_file)
|
||||||
|
|
||||||
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
if sum([len(report_file.reviewed_accuracy[x]) for x in report_file.reviewed_accuracy.keys() if "_count" not in x]) > 0 :
|
||||||
day_data["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", 0))
|
day_data["average_accuracy_rate"]["imei"].add(report_file.reviewed_accuracy.get("imei_number", 0))
|
||||||
@ -192,14 +191,13 @@ class ReportAccumulateByRequest:
|
|||||||
for report_file in report_files:
|
for report_file in report_files:
|
||||||
self.data[this_month][0] = self.update_total(self.data[this_month][0], report_file) # Update the subtotal within the month
|
self.data[this_month][0] = self.update_total(self.data[this_month][0], report_file) # Update the subtotal within the month
|
||||||
self.data[this_month][1][this_day] = self.update_day(self.data[this_month][1][this_day], report_file) # Update the subtotal of the day
|
self.data[this_month][1][this_day] = self.update_day(self.data[this_month][1][this_day], report_file) # Update the subtotal of the day
|
||||||
# save repot detail
|
|
||||||
|
|
||||||
def count_transactions_within_day(self, date_string):
|
def count_transactions_within_day(self, date_string):
|
||||||
# convert this day into timezone.datetime at UTC
|
# convert this day into timezone.datetime at UTC
|
||||||
start_date = datetime.strptime(date_string, "%Y%m%d")
|
start_date = datetime.strptime(date_string, "%Y%m%d")
|
||||||
start_date_utc = timezone.make_aware(start_date, timezone=timezone.utc)
|
start_date_with_timezone = timezone.make_aware(start_date)
|
||||||
end_date_utc = start_date_utc + timezone.timedelta(days=1)
|
end_date_with_timezone = start_date_with_timezone + timezone.timedelta(days=1)
|
||||||
return count_transactions(start_date_utc, end_date_utc)
|
return count_transactions(start_date_with_timezone, end_date_with_timezone, self.sub)
|
||||||
|
|
||||||
def save(self, root_report_id, is_daily_report=False, include_test=False):
|
def save(self, root_report_id, is_daily_report=False, include_test=False):
|
||||||
report_data = self.get()
|
report_data = self.get()
|
||||||
@ -214,8 +212,8 @@ class ReportAccumulateByRequest:
|
|||||||
# save daily reports
|
# save daily reports
|
||||||
report_id = root_report_id + "_" + day
|
report_id = root_report_id + "_" + day
|
||||||
start_date = datetime.strptime(day, "%Y%m%d")
|
start_date = datetime.strptime(day, "%Y%m%d")
|
||||||
start_date_utc = timezone.make_aware(start_date, timezone=timezone.utc)
|
start_date_with_timezone = timezone.make_aware(start_date)
|
||||||
end_date_utc = start_date_utc + timezone.timedelta(days=1)
|
end_date_with_timezone = start_date_with_timezone + timezone.timedelta(days=1)
|
||||||
_average_OCR_time = {"invoice": self.data[month][1][day]["average_processing_time"]["invoice"](), "imei": self.data[month][1][day]["average_processing_time"]["imei"](),
|
_average_OCR_time = {"invoice": self.data[month][1][day]["average_processing_time"]["invoice"](), "imei": self.data[month][1][day]["average_processing_time"]["imei"](),
|
||||||
"invoice_count": self.data[month][1][day]["average_processing_time"]["invoice"].count, "imei_count": self.data[month][1][day]["average_processing_time"]["imei"].count}
|
"invoice_count": self.data[month][1][day]["average_processing_time"]["invoice"].count, "imei_count": self.data[month][1][day]["average_processing_time"]["imei"].count}
|
||||||
|
|
||||||
@ -235,8 +233,8 @@ class ReportAccumulateByRequest:
|
|||||||
is_daily_report=is_daily_report,
|
is_daily_report=is_daily_report,
|
||||||
subsidiary=self.sub.lower().replace(" ", ""),
|
subsidiary=self.sub.lower().replace(" ", ""),
|
||||||
include_test=include_test,
|
include_test=include_test,
|
||||||
start_at=start_date_utc,
|
start_at=start_date_with_timezone,
|
||||||
end_at=end_date_utc,
|
end_at=end_date_with_timezone,
|
||||||
status="Ready",
|
status="Ready",
|
||||||
number_request=report_data[month][1][day]["num_request"],
|
number_request=report_data[month][1][day]["num_request"],
|
||||||
number_images=report_data[month][1][day]["total_images"],
|
number_images=report_data[month][1][day]["total_images"],
|
||||||
@ -250,14 +248,18 @@ class ReportAccumulateByRequest:
|
|||||||
reviewed_accuracy=acumulated_acc["reviewed_accuracy"],
|
reviewed_accuracy=acumulated_acc["reviewed_accuracy"],
|
||||||
)
|
)
|
||||||
new_report.save()
|
new_report.save()
|
||||||
# save data to redis for overview retrieval
|
data = extract_report_detail_list(self.data[month][1][day]["report_files"], lower=True)
|
||||||
self.redis_client.set(settings.OVERVIEW_REPORT_KEY, json.dumps(save_data))
|
data_workbook = dict2xlsx(data, _type='report_detail')
|
||||||
print(f'[DEBUG]: fine_data: {fine_data}')
|
local_workbook = save_workbook_file(report_id + ".xlsx", new_report, data_workbook)
|
||||||
|
s3_key=save_report_to_S3(report_id, local_workbook)
|
||||||
|
return fine_data, save_data
|
||||||
|
|
||||||
def get(self) -> Any:
|
def get(self) -> Any:
|
||||||
# FIXME: This looks like a junk
|
# FIXME: This looks like a junk
|
||||||
_data = copy.deepcopy(self.data)
|
_data = copy.deepcopy(self.data)
|
||||||
for month in _data.keys():
|
for month in _data.keys():
|
||||||
|
_data[month][0]["images_quality"]["successful_percent"] = _data[month][0]["images_quality"]["successful"]/_data[month][0]["total_images"] if _data[month][0]["total_images"] > 0 else 0
|
||||||
|
_data[month][0]["images_quality"]["bad_percent"] = _data[month][0]["images_quality"]["bad"]/_data[month][0]["total_images"] if _data[month][0]["total_images"] > 0 else 0
|
||||||
num_transaction_imei = 0
|
num_transaction_imei = 0
|
||||||
num_transaction_invoice = 0
|
num_transaction_invoice = 0
|
||||||
for day in _data[month][1].keys():
|
for day in _data[month][1].keys():
|
||||||
@ -278,6 +280,10 @@ class ReportAccumulateByRequest:
|
|||||||
_data[month][1][day]["reviewed_accuracy"]["purchase_date"] = _data[month][1][day]["reviewed_accuracy"]["purchase_date"]()
|
_data[month][1][day]["reviewed_accuracy"]["purchase_date"] = _data[month][1][day]["reviewed_accuracy"]["purchase_date"]()
|
||||||
_data[month][1][day]["reviewed_accuracy"]["retailername"] = _data[month][1][day]["reviewed_accuracy"]["retailername"]()
|
_data[month][1][day]["reviewed_accuracy"]["retailername"] = _data[month][1][day]["reviewed_accuracy"]["retailername"]()
|
||||||
_data[month][1][day]["reviewed_accuracy"]["sold_to_party"] = _data[month][1][day]["reviewed_accuracy"]["sold_to_party"]()
|
_data[month][1][day]["reviewed_accuracy"]["sold_to_party"] = _data[month][1][day]["reviewed_accuracy"]["sold_to_party"]()
|
||||||
|
_data[month][1][day].pop("report_files")
|
||||||
|
|
||||||
|
_data[month][1][day]["images_quality"]["successful_percent"] = _data[month][1][day]["images_quality"]["successful"]/_data[month][1][day]["total_images"] if _data[month][1][day]["total_images"] > 0 else 0
|
||||||
|
_data[month][1][day]["images_quality"]["bad_percent"] = _data[month][1][day]["images_quality"]["bad"]/_data[month][1][day]["total_images"] if _data[month][1][day]["total_images"] > 0 else 0
|
||||||
|
|
||||||
_data[month][0]["usage"]["imei"] = num_transaction_imei
|
_data[month][0]["usage"]["imei"] = num_transaction_imei
|
||||||
_data[month][0]["usage"]["invoice"] = num_transaction_invoice
|
_data[month][0]["usage"]["invoice"] = num_transaction_invoice
|
||||||
@ -535,9 +541,11 @@ def extract_report_detail_list(report_detail_list, lower=False, in_percent=True)
|
|||||||
data[i][key] = data[i][key]*100
|
data[i][key] = data[i][key]*100
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def count_transactions(start_date, end_date):
|
def count_transactions(start_date, end_date, subsidiary="all"):
|
||||||
base_query = Q(created_at__range=(start_date, end_date))
|
base_query = Q(created_at__range=(start_date, end_date))
|
||||||
base_query &= Q(is_test_request=False)
|
base_query &= Q(is_test_request=False)
|
||||||
|
if subsidiary and subsidiary.lower().replace(" ", "")!="all":
|
||||||
|
base_query &= Q(redemption_id__startswith=subsidiary)
|
||||||
transaction_att = {}
|
transaction_att = {}
|
||||||
|
|
||||||
print(f"[DEBUG]: atracting transactions attribute...")
|
print(f"[DEBUG]: atracting transactions attribute...")
|
||||||
|
@ -201,10 +201,13 @@ def save_feedback_file(file_name: str, rq: FeedbackRequest, uploaded_file: dict)
|
|||||||
csvfile.write(file_contents)
|
csvfile.write(file_contents)
|
||||||
return file_path
|
return file_path
|
||||||
|
|
||||||
def save_workbook_file(file_name: str, rp: Report, workbook):
|
def save_workbook_file(file_name: str, rp: Report, workbook, prefix=""):
|
||||||
report_id = str(rp.report_id)
|
report_id = str(rp.report_id)
|
||||||
|
|
||||||
|
if not prefix:
|
||||||
folder_path = os.path.join(settings.MEDIA_ROOT, "report", report_id)
|
folder_path = os.path.join(settings.MEDIA_ROOT, "report", report_id)
|
||||||
|
else:
|
||||||
|
folder_path = os.path.join(settings.MEDIA_ROOT, "report", prefix)
|
||||||
os.makedirs(folder_path, exist_ok = True)
|
os.makedirs(folder_path, exist_ok = True)
|
||||||
|
|
||||||
file_path = os.path.join(folder_path, file_name)
|
file_path = os.path.join(folder_path, file_name)
|
||||||
@ -399,11 +402,16 @@ def build_media_url_v2(media_id: str, user_id: int, sub_id: int, u_sync_id: str)
|
|||||||
def get_value(_dict, keys):
|
def get_value(_dict, keys):
|
||||||
keys = keys.split('.')
|
keys = keys.split('.')
|
||||||
value = _dict
|
value = _dict
|
||||||
|
try:
|
||||||
for key in keys:
|
for key in keys:
|
||||||
if not key in value.keys():
|
if not key in value.keys():
|
||||||
return "-"
|
return "-"
|
||||||
else:
|
else:
|
||||||
value = value.get(key, {})
|
value = value.get(key, {})
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[ERROR]: {e}")
|
||||||
|
print(f"[ERROR]: value: {value}")
|
||||||
|
print(f"[ERROR]: keys: {keys}")
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
return "-"
|
return "-"
|
||||||
@ -486,6 +494,7 @@ def dict2xlsx(input: json, _type='report'):
|
|||||||
ws[key + str(start_index)].border = border
|
ws[key + str(start_index)].border = border
|
||||||
|
|
||||||
if _type == 'report':
|
if _type == 'report':
|
||||||
|
if subtotal['subs'] == '+':
|
||||||
ws[key + str(start_index)].font = font_black_bold
|
ws[key + str(start_index)].font = font_black_bold
|
||||||
if key_index == 0 or (key_index >= 9 and key_index <= 15):
|
if key_index == 0 or (key_index >= 9 and key_index <= 15):
|
||||||
ws[key + str(start_index)].fill = fill_gray
|
ws[key + str(start_index)].fill = fill_gray
|
||||||
@ -493,6 +502,15 @@ def dict2xlsx(input: json, _type='report'):
|
|||||||
ws[key + str(start_index)].fill = fill_green
|
ws[key + str(start_index)].fill = fill_green
|
||||||
elif key_index >= 4 and key_index <= 8:
|
elif key_index >= 4 and key_index <= 8:
|
||||||
ws[key + str(start_index)].fill = fill_yellow
|
ws[key + str(start_index)].fill = fill_yellow
|
||||||
|
else:
|
||||||
|
if 'average_accuracy_rate' in mapping[key] and type(value) in [int, float] and value < 95:
|
||||||
|
ws[key + str(start_index)].style = normal_cell_red
|
||||||
|
elif 'average_processing_time' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
||||||
|
ws[key + str(start_index)].style = normal_cell_red
|
||||||
|
elif 'bad_percent' in mapping[key] and type(value) in [int, float] and value > 10:
|
||||||
|
ws[key + str(start_index)].style = normal_cell_red
|
||||||
|
else :
|
||||||
|
ws[key + str(start_index)].style = normal_cell
|
||||||
elif _type == 'report_detail':
|
elif _type == 'report_detail':
|
||||||
if 'accuracy' in mapping[key] and type(value) in [int, float] and value < 75:
|
if 'accuracy' in mapping[key] and type(value) in [int, float] and value < 75:
|
||||||
ws[key + str(start_index)].style = normal_cell_red
|
ws[key + str(start_index)].style = normal_cell_red
|
||||||
@ -503,20 +521,4 @@ def dict2xlsx(input: json, _type='report'):
|
|||||||
|
|
||||||
start_index += 1
|
start_index += 1
|
||||||
|
|
||||||
if 'data' in subtotal.keys():
|
|
||||||
for record in subtotal['data']:
|
|
||||||
for key in mapping.keys():
|
|
||||||
value = get_value(record, mapping[key])
|
|
||||||
ws[key + str(start_index)] = value
|
|
||||||
if 'average_accuracy_rate' in mapping[key] and type(value) in [int, float] and value < 95:
|
|
||||||
ws[key + str(start_index)].style = normal_cell_red
|
|
||||||
elif 'average_processing_time' in mapping[key] and type(value) in [int, float] and value > 2.0:
|
|
||||||
ws[key + str(start_index)].style = normal_cell_red
|
|
||||||
elif 'bad_percent' in mapping[key] and type(value) in [int, float] and value > 10:
|
|
||||||
ws[key + str(start_index)].style = normal_cell_red
|
|
||||||
else :
|
|
||||||
ws[key + str(start_index)].style = normal_cell
|
|
||||||
|
|
||||||
start_index += 1
|
|
||||||
|
|
||||||
return wb
|
return wb
|
||||||
|
@ -13,8 +13,8 @@ class RedisUtils:
|
|||||||
request_id: str
|
request_id: str
|
||||||
data: dict
|
data: dict
|
||||||
image_index: int
|
image_index: int
|
||||||
"""request_id
|
"""
|
||||||
self.redis_client.hset(, image_index, json.dumps(data))
|
self.redis_client.hset(request_id, image_index, json.dumps(data))
|
||||||
self.redis_client.expire(request_id, 3600)
|
self.redis_client.expire(request_id, 3600)
|
||||||
|
|
||||||
def get_all_cache(self, request_id):
|
def get_all_cache(self, request_id):
|
||||||
@ -23,6 +23,9 @@ class RedisUtils:
|
|||||||
resutlt[key] = json.loads(value)
|
resutlt[key] = json.loads(value)
|
||||||
return resutlt
|
return resutlt
|
||||||
|
|
||||||
|
def get_specific_cache(self, request_id, key):
|
||||||
|
return json.loads(self.redis_client.hget(request_id, key))
|
||||||
|
|
||||||
def get_size(self, request_id):
|
def get_size(self, request_id):
|
||||||
return self.redis_client.hlen(request_id)
|
return self.redis_client.hlen(request_id)
|
||||||
|
|
||||||
|
11
cope2n-api/fwd_api/utils/subsidiary.py
Normal file
11
cope2n-api/fwd_api/utils/subsidiary.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from fwd.settings import SUBS
|
||||||
|
|
||||||
|
def map_subsidiary_long_to_short(long_sub):
|
||||||
|
short_sub = SUBS.get(long_sub.upper(), "all")
|
||||||
|
return short_sub.upper()
|
||||||
|
|
||||||
|
def map_subsidiary_short_to_long(short_sub):
|
||||||
|
for k, v in SUBS.items():
|
||||||
|
if v == short_sub.upper():
|
||||||
|
return k
|
||||||
|
return "ALL"
|
68
cope2n-api/scripts/script.py
Normal file
68
cope2n-api/scripts/script.py
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Get the proxy URL from the environment variable
|
||||||
|
interval = 60*60*1 # 1 minute
|
||||||
|
update_cost = 60*3
|
||||||
|
proxy_url = os.getenv('PROXY', "localhost")
|
||||||
|
|
||||||
|
# Define the login API URL
|
||||||
|
login_url = f'{proxy_url}/api/ctel/login/'
|
||||||
|
login_token = None
|
||||||
|
|
||||||
|
# Define the login credentials
|
||||||
|
login_credentials = {
|
||||||
|
'username': 'sbt',
|
||||||
|
'password': '7Eg4AbWIXDnufgn'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Define the command to call the update API
|
||||||
|
update_url = f'{proxy_url}/api/ctel/make_report/'
|
||||||
|
update_params = {
|
||||||
|
'is_daily_report': 'true',
|
||||||
|
'report_overview_duration': '',
|
||||||
|
'subsidiary': None
|
||||||
|
}
|
||||||
|
|
||||||
|
"report_overview_duration"
|
||||||
|
|
||||||
|
def update_report(login_token, report_overview_duration=["30d", "7d"], subsidiary=["all", "SEAU", "SESP", "SME", "SEPCO", "TSE", "SEIN"]):
|
||||||
|
headers = {'Authorization': login_token}
|
||||||
|
for dur in report_overview_duration:
|
||||||
|
for sub in subsidiary:
|
||||||
|
update_params["report_overview_duration"] = dur
|
||||||
|
update_params["subsidiary"] = sub
|
||||||
|
update_response = requests.get(update_url, params=update_params, headers=headers)
|
||||||
|
print("[INFO]: update_response at {} by {} - {} with status {}".format(datetime.now(), dur, sub, update_response.status_code))
|
||||||
|
update_response.raise_for_status()
|
||||||
|
time.sleep(update_cost)
|
||||||
|
|
||||||
|
# Define the interval in seconds between API calls
|
||||||
|
# time.sleep(60)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
# Call the login API and retrieve the login token
|
||||||
|
if not login_token:
|
||||||
|
login_response = requests.post(login_url, data=login_credentials)
|
||||||
|
# login_response.raise_for_status()
|
||||||
|
if login_response.status_code == 200:
|
||||||
|
login_token = login_response.json()['token']
|
||||||
|
print("[INFO] relogged in at {}".format(datetime.now()))
|
||||||
|
|
||||||
|
# Call the update API
|
||||||
|
try:
|
||||||
|
update_report(login_token)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[ERROR]: {e}")
|
||||||
|
print(f"[ERROR]: Failed to update_response, retrying...")
|
||||||
|
login_response = requests.post(login_url, data=login_credentials)
|
||||||
|
# login_response.raise_for_status()
|
||||||
|
if login_response.status_code == 200:
|
||||||
|
login_token = login_response.json()['token']
|
||||||
|
print("[INFO] relogged in at {}".format(datetime.now()))
|
||||||
|
update_report(login_token)
|
||||||
|
|
||||||
|
# Wait for the specified interval
|
||||||
|
time.sleep(interval)
|
@ -175,6 +175,7 @@ services:
|
|||||||
|
|
||||||
working_dir: /app
|
working_dir: /app
|
||||||
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
|
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
|
||||||
|
# command: bash -c "tail -f > /dev/null"
|
||||||
|
|
||||||
# Back-end persistent
|
# Back-end persistent
|
||||||
db-sbt:
|
db-sbt:
|
||||||
|
Loading…
Reference in New Issue
Block a user