commit
4b5de38b28
@ -19,6 +19,7 @@ from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list
|
|||||||
from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url
|
from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url
|
||||||
from ..utils.redis import RedisUtils
|
from ..utils.redis import RedisUtils
|
||||||
from ..utils.process import string_to_boolean
|
from ..utils.process import string_to_boolean
|
||||||
|
from ..utils.cache import get_cache, set_cache
|
||||||
from ..request.ReportCreationSerializer import ReportCreationSerializer
|
from ..request.ReportCreationSerializer import ReportCreationSerializer
|
||||||
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
|
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
|
||||||
from ..utils.report import aggregate_overview
|
from ..utils.report import aggregate_overview
|
||||||
@ -467,7 +468,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
|
|
||||||
# Retrive data from Redis
|
# Retrive data from Redis
|
||||||
key = f"{subsidiary}_{duration}"
|
key = f"{subsidiary}_{duration}"
|
||||||
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", [])
|
data = get_cache(key).get("data", [])
|
||||||
response = {
|
response = {
|
||||||
'overview_data': data,
|
'overview_data': data,
|
||||||
}
|
}
|
||||||
@ -510,7 +511,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
for sub in subsidiaries_to_include:
|
for sub in subsidiaries_to_include:
|
||||||
key = f"{sub}_{duration}"
|
key = f"{sub}_{duration}"
|
||||||
try:
|
try:
|
||||||
this_overview = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", [])
|
this_overview = get_cache(key).get("data", [])
|
||||||
if sub != "seao":
|
if sub != "seao":
|
||||||
this_overview = [d for d in this_overview if d.get("subs") != "+"]
|
this_overview = [d for d in this_overview if d.get("subs") != "+"]
|
||||||
else:
|
else:
|
||||||
@ -551,7 +552,7 @@ class AccuracyViewSet(viewsets.ViewSet):
|
|||||||
else:
|
else:
|
||||||
# Retrive data from Redis
|
# Retrive data from Redis
|
||||||
key = f"{subsidiary}_{duration}"
|
key = f"{subsidiary}_{duration}"
|
||||||
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", [])
|
data = get_cache(key).get("data", [])
|
||||||
response = {
|
response = {
|
||||||
'overview_data': data,
|
'overview_data': data,
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_s
|
|||||||
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
|
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
|
||||||
from ..utils import time_stuff
|
from ..utils import time_stuff
|
||||||
from ..utils.redis import RedisUtils
|
from ..utils.redis import RedisUtils
|
||||||
|
from ..utils.cache import set_cache, get_cache
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
import json
|
import json
|
||||||
@ -315,7 +316,8 @@ def make_a_report_2(report_id, query_set):
|
|||||||
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
|
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
|
||||||
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
|
||||||
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
s3_key=save_report_to_S3(report.report_id, local_workbook)
|
||||||
redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
|
# redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
|
||||||
|
set_cache(overview_filename.replace(".xlsx", ""), save_data)
|
||||||
|
|
||||||
except IndexError as e:
|
except IndexError as e:
|
||||||
print(e)
|
print(e)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# myapp/management/commands/mycustomcommand.py
|
# myapp/management/commands/mycustomcommand.py
|
||||||
from io import StringIO
|
from fwd_api.constant.common import FileCategory
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from fwd_api.models import SubscriptionRequestFile, SubscriptionRequest
|
from fwd_api.models import SubscriptionRequestFile, SubscriptionRequest
|
||||||
@ -37,7 +37,7 @@ class Command(BaseCommand):
|
|||||||
"sold_to_party": ""
|
"sold_to_party": ""
|
||||||
}
|
}
|
||||||
|
|
||||||
images = SubscriptionRequestFile.objects.filter(request=request)
|
images = SubscriptionRequestFile.objects.filter(request=request, file_category=FileCategory.Origin.value)
|
||||||
is_match = False
|
is_match = False
|
||||||
try:
|
try:
|
||||||
for i, image in enumerate(images):
|
for i, image in enumerate(images):
|
||||||
|
21
cope2n-api/fwd_api/migrations/0184_caching.py
Normal file
21
cope2n-api/fwd_api/migrations/0184_caching.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
# Generated by Django 4.1.3 on 2024-03-05 08:10
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
('fwd_api', '0183_report_num_no_reviewed_report_num_not_reviewed_and_more'),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name='Caching',
|
||||||
|
fields=[
|
||||||
|
('id', models.AutoField(primary_key=True, serialize=False)),
|
||||||
|
('key', models.CharField(max_length=200, unique=True)),
|
||||||
|
('value', models.JSONField(null=True)),
|
||||||
|
],
|
||||||
|
),
|
||||||
|
]
|
8
cope2n-api/fwd_api/models/Caching.py
Normal file
8
cope2n-api/fwd_api/models/Caching.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
from django.db import models
|
||||||
|
from django.utils import timezone
|
||||||
|
from fwd_api.models.Subscription import Subscription
|
||||||
|
|
||||||
|
class Caching(models.Model):
|
||||||
|
id = models.AutoField(primary_key=True)
|
||||||
|
key = models.CharField(max_length=200, unique=True) # Change to request_id
|
||||||
|
value = models.JSONField(null=True) # Change to request_id
|
@ -8,5 +8,4 @@ from .Subscription import Subscription
|
|||||||
from .FeedbackRequest import FeedbackRequest
|
from .FeedbackRequest import FeedbackRequest
|
||||||
from .Report import Report
|
from .Report import Report
|
||||||
from .ReportFile import ReportFile
|
from .ReportFile import ReportFile
|
||||||
|
from .Caching import Caching
|
||||||
|
|
19
cope2n-api/fwd_api/utils/cache.py
Normal file
19
cope2n-api/fwd_api/utils/cache.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from fwd_api.models import Caching
|
||||||
|
|
||||||
|
def set_cache(key, value):
|
||||||
|
cache = Caching.objects.filter(key=key)
|
||||||
|
if cache.count() == 0:
|
||||||
|
this_cache = Caching(key=key, value=value)
|
||||||
|
else:
|
||||||
|
this_cache = cache.first()
|
||||||
|
this_cache.value = value
|
||||||
|
this_cache.save()
|
||||||
|
return this_cache
|
||||||
|
|
||||||
|
|
||||||
|
def get_cache(key):
|
||||||
|
value = {}
|
||||||
|
cache = Caching.objects.filter(key=key)
|
||||||
|
if cache.count() > 0:
|
||||||
|
value = cache.first().value
|
||||||
|
return value
|
@ -4,6 +4,6 @@ export function datetimeStrToDate(dateTimeStr: string, targetTimeZone: string):
|
|||||||
}
|
}
|
||||||
const options: Intl.DateTimeFormatOptions = { timeZone: targetTimeZone, year: 'numeric', month: '2-digit', day: '2-digit' };
|
const options: Intl.DateTimeFormatOptions = { timeZone: targetTimeZone, year: 'numeric', month: '2-digit', day: '2-digit' };
|
||||||
const date = new Date(dateTimeStr);
|
const date = new Date(dateTimeStr);
|
||||||
const convertedDateTimeStr = date.toLocaleDateString('en-US', options).split('/').reverse().join('-');
|
const convertedDateTimeStr = date.toLocaleDateString('en-US', options).split('/').reverse();
|
||||||
return convertedDateTimeStr;
|
return convertedDateTimeStr[0] + "-" + convertedDateTimeStr[2] + "-" + convertedDateTimeStr[1];
|
||||||
}
|
}
|
@ -250,7 +250,6 @@ services:
|
|||||||
networks:
|
networks:
|
||||||
- ctel-sbt
|
- ctel-sbt
|
||||||
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
db_data:
|
db_data:
|
||||||
BE_static:
|
BE_static:
|
||||||
|
@ -196,10 +196,7 @@ services:
|
|||||||
- ctel-sbt
|
- ctel-sbt
|
||||||
|
|
||||||
dashboard_refresh:
|
dashboard_refresh:
|
||||||
build:
|
image: public.ecr.aws/v4n9y6r8/sidp/api-caller-sbt:{{tag}}
|
||||||
context: api-cronjob
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
image: sidp/api-caller-sbt:{{tag}}
|
|
||||||
environment:
|
environment:
|
||||||
- PROXY=http://be-ctel-sbt:9000
|
- PROXY=http://be-ctel-sbt:9000
|
||||||
- ADMIN_USER_NAME=${ADMIN_USER_NAME}
|
- ADMIN_USER_NAME=${ADMIN_USER_NAME}
|
||||||
|
@ -4,15 +4,20 @@ import boto3
|
|||||||
import os
|
import os
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from pytz import timezone
|
import pytz
|
||||||
|
from django.utils import timezone
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
load_dotenv("../.env_prod")
|
load_dotenv("../.env_prod")
|
||||||
|
|
||||||
OUTPUT_NAME = "all_0226_0304"
|
tz = pytz.timezone('Asia/Singapore')
|
||||||
START_DATE = datetime(2024, 2, 26, tzinfo=timezone('Asia/Ho_Chi_Minh'))
|
|
||||||
END_DATE = datetime(2024, 3, 4, tzinfo=timezone('Asia/Ho_Chi_Minh'))
|
OUTPUT_NAME = "Feb29"
|
||||||
|
START_DATE = datetime(2024, 2, 29)
|
||||||
|
END_DATE = datetime(2024, 3, 1)
|
||||||
|
START_DATE = timezone.make_aware(START_DATE, tz)
|
||||||
|
END_DATE = timezone.make_aware(END_DATE, tz)
|
||||||
|
|
||||||
# Database connection details
|
# Database connection details
|
||||||
db_host = os.environ.get('DB_HOST', "")
|
db_host = os.environ.get('DB_HOST', "")
|
||||||
|
Loading…
Reference in New Issue
Block a user