This commit is contained in:
dx-tan 2024-03-05 15:39:51 +07:00
parent 1d6bd417d9
commit 51623c3fa1
8 changed files with 66 additions and 17 deletions

View File

@ -19,6 +19,7 @@ from ..utils.accuracy import shadow_report, MonthReportAccumulate, first_of_list
from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url from ..utils.file import download_from_S3, dict2xlsx, save_report_to_S3, build_S3_url
from ..utils.redis import RedisUtils from ..utils.redis import RedisUtils
from ..utils.process import string_to_boolean from ..utils.process import string_to_boolean
from ..utils.cache import get_cache, set_cache
from ..request.ReportCreationSerializer import ReportCreationSerializer from ..request.ReportCreationSerializer import ReportCreationSerializer
from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long from ..utils.subsidiary import map_subsidiary_long_to_short, map_subsidiary_short_to_long
from ..utils.report import aggregate_overview from ..utils.report import aggregate_overview
@ -467,7 +468,7 @@ class AccuracyViewSet(viewsets.ViewSet):
# Retrive data from Redis # Retrive data from Redis
key = f"{subsidiary}_{duration}" key = f"{subsidiary}_{duration}"
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) data = get_cache(key).get("data", [])
response = { response = {
'overview_data': data, 'overview_data': data,
} }
@ -510,7 +511,7 @@ class AccuracyViewSet(viewsets.ViewSet):
for sub in subsidiaries_to_include: for sub in subsidiaries_to_include:
key = f"{sub}_{duration}" key = f"{sub}_{duration}"
try: try:
this_overview = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) this_overview = get_cache(key).get("data", [])
if sub != "seao": if sub != "seao":
this_overview = [d for d in this_overview if d.get("subs") != "+"] this_overview = [d for d in this_overview if d.get("subs") != "+"]
else: else:
@ -551,7 +552,7 @@ class AccuracyViewSet(viewsets.ViewSet):
else: else:
# Retrive data from Redis # Retrive data from Redis
key = f"{subsidiary}_{duration}" key = f"{subsidiary}_{duration}"
data = json.loads(redis_client.get_specific_cache(settings.OVERVIEW_REPORT_ROOT, key)).get("data", []) data = get_cache(key).get("data", [])
response = { response = {
'overview_data': data, 'overview_data': data,
} }

View File

@ -7,6 +7,7 @@ from ..utils.accuracy import update_temp_accuracy, IterAvg, calculate_and_save_s
from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly from ..utils.file import dict2xlsx, save_workbook_file, save_report_to_S3, save_images_to_csv_briefly
from ..utils import time_stuff from ..utils import time_stuff
from ..utils.redis import RedisUtils from ..utils.redis import RedisUtils
from ..utils.cache import set_cache, get_cache
from django.utils import timezone from django.utils import timezone
from django.db.models import Q from django.db.models import Q
import json import json
@ -315,7 +316,8 @@ def make_a_report_2(report_id, query_set):
overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx" overview_filename = query_set["subsidiary"] + "_" + query_set["report_overview_duration"] + ".xlsx"
local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT) local_workbook = save_workbook_file(overview_filename, report, data_workbook, settings.OVERVIEW_REPORT_ROOT)
s3_key=save_report_to_S3(report.report_id, local_workbook) s3_key=save_report_to_S3(report.report_id, local_workbook)
redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data)) # redis_client.set_cache(settings.OVERVIEW_REPORT_ROOT, overview_filename.replace(".xlsx", ""), json.dumps(save_data))
set_cache(overview_filename.replace(".xlsx", ""), save_data)
except IndexError as e: except IndexError as e:
print(e) print(e)

View File

@ -1,5 +1,5 @@
# myapp/management/commands/mycustomcommand.py # myapp/management/commands/mycustomcommand.py
from io import StringIO from fwd_api.constant.common import FileCategory
from django.core.management.base import BaseCommand from django.core.management.base import BaseCommand
from tqdm import tqdm from tqdm import tqdm
from fwd_api.models import SubscriptionRequestFile, SubscriptionRequest from fwd_api.models import SubscriptionRequestFile, SubscriptionRequest
@ -37,7 +37,7 @@ class Command(BaseCommand):
"sold_to_party": "" "sold_to_party": ""
} }
images = SubscriptionRequestFile.objects.filter(request=request) images = SubscriptionRequestFile.objects.filter(request=request, file_category=FileCategory.Origin.value)
is_match = False is_match = False
try: try:
for i, image in enumerate(images): for i, image in enumerate(images):

View File

@ -0,0 +1,21 @@
# Generated by Django 4.1.3 on 2024-03-05 08:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fwd_api', '0183_report_num_no_reviewed_report_num_not_reviewed_and_more'),
]
operations = [
migrations.CreateModel(
name='Caching',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('key', models.CharField(max_length=200, unique=True)),
('value', models.JSONField(null=True)),
],
),
]

View File

@ -0,0 +1,8 @@
from django.db import models
from django.utils import timezone
from fwd_api.models.Subscription import Subscription
class Caching(models.Model):
id = models.AutoField(primary_key=True)
key = models.CharField(max_length=200, unique=True) # Change to request_id
value = models.JSONField(null=True) # Change to request_id

View File

@ -8,5 +8,4 @@ from .Subscription import Subscription
from .FeedbackRequest import FeedbackRequest from .FeedbackRequest import FeedbackRequest
from .Report import Report from .Report import Report
from .ReportFile import ReportFile from .ReportFile import ReportFile
from .Caching import Caching

View File

@ -0,0 +1,19 @@
from fwd_api.models import Caching
def set_cache(key, value):
cache = Caching.objects.filter(key=key)
if cache.count() == 0:
this_cache = Caching(key=key, value=value)
else:
this_cache = cache.first()
this_cache.value = value
this_cache.save()
return this_cache
def get_cache(key):
value = {}
cache = Caching.objects.filter(key=key)
if cache.count() > 0:
value = cache.first().value
return value

View File

@ -89,12 +89,12 @@ services:
depends_on: depends_on:
db-sbt: db-sbt:
condition: service_started condition: service_started
command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input && # command: sh -c "chmod -R 777 /app; sleep 5; python manage.py collectstatic --no-input &&
python manage.py makemigrations && # python manage.py makemigrations &&
python manage.py migrate && # python manage.py migrate &&
python manage.py compilemessages && # python manage.py compilemessages &&
gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod # gunicorn fwd.asgi:application -k uvicorn.workers.UvicornWorker --timeout 300 -b 0.0.0.0:9000" # pre-makemigrations on prod
# command: bash -c "tail -f > /dev/null" command: bash -c "tail -f > /dev/null"
minio: minio:
image: minio/minio image: minio/minio
@ -179,8 +179,8 @@ services:
- ./cope2n-api:/app - ./cope2n-api:/app
working_dir: /app working_dir: /app
command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5" # command: sh -c "celery -A fwd_api.celery_worker.worker worker -l INFO -c 5"
# command: bash -c "tail -f > /dev/null" command: bash -c "tail -f > /dev/null"
# Back-end persistent # Back-end persistent
db-sbt: db-sbt:
@ -250,7 +250,6 @@ services:
networks: networks:
- ctel-sbt - ctel-sbt
volumes: volumes:
db_data: db_data:
BE_static: BE_static: