Compare commits

..

No commits in common. "main" and "dev/search_content" have entirely different histories.

5 changed files with 4 additions and 48 deletions

View File

@ -14,7 +14,6 @@ from pathlib import Path
import environ
from django.urls import reverse_lazy
from fwd_api.middleware.logging_request_response_middleware import TraceIDLogFilter
from fwd_api.middleware.response_monitor import ResponseMonitorMiddleware
# Build paths inside the project like this: BASE_DIR / 'subdir'.
@ -50,9 +49,6 @@ S3_SECRET_KEY = env.str("S3_SECRET_KEY", "")
S3_BUCKET_NAME = env.str("S3_BUCKET_NAME", "ocr-data")
REDIS_HOST = env.str("REDIS_HOST", "result-cache")
REDIS_PORT = env.int("REDIS_PORT", 6379)
AWS_REGION = env.str("AWS_REGION", "")
AWS_QUEUE_URL = env.str("AWS_QUEUE_URL", "")
INSTALLED_APPS = [
@ -81,8 +77,7 @@ MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
"whitenoise.middleware.WhiteNoiseMiddleware",
"django.middleware.locale.LocaleMiddleware",
"fwd_api.middleware.logging_request_response_middleware.LoggingMiddleware",
'fwd_api.middleware.response_monitor.ResponseMonitorMiddleware',
"fwd_api.middleware.logging_request_response_middleware.LoggingMiddleware"
]
LOCALE_PATHS = [

View File

@ -20,8 +20,6 @@ from fwd_api.middleware.local_storage import get_current_trace_id
import csv
import json
import copy
import boto3
import datetime
from fwd_api.utils.accuracy import predict_result_to_ready
from celery.utils.log import get_task_logger
@ -35,8 +33,6 @@ s3_client = S3Util.MinioS3Client(
secret_key=settings.S3_SECRET_KEY,
bucket_name=settings.S3_BUCKET_NAME
)
sqs_client = boto3.client('sqs'+ str(uuid.uuid4()), region_name=settings.AWS_REGION) # keys are stored in the cridental
def process_pdf_file(file_name: str, file_path: str, request, user, doc_type: str, index_in_request: int) -> list:
try:
@ -294,22 +290,3 @@ def upload_obj_to_s3(byte_obj, s3_key):
res = s3_client.update_object(s3_key, obj)
else:
logger.info(f"S3 is not available, skipping,...")
@app.task(base=VerboseTask, name='send_response_to_sqs')
def send_response_to_sqs(response_data, status_code):
"""Send error responses to SQS for monitoring"""
try:
message_body = {
"status_code": status_code,
"timestamp": int(datetime.datetime.now().timestamp()),
"message": response_data
}
sqs_client.send_message(
QueueUrl=settings.AWS_QUEUE_URL,
MessageBody=json.dumps(message_body)
)
logger.info(f"Error response sent to SQS: {status_code}")
except Exception as e:
logger.error(f"Failed to send to SQS: {str(e)}")

View File

@ -47,7 +47,6 @@ app.conf.update({
Queue('csv_feedback'),
Queue('report'),
Queue('report_2'),
Queue('error_responses'),
],
'task_routes': {
'process_sap_invoice_result': {'queue': 'invoice_sap_rs'},
@ -67,7 +66,6 @@ app.conf.update({
'csv_feedback': {'queue': "csv_feedback"},
'make_a_report': {'queue': "report"},
'make_a_report_2': {'queue': "report_2"},
'send_response_to_sqs': {'queue': 'error_responses'},
}
})

View File

@ -1,14 +0,0 @@
from functools import partial
from django.utils.deprecation import MiddlewareMixin
from fwd_api.celery_worker.internal_task import send_response_to_sqs
class ResponseMonitorMiddleware(MiddlewareMixin):
def process_response(self, request, response):
"""Monitor responses and send errors to SQS"""
if response and (400 <= response.status_code < 600):
# Send async to avoid blocking response
send_response_to_sqs.delay(
response.data if hasattr(response, 'data') else str(response.content),
response.status_code
)
return response

View File

@ -1,4 +1,4 @@
# Generated by Django 4.1.3 on 2024-11-04 10:47
# Generated by Django 4.1.3 on 2024-11-05 02:48
from django.db import migrations, models