import subprocess
import hashlib
import random
import string
import os
import boto3
from datetime import datetime
from dotenv import load_dotenv
load_dotenv(".env_prod")

BASH_FILE = './deploy_images.sh'
S3_ENDPOINT = ""
S3_ACCESS_KEY = os.getenv('S3_ACCESS_KEY')
S3_SECRET_KEY = os.getenv('S3_SECRET_KEY')
S3_BUCKET = "ocr-deployment-config"

class MinioS3Client:
    def __init__(self, access_key, secret_key, bucket_name, endpoint=""):
        self.endpoint = endpoint
        self.access_key = access_key
        self.secret_key = secret_key
        self.bucket_name = bucket_name
        try:
            if len(endpoint) > 0:
                self.s3_client = boto3.client(
                    's3',
                    endpoint_url=endpoint,
                    aws_access_key_id=access_key,
                    aws_secret_access_key=secret_key
                )
            else:
                self.s3_client = boto3.client(
                    's3',
                    aws_access_key_id=access_key,
                    aws_secret_access_key=secret_key
                )
        except Exception as e:
            print(f"[WARM] Unable to create an s3 client, {e}")
            self.s3_client = None

    def update_object(self, s3_key, content):
        try:
            res = self.s3_client.put_object(
                Bucket=self.bucket_name,
                Key=s3_key,
                Body=content
            )
            # print(f"Object '{s3_key}' updated in S3 with res: {res}")
            return res
        except Exception as e:
            print(f"Error updating object in S3: {str(e)}")

    def upload_file(self, local_file_path, s3_key):
        try:
            res = self.s3_client.upload_file(local_file_path, self.bucket_name, s3_key)
            # print(f"File '{local_file_path}' uploaded to S3 with key '{s3_key}'")
            return res
        except Exception as e:
            print(f"Error uploading file to S3: {str(e)}")

    def download_file(self, s3_key, local_file_path):
        try:
            res = self.s3_client.download_file(self.bucket_name, s3_key, local_file_path)
            # print(f"File '{s3_key}' downloaded from S3 to '{local_file_path}'")
            return res
        except Exception as e:
            print(f"Error downloading file from S3: {str(e)}")

def random_hash():
    # Generate a random input
    random_input = ''.join(random.choices(string.ascii_letters + string.digits, k=16))

    # Generate the hash
    hash_object = hashlib.sha256(random_input.encode())
    random_hash = hash_object.hexdigest()
    return random_hash

def deploy():
    # Define the variable
    tag = str(random_hash()[:8])
    now = datetime.now()
    tag = str(now.strftime("%Y%m%d%H%M%S")) + "_" + tag 
    # tag = "4cae5134_261223123256"
    print(tag)

    # Execute the Bash script with the variable as a command-line argument
    # os.chmod(BASH_FILE, 777)
    os.chmod(BASH_FILE, 0o755)
    subprocess.call([BASH_FILE, tag])

    # TODO: Update to S3
    s3_client = MinioS3Client(S3_ACCESS_KEY, S3_SECRET_KEY, S3_BUCKET, S3_ENDPOINT)
    local_compose_file = f"./docker-compose_{tag}.yml"
    local_env_file = f"./.env_{tag}"

    s3_compose_file = f"docker-yaml-files/docker-compose_{tag}.yml"
    s3_env_file = f"env-files/.env_{tag}"

    s3_filebeat_compose = f"docker_compose_filebeat_file/docker-compose-filebeat.yml"
    local_filebeat_compose = f"./docker-compose-filebeat.yml"
    s3_filebeat_config = f"filebeat_file/filebeat.yml"
    local_filebeat_config = f"./filebeat.yml"

    print(f"[INFO]: Uploading compose and env file to s3...")
    s3_client.upload_file(local_compose_file, s3_compose_file)
    s3_client.upload_file(local_env_file, s3_env_file)
    s3_client.upload_file(local_filebeat_compose, s3_filebeat_compose)
    s3_client.upload_file(local_filebeat_config, s3_filebeat_config)

    print(f"[INFO]: Deleting files...")
    os.remove(local_compose_file)
    os.remove(local_env_file)

    print(f"[INFO]: Done !!!")
if __name__=="__main__":
    deploy()