from datetime import timedelta
from typing import BinaryIO

from minio import Minio
from minio.error import S3Error

from app.core.exceptions import StorageError

from common_logging import get_logger

logger = get_logger(__name__)




class MinioService:

    def __init__(
        self,
        endpoint: str = "localhost:9000",
        access_key: str = "minioadmin",
        secret_key: str = "minioadmin",
        secure: bool = False,
    ):
        self.endpoint = endpoint
        self.access_key = access_key
        self.secret_key = secret_key
        self.secure = secure
        try:
            self.client = Minio(
                endpoint=endpoint, access_key=access_key, secret_key=secret_key, secure=secure
            )
            logger.info(f"Successfully connected to MinIO: {endpoint}")
        except Exception as e:
            logger.error(f"Failed to connect to MinIO: {e}")
            raise

    def ensure_bucket(self, bucket_name: str) -> None:
        try:
            if not self.client.bucket_exists(bucket_name):
                self.client.make_bucket(bucket_name)
                logger.info(f"Created bucket: {bucket_name}")
            else:
                logger.info(f"Bucket already exists: {bucket_name}")
        except S3Error as e:
            logger.error(f"Failed to create bucket {bucket_name}: {e}")
            raise StorageError(f"Failed to create bucket: {str(e)}", bucket=bucket_name) from None

    def upload_file(
        self,
        bucket_name: str,
        object_name: str,
        file_data: BinaryIO,
        file_size: int,
        content_type: str = "application/octet-stream",
    ) -> bool:
        try:
            self.ensure_bucket(bucket_name)
            self.client.put_object(
                bucket_name=bucket_name,
                object_name=object_name,
                data=file_data,
                length=file_size,
                content_type=content_type,
            )
            logger.info(f"File uploaded successfully: {bucket_name}/{object_name}")
            return True
        except S3Error as e:
            logger.error(f"File upload failed: {e}")
            return False

    def upload_file_from_path(
        self,
        bucket_name: str,
        object_name: str,
        file_path: str,
        content_type: str = "application/octet-stream",
    ) -> bool:
        try:
            self.ensure_bucket(bucket_name)
            self.client.fput_object(
                bucket_name=bucket_name,
                object_name=object_name,
                file_path=file_path,
                content_type=content_type,
            )
            logger.info(f"File uploaded successfully: {bucket_name}/{object_name}")
            return True
        except S3Error as e:
            logger.error(f"File upload failed: {e}")
            return False

    def download_file(self, bucket_name: str, object_name: str, file_path: str) -> bool:
        try:
            self.client.fget_object(
                bucket_name=bucket_name, object_name=object_name, file_path=file_path
            )
            logger.info(f"File downloaded successfully: {bucket_name}/{object_name} -> {file_path}")
            return True
        except S3Error as e:
            logger.error(f"File download failed: {e}")
            return False

    def get_file_data(self, bucket_name: str, object_name: str) -> bytes | None:
        try:
            response = self.client.get_object(bucket_name=bucket_name, object_name=object_name)
            data = response.read()
            response.close()
            response.release_conn()
            logger.info(f"File data retrieved successfully: {bucket_name}/{object_name}")
            return data
        except S3Error as e:
            logger.error(f"Failed to get file data: {e}")
            return None

    def delete_file(self, bucket_name: str, object_name: str) -> bool:
        try:
            self.client.remove_object(bucket_name=bucket_name, object_name=object_name)
            logger.info(f"File deleted successfully: {bucket_name}/{object_name}")
            return True
        except S3Error as e:
            logger.error(f"File deletion failed: {e}")
            return False

    def file_exists(self, bucket_name: str, object_name: str) -> bool:
        try:
            self.client.stat_object(bucket_name=bucket_name, object_name=object_name)
            return True
        except S3Error:
            return False

    def get_presigned_url(
        self, bucket_name: str, object_name: str, expires: timedelta = timedelta(hours=1)
    ) -> str | None:
        try:
            url = self.client.presigned_get_object(
                bucket_name=bucket_name, object_name=object_name, expires=expires
            )
            logger.info(f"Generated presigned URL: {bucket_name}/{object_name}")
            return url
        except S3Error as e:
            logger.error(f"Failed to generate presigned URL: {e}")
            return None

    def list_objects(self, bucket_name: str, prefix: str = "", recursive: bool = True) -> list:
        try:
            objects = self.client.list_objects(
                bucket_name=bucket_name, prefix=prefix, recursive=recursive
            )
            result = []
            for obj in objects:
                result.append(
                    {
                        "object_name": obj.object_name,
                        "size": obj.size,
                        "last_modified": obj.last_modified,
                        "etag": obj.etag,
                    }
                )
            return result
        except S3Error as e:
            logger.error(f"Failed to list objects: {e}")
            return []

    def delete_by_prefix(self, bucket_name: str, prefix: str) -> int:
        from minio.deleteobjects import DeleteObject

        try:
            objects = self.client.list_objects(
                bucket_name=bucket_name, prefix=prefix, recursive=True
            )
            delete_list = [DeleteObject(obj.object_name) for obj in objects]
            if not delete_list:
                logger.info(f"No objects found with prefix: {bucket_name}/{prefix}")
                return 0
            errors = list(self.client.remove_objects(bucket_name, delete_list))
            if errors:
                error_names = [e.object_name for e in errors]
                logger.error(f"Failed to delete {len(errors)} objects: {error_names}")
                raise StorageError(
                    f"Failed to delete {len(errors)} objects under prefix {prefix}",
                    bucket=bucket_name,
                )
            deleted_count = len(delete_list)
            logger.info(f"Deleted {deleted_count} objects with prefix: {bucket_name}/{prefix}")
            return deleted_count
        except StorageError:
            raise
        except S3Error as e:
            if getattr(e, "code", "") == "NoSuchBucket":
                logger.info(f"Bucket does not exist, skip prefix deletion: {bucket_name}/{prefix}")
                return 0
            logger.error(f"Failed to delete objects by prefix {prefix}: {e}")
            raise StorageError(f"Failed to delete objects by prefix: {str(e)}", bucket=bucket_name) from None


_minio_service = None


def get_minio_service() -> MinioService:
    global _minio_service
    if _minio_service is None:
        from app.config import settings


        _minio_service = MinioService(
            endpoint=settings.MINIO_ENDPOINT,
            access_key=settings.MINIO_ACCESS_KEY,
            secret_key=settings.MINIO_SECRET_KEY,
            secure=settings.MINIO_SECURE.lower() == "true",
        )
    return _minio_service
