import mimetypes
import os
from contextlib import contextmanager
from pathlib import Path
from datetime import datetime, timezone
from time import time
import logging
from flask import g

from botocore.exceptions import ClientError

from . import config_load as config


class Timer:
    def __init__(self, s3_conn):
        self.s3_conn = s3_conn

    def __getattr__(self, name):
        def measure(func):
            def inner(*args, **kwargs):
                start = time()

                try:
                    res = func(*args, **kwargs) 
                finally:
                    res_time_ms = (time() - start) * 1000

                    if config.S3_DEBUG:
                        logging.info(f"S3 method {func.__name__} was executed with args: {args}, kwargs: {kwargs} in {round(res_time_ms, 3)} ms")

                    try:
                        if "profiler_data" not in g:
                            g.profiler_data = dict()

                        if "total_s3_ms" not in g.profiler_data:
                            g.profiler_data["total_s3_ms"] = 0

                        if "s3_ops_count" not in g.profiler_data:
                            g.profiler_data["s3_ops_count"] = 0

                        g.profiler_data["total_s3_ms"] = round(g.profiler_data["total_s3_ms"] + res_time_ms, 3)
                        g.profiler_data["s3_ops_count"] += 1
                    except RuntimeError:
                        pass

                return res
            return inner

        attr = getattr(self.s3_conn, name)
        if callable(attr):
            return measure(attr)
        return attr


class IOS3:
    def __init__(self, conn, bucket):
        self.conn = Timer(conn)

        self.bucket = bucket
        self.cache_dir = "/tmp/s3/cache"

        self._metadata = self._init_metadata()

    def metadata(self, path):
        if not self._metadata.get(path, None):
            if path == "/":
                self._metadata[path] = self._get_metadata(path)
            else:
                as_file, as_dir = path.strip("/"), f"{path.strip('/')}/"
                suffix = self.io_suffix(path)

                if not suffix or suffix == ".meta":
                    self._metadata[path] = self._get_metadata(as_dir) or self._get_metadata(as_file)
                else:
                    self._metadata[path] = self._get_metadata(as_file) or self._get_metadata(as_dir)
                
        return self._metadata[path]
    
    def _init_metadata(self):
        try:
            if "s3_metadata" not in g:
                g.s3_metadata = dict()

            return g.s3_metadata
        except RuntimeError:
            return {}
    
    def _get_metadata(self, key):
        try:
            return self.conn.head_object(Bucket=self.bucket, Key=key)
        except ClientError as e:
            if e.response["ResponseMetadata"]["HTTPStatusCode"] == 404:
                if key == "/":
                    return {
                        "ContentType": "application/x-directory",
                        "ContentLength": 0,
                        "Metadata": {}
                        }
                return None
            elif e.response["ResponseMetadata"]["HTTPStatusCode"] == 400:
                if key == "/":
                    return {
                        "ContentType": "application/x-directory",
                        "ContentLength": 0,
                        "Metadata": {}
                        }
            raise e

    def _get_cache_path(self, path):
        return Path(f"{self.cache_dir.rstrip('/')}/{path.lstrip('/')}")

    def io_read_text(self, path, encoding=None, errors=None):
        with self.io_open(path, mode='r', encoding=encoding, errors=errors) as file:
            return file.read()
    
    def io_read_bytes(self, path):
        with self.io_open(path, mode='rb') as file:
            return file.read()

    def io_write_text(self, path, data, encoding=None, errors=None, newline=None):
        with self.io_open(path, mode='w', encoding=None, errors=None, newline=None) as file:
            return file.write(data)

    def io_write_bytes(self, path, data):
        with self.io_open(path, mode='wb') as file:
            return file.write(data)
    
    @contextmanager
    def io_open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None):
        if self.io_is_dir(path):
            raise IsADirectoryError(path)

        cache_path = self._get_cache_path(path)

        if not cache_path.exists() or self._is_modified(path, cache_path):
            self._update_cache(path, cache_path)
                
        try:
            file = cache_path.open(mode, buffering, encoding, errors, newline)
            yield file
        finally:
            file.close()

        if self._is_modified(path, cache_path):
            if not self.io_exists(path):
                self._metadata[path] = {
                    "ContentType": self.io_mimetype(path) or "binary/octet-stream",
                    "ContentLength": cache_path.stat().st_size,
                    "Metadata": {
                        "ctime": str(cache_path.stat().st_ctime),
                        "mode": str(0o777)
                        }
                }
            self.metadata(path)["Metadata"]["mtime"] = str(cache_path.stat().st_mtime) 

            with cache_path.open("rb") as file:
                self.conn.upload_fileobj(
                    file, 
                    self.bucket, 
                    self._as_s3_key(path), 
                    ExtraArgs={"Metadata": self.metadata(path)["Metadata"]}
                    )
                
    def _is_modified(self, path, cache_path):
        if self.io_exists(path) and cache_path.exists():
            return self.io_stat(path).st_mtime != cache_path.stat().st_mtime

        if self.io_exists(path):
            return False

        if cache_path.exists():
            return True
        
    def _update_cache(self, path, cache_path):
        cache_path.parent.mkdir(parents=True, exist_ok=True)

        with cache_path.open("wb") as file:
            if self.io_exists(path) and self.io_stat(path).st_size != 0:
                self.conn.download_fileobj(self.bucket, self._as_s3_key(path), file)

        if self.io_exists(path):  
            upd_time = (self.io_stat(path).st_atime, self.io_stat(path).st_mtime)
            os.utime(str(cache_path), upd_time)

    def io_suffix(self, path):
        name = self.io_name(path)
        if "." not in name:
            return ""
        return f".{name.split('.')[-1]}"
    
    def io_suffixes(self, path):
        name = self.io_name(path)
        if "." not in name:
            return []
        return [f".{suffix}" for suffix in name.split('.')[1:] if len(suffix)]

    def io_stem(self, path):
        name = self.io_name(path)
        return name.rsplit(".", 1)[0]

    def io_absolute(self, path):
        return self._as_posix(path)
    
    def io_parts(self, path):
        return ["/"] + [part for part in path.split("/") if len(part)]
    
    def io_rename(self, path, target):
        if self.io_exists(target):
            raise FileExistsError
        
        if self.io_is_file(path) and self.io_is_dir(target):
            raise IsADirectoryError(target)
        
        if self.io_is_dir(path) and self.io_is_file(target):
            raise NotADirectoryError(target)
        
        if target.startswith(path):
            raise OSError(22, f"Invalid argument: {target}")
        
        if path == target:
            return
        
        if self.io_is_dir(path):
            for src_dir, dirnames, filenames in self.io_walk(path, top_down=False):
                parts = self.io_parts(src_dir)
                tail = "/".join(parts[len(parts):])

                dst_dir = os.path.join(target, *tail.split("/"))
                self.io_mkdir(dst_dir, parents=True, exist_ok=True)

                for filename in filenames:
                    src_file = os.path.join(src_dir, filename)
                    dst_file = os.path.join(dst_dir, filename)

                    if self.io_exists(dst_file):
                        raise FileExistsError(dst_file)
                    
                    self.conn.copy_object(
                        CopySource={'Bucket': self.bucket, 'Key': self._as_s3_key(src_file)}, 
                        Bucket=self.bucket, 
                        Key=self._as_s3_key(dst_file)
                        )
                    
                    self._metadata[dst_file] = self._metadata[src_dir].copy()  
                    self.io_unlink(src_file, missing_ok=True)
                self.io_rmdir(src_dir)
        else:
            self.conn.copy_object(
                CopySource={'Bucket': self.bucket, 'Key': self._as_s3_key(path)}, 
                Bucket=self.bucket, 
                Key=self._as_s3_key(target)
                )
            self._metadata[target] = self._metadata[path].copy()
            self.io_unlink(path, missing_ok=True)
    
    def io_joinpath(self, path, key):
        return os.path.join(path, *str(key).split("/"))
    
    def io_parent(self, path):
        if path == "/":
            return None
        
        parent, *_ = self._as_posix(path).rstrip("/").rsplit("/", 1)
        if not len(parent):
            return "/"
        return parent

    def io_parents(self, path):
        parents, cur_parent = [], self.io_parent(path)

        while cur_parent is not None:
            parents.append(cur_parent)
            cur_parent = self.io_parent(cur_parent)
        return parents[::-1]
    
    def io_name(self, path):
        if path == "/":
            return "/"
        return path.rstrip("/").rsplit("/")[-1]
    
    def io_mkdir(self, path, mode=0o666, parents=False, exist_ok=False):
        content_type, now = "application/x-directory", str(datetime.now().timestamp())
        stat = {
            "mtime": now,
            "ctime": now,
            "mode": str(mode)
            }
        
        if not exist_ok and self.io_exists(path):
            raise FileExistsError(path)
            
        parent = self.io_parent(path)
        if parent is not None and not self.io_exists(parent):
            if parents:
                self.io_mkdir(parent, mode, parents, exist_ok)
            else:
                raise FileNotFoundError(path)

        dir_path = self._as_dir_path(path)

        self.conn.put_object(
            Bucket=self.bucket, 
            Key=self._as_s3_key(dir_path), 
            ContentType=content_type,
            Metadata=stat
            )

        self._metadata[path] = {
            "ContentType": content_type,
            "ContentLength": 0,
            "Metadata": stat
        }

    def io_touch(self, path, mode=0o777, exist_ok=False):
        content_type = self.io_mimetype(path) or "binary/octet-stream"
        stat = {
            "mtime": str(datetime.now().timestamp()),
            "mode": str(mode)
            }
            
        if self.io_exists(path):
            if not exist_ok:
                raise FileExistsError(path)
            return
        else:
            stat["ctime"] = str(datetime.now().timestamp())
        
        self.conn.put_object(
            Bucket=self.bucket, 
            Key=self._as_s3_key(path), 
            ContentType=content_type,
            Metadata=stat
            )

        self._metadata[path] = {
            "ContentType": content_type,
            "ContentLength": 0,
            "Metadata": stat
        }

    def io_unlink(self, path, missing_ok=False):
        if not self.io_exists(path):
            if missing_ok:
                return
            raise FileNotFoundError(path)

        if not self.io_is_file(path):
            raise IsADirectoryError(path)
        
        cache_path = self._get_cache_path(path)
        if cache_path.exists():
            cache_path.unlink()

        self.conn.delete_object(Bucket=self.bucket, Key=self._as_s3_key(path))
        if path in self._metadata:
            self._metadata[path] = None
    
    def io_rmdir(self, path):
        if not self.io_exists(path):
            raise FileNotFoundError(path)
        
        if not self.io_is_dir(path):
            raise NotADirectoryError(path)
        
        path = self._as_dir_path(path)
        
        if len(list(self.io_iterdir(path))) > 0:
            raise OSError(39, f"Directory not empty: {path}")
        
        cache_path = self._get_cache_path(path)
        if cache_path.exists():
            cache_path.rmdir()
            
        self.conn.delete_object(Bucket=self.bucket, Key=self._as_s3_key(path))
        if path in self._metadata:
            self._metadata[path] = None

    def io_iterdir(self, path):
        if not self.io_exists(path):
            raise FileNotFoundError(path)
        
        if not self.io_is_dir(path):
            raise NotADirectoryError(path)

        path = self._as_dir_path(path)

        continuation_token = None
        kwargs = {"Bucket": self.bucket, "Prefix": self._as_s3_key(path), "Delimiter": "/", "MaxKeys": 1000}
        while True:
            data = self.conn.list_objects_v2(**kwargs)

            for obj in data.get("Contents", []):
                if obj["Key"] == self._as_s3_key(path):
                    continue
                yield self._as_posix(obj["Key"])

            for dir in data.get("CommonPrefixes", []):
                if dir["Prefix"] == self._as_s3_key(path):
                    continue
                yield self._as_posix(dir["Prefix"])

            continuation_token = data.get("NextContinuationToken", None)
            if not continuation_token:
                break
            
            kwargs["ContinuationToken"] = continuation_token
    
    def io_walk(self, path, top_down=True, on_error=None):
        stack = [path]

        while stack:
            cur_path = stack.pop()
            if isinstance(cur_path, tuple):
                yield cur_path
                continue
            
            dirnames, dirs = [], []
            filenames = []
            for child_path in self.io_iterdir(cur_path):
                name = self.io_name(child_path)
                if self.io_is_dir(child_path):
                    dirnames.append(name)
                    dirs.append(child_path)
                else:
                    filenames.append(name)

            if top_down:
                yield cur_path, dirnames, filenames
            else:
                stack.append((cur_path, dirnames, filenames))

            stack += dirs[::-1]

    def io_is_dir(self, path):
        is_dir = False

        if self.io_exists(path):
            is_dir |= self.metadata(path).get("ContentType", None) == "application/x-directory"
        is_dir |= path.endswith("/")
            
        return is_dir
    
    def io_is_file(self, path):
        if not self.io_exists(path):
            return False
        return not self.io_is_dir(path)

    def io_exists(self, path):
        if path in self._metadata:
            return self._metadata[path] is not None
        return self.metadata(path) is not None

    def io_stat(self, path):
        if not self.io_exists(path):
            raise FileNotFoundError(path)

        stat = (
            self.metadata(path)["Metadata"].get("mode", 0), #  st_mode
            0, #  st_ino
            0, #  st_dev
            0, #  st_nlink
            self.metadata(path)["Metadata"].get("uid", 0), #  st_uid
            self.metadata(path)["Metadata"].get("gid", 0), #  st_gid
            self.metadata(path)["Metadata"].get("size", 0) or self.metadata(path).get("ContentLength", 0), #  st_size
            0, #  st_atime
            float(self.metadata(path)["Metadata"].get("mtime", 0)), #  st_mtime
            float(self.metadata(path)["Metadata"].get("ctime", 0)) #  st_ctime
        )
        return os.stat_result(stat)
    
    def io_rmtree(self, path, *args, **kwargs):
        if not self.io_is_dir(path):
            raise NotADirectoryError
        
        path = self._as_dir_path(path)

        for root, dirnames, filenames in self.io_walk(path, top_down=False):
            for dirname in dirnames:
                dir_path = os.path.join(root, dirname)
                self.io_rmdir(dir_path)
            
            for filename in filenames:
                file_path = os.path.join(root, filename)
                self.io_unlink(file_path)
        
        if self.io_name(path) != "/":
            self.io_rmdir(path)

    def io_copy(self, src, dst):
        src, dst = str(src), str(dst)
        
        if self.io_is_dir(src):
            raise IsADirectoryError
        
        if self.io_is_dir(dst):
            dst = os.path.join(dst, self.io_name(src))

        self.conn.copy_object(
            CopySource={'Bucket': self.bucket, 'Key': self._as_s3_key(src)}, 
            Bucket=self.bucket, 
            Key=self._as_s3_key(dst)
            )
        self._metadata[dst] = self._metadata[src].copy()

    def io_mimetype(self, path):
        mimetypes.add_type('application/vnd.ms-outlook', '.msg')
        mimetypes.add_type('text/plain', '.log')

        mimetype, _ = mimetypes.guess_type(path)
        return mimetype

    def io_disk_usage(self, path):
        return 10_000_000_000, 0, 10_000_000_000
    
    def io_tmp_readonly_path(self, path):
        if not self.io_exists(path):
            raise FileNotFoundError(path)

        cache_path = self._get_cache_path(path)
        if not cache_path.exists() or self._is_modified(path, cache_path):
            self._update_cache(path, cache_path)
        
        return str(cache_path.absolute())
    
    def _as_dir_path(self, path):
        return f"{path.strip('/')}/"
    
    def _as_s3_key(self, path):
        return path.lstrip("/")
    
    def _as_posix(self, path):
        return f"/{path.lstrip('/')}"

