import requests
import ssl
from requests.packages.urllib3.poolmanager import PoolManager
from requests.packages.urllib3.util import ssl_
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import time
import json
import random
from LoggingFormatter import logger
import urllib3
import InspectLinkValidator
from curl_cffi import requests as cffi

CIPHERS = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384: ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:AES256-SHA"


class TlsAdapter(HTTPAdapter):
    def __init__(self, ssl_options=0, **kwargs):
        self.ssl_options = ssl_options
        super(TlsAdapter, self).__init__(
            **kwargs, pool_maxsize=5, max_retries=3, pool_block=False
        )

    def init_poolmanager(self, *pool_args, **pool_kwargs):
        ctx = ssl_.create_urllib3_context(
            ciphers=CIPHERS, cert_reqs=ssl.CERT_REQUIRED, options=self.ssl_options
        )
        self.poolmanager = PoolManager(*pool_args, ssl_context=ctx, **pool_kwargs)


class RequestManager(object):
    def __init__(self, all_sessions, webshare_ips):
        self.all_sessions = all_sessions
        self.ua_dict = self.prepareUserAgents()
        self.webshare_ips = webshare_ips
        self.adapter = TlsAdapter(ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1)

    def getAWSSession(self):
        return random.choice(self.all_sessions)

    def prepareUserAgents(self):
        with open("user_agents.json") as f:
            uas = json.load(f)
        return uas

    def getRandomUAEntry(self):
        return random.choice(list(self.ua_dict.values()))

    def getNewSession(self):
        s = cffi.Session()
        return s
        #s = requests.Session()
        #s.mount("https://", self.adapter)
        #return s
    
    def getRequestAllAvailableProxies(self, url, without_aws=False, skins_or_inspect_links: str="inspect_links"):
        ##############AWS##############
        for _ in range(2):
            logger.warning("Trying to get: " + str(url) + " with AWS")
            response = self.getAWSSession().get(url, timeout=15)
            response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="AWS", skins_or_inspect_links=skins_or_inspect_links)
            if response is not None:
                return response
            else:
                time.sleep(5)
        
        random_proxyprovider = random.randint(1,5)
        
        match random_proxyprovider:
            case 1:
                logger.warning("Trying to get: " + str(url) + " with IPRoyal")
                response = self.getRequestIPRoyal(url=url)
                response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="IPRoyal", skins_or_inspect_links=skins_or_inspect_links)
                if response is not None:
                    return response
                else:
                    return None
            case 2:
                logger.warning("Trying to get: " + str(url) + " with Webshare")
                response = self.getRequestWebshare(url=url)
                response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="Webshare", skins_or_inspect_links=skins_or_inspect_links)
                if response is not None:
                    return response
                else:
                    return None
            case 3:
                logger.warning("Trying to get: " + str(url) + " with ProxyCheap")
                response = self.getRequestProxyCheap(url=url)
                response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="ProxyCheap", skins_or_inspect_links=skins_or_inspect_links)
                if response is not None:
                    return response
                else:
                    return None
            case 4:
                logger.warning("Trying to get: " + str(url) + " with 2captcha")
                response = self.getRequest2Captcha(url=url)
                response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="2Captcha", skins_or_inspect_links=skins_or_inspect_links)
                if response is not None:
                    return response
                else:
                    return None
            case 5:
                logger.warning("Trying to get: " + str(url) + " with evomi")
                response = self.getRequestEvomi(url=url)
                response = InspectLinkValidator.checkIfRequestSuccessfull(response=response, proxy_provider="evomi", skins_or_inspect_links=skins_or_inspect_links)
                if response is not None:
                    return response
                else:
                    return None
                
            case _:
                return None
        return None
    
    def getRequestEvomi(self, url):
        logger.warning("Trying to get: " + str(url) + " with evomi")
        s = self.getNewSession()
        evomi_http = "http://kroisalex8:YIcLVEa3mdM4e7KsIW4W@rp-core.evomi.com:1000"
        #evomi_https = "https://kroisalex8:YIcLVEa3mdM4e7KsIW4W@rp-core.evomi-proxy.com:1001"
        s.proxies.update({"https": evomi_http, "http": evomi_http})
        try:
            response = s.get(url, timeout=10, impersonate="chrome")
            return response
        except Exception as e:
            logger.critical("Exception in getRequestEvomi: " + str(e))
        return None
    
    def getRequest2Captcha(self, url):
        logger.warning("Trying to get: " + str(url) + " with 2Captcha")
        username = "u9c85bfe1559a05bf-zone-custom"
        password = "u9c85bfe1559a05bf"
        PROXY_DNS = "118.193.59.102:2334"
        proxy = {"https":"http://{}:{}@{}".format(username, password, PROXY_DNS)}
        #r = requests.get(urlToGet , proxies=proxy)
        #print("Response:{}".format(r.text))
        try:
            response = cffi.get(url, proxies=proxy, timeout=10, impersonate="chrome")
            return response
        except Exception as e:
            logger.critical("Exception in getRequest2Captcha: " + str(e))
        return None

    def getRequestIPRoyal(self, url):
        iproyal_pwlist = ["mArgare1he_region-europe", "mArgare1he", "mArgare1he_streaming-1", "mArgare1he_region-europe_streaming-1", "mArgare1he_region-northamerica", "mArgare1he_region-northamerica_streaming-1"]
        logger.warning("Trying to get: " + str(url) + " with IPRoyal")
        s = self.getNewSession()
        iproyal_proxy = "http://alex133769:" + random.choice(iproyal_pwlist) + "@91.239.130.34:12321"
        random_ua_entry = self.getRandomUAEntry()
        s.proxies.update({"https": iproyal_proxy, "http": iproyal_proxy})
        try:
            response = s.get(url, timeout=10, impersonate="chrome")
            return response
        except Exception as e:
            logger.critical("Exception in getRequestIPRoyal: " + str(e))
        return None

    def getRequestWebshare(self, url):
        logger.warning("Trying to get: " + str(url) + " with Webshare")
        s = self.getNewSession()
        random_proxy = random.choice(self.webshare_ips)
        s.proxies.update({"https": random_proxy, "http": random_proxy})
        try:
            response = s.get(url, timeout=10, impersonate="chrome")
            return response
        except Exception as e:
            logger.critical("Exception in getRequestWebshare: " + str(e))
        return None

    def getRequestProxyCheap(self, url):
        s = self.getNewSession()
        proxycheap_proxy = "http://uw0g9g5j:mXC7JRAq3shWe34X@proxy.proxy-cheap.com:31112"
        s.proxies.update({"https": proxycheap_proxy, "http": proxycheap_proxy})
        try:
            response = s.get(url, timeout=10, impersonate="chrome")
            return response
        except Exception as e:
            logger.critical("Exception in getRequestProxyCheap: " + str(e))
        return None


    # def getRequestProxyScrape(self, url):
    #     logger.warning("Trying to get: " + str(url) + " with Proxyscrape")
    #     s = self.getNewSession()
    #     proxyscrape_proxy = "http://che4pvco7u1h1db:bvt6yy7wrahb637@rp.proxyscrape.com:6060"
    #     random_ua_entry = self.getRandomUAEntry()
    #     s.proxies.update({"https": proxyscrape_proxy, "http": proxyscrape_proxy})
    #     s.headers.update({"User-Agent": random_ua_entry[1]})
    #     try:
    #         response = s.get(url, timeout=10)
    #         if response.status_code != 200:
    #             return None
    #         return response
    #     except Exception as e:
    #         logger.critical("Exception in getRequestProxyScrape: " + str(e))
    #     return None

    def getRequestNaked(self, url) -> requests.Response:
        s = requests.Session()
        r = s.get(url)
        return r

    def postRequestNaked(self, url, _json) -> requests.Response:
        s = requests.Session()
        r = s.post(url, json=_json)
        return r
