from newMarketBot import NewMarketBot
from requests_ip_rotator import ApiGateway, ALL_REGIONS
import time
import random
import requests
import queue
import psycopg2
import threading
from ThreadMonitor import ThreadMonitor
from LoggingFormatter import logger

NEW_ALL_REGIONS = ALL_REGIONS + [
    "ap-south-2",
    "ap-southeast-3",
    "ap-southeast-4",
    "ca-west-1",
    "eu-central-2",
    "eu-south-2",
    "il-central-1",
    "me-central-1",
]
all_sessions = []
for region in NEW_ALL_REGIONS:
    gateway = ApiGateway(
        "https://steamcommunity.com",
        access_key_id="AKIAVRYQ3XK3I75DLJEH",
        access_key_secret="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",
        regions=[str(region)],
    )
    gateway.start()
    aws_session = requests.Session()
    aws_session.mount("https://steamcommunity.com", gateway)
    all_sessions.append(aws_session)

all_threads = []


def handleCsgofloatQueue(csgofloat_queue: queue.Queue, webshare_ips: list):
    while True:
        if csgofloat_queue.qsize() <= 0:
            time.sleep(random.uniform(0.5, 1))

        else:

            skin_in_db = True
            time.sleep(random.uniform(0.5, 0.7))
            single_elem = csgofloat_queue.get()
            time.sleep(random.uniform(0.5, 0.7))
            price = single_elem[0]
            market_link = single_elem[1]
            single_full_item_name = single_elem[2]
            single_inspect_link = single_elem[3]
            m = single_elem[4]

            try:
                postgresql_conn = psycopg2.connect(
                    database="postgres",
                    user="postgres",
                    password="Berufsorientierung1!",
                    host="23.88.122.57",
                    port="5432",
                )
                postgresql_cur = postgresql_conn.cursor()
                postgresql_cur.execute(
                    "SELECT rank FROM floats WHERE inspect_link = %s",
                    (single_inspect_link,),
                )

                rank_db = postgresql_cur.fetchone()[0]
                logger.info("SKIN IN DB, SKIP")
            except TypeError:
                logger.error("SKIN NOT IN DB, CONTINUE")
                skin_in_db = False
                postgresql_cur.close()
            except (Exception, psycopg2.DatabaseError) as error:
                logger.error(str(error))
            try:
                if postgresql_conn is not None:
                    postgresql_conn.close()
            except Exception as e:
                logger.error(str(e))

            if skin_in_db is False:
                nm = NewMarketBot(
                    "highlow",
                    "Queue Thread",
                    webshare_ips,
                    all_sessions,
                    False,
                    queue.Queue(),
                    queue.Queue(),
                )
                final_listing = nm.singleCheckCsgofloatRank(
                    market_link, single_full_item_name, single_inspect_link, m, price
                )
                if final_listing is None:
                    continue
                elif final_listing == "neger":
                    continue
                else:
                    nm.singleCheckForPotentialBuy(
                        final_listing, single_inspect_link, 1337
                    )
            else:
                continue


def handleListingsQueue(listings_queue: queue.Queue, webshare_ips, csgofloat_queue):
    while True:
        if listings_queue.qsize() <= 0:
            time.sleep(random.uniform(0.5, 1))

        else:
            bulk_list = listings_queue.get()[0]
            high_low = list(bulk_list[0].values())[0]["high_low"]
            nm = NewMarketBot(
                str(high_low),
                "Best Thread",
                webshare_ips,
                all_sessions,
                False,
                queue.Queue(),
                csgofloat_queue,
            )
            nm.getBestOrWorstSkinsBulk(bulk_list)


def prepareQueue():
    ip_addresses = []

    response = requests.get(
        "https://proxy.webshare.io/api/v2/proxy/list/download/hdaovifqwgapnzijunmiptygnyrtyqaeyvvvqgdo/-/any/username/direct/"
    )
    splitted = response.text.rsplit("\n")
    for ip in splitted:
        clean_ip = ip.replace("\r", "")
        splitted_ip = clean_ip.split(":")
        if splitted_ip[0] != "":
            full_ip = (
                "http://"
                + splitted_ip[2]
                + ":"
                + splitted_ip[3]
                + "@"
                + splitted_ip[0]
                + ":"
                + splitted_ip[1]
            )
            ip_addresses.append(full_ip)

    return ip_addresses


def startScraper(filename: str, bs_and_fn: bool):
    webshare_ips = prepareQueue()

    csgofloat_queue = queue.Queue()
    for i in range(3):
        t = threading.Thread(
            target=handleCsgofloatQueue,
            args=(
                csgofloat_queue,
                webshare_ips,
            ),
        )
        t.daemon = True
        t.name = "handleCsgofloatQueue" + str(i)
        t.start()
        all_threads.append(t)

    listings_queue = queue.Queue()
    for i in range(3):
        t_l = threading.Thread(
            target=handleListingsQueue,
            args=(
                listings_queue,
                webshare_ips,
                csgofloat_queue,
            ),
        )
        t_l.daemon = True
        t_l.name = "handleListingsQueue" + str(i)
        t_l.start()
        all_threads.append(t_l)

    f = open(filename, "r")
    lines = f.readlines()
    f.close()

    lines = [x.strip() for x in lines]
    # splitted_weapons = [lines[(i*len(lines))//2:((i+1)*len(lines))//2] for i in range(2)]

    extended_scrape = False

    while True:
        if bs_and_fn:
            floatval = "high"
            runThreads(
                lines,
                floatval,
                webshare_ips,
                all_sessions,
                extended_scrape,
                listings_queue,
                csgofloat_queue,
            )

            floatval = "low"
            runThreads(
                lines,
                floatval,
                webshare_ips,
                all_sessions,
                extended_scrape,
                listings_queue,
                csgofloat_queue,
            )
            time.sleep(60)


def runThreads(
    weapon_list: list,
    high_or_low: str,
    webshare_ips: list,
    all_sessions: list,
    extended_scrape: bool,
    listings_queue: queue.Queue,
    csgofloat_queue: queue.Queue,
):
    threads = []
    for line in weapon_list:
        m = NewMarketBot(
            high_or_low,
            str(line.rstrip()),
            webshare_ips,
            all_sessions,
            extended_scrape,
            listings_queue,
            csgofloat_queue,
        )
        t = threading.Thread(target=m.startBot)
        t.name = str(line).rstrip()
        t.daemon = True
        threads.append(t)
        all_threads.append(t)

    threadmonitor = ThreadMonitor(all_threads)
    threadmonitor_thread = threading.Thread(target=threadmonitor.list_running_threads)
    threadmonitor_thread.daemon = True
    threadmonitor_thread.start()
    for thread in threads:
        thread.start()
        time.sleep(10)

    for thread in threads:
        thread.join()


if __name__ == "__main__":
    # Runs the scraper for all weapons
    startScraper(filename="list/tag_weapon_list.txt", bs_and_fn=True)
