import threading
import time
import traceback
import pickle
import os
import requests
import cloudscraper  # Required for Udemy session handling
from base import VERSION, LoginException, Scraper, Udemy, scraper_dict
from colors import bw, by, fb, fg, fr
from requests.utils import cookiejar_from_dict, dict_from_cookiejar

# DUCE-CLI

COOKIE_FILE = "udemy_cookies.pkl"  # File to store cookies


def save_cookies(session):
    """Save session cookies as a dictionary"""
    with open(COOKIE_FILE, "wb") as f:
        pickle.dump(dict_from_cookiejar(session.cookies), f)  # Convert cookies to a dictionary


def load_cookies():
    """Load cookies from a file if it exists and return as a dictionary"""
    if os.path.exists(COOKIE_FILE):
        with open(COOKIE_FILE, "rb") as f:
            cookies = pickle.load(f)
            if isinstance(cookies, dict):  # Ensure cookies are stored as a dictionary
                return cookies
    return None


def create_scraping_thread(site: str):
    """Creates a separate thread to scrape each site"""
    code_name = scraper_dict[site]
    try:
        t = threading.Thread(target=getattr(scraper, code_name), daemon=True)
        t.start()

        while getattr(scraper, f"{code_name}_length") == 0:
            time.sleep(0.1)  # Avoid busy waiting

        if getattr(scraper, f"{code_name}_length") == -1:
            raise Exception(f"Error in: {site}")

        print(f"Scraping {site} completed successfully.")

    except Exception as e:
        error = traceback.format_exc()
        print(f"Error in {site}: {error}")


############## MAIN #############
udemy = Udemy("cli")
udemy.load_settings()
login_title, main_title = udemy.check_for_update()

if "Update" in login_title:
    print(login_title)

login_successful = False
session = cloudscraper.create_scraper()  # Use cloudscraper instead of requests.Session()

# Attempt to use saved cookies first
cookies = load_cookies()
if cookies:
    print("Trying to log in using saved cookies...")
    try:
        session.cookies = cookiejar_from_dict(cookies)  # Convert dict back to cookie jar
        udemy.client = session  # Attach session to Udemy object
        udemy.cookie_dict = cookies  # Set the cookie dictionary
        udemy.get_session_info()  # Check if session is valid
        print(f"Logged in as {udemy.display_name} using cookies ✅")
        login_successful = True
    except LoginException:
        print("Cookies expired or invalid. Switching to manual login.")
        os.remove(COOKIE_FILE)  # Delete invalid cookies

# If cookies are not valid, use email/password login
if not login_successful:
    while not login_successful:
        try:
            if udemy.settings.get("email") and udemy.settings.get("password"):
                email, password = udemy.settings["email"], udemy.settings["password"]
                print(f"Trying to log in using saved credentials: {email}")
            else:
                email = input("Email: ")
                password = input("Password: ")

            udemy.manual_login(email, password)
            udemy.get_session_info()  # Ensure login was successful

            # Save successful login credentials
            udemy.settings["email"], udemy.settings["password"] = email, password
            udemy.save_settings()

            # Save cookies after successful login
            save_cookies(udemy.client)

            print(f"Logged in as {udemy.display_name} ✅")
            login_successful = True

        except LoginException as e:
            print(f"Login Failed: {e}")
            if "Browser" in login_title:
                print("Can't login using cookies")
                os.remove(COOKIE_FILE)  # Delete invalid cookies
            elif "Email" in login_title:
                udemy.settings["email"], udemy.settings["password"] = "", ""
                udemy.save_settings()

print(fg + f"Logged in as {udemy.display_name}")

# Check if the user has valid settings
if udemy.is_user_dumb():
    print(bw + fr + "Invalid settings! Exiting.")
    exit()

scraper = Scraper(udemy.sites)

try:
    # Scrape courses
    print("Starting to scrape free Udemy courses...")
    udemy.scraped_data = scraper.get_scraped_courses(create_scraping_thread)
    time.sleep(0.5)
    print("\nScraping completed. Enrolling in courses...\n")

    # Start enrolling
    udemy.start_enrolling()

    print(f"\nSuccessfully Enrolled: {udemy.successfully_enrolled_c}")
    print(f"Amount Saved: {round(udemy.amount_saved_c, 2)} {udemy.currency.upper()}")
    print(f"Already Enrolled: {udemy.already_enrolled_c}")
    print(f"Excluded Courses: {udemy.excluded_c}")
    print(f"Expired Courses: {udemy.expired_c}")

except Exception as e:
    print(f"Error:\n{traceback.format_exc()}\n")

input("Press Enter to exit...")