imseldrith commited on
Commit
84930b1
·
verified ·
1 Parent(s): 9168bd0

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +138 -0
  2. cli (1).py +140 -0
app.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template_string, jsonify
2
+ from apscheduler.schedulers.background import BackgroundScheduler
3
+ import subprocess
4
+ import threading
5
+ from datetime import datetime
6
+
7
+ app = Flask(__name__)
8
+ execution_logs = []
9
+ MAX_LOG_ENTRIES = 20
10
+
11
+ def run_cli_script():
12
+ """Runs cli.py and streams logs in real-time to both UI and terminal."""
13
+ timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")
14
+ log_entry = {'time': timestamp, 'output': '', 'error': ''}
15
+
16
+ try:
17
+ process = subprocess.Popen(
18
+ ["python", "cli.py"],
19
+ stdout=subprocess.PIPE,
20
+ stderr=subprocess.PIPE,
21
+ text=True,
22
+ bufsize=1
23
+ )
24
+
25
+ # Stream logs to UI and print to terminal
26
+ for line in process.stdout:
27
+ log_entry['output'] += line
28
+ execution_logs.append({'time': timestamp, 'output': line, 'error': ''})
29
+ print(line, end="") # ✅ Print logs to terminal
30
+ if len(execution_logs) > MAX_LOG_ENTRIES:
31
+ execution_logs.pop(0)
32
+
33
+ for line in process.stderr:
34
+ log_entry['error'] += line
35
+ execution_logs.append({'time': timestamp, 'output': '', 'error': line})
36
+ print(line, end="") # ✅ Print errors to terminal
37
+ if len(execution_logs) > MAX_LOG_ENTRIES:
38
+ execution_logs.pop(0)
39
+
40
+ except Exception as e:
41
+ log_entry['error'] = str(e)
42
+ execution_logs.append({'time': timestamp, 'output': '', 'error': str(e)})
43
+ print(f"Error: {str(e)}") # ✅ Print error to terminal
44
+
45
+ def start_initial_run():
46
+ threading.Thread(target=run_cli_script, daemon=True).start()
47
+
48
+ scheduler = BackgroundScheduler(daemon=True)
49
+ scheduler.add_job(
50
+ run_cli_script,
51
+ 'interval',
52
+ hours=3,
53
+ id='main_job',
54
+ next_run_time=datetime.now()
55
+ )
56
+ scheduler.start()
57
+
58
+ start_initial_run()
59
+
60
+ @app.route('/')
61
+ def home():
62
+ """Main UI displaying logs and next run time."""
63
+ job = scheduler.get_job('main_job')
64
+ next_run = job.next_run_time.strftime('%Y-%m-%d %H:%M:%S UTC') if job else 'N/A'
65
+
66
+ return render_template_string('''
67
+ <!DOCTYPE html>
68
+ <html>
69
+ <head>
70
+ <title>Script Scheduler</title>
71
+ <script>
72
+ function fetchLogs() {
73
+ fetch('/logs')
74
+ .then(response => response.json())
75
+ .then(data => {
76
+ let logBox = document.getElementById("log-box");
77
+ logBox.innerHTML = "";
78
+ data.logs.forEach(log => {
79
+ let logEntry = "<div class='timestamp'>" + log.time + "</div>";
80
+ if (log.output) logEntry += "<div class='output'>" + log.output + "</div>";
81
+ if (log.error) logEntry += "<div class='error'>" + log.error + "</div>";
82
+ logEntry += "<hr>";
83
+ logBox.innerHTML += logEntry;
84
+ });
85
+ logBox.scrollTop = logBox.scrollHeight;
86
+ });
87
+ }
88
+ setInterval(fetchLogs, 2000);
89
+ window.onload = fetchLogs;
90
+ </script>
91
+ <style>
92
+ body { font-family: Arial, sans-serif; padding: 20px; }
93
+ .log-box {
94
+ background: #000;
95
+ color: #0f0;
96
+ padding: 15px;
97
+ border-radius: 5px;
98
+ margin-top: 20px;
99
+ white-space: pre-wrap;
100
+ max-height: 400px;
101
+ overflow-y: auto;
102
+ }
103
+ .timestamp { color: #888; margin-bottom: 10px; }
104
+ .error { color: #ff4444; }
105
+ </style>
106
+ </head>
107
+ <body>
108
+ <h1>Script Scheduler</h1>
109
+ <p>Next run: {{ next_run }}</p>
110
+ <h2>Latest Execution Logs</h2>
111
+ <div id="log-box" class="log-box"></div>
112
+ <p><a href="/force-run">Trigger Manual Run</a></p>
113
+ <p><a href="/run-check">Check Scheduler Status</a></p>
114
+ </body>
115
+ </html>
116
+ ''', next_run=next_run)
117
+
118
+ @app.route('/logs')
119
+ def logs():
120
+ """Returns logs as JSON for AJAX polling."""
121
+ return jsonify({'logs': execution_logs})
122
+
123
+ @app.route('/force-run')
124
+ def force_run():
125
+ """Manually trigger the script execution."""
126
+ threading.Thread(target=run_cli_script, daemon=True).start()
127
+ return "Script executed manually", 200
128
+
129
+ @app.route('/run-check')
130
+ def run_check():
131
+ """Check if the scheduler is still running."""
132
+ if not scheduler.running:
133
+ print("Scheduler was stopped! Restarting...")
134
+ scheduler.start()
135
+ return "Scheduler is running", 200
136
+
137
+ if __name__ == '__main__':
138
+ app.run(host='0.0.0.0', port=7860)
cli (1).py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+ import time
3
+ import traceback
4
+ import pickle
5
+ import os
6
+ import requests
7
+ import cloudscraper # Required for Udemy session handling
8
+ from base import VERSION, LoginException, Scraper, Udemy, scraper_dict
9
+ from colors import bw, by, fb, fg, fr
10
+ from requests.utils import cookiejar_from_dict, dict_from_cookiejar
11
+
12
+ # DUCE-CLI
13
+
14
+ COOKIE_FILE = "udemy_cookies.pkl" # File to store cookies
15
+
16
+
17
+ def save_cookies(session):
18
+ """Save session cookies as a dictionary"""
19
+ with open(COOKIE_FILE, "wb") as f:
20
+ pickle.dump(dict_from_cookiejar(session.cookies), f) # Convert cookies to a dictionary
21
+
22
+
23
+ def load_cookies():
24
+ """Load cookies from a file if it exists and return as a dictionary"""
25
+ if os.path.exists(COOKIE_FILE):
26
+ with open(COOKIE_FILE, "rb") as f:
27
+ cookies = pickle.load(f)
28
+ if isinstance(cookies, dict): # Ensure cookies are stored as a dictionary
29
+ return cookies
30
+ return None
31
+
32
+
33
+ def create_scraping_thread(site: str):
34
+ """Creates a separate thread to scrape each site"""
35
+ code_name = scraper_dict[site]
36
+ try:
37
+ t = threading.Thread(target=getattr(scraper, code_name), daemon=True)
38
+ t.start()
39
+
40
+ while getattr(scraper, f"{code_name}_length") == 0:
41
+ time.sleep(0.1) # Avoid busy waiting
42
+
43
+ if getattr(scraper, f"{code_name}_length") == -1:
44
+ raise Exception(f"Error in: {site}")
45
+
46
+ print(f"Scraping {site} completed successfully.")
47
+
48
+ except Exception as e:
49
+ error = traceback.format_exc()
50
+ print(f"Error in {site}: {error}")
51
+
52
+
53
+ ############## MAIN #############
54
+ udemy = Udemy("cli")
55
+ udemy.load_settings()
56
+ login_title, main_title = udemy.check_for_update()
57
+
58
+ if "Update" in login_title:
59
+ print(login_title)
60
+
61
+ login_successful = False
62
+ session = cloudscraper.create_scraper() # Use cloudscraper instead of requests.Session()
63
+
64
+ # Attempt to use saved cookies first
65
+ cookies = load_cookies()
66
+ if cookies:
67
+ print("Trying to log in using saved cookies...")
68
+ try:
69
+ session.cookies = cookiejar_from_dict(cookies) # Convert dict back to cookie jar
70
+ udemy.client = session # Attach session to Udemy object
71
+ udemy.cookie_dict = cookies # Set the cookie dictionary
72
+ udemy.get_session_info() # Check if session is valid
73
+ print(f"Logged in as {udemy.display_name} using cookies ✅")
74
+ login_successful = True
75
+ except LoginException:
76
+ print("Cookies expired or invalid. Switching to manual login.")
77
+ os.remove(COOKIE_FILE) # Delete invalid cookies
78
+
79
+ # If cookies are not valid, use email/password login
80
+ if not login_successful:
81
+ while not login_successful:
82
+ try:
83
+ if udemy.settings.get("email") and udemy.settings.get("password"):
84
+ email, password = udemy.settings["email"], udemy.settings["password"]
85
+ print(f"Trying to log in using saved credentials: {email}")
86
+ else:
87
+ email = input("Email: ")
88
+ password = input("Password: ")
89
+
90
+ udemy.manual_login(email, password)
91
+ udemy.get_session_info() # Ensure login was successful
92
+
93
+ # Save successful login credentials
94
+ udemy.settings["email"], udemy.settings["password"] = email, password
95
+ udemy.save_settings()
96
+
97
+ # Save cookies after successful login
98
+ save_cookies(udemy.client)
99
+
100
+ print(f"Logged in as {udemy.display_name} ✅")
101
+ login_successful = True
102
+
103
+ except LoginException as e:
104
+ print(f"Login Failed: {e}")
105
+ if "Browser" in login_title:
106
+ print("Can't login using cookies")
107
+ os.remove(COOKIE_FILE) # Delete invalid cookies
108
+ elif "Email" in login_title:
109
+ udemy.settings["email"], udemy.settings["password"] = "", ""
110
+ udemy.save_settings()
111
+
112
+ print(fg + f"Logged in as {udemy.display_name}")
113
+
114
+ # Check if the user has valid settings
115
+ if udemy.is_user_dumb():
116
+ print(bw + fr + "Invalid settings! Exiting.")
117
+ exit()
118
+
119
+ scraper = Scraper(udemy.sites)
120
+
121
+ try:
122
+ # Scrape courses
123
+ print("Starting to scrape free Udemy courses...")
124
+ udemy.scraped_data = scraper.get_scraped_courses(create_scraping_thread)
125
+ time.sleep(0.5)
126
+ print("\nScraping completed. Enrolling in courses...\n")
127
+
128
+ # Start enrolling
129
+ udemy.start_enrolling()
130
+
131
+ print(f"\nSuccessfully Enrolled: {udemy.successfully_enrolled_c}")
132
+ print(f"Amount Saved: {round(udemy.amount_saved_c, 2)} {udemy.currency.upper()}")
133
+ print(f"Already Enrolled: {udemy.already_enrolled_c}")
134
+ print(f"Excluded Courses: {udemy.excluded_c}")
135
+ print(f"Expired Courses: {udemy.expired_c}")
136
+
137
+ except Exception as e:
138
+ print(f"Error:\n{traceback.format_exc()}\n")
139
+
140
+ input("Press Enter to exit...")