Spaces:
Runtime error
Runtime error
File size: 5,906 Bytes
14b7018 e96a62a 88e9f1a aa456fd 5c6037b 5eb968e e96a62a 3a2ce2a 5eb968e 2a6e962 5eb968e 87175db e96a62a 5eb968e 5c6037b 87175db 5eb968e 2a6e962 5eb968e 5c6037b 5eb968e e96a62a 88e9f1a cd03aa3 5eb968e cd03aa3 88e9f1a e96a62a cd03aa3 5eb968e 5c6037b 241129c 5c6037b de50ad7 5c6037b 14b7018 5eb968e 2a6e962 3a2ce2a 5eb968e 2a6e962 88e9f1a e96a62a 5eb968e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
from flask import Flask, render_template_string, jsonify
from apscheduler.schedulers.background import BackgroundScheduler
import subprocess
import threading
import pytz
from datetime import datetime
from io import TextIOWrapper
app = Flask(__name__)
execution_logs = []
MAX_LOG_ENTRIES = 20 # Now tracks executions, not individual lines
log_lock = threading.Lock() # Thread safety for execution_logs
process_lock = threading.Lock() # Prevent concurrent script execution
def run_cli_script():
"""Runs cli.py and captures output/errors into a single log entry."""
if not process_lock.acquire(blocking=False):
# Another instance is already running
return
try:
# Get current time in IST
utc_now = datetime.utcnow()
ist = pytz.timezone("Asia/Kolkata")
timestamp = utc_now.replace(tzinfo=pytz.utc).astimezone(ist).strftime("%Y-%m-%d %H:%M:%S IST")
log_entry = {'time': timestamp, 'output': '', 'error': ''}
with subprocess.Popen(
["python", "cli.py"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
bufsize=1,
text=True
) as process:
# Thread function to capture output
def capture_stream(stream, type):
for line in stream:
if type == 'output':
log_entry['output'] += line
print(line, end='')
else:
log_entry['error'] += line
print(line, end='', file=sys.stderr)
# Start threads to capture stdout and stderr
stdout_thread = threading.Thread(
target=capture_stream,
args=(process.stdout, 'output')
)
stderr_thread = threading.Thread(
target=capture_stream,
args=(process.stderr, 'error')
)
stdout_thread.start()
stderr_thread.start()
# Wait for process to complete or timeout (e.g., 1 hour)
process.wait(timeout=3600)
# Wait for threads to finish
stdout_thread.join()
stderr_thread.join()
# Add completed log entry
with log_lock:
execution_logs.append(log_entry)
if len(execution_logs) > MAX_LOG_ENTRIES:
execution_logs.pop(0)
except subprocess.TimeoutExpired:
process.terminate()
log_entry['error'] += "\nProcess timed out after 1 hour."
except Exception as e:
log_entry['error'] += f"\nUnexpected error: {str(e)}"
finally:
process_lock.release()
def start_initial_run():
threading.Thread(target=run_cli_script, daemon=True).start()
scheduler = BackgroundScheduler(daemon=True)
scheduler.add_job(
run_cli_script,
'interval',
hours=3,
id='main_job',
timezone=pytz.utc # Scheduler uses UTC internally
)
scheduler.start()
start_initial_run()
@app.route('/')
def home():
job = scheduler.get_job('main_job')
next_run = job.next_run_time.astimezone(pytz.timezone("Asia/Kolkata")).strftime('%Y-%m-%d %H:%M:%S IST') if job else 'N/A'
return render_template_string('''
<!DOCTYPE html>
<html>
<head>
<title>Script Scheduler</title>
<script>
function fetchLogs() {
fetch('/logs')
.then(response => response.json())
.then(data => {
let logBox = document.getElementById("log-box");
logBox.innerHTML = "";
data.logs.forEach(log => {
let logEntry = "<div class='timestamp'>" + log.time + "</div>";
if (log.output) logEntry += "<div class='output'>" + log.output + "</div>";
if (log.error) logEntry += "<div class='error'>" + log.error + "</div>";
logEntry += "<hr>";
logBox.innerHTML += logEntry;
});
logBox.scrollTop = logBox.scrollHeight;
});
}
setInterval(fetchLogs, 2000);
window.onload = fetchLogs;
</script>
<style>
body { font-family: Arial, sans-serif; padding: 20px; }
.log-box {
background: #000;
color: #0f0;
padding: 15px;
border-radius: 5px;
margin-top: 20px;
white-space: pre-wrap;
max-height: 400px;
overflow-y: auto;
}
.timestamp { color: #888; margin-bottom: 10px; }
.error { color: #ff4444; }
</style>
</head>
<body>
<h1>Script Scheduler</h1>
<p>Next run: {{ next_run }}</p>
<h2>Latest Execution Logs</h2>
<div id="log-box" class="log-box"></div>
<p><a href="/force-run">Trigger Manual Run</a></p>
<p><a href="/run-check">Check Scheduler Status</a></p>
</body>
</html>
''', next_run=next_run)
@app.route('/logs')
def logs():
with log_lock:
return jsonify({'logs': execution_logs[::-1]}) # Newest logs first
@app.route('/force-run')
def force_run():
if threading.Thread(target=run_cli_script, daemon=True).start():
return "Script started manually", 200
return "Script is already running", 429
@app.route('/run-check')
def run_check():
if not scheduler.running:
scheduler.start()
return "Scheduler is running", 200
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7860) |