Spaces:
Sleeping
Sleeping
import pandas as pd | |
import os | |
import gradio as gr | |
import plotly.express as px | |
from typing import Tuple, List, Union | |
import traceback | |
import io | |
import zipfile | |
import tempfile | |
# NTU Singapore colors | |
NTU_BLUE = "#003D7C" | |
NTU_RED = "#C11E38" | |
NTU_GOLD = "#E7B820" | |
def process_data(file: gr.File, progress=gr.Progress()) -> Tuple[str, str, pd.DataFrame, Union[str, None]]: | |
try: | |
# Check if file is uploaded | |
if file is None: | |
raise ValueError("No file uploaded. Please upload an Excel file.") | |
# Check file extension | |
if not file.name.lower().endswith(('.xls', '.xlsx')): | |
raise ValueError("Invalid file format. Please upload an Excel file (.xls or .xlsx).") | |
# Load the raw Excel file | |
try: | |
raw_data = pd.read_excel(file.name) | |
except Exception as e: | |
raise ValueError(f"Error reading Excel file: {str(e)}") | |
base_path = tempfile.mkdtemp() | |
final_file_path = os.path.join(base_path, 'final_output.xlsx') | |
# Step 1: Extract User Information | |
user_info = raw_data[['user_id', 'lastname']].drop_duplicates().copy() | |
user_info['Username'] = user_info['user_id'] | |
user_info['Name'] = user_info['lastname'] | |
user_info['Email'] = user_info['user_id'] + '@ntu.edu.sg' | |
progress(0.2, desc="Extracting user information") | |
# Step 2: Calculate Course Count | |
course_counts = raw_data.groupby('user_id')['course_id'].nunique().reset_index() | |
course_counts.columns = ['Username', 'Courses'] | |
user_info = user_info.merge(course_counts, on='Username', how='left') | |
progress(0.4, desc="Calculating course counts") | |
# Step 3: Calculate Grand Total | |
event_counts = raw_data.groupby('user_id').size().reset_index(name='Grand Total') | |
event_counts.columns = ['Username', 'Grand Total'] | |
user_info = user_info.merge(event_counts, on='Username', how='left') | |
progress(0.6, desc="Calculating grand totals") | |
# Step 4: Generate Filenames and Paths | |
user_info['File'] = 'User_' + user_info['Username'] + '_data.csv' | |
user_info['Path'] = user_info['File'].apply(lambda x: os.path.join(base_path, 'mailmerge', x)) | |
# Remove extra columns and summary rows | |
user_info = user_info[['Username', 'Name', 'Courses', 'Grand Total', 'Email', 'File', 'Path']] | |
user_info = user_info[user_info['Username'].notna()] | |
user_info.drop_duplicates(subset=['Username'], inplace=True) | |
user_info.sort_values(by='Username', inplace=True) | |
progress(0.8, desc="Generating output files") | |
# Calculate totals for Courses and Grand Total | |
total_courses = user_info['Courses'].sum() | |
total_grand_total = user_info['Grand Total'].sum() | |
# Generate individual CSV files for each user | |
required_columns = ['course_id', 'course_pk1', 'data', 'event_type', 'internal_handle', 'lastname', 'session_id', 'timestamp', 'user_id', 'system_role'] | |
mailmerge_path = os.path.join(base_path, 'mailmerge') | |
if not os.path.exists(mailmerge_path): | |
os.makedirs(mailmerge_path) | |
for user_id in user_info['Username'].unique(): | |
user_data = raw_data[raw_data['user_id'] == user_id][required_columns] | |
user_file_path = os.path.join(mailmerge_path, f'User_{user_id}_data.csv') | |
user_data.to_csv(user_file_path, index=False) | |
# Save the final dataframe to the output Excel file | |
with pd.ExcelWriter(final_file_path, engine='xlsxwriter') as writer: | |
user_info.to_excel(writer, index=False, sheet_name='Sheet1') | |
workbook = writer.book | |
worksheet = writer.sheets['Sheet1'] | |
# Find the last row number dynamically | |
last_row = len(user_info) + 1 # Account for header row in Excel | |
# Write the total values in columns B, C, and D of the first empty row after the user data | |
worksheet.write(f'B{last_row + 1}', 'Total') | |
worksheet.write(f'C{last_row + 1}', total_courses) | |
worksheet.write(f'D{last_row + 1}', total_grand_total) | |
# Create a zip file containing all user CSV files and the final Excel file | |
zip_buffer = io.BytesIO() | |
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file: | |
for root, _, files in os.walk(mailmerge_path): | |
for file in files: | |
file_path = os.path.join(root, file) | |
zip_file.write(file_path, os.path.relpath(file_path, base_path)) | |
zip_file.write(final_file_path, os.path.basename(final_file_path)) | |
zip_buffer.seek(0) | |
temp_zip_file = tempfile.NamedTemporaryFile(delete=False, suffix=".zip") | |
with open(temp_zip_file.name, 'wb') as f: | |
f.write(zip_buffer.getvalue()) | |
progress(1.0, desc="Processing complete") | |
return "Processing complete. You can now download the results.", "Results are packaged in the zip file below.", user_info, temp_zip_file.name | |
except Exception as e: | |
error_msg = f"Error: {str(e)}\n\nTraceback:\n{traceback.format_exc()}" | |
return error_msg, "Processing failed", pd.DataFrame(), None | |
def create_summary_stats(df: pd.DataFrame) -> dict: | |
try: | |
return { | |
"Total Users": len(df), | |
"Total Courses": df['Courses'].sum(), | |
"Total Activity": df['Grand Total'].sum(), | |
"Avg Courses per User": df['Courses'].mean(), | |
"Avg Activity per User": df['Grand Total'].mean() | |
} | |
except Exception as e: | |
return {"Error": f"Failed to create summary stats: {str(e)}"} | |
def create_bar_chart(df: pd.DataFrame, x: str, y: str, title: str) -> Union[px.bar, None]: | |
try: | |
if df.empty: | |
return None | |
fig = px.bar(df, x=x, y=y, title=title) | |
fig.update_layout( | |
plot_bgcolor='white', | |
paper_bgcolor='white', | |
font_color=NTU_BLUE | |
) | |
fig.update_traces(marker_color=NTU_BLUE) | |
return fig | |
except Exception as e: | |
print(f"Error creating bar chart: {str(e)}") | |
return None | |
def create_scatter_plot(df: pd.DataFrame) -> Union[px.scatter, None]: | |
try: | |
if df.empty: | |
return None | |
fig = px.scatter(df, x='Courses', y='Grand Total', title='Courses vs. Activity Level', | |
hover_data=['Username', 'Name']) | |
fig.update_layout( | |
plot_bgcolor='white', | |
paper_bgcolor='white', | |
font_color=NTU_BLUE | |
) | |
fig.update_traces(marker_color=NTU_RED) | |
return fig | |
except Exception as e: | |
print(f"Error creating scatter plot: {str(e)}") | |
return None | |
def update_insights(df: pd.DataFrame, zip_path: Union[str, None]) -> List[Union[gr.components.Component, None]]: | |
try: | |
if df.empty: | |
return [gr.Markdown("No data available. Please upload and process a file first.")] + [None] * 6 | |
stats = create_summary_stats(df) | |
stats_md = gr.Markdown("\n".join([f"**{k}**: {v:.2f}" for k, v in stats.items()])) | |
users_activity_chart = create_bar_chart(df, 'Username', 'Grand Total', 'User Activity Levels') | |
users_courses_chart = create_bar_chart(df, 'Username', 'Courses', 'Courses per User') | |
scatter_plot = create_scatter_plot(df) | |
user_table = gr.DataFrame(value=df) | |
if zip_path: | |
download_button = gr.File(value=zip_path, visible=True, label="Download Results") | |
download_text = gr.Markdown("Click the 'Download Results' button above to download the ZIP file containing all processed data.") | |
else: | |
download_button = gr.File(visible=False, label="Download Results") | |
download_text = gr.Markdown("") | |
return [stats_md, users_activity_chart, users_courses_chart, scatter_plot, user_table, download_button, download_text] | |
except Exception as e: | |
error_msg = f"Error updating insights: {str(e)}\n\nTraceback:\n{traceback.format_exc()}" | |
return [gr.Markdown(error_msg)] + [None] * 6 | |
def process_and_update(file): | |
try: | |
result_msg, csv_loc, df, zip_path = process_data(file) | |
insights = update_insights(df, zip_path) | |
return [result_msg, csv_loc, zip_path] + insights | |
except Exception as e: | |
error_msg = f"Error in process_and_update: {str(e)}\n\nTraceback:\n{traceback.format_exc()}" | |
return [error_msg, "Processing failed", None] + [gr.Markdown(error_msg)] + [None] * 6 | |
def clear_outputs(): | |
return [""] * 2 + [None] * 6 + [""] # 2 text outputs, 6 graph/table/file outputs, and 1 download text | |
# Create a custom theme | |
custom_theme = gr.themes.Base().set( | |
body_background_fill="#E6F3FF", | |
body_text_color="#003D7C", | |
button_primary_background_fill="#C11E38", | |
button_primary_background_fill_hover="#A5192F", | |
button_primary_text_color="white", | |
block_title_text_color="#003D7C", | |
block_label_background_fill="#E6F3FF", | |
input_background_fill="white", | |
input_border_color="#003D7C", | |
input_border_color_focus="#C11E38", | |
) | |
with gr.Blocks(theme=custom_theme, css="custom.css") as iface: | |
gr.Markdown("# Gradebook Data Processor", elem_classes=["app-title"]) | |
with gr.Tabs(): | |
with gr.TabItem("1. File Upload and Processing"): | |
gr.Markdown("## Step 1: Upload your Excel file and process the data") | |
file_input = gr.File(label="Upload Excel File") | |
process_btn = gr.Button("Process Data", variant="primary") | |
output_msg = gr.Textbox(label="Processing Result") | |
csv_location = gr.Textbox(label="Output Information") | |
download_button = gr.File(visible=False, label="Download Results") | |
gr.Markdown("After processing, switch to the 'Data Insights' tab to view results and download files.") | |
with gr.TabItem("2. Data Insights Dashboard"): | |
gr.Markdown("## Step 2: Review insights and download results") | |
summary_stats = gr.Markdown("Upload and process a file to see summary statistics.") | |
with gr.Row(): | |
users_activity_chart = gr.Plot() | |
users_courses_chart = gr.Plot() | |
scatter_plot = gr.Plot() | |
user_table = gr.DataFrame() | |
clear_btn = gr.Button("Clear All Data", variant="secondary") | |
gr.Markdown("Click 'Clear All Data' to reset the application and start over.") | |
process_btn.click( | |
process_and_update, | |
inputs=[file_input], | |
outputs=[output_msg, csv_location, download_button, summary_stats, users_activity_chart, users_courses_chart, scatter_plot, user_table, download_button] | |
) | |
clear_btn.click( | |
clear_outputs, | |
inputs=[], | |
outputs=[output_msg, csv_location, summary_stats, users_activity_chart, users_courses_chart, scatter_plot, user_table, download_button] | |
) | |
if __name__ == "__main__": | |
iface.launch() | |