awacke1's picture
Create app.py
f451bf7 verified
raw
history blame
1.63 kB
import os
import gradio as gr
from transformers import AutoModel, AutoTokenizer
def process_models(model_name, save_dir, additional_models):
log_lines = []
# Process primary model
log_lines.append(f"πŸš€ Loading model: **{model_name}**")
try:
model = AutoModel.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
model_save_path = os.path.join(save_dir, model_name.replace("/", "_"))
os.makedirs(model_save_path, exist_ok=True)
model.save_pretrained(model_save_path)
log_lines.append(f"βœ… Saved **{model_name}** to `{model_save_path}`")
except Exception as e:
log_lines.append(f"❌ Error with **{model_name}**: {e}")
# Process additional models if any
if additional_models:
for m in additional_models:
log_lines.append(f"πŸš€ Loading model: **{m}**")
try:
model = AutoModel.from_pretrained(m)
tokenizer = AutoTokenizer.from_pretrained(m)
model_save_path = os.path.join(save_dir, m.replace("/", "_"))
os.makedirs(model_save_path, exist_ok=True)
model.save_pretrained(model_save_path)
log_lines.append(f"βœ… Saved **{m}** to `{model_save_path}`")
except Exception as e:
log_lines.append(f"❌ Error with **{m}**: {e}")
return "\n".join(log_lines)
# Mermaid glossary: a one-line flow summary of our UI actions.
mermaid_glossary = """graph LR
A[πŸš€ Model Input] --> B[Load Model]
B --> C[πŸ’Ύ Save Model]
D[🧩 Additional Models] --> B
"""