Spaces:
Sleeping
Sleeping
import os | |
import gradio as gr | |
from transformers import AutoModel, AutoTokenizer | |
def process_models(model_name, save_dir, additional_models): | |
log_lines = [] | |
# Process primary model | |
log_lines.append(f"π Loading model: **{model_name}**") | |
try: | |
model = AutoModel.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model_save_path = os.path.join(save_dir, model_name.replace("/", "_")) | |
os.makedirs(model_save_path, exist_ok=True) | |
model.save_pretrained(model_save_path) | |
log_lines.append(f"β Saved **{model_name}** to `{model_save_path}`") | |
except Exception as e: | |
log_lines.append(f"β Error with **{model_name}**: {e}") | |
# Process additional models if any | |
if additional_models: | |
for m in additional_models: | |
log_lines.append(f"π Loading model: **{m}**") | |
try: | |
model = AutoModel.from_pretrained(m) | |
tokenizer = AutoTokenizer.from_pretrained(m) | |
model_save_path = os.path.join(save_dir, m.replace("/", "_")) | |
os.makedirs(model_save_path, exist_ok=True) | |
model.save_pretrained(model_save_path) | |
log_lines.append(f"β Saved **{m}** to `{model_save_path}`") | |
except Exception as e: | |
log_lines.append(f"β Error with **{m}**: {e}") | |
return "\n".join(log_lines) | |
# Mermaid glossary: a one-line flow summary of our UI actions. | |
mermaid_glossary = """graph LR | |
A[π Model Input] --> B[Load Model] | |
B --> C[πΎ Save Model] | |
D[𧩠Additional Models] --> B | |
""" | |