Spaces:
Sleeping
Sleeping
from transformers import AutoTokenizer, AutoModelForCausalLM | |
# Model name | |
model_name = "abinayam/gpt-2-tamil" | |
# Load from Hugging Face | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# Save to local folder called 'model' | |
model.save_pretrained("model") | |
tokenizer.save_pretrained("model") | |
print("β Model and tokenizer saved successfully in './model'") | |