Spaces:
Sleeping
Sleeping
#from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline | |
#model = AutoModelForCausalLM.from_pretrained("bigscience/bloom-1b7") | |
#tokenizer = AutoTokenizer.from_pretrained("bigscience/bloom-1b7") | |
#pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=2048, repetition_penalty=1.2, temperature=0.4) | |
from transformers import pipeline | |
# Cargar el modelo preentrenado (Bloom o cualquier otro compatible) | |
def load_pipeline(): | |
return pipeline("text-generation", model="bigscience/bloom-560m") | |
pipe = load_pipeline() |