File size: 561 Bytes
3b63b31
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
#from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline

#model = AutoModelForCausalLM.from_pretrained("bigscience/bloom-1b7")
#tokenizer = AutoTokenizer.from_pretrained("bigscience/bloom-1b7")

#pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=2048, repetition_penalty=1.2, temperature=0.4)

from transformers import pipeline

# Cargar el modelo preentrenado (Bloom o cualquier otro compatible)
def load_pipeline():
    return pipeline("text-generation", model="bigscience/bloom-560m")

pipe = load_pipeline()