JPLTedCas commited on
Commit
3b63b31
·
verified ·
1 Parent(s): df3d965

Create load_model.py

Browse files
Files changed (1) hide show
  1. load_model.py +14 -0
load_model.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
2
+
3
+ #model = AutoModelForCausalLM.from_pretrained("bigscience/bloom-1b7")
4
+ #tokenizer = AutoTokenizer.from_pretrained("bigscience/bloom-1b7")
5
+
6
+ #pipe = pipeline("text-generation", model=model, tokenizer=tokenizer, max_new_tokens=2048, repetition_penalty=1.2, temperature=0.4)
7
+
8
+ from transformers import pipeline
9
+
10
+ # Cargar el modelo preentrenado (Bloom o cualquier otro compatible)
11
+ def load_pipeline():
12
+ return pipeline("text-generation", model="bigscience/bloom-560m")
13
+
14
+ pipe = load_pipeline()