khrek commited on
Commit
218128a
·
1 Parent(s): 9abbb59

Update models.py

Browse files
Files changed (1) hide show
  1. models.py +2 -2
models.py CHANGED
@@ -20,7 +20,7 @@ class Models():
20
 
21
  ner_model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto", torch_dtype=torch.float16, offload_folder="offload", offload_state_dict = True)
22
  tokenizer = AutoTokenizer.from_pretrained("Universal-NER/UniNER-7B-all", use_fast=False, padding="max_length")
23
- pipeline = pipeline(
24
  "text-generation", #task
25
  model=ner_model,
26
  max_length=1000,
@@ -31,7 +31,7 @@ class Models():
31
  num_return_sequences=1
32
  )
33
 
34
- self.llm = HuggingFacePipeline(pipeline = pipeline, model_kwargs = {'temperature':0})
35
  self.prompt = PromptTemplate(template=self.template, input_variables=["input_text","entity_type"])
36
  self.llm_chain = LLMChain(prompt=self.prompt, llm=self.llm)
37
 
 
20
 
21
  ner_model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map="auto", torch_dtype=torch.float16, offload_folder="offload", offload_state_dict = True)
22
  tokenizer = AutoTokenizer.from_pretrained("Universal-NER/UniNER-7B-all", use_fast=False, padding="max_length")
23
+ hf_pipeline = pipeline(
24
  "text-generation", #task
25
  model=ner_model,
26
  max_length=1000,
 
31
  num_return_sequences=1
32
  )
33
 
34
+ self.llm = HuggingFacePipeline(hf_pipeline, model_kwargs = {'temperature':0})
35
  self.prompt = PromptTemplate(template=self.template, input_variables=["input_text","entity_type"])
36
  self.llm_chain = LLMChain(prompt=self.prompt, llm=self.llm)
37