IC4T commited on
Commit
25d9afd
·
1 Parent(s): 328524f
Files changed (1) hide show
  1. app.py +4 -2
app.py CHANGED
@@ -19,7 +19,8 @@ from instruct_pipeline import InstructionTextGenerationPipeline
19
 
20
  from training.generate import load_model_tokenizer_for_generate
21
  from ctransformers import AutoModelForCausalLM
22
-
 
23
  # from training.generate import InstructionTextGenerationPipeline, load_model_tokenizer_for_generate
24
  # from googletrans import Translator
25
  # translator = Translator()
@@ -65,8 +66,9 @@ match model_type:
65
  # generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer)
66
  # llm = HuggingFacePipeline(pipeline=generate_text)
67
 
68
- llm = AutoModelForCausalLM.from_pretrained(model_path, model_type='dolly-v2')
69
 
 
70
 
71
  # llm = HuggingFacePipeline(
72
  # pipeline=InstructionTextGenerationPipeline(
 
19
 
20
  from training.generate import load_model_tokenizer_for_generate
21
  from ctransformers import AutoModelForCausalLM
22
+ from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler
23
+ import ctransformers
24
  # from training.generate import InstructionTextGenerationPipeline, load_model_tokenizer_for_generate
25
  # from googletrans import Translator
26
  # translator = Translator()
 
66
  # generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer)
67
  # llm = HuggingFacePipeline(pipeline=generate_text)
68
 
69
+ # llm = AutoModelForCausalLM.from_pretrained(model_path, model_type='dolly-v2')
70
 
71
+ llm = CTransformers(model_path, callbacks=[StreamingStdOutCallbackHandler()])
72
 
73
  # llm = HuggingFacePipeline(
74
  # pipeline=InstructionTextGenerationPipeline(