Spaces:
Running
Running
IC4T
commited on
Commit
·
243abbf
1
Parent(s):
ffe2d8c
commit
Browse files
app.py
CHANGED
@@ -60,9 +60,13 @@ retriever = db.as_retriever(search_kwargs={"k": target_source_chunks})
|
|
60 |
|
61 |
match model_type:
|
62 |
case "dolly-v2-3b":
|
63 |
-
model, tokenizer = load_model_tokenizer_for_generate(model_path)
|
64 |
-
generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer)
|
65 |
-
llm = HuggingFacePipeline(pipeline=generate_text)
|
|
|
|
|
|
|
|
|
66 |
# llm = HuggingFacePipeline(
|
67 |
# pipeline=InstructionTextGenerationPipeline(
|
68 |
# # Return the full text, because this is what the HuggingFacePipeline expects.
|
|
|
60 |
|
61 |
match model_type:
|
62 |
case "dolly-v2-3b":
|
63 |
+
# model, tokenizer = load_model_tokenizer_for_generate(model_path)
|
64 |
+
# generate_text = InstructionTextGenerationPipeline(model=model, tokenizer=tokenizer)
|
65 |
+
# llm = HuggingFacePipeline(pipeline=generate_text)
|
66 |
+
|
67 |
+
llm = AutoModelForCausalLM.from_pretrained(model_path, model_type='dolly-v2')
|
68 |
+
|
69 |
+
|
70 |
# llm = HuggingFacePipeline(
|
71 |
# pipeline=InstructionTextGenerationPipeline(
|
72 |
# # Return the full text, because this is what the HuggingFacePipeline expects.
|