Spaces:
Running
Running
Chandranshu Jain
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -13,9 +13,10 @@ from langchain.prompts import PromptTemplate
|
|
13 |
from langchain_community.document_loaders import PyPDFLoader
|
14 |
from langchain_chroma import Chroma
|
15 |
from langchain_community.vectorstores import Chroma
|
16 |
-
|
17 |
-
import
|
18 |
-
|
|
|
19 |
|
20 |
#st.set_page_config(page_title="Document Genie", layout="wide")
|
21 |
|
@@ -89,11 +90,7 @@ def get_conversational_chain():
|
|
89 |
model_id = "google/gemma-1.1-2b-it"
|
90 |
dtype = torch.bfloat16
|
91 |
|
92 |
-
|
93 |
-
llm= AutoModelForCausalLM.from_pretrained(
|
94 |
-
model_id,
|
95 |
-
torch_dtype=dtype,
|
96 |
-
)
|
97 |
|
98 |
pt = ChatPromptTemplate.from_template(template)
|
99 |
# Retrieve and generate using the relevant snippets of the blog.
|
|
|
13 |
from langchain_community.document_loaders import PyPDFLoader
|
14 |
from langchain_chroma import Chroma
|
15 |
from langchain_community.vectorstores import Chroma
|
16 |
+
# Use a pipeline as a high-level helper
|
17 |
+
from transformers import pipeline
|
18 |
+
|
19 |
+
|
20 |
|
21 |
#st.set_page_config(page_title="Document Genie", layout="wide")
|
22 |
|
|
|
90 |
model_id = "google/gemma-1.1-2b-it"
|
91 |
dtype = torch.bfloat16
|
92 |
|
93 |
+
llm = pipeline("text-generation", model="google/gemma-1.1-2b-it")
|
|
|
|
|
|
|
|
|
94 |
|
95 |
pt = ChatPromptTemplate.from_template(template)
|
96 |
# Retrieve and generate using the relevant snippets of the blog.
|