Update app.py
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ embed_model = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-
|
|
32 |
from langchain_chroma import Chroma
|
33 |
|
34 |
vectorstore = Chroma(
|
35 |
-
collection_name="
|
36 |
embedding_function=embed_model,
|
37 |
persist_directory="./",
|
38 |
)
|
@@ -44,7 +44,7 @@ retriever = vectorstore.as_retriever()
|
|
44 |
|
45 |
from langchain_core.prompts import PromptTemplate
|
46 |
|
47 |
-
template = ("""You are a
|
48 |
Use the provided context to answer the question.
|
49 |
If you don't know the answer, say so. Explain your answer in detail.
|
50 |
Do not discuss the context in your response; just provide the answer directly.
|
@@ -75,12 +75,12 @@ def rag_memory_stream(text):
|
|
75 |
partial_text += new_text
|
76 |
yield partial_text
|
77 |
|
78 |
-
examples = ['I
|
79 |
|
80 |
|
81 |
|
82 |
|
83 |
-
title = "Real-time AI App with Groq API and LangChain to Answer
|
84 |
demo = gr.Interface(
|
85 |
title=title,
|
86 |
fn=rag_memory_stream,
|
|
|
32 |
from langchain_chroma import Chroma
|
33 |
|
34 |
vectorstore = Chroma(
|
35 |
+
collection_name="car_dataset_store",
|
36 |
embedding_function=embed_model,
|
37 |
persist_directory="./",
|
38 |
)
|
|
|
44 |
|
45 |
from langchain_core.prompts import PromptTemplate
|
46 |
|
47 |
+
template = ("""You are a car expert.
|
48 |
Use the provided context to answer the question.
|
49 |
If you don't know the answer, say so. Explain your answer in detail.
|
50 |
Do not discuss the context in your response; just provide the answer directly.
|
|
|
75 |
partial_text += new_text
|
76 |
yield partial_text
|
77 |
|
78 |
+
examples = ['I need a car', 'what is the make and fuel type of a car?']
|
79 |
|
80 |
|
81 |
|
82 |
|
83 |
+
title = "Real-time AI App with Groq API and LangChain to Answer car questions"
|
84 |
demo = gr.Interface(
|
85 |
title=title,
|
86 |
fn=rag_memory_stream,
|