Update app.py
Browse files
app.py
CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
|
|
2 |
from langchain_mistralai.chat_models import ChatMistralAI
|
3 |
from langchain.prompts import ChatPromptTemplate
|
4 |
from langchain_deepseek import ChatDeepSeek
|
|
|
5 |
import os
|
6 |
from pathlib import Path
|
7 |
import json
|
@@ -214,15 +215,24 @@ class OptimizedRAGLoader:
|
|
214 |
# streaming=True,
|
215 |
# )
|
216 |
|
217 |
-
deepseek_api_key = os.getenv("DEEPSEEK_KEY")
|
|
|
|
|
|
|
|
|
|
|
|
|
218 |
|
219 |
-
|
220 |
-
|
|
|
|
|
221 |
temperature=0,
|
222 |
-
|
223 |
streaming=True,
|
224 |
)
|
225 |
|
|
|
226 |
rag_loader = OptimizedRAGLoader()
|
227 |
retriever = rag_loader.get_retriever(k=5) # Reduced k for faster retrieval
|
228 |
|
|
|
2 |
from langchain_mistralai.chat_models import ChatMistralAI
|
3 |
from langchain.prompts import ChatPromptTemplate
|
4 |
from langchain_deepseek import ChatDeepSeek
|
5 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
6 |
import os
|
7 |
from pathlib import Path
|
8 |
import json
|
|
|
215 |
# streaming=True,
|
216 |
# )
|
217 |
|
218 |
+
# deepseek_api_key = os.getenv("DEEPSEEK_KEY")
|
219 |
+
# llm = ChatDeepSeek(
|
220 |
+
# model="deepseek-chat",
|
221 |
+
# temperature=0,
|
222 |
+
# api_key=deepseek_api_key,
|
223 |
+
# streaming=True,
|
224 |
+
# )
|
225 |
|
226 |
+
|
227 |
+
gemini_api_key = os.getenv("GEMINI_KEY")
|
228 |
+
llm = ChatGoogleGenerativeAI(
|
229 |
+
model="Gemini 2.0 Flash-Lite",
|
230 |
temperature=0,
|
231 |
+
google_api_key=gemini_api_key,
|
232 |
streaming=True,
|
233 |
)
|
234 |
|
235 |
+
|
236 |
rag_loader = OptimizedRAGLoader()
|
237 |
retriever = rag_loader.get_retriever(k=5) # Reduced k for faster retrieval
|
238 |
|