Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,23 +5,23 @@ import google.generativeai as genai
|
|
| 5 |
from langchain_community.vectorstores import FAISS
|
| 6 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 7 |
|
| 8 |
-
genai.configure(api_key="")
|
| 9 |
|
| 10 |
|
| 11 |
-
def predict(message :str
|
| 12 |
model = genai.GenerativeModel("gemini-pro")
|
| 13 |
his = []
|
| 14 |
-
for i,j in history:
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
chat = model.start_chat(
|
| 20 |
history=his
|
| 21 |
)
|
| 22 |
response = chat.send_message(message)
|
| 23 |
return response.text
|
| 24 |
-
iface = gr.Interface(fn = predict,inputs = ["text"
|
| 25 |
iface.launch()
|
| 26 |
|
| 27 |
|
|
|
|
| 5 |
from langchain_community.vectorstores import FAISS
|
| 6 |
from langchain_google_genai import ChatGoogleGenerativeAI
|
| 7 |
|
| 8 |
+
genai.configure(api_key="AIzaSyD2o8vjePJb6z8vT_PVe82lVWMD3_cBL0g")
|
| 9 |
|
| 10 |
|
| 11 |
+
def predict(message :str ) -> str:
|
| 12 |
model = genai.GenerativeModel("gemini-pro")
|
| 13 |
his = []
|
| 14 |
+
# for i,j in history:
|
| 15 |
+
# his.extend([
|
| 16 |
+
# {"role": "user", "parts": i},
|
| 17 |
+
# {"role": "model", "parts": j},
|
| 18 |
+
# ])
|
| 19 |
chat = model.start_chat(
|
| 20 |
history=his
|
| 21 |
)
|
| 22 |
response = chat.send_message(message)
|
| 23 |
return response.text
|
| 24 |
+
iface = gr.Interface(fn = predict,inputs = ["text"],outputs = "text")
|
| 25 |
iface.launch()
|
| 26 |
|
| 27 |
|