Spaces:
Runtime error
Runtime error
Kuba Fietkiewicz
commited on
Commit
·
e817d77
1
Parent(s):
5149f78
add cryptography
Browse files- app.py +0 -11
- requirements.txt +2 -1
app.py
CHANGED
@@ -45,20 +45,9 @@ qa_chain = ConversationalRetrievalChain.from_llm(
|
|
45 |
)
|
46 |
|
47 |
def predict(message, history):
|
48 |
-
# Convert the history into LangChain format
|
49 |
-
history_langchain_format = []
|
50 |
-
for human, ai in history:
|
51 |
-
history_langchain_format.append(HumanMessage(content=human))
|
52 |
-
history_langchain_format.append(AIMessage(content=ai))
|
53 |
-
|
54 |
-
# Add the current user message
|
55 |
-
history_langchain_format.append(HumanMessage(content=message))
|
56 |
-
|
57 |
# Get a response from the Conversational Retrieval Chain
|
58 |
response = qa_chain.run(question=message)
|
59 |
|
60 |
-
print(response)
|
61 |
-
|
62 |
# Extract and return the content of the response
|
63 |
return response # or modify as needed based on the response structure
|
64 |
|
|
|
45 |
)
|
46 |
|
47 |
def predict(message, history):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
# Get a response from the Conversational Retrieval Chain
|
49 |
response = qa_chain.run(question=message)
|
50 |
|
|
|
|
|
51 |
# Extract and return the content of the response
|
52 |
return response # or modify as needed based on the response structure
|
53 |
|
requirements.txt
CHANGED
@@ -3,4 +3,5 @@ openai
|
|
3 |
langchain
|
4 |
chromadb
|
5 |
pypdf
|
6 |
-
tiktoken
|
|
|
|
3 |
langchain
|
4 |
chromadb
|
5 |
pypdf
|
6 |
+
tiktoken
|
7 |
+
cryptography
|