Pijush2023 commited on
Commit
129c611
·
verified ·
1 Parent(s): 31f469c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -35
app.py CHANGED
@@ -906,7 +906,7 @@ pc = Pinecone(api_key=os.environ['PINECONE_API_KEY'])
906
 
907
  index_name = "radardata07242024"
908
  vectorstore = PineconeVectorStore(index_name=index_name, embedding=embeddings)
909
- retriever = vectorstore.as_retriever(search_kwargs={'k': 5})
910
 
911
  chat_model = ChatOpenAI(api_key=os.environ['OPENAI_API_KEY'], temperature=0, model='gpt-4o')
912
 
@@ -1034,37 +1034,6 @@ def retriever_neo4j(question: str):
1034
  logging.debug(f"Structured data: {structured_data}")
1035
  return structured_data
1036
 
1037
- _template = """Given the following conversation and a follow-up question, rephrase the follow-up question to be a standalone question,
1038
- in its original language.
1039
- Chat History:
1040
- {chat_history}
1041
- Follow Up Input: {question}
1042
- Standalone question:"""
1043
-
1044
- CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(_template)
1045
-
1046
- def _format_chat_history(chat_history: list[tuple[str, str]]) -> list:
1047
- buffer = []
1048
- for human, ai in chat_history:
1049
- buffer.append(HumanMessage(content=human))
1050
- buffer.append(AIMessage(content=ai))
1051
- return buffer
1052
-
1053
- _search_query = RunnableBranch(
1054
- (
1055
- RunnableLambda(lambda x: bool(x.get("chat_history"))).with_config(
1056
- run_name="HasChatHistoryCheck"
1057
- ),
1058
- RunnablePassthrough.assign(
1059
- chat_history=lambda x: _format_chat_history(x["chat_history"])
1060
- )
1061
- | CONDENSE_QUESTION_PROMPT
1062
- | ChatOpenAI(temperature=0, api_key=os.environ['OPENAI_API_KEY'])
1063
- | StrOutputParser(),
1064
- ),
1065
- RunnableLambda(lambda x : x["question"]),
1066
- )
1067
-
1068
  template = """Answer the question based only on the following context:
1069
  {context}
1070
  Question: {question}
@@ -1076,7 +1045,7 @@ qa_prompt = ChatPromptTemplate.from_template(template)
1076
  chain_neo4j = (
1077
  RunnableParallel(
1078
  {
1079
- "context": _search_query | retriever_neo4j,
1080
  "question": RunnablePassthrough(),
1081
  }
1082
  )
@@ -1104,8 +1073,7 @@ def generate_answer(message, choice, retrieval_mode):
1104
  elif retrieval_mode == "Knowledge-Graph":
1105
  response = chain_neo4j.invoke({"question": message})
1106
  logging.debug(f"Knowledge-Graph response: {response}")
1107
- formatted_response = prompt_template.format(context=response, question=message)
1108
- return formatted_response, extract_addresses(formatted_response)
1109
  else:
1110
  return "Invalid retrieval mode selected.", []
1111
 
@@ -1686,3 +1654,4 @@ with gr.Blocks(theme='Pijush2023/scikit-learn-pijush') as demo:
1686
 
1687
  demo.queue()
1688
  demo.launch(share=True)
 
 
906
 
907
  index_name = "radardata07242024"
908
  vectorstore = PineconeVectorStore(index_name=index_name, embedding=embeddings)
909
+ retriever = vectorstore.as_retriever(search_kwargs={'k': 5, 'filter': {}})
910
 
911
  chat_model = ChatOpenAI(api_key=os.environ['OPENAI_API_KEY'], temperature=0, model='gpt-4o')
912
 
 
1034
  logging.debug(f"Structured data: {structured_data}")
1035
  return structured_data
1036
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1037
  template = """Answer the question based only on the following context:
1038
  {context}
1039
  Question: {question}
 
1045
  chain_neo4j = (
1046
  RunnableParallel(
1047
  {
1048
+ "context": RunnablePassthrough() | retriever_neo4j,
1049
  "question": RunnablePassthrough(),
1050
  }
1051
  )
 
1073
  elif retrieval_mode == "Knowledge-Graph":
1074
  response = chain_neo4j.invoke({"question": message})
1075
  logging.debug(f"Knowledge-Graph response: {response}")
1076
+ return response, extract_addresses(response)
 
1077
  else:
1078
  return "Invalid retrieval mode selected.", []
1079
 
 
1654
 
1655
  demo.queue()
1656
  demo.launch(share=True)
1657
+