Joshua Sundance Bailey commited on
Commit
fe587f8
·
unverified ·
2 Parent(s): c7f2fc6 f9ebf1f

Merge pull request #114 from connorsutton/main

Browse files
langchain-streamlit-demo/app.py CHANGED
@@ -23,6 +23,7 @@ from streamlit_feedback import streamlit_feedback
23
  from defaults import default_values
24
  from llm_resources import (
25
  get_agent,
 
26
  get_llm,
27
  get_runnable,
28
  get_texts_and_multiretriever,
@@ -494,17 +495,29 @@ if st.session_state.llm:
494
  chat_prompt,
495
  prompt,
496
  )
 
497
  doc_chain_tool = Tool.from_function(
498
  func=lambda s: st.session_state.doc_chain.invoke(
499
  s,
500
  # config=get_config(callbacks),
501
  ),
502
  name="user-document-chat",
503
- description="this assistant returns a response based on the user's custom context. "
 
 
 
 
 
 
 
 
 
 
 
504
  "if the user's meaning is unclear, perhaps the answer is here. "
505
  "generally speaking, try this tool before conducting web research.",
506
  )
507
- TOOLS = [doc_chain_tool, research_assistant_tool] + default_tools
508
 
509
  st.session_state.chain = get_agent(
510
  TOOLS,
 
23
  from defaults import default_values
24
  from llm_resources import (
25
  get_agent,
26
+ get_doc_agent,
27
  get_llm,
28
  get_runnable,
29
  get_texts_and_multiretriever,
 
495
  chat_prompt,
496
  prompt,
497
  )
498
+
499
  doc_chain_tool = Tool.from_function(
500
  func=lambda s: st.session_state.doc_chain.invoke(
501
  s,
502
  # config=get_config(callbacks),
503
  ),
504
  name="user-document-chat",
505
+ description="this assistant returns a response based on the user's custom context. ",
506
+ )
507
+ doc_chain_agent = get_doc_agent(
508
+ [doc_chain_tool],
509
+ )
510
+ doc_question_tool = Tool.from_function(
511
+ func=lambda s: doc_chain_agent.invoke(
512
+ s,
513
+ ),
514
+ name="document-question-tool",
515
+ description="this assistant answers a question based on the user's custom context. "
516
+ "this assistant responds to fully formed questions."
517
  "if the user's meaning is unclear, perhaps the answer is here. "
518
  "generally speaking, try this tool before conducting web research.",
519
  )
520
+ TOOLS = [doc_question_tool, research_assistant_tool] + default_tools
521
 
522
  st.session_state.chain = get_agent(
523
  TOOLS,
langchain-streamlit-demo/llm_resources.py CHANGED
@@ -2,7 +2,7 @@ import uuid
2
  from tempfile import NamedTemporaryFile
3
  from typing import Tuple, List, Optional, Dict
4
 
5
- from langchain.agents import AgentExecutor
6
  from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
7
  from langchain.callbacks.base import BaseCallbackHandler
8
  from langchain.chains import LLMChain
@@ -16,7 +16,7 @@ from langchain.chat_models import (
16
  from langchain.document_loaders import PyPDFLoader
17
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
18
  from langchain.llms.base import BaseLLM
19
- from langchain.prompts import MessagesPlaceholder
20
  from langchain.retrievers import EnsembleRetriever
21
  from langchain.retrievers.multi_query import MultiQueryRetriever
22
  from langchain.retrievers.multi_vector import MultiVectorRetriever
@@ -82,6 +82,48 @@ def get_agent(
82
  )
83
 
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  def get_runnable(
86
  use_document_chat: bool,
87
  document_chat_chain_type: str,
 
2
  from tempfile import NamedTemporaryFile
3
  from typing import Tuple, List, Optional, Dict
4
 
5
+ from langchain.agents import AgentExecutor, AgentType, initialize_agent
6
  from langchain.agents.openai_functions_agent.base import OpenAIFunctionsAgent
7
  from langchain.callbacks.base import BaseCallbackHandler
8
  from langchain.chains import LLMChain
 
16
  from langchain.document_loaders import PyPDFLoader
17
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
18
  from langchain.llms.base import BaseLLM
19
+ from langchain.prompts import MessagesPlaceholder, ChatPromptTemplate
20
  from langchain.retrievers import EnsembleRetriever
21
  from langchain.retrievers.multi_query import MultiQueryRetriever
22
  from langchain.retrievers.multi_vector import MultiVectorRetriever
 
82
  )
83
 
84
 
85
+ def get_doc_agent(
86
+ tools: list[BaseTool],
87
+ llm: Optional[BaseLLM] = None,
88
+ agent_type: AgentType = AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
89
+ ):
90
+ if llm is None:
91
+ llm = ChatOpenAI(
92
+ model_name="gpt-4-1106-preview",
93
+ temperature=0.0,
94
+ streaming=True,
95
+ )
96
+ prompt = ChatPromptTemplate.from_messages(
97
+ [
98
+ (
99
+ "system",
100
+ """
101
+ You assist a chatbot with answering questions about a document.
102
+ If necessary, break up incoming questions into multiple parts,
103
+ and use the tools provided to answer smaller questions before
104
+ answering the larger question.
105
+ """,
106
+ ),
107
+ ("user", "{input}"),
108
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
109
+ ],
110
+ )
111
+ agent_executor = initialize_agent(
112
+ tools,
113
+ llm,
114
+ agent=agent_type,
115
+ verbose=True,
116
+ memory=None,
117
+ handle_parsing_errors=True,
118
+ prompt=prompt,
119
+ )
120
+ return (
121
+ {"input": RunnablePassthrough()}
122
+ | agent_executor
123
+ | (lambda output: output["output"])
124
+ )
125
+
126
+
127
  def get_runnable(
128
  use_document_chat: bool,
129
  document_chat_chain_type: str,
requirements.txt CHANGED
@@ -1,11 +1,11 @@
1
- anthropic==0.7.8
2
  beautifulsoup4==4.12.2
3
  black==23.12.0
4
  certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerability
5
- duckduckgo-search==4.0.0
6
  faiss-cpu==1.7.4
7
- langchain==0.0.350
8
- langsmith==0.0.71
9
  mypy==1.7.1
10
  numexpr==2.8.8
11
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability
 
1
+ anthropic==0.8.0
2
  beautifulsoup4==4.12.2
3
  black==23.12.0
4
  certifi>=2023.7.22 # not directly required, pinned by Snyk to avoid a vulnerability
5
+ duckduckgo-search==4.1.0
6
  faiss-cpu==1.7.4
7
+ langchain==0.0.351
8
+ langsmith==0.0.72
9
  mypy==1.7.1
10
  numexpr==2.8.8
11
  numpy>=1.22.2 # not directly required, pinned by Snyk to avoid a vulnerability