Connor Sutton commited on
Commit
f88e711
·
1 Parent(s): 3b54d51

added sub-agent for document retrieval

Browse files
langchain-streamlit-demo/app.py CHANGED
@@ -23,6 +23,7 @@ from streamlit_feedback import streamlit_feedback
23
  from defaults import default_values
24
  from llm_resources import (
25
  get_agent,
 
26
  get_llm,
27
  get_runnable,
28
  get_texts_and_multiretriever,
@@ -494,17 +495,29 @@ if st.session_state.llm:
494
  chat_prompt,
495
  prompt,
496
  )
 
497
  doc_chain_tool = Tool.from_function(
498
  func=lambda s: st.session_state.doc_chain.invoke(
499
  s,
500
  # config=get_config(callbacks),
501
  ),
502
  name="user-document-chat",
 
 
 
 
 
 
 
 
 
 
503
  description="this assistant returns a response based on the user's custom context. "
504
  "if the user's meaning is unclear, perhaps the answer is here. "
505
- "generally speaking, try this tool before conducting web research.",
 
506
  )
507
- TOOLS = [doc_chain_tool, research_assistant_tool] + default_tools
508
 
509
  st.session_state.chain = get_agent(
510
  TOOLS,
 
23
  from defaults import default_values
24
  from llm_resources import (
25
  get_agent,
26
+ get_doc_agent,
27
  get_llm,
28
  get_runnable,
29
  get_texts_and_multiretriever,
 
495
  chat_prompt,
496
  prompt,
497
  )
498
+
499
  doc_chain_tool = Tool.from_function(
500
  func=lambda s: st.session_state.doc_chain.invoke(
501
  s,
502
  # config=get_config(callbacks),
503
  ),
504
  name="user-document-chat",
505
+ description="this assistant returns a response based on the user's custom context. ",
506
+ )
507
+ doc_chain_agent = get_doc_agent(
508
+ [doc_chain_tool],
509
+ )
510
+ doc_agent_tool = Tool.from_function(
511
+ func=lambda s: doc_chain_agent.invoke(
512
+ s,
513
+ ),
514
+ name="document-agent",
515
  description="this assistant returns a response based on the user's custom context. "
516
  "if the user's meaning is unclear, perhaps the answer is here. "
517
+ "generally speaking, try this tool before conducting web research."
518
+ "it is best to send this tool a question, as it will attempt to break complex questions down into several, simpler questions.",
519
  )
520
+ TOOLS = [doc_agent_tool, research_assistant_tool] + default_tools
521
 
522
  st.session_state.chain = get_agent(
523
  TOOLS,
langchain-streamlit-demo/llm_resources.py CHANGED
@@ -2,7 +2,7 @@ import uuid
2
  from tempfile import NamedTemporaryFile
3
  from typing import Tuple, List, Optional, Dict
4
 
5
- from langchain.agents import AgentExecutor
6
  from langchain.agents.openai_functions_agent.agent_token_buffer_memory import (
7
  AgentTokenBufferMemory,
8
  )
@@ -19,7 +19,7 @@ from langchain.chat_models import (
19
  from langchain.document_loaders import PyPDFLoader
20
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
21
  from langchain.llms.base import BaseLLM
22
- from langchain.prompts import MessagesPlaceholder
23
  from langchain.retrievers import EnsembleRetriever
24
  from langchain.retrievers.multi_query import MultiQueryRetriever
25
  from langchain.retrievers.multi_vector import MultiVectorRetriever
@@ -78,6 +78,48 @@ def get_agent(
78
  )
79
 
80
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  def get_runnable(
82
  use_document_chat: bool,
83
  document_chat_chain_type: str,
 
2
  from tempfile import NamedTemporaryFile
3
  from typing import Tuple, List, Optional, Dict
4
 
5
+ from langchain.agents import AgentExecutor, AgentType, initialize_agent
6
  from langchain.agents.openai_functions_agent.agent_token_buffer_memory import (
7
  AgentTokenBufferMemory,
8
  )
 
19
  from langchain.document_loaders import PyPDFLoader
20
  from langchain.embeddings import AzureOpenAIEmbeddings, OpenAIEmbeddings
21
  from langchain.llms.base import BaseLLM
22
+ from langchain.prompts import MessagesPlaceholder, ChatPromptTemplate
23
  from langchain.retrievers import EnsembleRetriever
24
  from langchain.retrievers.multi_query import MultiQueryRetriever
25
  from langchain.retrievers.multi_vector import MultiVectorRetriever
 
78
  )
79
 
80
 
81
+ def get_doc_agent(
82
+ tools: list[BaseTool],
83
+ llm: Optional[BaseLLM] = None,
84
+ agent_type: AgentType = AgentType.STRUCTURED_CHAT_ZERO_SHOT_REACT_DESCRIPTION,
85
+ ):
86
+ if llm is None:
87
+ llm = ChatOpenAI(
88
+ model_name="gpt-4-1106-preview",
89
+ temperature=0.0,
90
+ streaming=True,
91
+ )
92
+ prompt = ChatPromptTemplate.from_messages(
93
+ [
94
+ (
95
+ "system",
96
+ """
97
+ You assist a chatbot with answering questions about a document.
98
+ If necessary, break up incoming questions into multiple parts,
99
+ and use the tools provided to answer smaller questions before
100
+ answering the larger question.
101
+ """,
102
+ ),
103
+ ("user", "{input}"),
104
+ MessagesPlaceholder(variable_name="agent_scratchpad"),
105
+ ],
106
+ )
107
+ agent_executor = initialize_agent(
108
+ tools,
109
+ llm,
110
+ agent=agent_type,
111
+ verbose=True,
112
+ memory=None,
113
+ handle_parsing_errors=True,
114
+ prompt=prompt,
115
+ )
116
+ return (
117
+ {"input": RunnablePassthrough()}
118
+ | agent_executor
119
+ | (lambda output: output["output"])
120
+ )
121
+
122
+
123
  def get_runnable(
124
  use_document_chat: bool,
125
  document_chat_chain_type: str,