Joshua Sundance Bailey
commited on
Commit
·
f4b7ac3
1
Parent(s):
238a2a1
redefine tools
Browse files- langchain-streamlit-demo/app.py +42 -37
langchain-streamlit-demo/app.py
CHANGED
@@ -15,7 +15,6 @@ from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
|
|
15 |
from langchain.schema.document import Document
|
16 |
from langchain.schema.retriever import BaseRetriever
|
17 |
from langchain.tools import DuckDuckGoSearchRun, WikipediaQueryRun
|
18 |
-
from langchain.tools import Tool
|
19 |
from langchain.utilities import WikipediaAPIWrapper
|
20 |
from langsmith.client import Client
|
21 |
from streamlit_feedback import streamlit_feedback
|
@@ -461,27 +460,30 @@ if st.session_state.llm:
|
|
461 |
)
|
462 |
st_callback = StreamlitCallbackHandler(st.container())
|
463 |
callbacks.append(st_callback)
|
464 |
-
|
465 |
-
|
466 |
-
|
467 |
-
|
468 |
-
|
469 |
-
|
470 |
-
|
471 |
-
|
472 |
-
|
473 |
-
|
|
|
|
|
|
|
|
|
474 |
|
475 |
python_coder_agent = get_python_agent(st.session_state.llm)
|
476 |
|
477 |
-
|
478 |
-
|
479 |
-
|
480 |
-
|
481 |
-
|
482 |
-
|
483 |
-
|
484 |
-
)
|
485 |
|
486 |
TOOLS = [research_assistant_tool, python_coder_tool] + default_tools
|
487 |
|
@@ -496,29 +498,32 @@ if st.session_state.llm:
|
|
496 |
prompt,
|
497 |
)
|
498 |
|
499 |
-
|
500 |
-
|
501 |
-
|
|
|
|
|
502 |
config=get_config(callbacks),
|
503 |
-
)
|
504 |
-
|
505 |
-
description="this assistant returns a response based on the user's custom context. ",
|
506 |
-
)
|
507 |
doc_chain_agent = get_doc_agent(
|
508 |
[doc_chain_tool],
|
509 |
)
|
510 |
-
|
511 |
-
|
512 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
513 |
config=get_config(callbacks),
|
514 |
-
)
|
515 |
-
|
516 |
-
description="this assistant answers a question based on the user's custom context. "
|
517 |
-
"this assistant responds to fully formed questions."
|
518 |
-
"Do not send anything besides a question. It already has context."
|
519 |
-
"if the user's meaning is unclear, perhaps the answer is here. "
|
520 |
-
"generally speaking, try this tool before conducting web research.",
|
521 |
-
)
|
522 |
TOOLS = [doc_question_tool, research_assistant_tool] + default_tools
|
523 |
|
524 |
st.session_state.chain = get_agent(
|
|
|
15 |
from langchain.schema.document import Document
|
16 |
from langchain.schema.retriever import BaseRetriever
|
17 |
from langchain.tools import DuckDuckGoSearchRun, WikipediaQueryRun
|
|
|
18 |
from langchain.utilities import WikipediaAPIWrapper
|
19 |
from langsmith.client import Client
|
20 |
from streamlit_feedback import streamlit_feedback
|
|
|
460 |
)
|
461 |
st_callback = StreamlitCallbackHandler(st.container())
|
462 |
callbacks.append(st_callback)
|
463 |
+
|
464 |
+
from langchain.agents.tools import tool
|
465 |
+
from langchain.callbacks.manager import Callbacks
|
466 |
+
|
467 |
+
@tool("web-research-assistant")
|
468 |
+
def research_assistant_tool(question: str, callbacks: Callbacks = None):
|
469 |
+
"""this assistant returns a comprehensive report based on web research.
|
470 |
+
it's slow and relatively expensive, so use it sparingly.
|
471 |
+
for quick facts, use duckduckgo instead.
|
472 |
+
"""
|
473 |
+
return research_assistant_chain.invoke(
|
474 |
+
dict(question=question),
|
475 |
+
config=get_config(callbacks),
|
476 |
+
)
|
477 |
|
478 |
python_coder_agent = get_python_agent(st.session_state.llm)
|
479 |
|
480 |
+
@tool("python-coder-assistant")
|
481 |
+
def python_coder_tool(input_str: str, callbacks: Callbacks = None):
|
482 |
+
"""this assistant writes Python code. give it clear instructions and requirements."""
|
483 |
+
return python_coder_agent.invoke(
|
484 |
+
dict(input=input_str),
|
485 |
+
config=get_config(callbacks),
|
486 |
+
)
|
|
|
487 |
|
488 |
TOOLS = [research_assistant_tool, python_coder_tool] + default_tools
|
489 |
|
|
|
498 |
prompt,
|
499 |
)
|
500 |
|
501 |
+
@tool("user-document-chat")
|
502 |
+
def doc_chain_tool(input_str: str, callbacks: Callbacks = None):
|
503 |
+
"""this assistant returns a response based on the user's custom context."""
|
504 |
+
return st.session_state.doc_chain.invoke(
|
505 |
+
input_str,
|
506 |
config=get_config(callbacks),
|
507 |
+
)
|
508 |
+
|
|
|
|
|
509 |
doc_chain_agent = get_doc_agent(
|
510 |
[doc_chain_tool],
|
511 |
)
|
512 |
+
|
513 |
+
@tool("document-question-tool")
|
514 |
+
def doc_question_tool(input_str: str, callbacks: Callbacks = None):
|
515 |
+
"""
|
516 |
+
this assistant answers a question based on the user's custom context.
|
517 |
+
this assistant responds to fully formed questions.
|
518 |
+
Do not send anything besides a question. It already has context.
|
519 |
+
if the user's meaning is unclear, perhaps the answer is here.
|
520 |
+
generally speaking, try this tool before conducting web research.
|
521 |
+
"""
|
522 |
+
return doc_chain_agent.invoke(
|
523 |
+
input_str,
|
524 |
config=get_config(callbacks),
|
525 |
+
)
|
526 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
527 |
TOOLS = [doc_question_tool, research_assistant_tool] + default_tools
|
528 |
|
529 |
st.session_state.chain = get_agent(
|