Commit
·
3e595a5
1
Parent(s):
f29a3a5
remove: unnecessary things update the code structure
Browse files- .gitignore +3 -2
- agent.ipynb +0 -192
- example_agent_chatgpt.ipynb +0 -233
- modules/__pycache__/job.cpython-312.pyc +0 -0
- modules/__pycache__/resume.cpython-312.pyc +0 -0
- modules/data_class.py +102 -0
- modules/instrctions.py +91 -0
- modules/job.py +0 -45
- modules/llm_in_use.py +25 -0
- modules/nodes.py +90 -0
- modules/resume.py +0 -95
- modules/tools.py +147 -0
- paintrek-chat-v1.ipynb +51 -67
- paintrek-chat-v2.ipynb +232 -0
- testtext_tmp +3 -0
- tools.py +0 -31
.gitignore
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
-
|
2 |
-
|
|
|
|
1 |
+
.env
|
2 |
+
__pycache__
|
3 |
+
modules/__pycache__
|
agent.ipynb
DELETED
@@ -1,192 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"cells": [
|
3 |
-
{
|
4 |
-
"cell_type": "code",
|
5 |
-
"execution_count": 1,
|
6 |
-
"metadata": {},
|
7 |
-
"outputs": [
|
8 |
-
{
|
9 |
-
"name": "stderr",
|
10 |
-
"output_type": "stream",
|
11 |
-
"text": [
|
12 |
-
"/home/frank-elite/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
|
13 |
-
" from .autonotebook import tqdm as notebook_tqdm\n"
|
14 |
-
]
|
15 |
-
}
|
16 |
-
],
|
17 |
-
"source": [
|
18 |
-
"import os\n",
|
19 |
-
"\n",
|
20 |
-
"from typing import Annotated, Literal\n",
|
21 |
-
"from typing_extensions import TypedDict\n",
|
22 |
-
"from langgraph.prebuilt import ToolNode\n",
|
23 |
-
"from langchain_core.messages import HumanMessage\n",
|
24 |
-
"from langgraph.graph import StateGraph, MessagesState, START, END\n",
|
25 |
-
"from langgraph.checkpoint.memory import MemorySaver\n",
|
26 |
-
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
27 |
-
"from tools import get_job, get_resume"
|
28 |
-
]
|
29 |
-
},
|
30 |
-
{
|
31 |
-
"cell_type": "code",
|
32 |
-
"execution_count": 2,
|
33 |
-
"metadata": {},
|
34 |
-
"outputs": [],
|
35 |
-
"source": [
|
36 |
-
"GOOGLE_API_KEY=\"AIzaSyA8eIxHBqeBWEP1g3t8bpvLxNaH5Lquemo\"\n",
|
37 |
-
"os.environ[\"GOOGLE_API_KEY\"] = GOOGLE_API_KEY\n"
|
38 |
-
]
|
39 |
-
},
|
40 |
-
{
|
41 |
-
"cell_type": "code",
|
42 |
-
"execution_count": 3,
|
43 |
-
"metadata": {},
|
44 |
-
"outputs": [],
|
45 |
-
"source": [
|
46 |
-
"tools = [get_job, get_resume]\n",
|
47 |
-
"llm = ChatGoogleGenerativeAI(model=\"gemini-1.5-flash-latest\").bind_tools(tools)"
|
48 |
-
]
|
49 |
-
},
|
50 |
-
{
|
51 |
-
"cell_type": "code",
|
52 |
-
"execution_count": 4,
|
53 |
-
"metadata": {},
|
54 |
-
"outputs": [],
|
55 |
-
"source": [
|
56 |
-
"def expert(state: MessagesState):\n",
|
57 |
-
" system_message = \"\"\"\n",
|
58 |
-
" You are a resume expert. You are tasked with improving the user resume based on a job description.\n",
|
59 |
-
" You can access the resume and job data using the provided tools.\n",
|
60 |
-
"\n",
|
61 |
-
" You must NEVER provide information that the user does not have.\n",
|
62 |
-
" These include, skills or experiences that are not in the resume. Do not make things up.\n",
|
63 |
-
" \"\"\"\n",
|
64 |
-
" messages = state[\"messages\"]\n",
|
65 |
-
" response = llm.invoke([system_message] + messages)\n",
|
66 |
-
" return {\"messages\": [response]}\n",
|
67 |
-
"\n",
|
68 |
-
"tool_node = ToolNode(tools)"
|
69 |
-
]
|
70 |
-
},
|
71 |
-
{
|
72 |
-
"cell_type": "code",
|
73 |
-
"execution_count": 5,
|
74 |
-
"metadata": {},
|
75 |
-
"outputs": [],
|
76 |
-
"source": [
|
77 |
-
"def should_continue(state: MessagesState) -> Literal[\"tools\", END]:\n",
|
78 |
-
" messages = state['messages']\n",
|
79 |
-
" last_message = messages[-1]\n",
|
80 |
-
" if last_message.tool_calls:\n",
|
81 |
-
" return \"tools\"\n",
|
82 |
-
" return END"
|
83 |
-
]
|
84 |
-
},
|
85 |
-
{
|
86 |
-
"cell_type": "code",
|
87 |
-
"execution_count": 6,
|
88 |
-
"metadata": {},
|
89 |
-
"outputs": [
|
90 |
-
{
|
91 |
-
"data": {
|
92 |
-
"text/plain": [
|
93 |
-
"<langgraph.graph.state.StateGraph at 0x70171ba751c0>"
|
94 |
-
]
|
95 |
-
},
|
96 |
-
"execution_count": 6,
|
97 |
-
"metadata": {},
|
98 |
-
"output_type": "execute_result"
|
99 |
-
}
|
100 |
-
],
|
101 |
-
"source": [
|
102 |
-
"graph = StateGraph(MessagesState)\n",
|
103 |
-
"\n",
|
104 |
-
"graph.add_node(\"expert\", expert)\n",
|
105 |
-
"graph.add_node(\"tools\", tool_node)\n",
|
106 |
-
"\n",
|
107 |
-
"graph.add_edge(START, \"expert\")\n",
|
108 |
-
"graph.add_conditional_edges(\"expert\", should_continue)\n",
|
109 |
-
"graph.add_edge(\"tools\", \"expert\")"
|
110 |
-
]
|
111 |
-
},
|
112 |
-
{
|
113 |
-
"cell_type": "code",
|
114 |
-
"execution_count": 7,
|
115 |
-
"metadata": {},
|
116 |
-
"outputs": [],
|
117 |
-
"source": [
|
118 |
-
"checkpointer = MemorySaver()\n",
|
119 |
-
"\n",
|
120 |
-
"app = graph.compile(checkpointer=checkpointer)"
|
121 |
-
]
|
122 |
-
},
|
123 |
-
{
|
124 |
-
"cell_type": "code",
|
125 |
-
"execution_count": 8,
|
126 |
-
"metadata": {},
|
127 |
-
"outputs": [
|
128 |
-
{
|
129 |
-
"name": "stdout",
|
130 |
-
"output_type": "stream",
|
131 |
-
"text": [
|
132 |
-
"I can access and process information from a resume and a job description using the `get_resume()` and `get_job()` functions. Based on the content of both, I can identify areas where the resume could be improved to better match the job description. However, I will only use information explicitly present in the provided resume and job description. I cannot add skills or experiences that are not already listed in the resume.\n",
|
133 |
-
"Based on the job title \"Software Engineer\" and the skills listed in the resume (\"Software Architecture\", \"System Optimization\", \"Team Mentorship\", \"Project Management\", \"API Development\", \"Continuous Integration/Continuous Deployment\", \"Bilingual\"), I can offer the following suggestions for improving the resume:\n",
|
134 |
-
"\n",
|
135 |
-
"* **Highlight relevant skills:** The resume should emphasize skills directly relevant to the \"Software Engineer\" role. For example, the \"API Development\" and \"Continuous Integration/Continuous Deployment\" skills should be prominently featured, perhaps with examples of projects where these skills were used.\n",
|
136 |
-
"\n",
|
137 |
-
"* **Quantify achievements:** Whenever possible, quantify accomplishments. Instead of simply listing \"Project Management,\" describe specific projects, the size of the team managed, and the positive outcomes achieved. Similarly, quantify successes in system optimization or software architecture.\n",
|
138 |
-
"\n",
|
139 |
-
"* **Tailor to the job description:** If the job description provides more detail (which it currently doesn't), further adjustments can be made to align the resume even more closely. For instance, if the job description emphasizes a specific programming language or framework, ensure that expertise in that area is highlighted.\n",
|
140 |
-
"\n",
|
141 |
-
"* **Consider adding a summary:** A brief summary at the beginning of the resume could highlight the most relevant skills and experience, immediately grabbing the reader's attention.\n",
|
142 |
-
"\n",
|
143 |
-
"I cannot make specific changes to the resume's content without knowing more about the specific projects and experiences. The suggestions above focus on improving the presentation and emphasizing the existing information to better match the job description.\n",
|
144 |
-
"Exiting...\n"
|
145 |
-
]
|
146 |
-
}
|
147 |
-
],
|
148 |
-
"source": [
|
149 |
-
"while True:\n",
|
150 |
-
" user_input = input(\">> \")\n",
|
151 |
-
" if user_input.lower() in [\"quit\", \"exit\"]:\n",
|
152 |
-
" print(\"Exiting...\")\n",
|
153 |
-
" break\n",
|
154 |
-
"\n",
|
155 |
-
" response = app.invoke(\n",
|
156 |
-
" {\"messages\": [HumanMessage(content=user_input)]},\n",
|
157 |
-
" config={\"configurable\": {\"thread_id\": 1}}\n",
|
158 |
-
" )\n",
|
159 |
-
"\n",
|
160 |
-
" print(response[\"messages\"][-1].content)"
|
161 |
-
]
|
162 |
-
},
|
163 |
-
{
|
164 |
-
"cell_type": "code",
|
165 |
-
"execution_count": null,
|
166 |
-
"metadata": {},
|
167 |
-
"outputs": [],
|
168 |
-
"source": []
|
169 |
-
}
|
170 |
-
],
|
171 |
-
"metadata": {
|
172 |
-
"kernelspec": {
|
173 |
-
"display_name": "paintrekbot",
|
174 |
-
"language": "python",
|
175 |
-
"name": "python3"
|
176 |
-
},
|
177 |
-
"language_info": {
|
178 |
-
"codemirror_mode": {
|
179 |
-
"name": "ipython",
|
180 |
-
"version": 3
|
181 |
-
},
|
182 |
-
"file_extension": ".py",
|
183 |
-
"mimetype": "text/x-python",
|
184 |
-
"name": "python",
|
185 |
-
"nbconvert_exporter": "python",
|
186 |
-
"pygments_lexer": "ipython3",
|
187 |
-
"version": "3.12.8"
|
188 |
-
}
|
189 |
-
},
|
190 |
-
"nbformat": 4,
|
191 |
-
"nbformat_minor": 2
|
192 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
example_agent_chatgpt.ipynb
DELETED
@@ -1,233 +0,0 @@
|
|
1 |
-
{
|
2 |
-
"cells": [
|
3 |
-
{
|
4 |
-
"cell_type": "code",
|
5 |
-
"execution_count": 21,
|
6 |
-
"metadata": {},
|
7 |
-
"outputs": [],
|
8 |
-
"source": [
|
9 |
-
"import os\n",
|
10 |
-
"from typing import Dict, List, Optional, TypedDict, Union\n",
|
11 |
-
"from langgraph.graph import Graph\n",
|
12 |
-
"from langchain_core.messages import HumanMessage, AIMessage\n",
|
13 |
-
"from pydantic import BaseModel, Field\n",
|
14 |
-
"import json\n",
|
15 |
-
"from typing import Annotated\n",
|
16 |
-
"from langchain_google_genai import ChatGoogleGenerativeAI\n"
|
17 |
-
]
|
18 |
-
},
|
19 |
-
{
|
20 |
-
"cell_type": "code",
|
21 |
-
"execution_count": 16,
|
22 |
-
"metadata": {},
|
23 |
-
"outputs": [],
|
24 |
-
"source": [
|
25 |
-
"GOOGLE_API_KEY=\"AIzaSyA8eIxHBqeBWEP1g3t8bpvLxNaH5Lquemo\"\n",
|
26 |
-
"os.environ[\"GOOGLE_API_KEY\"] = GOOGLE_API_KEY"
|
27 |
-
]
|
28 |
-
},
|
29 |
-
{
|
30 |
-
"cell_type": "code",
|
31 |
-
"execution_count": 17,
|
32 |
-
"metadata": {},
|
33 |
-
"outputs": [],
|
34 |
-
"source": [
|
35 |
-
"llm = ChatGoogleGenerativeAI(model=\"gemini-1.5-flash-latest\")"
|
36 |
-
]
|
37 |
-
},
|
38 |
-
{
|
39 |
-
"cell_type": "code",
|
40 |
-
"execution_count": 22,
|
41 |
-
"metadata": {},
|
42 |
-
"outputs": [],
|
43 |
-
"source": [
|
44 |
-
"# Define our data model\n",
|
45 |
-
"class Data(BaseModel):\n",
|
46 |
-
" name: str = Field(description=\"name\")\n",
|
47 |
-
" age: int = Field(description=\"age\")\n",
|
48 |
-
" hobby: Optional[List[str]] = Field(description=\"A list of hobbies.\")\n",
|
49 |
-
"\n",
|
50 |
-
"# Define state for the graph\n",
|
51 |
-
"class GraphState(TypedDict):\n",
|
52 |
-
" messages: List[Union[HumanMessage, AIMessage]]\n",
|
53 |
-
" collected_data: Dict\n",
|
54 |
-
" current_field: Optional[str]"
|
55 |
-
]
|
56 |
-
},
|
57 |
-
{
|
58 |
-
"cell_type": "code",
|
59 |
-
"execution_count": 26,
|
60 |
-
"metadata": {},
|
61 |
-
"outputs": [],
|
62 |
-
"source": [
|
63 |
-
"# Function to analyze messages and extract information\n",
|
64 |
-
"def extract_info(state: GraphState) -> Dict:\n",
|
65 |
-
" # Get the last user message\n",
|
66 |
-
" last_message = state[\"messages\"][-2].content if len(state[\"messages\"]) > 1 else \"\"\n",
|
67 |
-
" \n",
|
68 |
-
" # Prepare prompt for information extraction\n",
|
69 |
-
" extraction_prompt = f\"\"\"\n",
|
70 |
-
" Extract the following information from the user's message: '{last_message}'\n",
|
71 |
-
" If the information is present, format it as JSON matching these fields:\n",
|
72 |
-
" - name: person's name\n",
|
73 |
-
" - age: person's age (as integer)\n",
|
74 |
-
" - hobby: list of hobbies\n",
|
75 |
-
" \n",
|
76 |
-
" Only include fields that are clearly mentioned. Return 'null' for missing fields.\n",
|
77 |
-
" \"\"\"\n",
|
78 |
-
" \n",
|
79 |
-
" # Extract information using LLM\n",
|
80 |
-
" response = llm.invoke([HumanMessage(content=extraction_prompt)])\n",
|
81 |
-
" try:\n",
|
82 |
-
" extracted_info = json.loads(response.content)\n",
|
83 |
-
" # Update collected data with any new information\n",
|
84 |
-
" for key, value in extracted_info.items():\n",
|
85 |
-
" if value is not None:\n",
|
86 |
-
" state[\"collected_data\"][key] = value\n",
|
87 |
-
" except:\n",
|
88 |
-
" pass\n",
|
89 |
-
" \n",
|
90 |
-
" return {\"messages\": state[\"messages\"], \"collected_data\": state[\"collected_data\"], \"current_field\": state[\"current_field\"]}\n",
|
91 |
-
"\n",
|
92 |
-
"# Function to determine next field\n",
|
93 |
-
"def determine_next_field(state: Dict) -> Dict:\n",
|
94 |
-
" required_fields = {\"name\", \"age\"}\n",
|
95 |
-
" collected_fields = set(state[\"collected_data\"].keys())\n",
|
96 |
-
" missing_fields = required_fields - collected_fields\n",
|
97 |
-
" \n",
|
98 |
-
" if missing_fields:\n",
|
99 |
-
" state[\"current_field\"] = next(iter(missing_fields))\n",
|
100 |
-
" else:\n",
|
101 |
-
" state[\"current_field\"] = \"hobby\" if \"hobby\" not in state[\"collected_data\"] else None\n",
|
102 |
-
" \n",
|
103 |
-
" return state\n",
|
104 |
-
"\n",
|
105 |
-
"# Function to generate response\n",
|
106 |
-
"def generate_response(state: Dict) -> Dict:\n",
|
107 |
-
" if state[\"current_field\"] is None:\n",
|
108 |
-
" # All information collected\n",
|
109 |
-
" data = Data(**state[\"collected_data\"])\n",
|
110 |
-
" response = f\"Thank you! I've collected all the information:\\n{data.model_dump_json(indent=2)}\"\n",
|
111 |
-
" else:\n",
|
112 |
-
" # Ask for specific field\n",
|
113 |
-
" field_descriptions = {\n",
|
114 |
-
" \"name\": \"your name\",\n",
|
115 |
-
" \"age\": \"your age\",\n",
|
116 |
-
" \"hobby\": \"any hobbies you have\"\n",
|
117 |
-
" }\n",
|
118 |
-
" response = f\"Could you please tell me {field_descriptions[state['current_field']]}?\"\n",
|
119 |
-
" \n",
|
120 |
-
" state[\"messages\"].append(AIMessage(content=response))\n",
|
121 |
-
" return state\n",
|
122 |
-
"\n",
|
123 |
-
"# Create the graph\n",
|
124 |
-
"def create_chat_graph() -> Graph:\n",
|
125 |
-
" workflow = Graph()\n",
|
126 |
-
" \n",
|
127 |
-
" # Define the graph edges\n",
|
128 |
-
" workflow.add_node(\"extract_info\", extract_info)\n",
|
129 |
-
" workflow.add_node(\"determine_next_field\", determine_next_field)\n",
|
130 |
-
" workflow.add_node(\"generate_response\", generate_response)\n",
|
131 |
-
" \n",
|
132 |
-
" workflow.add_edge(\"extract_info\", \"determine_next_field\")\n",
|
133 |
-
" workflow.add_edge(\"determine_next_field\", \"generate_response\")\n",
|
134 |
-
" \n",
|
135 |
-
" # Set the entry point\n",
|
136 |
-
" workflow.set_entry_point(\"extract_info\")\n",
|
137 |
-
" workflow.set_finish_point(\"generate_response\")\n",
|
138 |
-
" \n",
|
139 |
-
" return workflow"
|
140 |
-
]
|
141 |
-
},
|
142 |
-
{
|
143 |
-
"cell_type": "code",
|
144 |
-
"execution_count": 27,
|
145 |
-
"metadata": {},
|
146 |
-
"outputs": [
|
147 |
-
{
|
148 |
-
"name": "stdout",
|
149 |
-
"output_type": "stream",
|
150 |
-
"text": [
|
151 |
-
"Assistant: Hi! I'd like to collect some information from you.\n"
|
152 |
-
]
|
153 |
-
},
|
154 |
-
{
|
155 |
-
"ename": "KeyError",
|
156 |
-
"evalue": "'generate_response'",
|
157 |
-
"output_type": "error",
|
158 |
-
"traceback": [
|
159 |
-
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
160 |
-
"\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)",
|
161 |
-
"Cell \u001b[0;32mIn[27], line 34\u001b[0m\n\u001b[1;32m 31\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m 33\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;18m__name__\u001b[39m \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m__main__\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[0;32m---> 34\u001b[0m run_chat()\n",
|
162 |
-
"Cell \u001b[0;32mIn[27], line 27\u001b[0m, in \u001b[0;36mrun_chat\u001b[0;34m()\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[38;5;66;03m# Run the graph and update state\u001b[39;00m\n\u001b[1;32m 26\u001b[0m new_state \u001b[38;5;241m=\u001b[39m app\u001b[38;5;241m.\u001b[39minvoke(state)\n\u001b[0;32m---> 27\u001b[0m state \u001b[38;5;241m=\u001b[39m new_state[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgenerate_response\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;66;03m# Get the final state from the last node\u001b[39;00m\n\u001b[1;32m 29\u001b[0m \u001b[38;5;66;03m# Check if all information is collected\u001b[39;00m\n\u001b[1;32m 30\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m state[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcurrent_field\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(state[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcollected_data\u001b[39m\u001b[38;5;124m\"\u001b[39m]) \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2\u001b[39m: \u001b[38;5;66;03m# At least name and age collected\u001b[39;00m\n",
|
163 |
-
"\u001b[0;31mKeyError\u001b[0m: 'generate_response'"
|
164 |
-
]
|
165 |
-
}
|
166 |
-
],
|
167 |
-
"source": [
|
168 |
-
"# Example usage\n",
|
169 |
-
"def run_chat():\n",
|
170 |
-
" graph = create_chat_graph()\n",
|
171 |
-
" app = graph.compile()\n",
|
172 |
-
" \n",
|
173 |
-
" # Initialize state\n",
|
174 |
-
" state = {\n",
|
175 |
-
" \"messages\": [AIMessage(content=\"Hi! I'd like to collect some information from you.\")],\n",
|
176 |
-
" \"collected_data\": {},\n",
|
177 |
-
" \"current_field\": None\n",
|
178 |
-
" }\n",
|
179 |
-
" \n",
|
180 |
-
" while True:\n",
|
181 |
-
" # Print last message\n",
|
182 |
-
" print(\"Assistant:\", state[\"messages\"][-1].content)\n",
|
183 |
-
" \n",
|
184 |
-
" # Get user input\n",
|
185 |
-
" user_input = input(\"User: \")\n",
|
186 |
-
" if user_input.lower() in ['quit', 'exit']:\n",
|
187 |
-
" break\n",
|
188 |
-
" \n",
|
189 |
-
" # Update state with user message\n",
|
190 |
-
" state[\"messages\"].append(HumanMessage(content=user_input))\n",
|
191 |
-
" \n",
|
192 |
-
" # Run the graph and update state\n",
|
193 |
-
" new_state = app.invoke(state)\n",
|
194 |
-
" state = new_state[\"generate_response\"] # Get the final state from the last node\n",
|
195 |
-
" \n",
|
196 |
-
" # Check if all information is collected\n",
|
197 |
-
" if state[\"current_field\"] is None and len(state[\"collected_data\"]) >= 2: # At least name and age collected\n",
|
198 |
-
" break\n",
|
199 |
-
"\n",
|
200 |
-
"if __name__ == \"__main__\":\n",
|
201 |
-
" run_chat()"
|
202 |
-
]
|
203 |
-
},
|
204 |
-
{
|
205 |
-
"cell_type": "code",
|
206 |
-
"execution_count": null,
|
207 |
-
"metadata": {},
|
208 |
-
"outputs": [],
|
209 |
-
"source": []
|
210 |
-
}
|
211 |
-
],
|
212 |
-
"metadata": {
|
213 |
-
"kernelspec": {
|
214 |
-
"display_name": "paintrekbot",
|
215 |
-
"language": "python",
|
216 |
-
"name": "python3"
|
217 |
-
},
|
218 |
-
"language_info": {
|
219 |
-
"codemirror_mode": {
|
220 |
-
"name": "ipython",
|
221 |
-
"version": 3
|
222 |
-
},
|
223 |
-
"file_extension": ".py",
|
224 |
-
"mimetype": "text/x-python",
|
225 |
-
"name": "python",
|
226 |
-
"nbconvert_exporter": "python",
|
227 |
-
"pygments_lexer": "ipython3",
|
228 |
-
"version": "3.12.8"
|
229 |
-
}
|
230 |
-
},
|
231 |
-
"nbformat": 4,
|
232 |
-
"nbformat_minor": 2
|
233 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
modules/__pycache__/job.cpython-312.pyc
DELETED
Binary file (2.31 kB)
|
|
modules/__pycache__/resume.cpython-312.pyc
DELETED
Binary file (4.58 kB)
|
|
modules/data_class.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing_extensions import TypedDict
|
2 |
+
from datetime import date
|
3 |
+
from typing import Annotated, Dict, Any
|
4 |
+
|
5 |
+
|
6 |
+
class PainLevels(TypedDict):
|
7 |
+
left_head: int
|
8 |
+
right_head: int
|
9 |
+
left_arm: int
|
10 |
+
left_hand: int
|
11 |
+
right_arm: int
|
12 |
+
right_hand: int
|
13 |
+
left_body_trunk: int
|
14 |
+
right_body_trunk: int
|
15 |
+
left_leg: int
|
16 |
+
left_foot: int
|
17 |
+
right_leg: int
|
18 |
+
right_foot: int
|
19 |
+
|
20 |
+
class Surgery(TypedDict):
|
21 |
+
surgery_name: str
|
22 |
+
time: date
|
23 |
+
|
24 |
+
class PatientID(TypedDict):
|
25 |
+
name: str
|
26 |
+
DOB: date
|
27 |
+
gender: str
|
28 |
+
contact: str
|
29 |
+
emergency_contact: str
|
30 |
+
|
31 |
+
class MainSymptom(TypedDict):
|
32 |
+
main_symptom: str
|
33 |
+
length: str
|
34 |
+
|
35 |
+
class Pain(TypedDict):
|
36 |
+
painlevel: PainLevels
|
37 |
+
pain_description: str
|
38 |
+
start_time: date
|
39 |
+
radiation: bool
|
40 |
+
triggers: str
|
41 |
+
symptom: str
|
42 |
+
|
43 |
+
class MedicalHistory(TypedDict):
|
44 |
+
medical_condition: str
|
45 |
+
first_time: date
|
46 |
+
surgery_history: list[Surgery]
|
47 |
+
medication: str
|
48 |
+
allergy: str
|
49 |
+
|
50 |
+
class FamilyHistory(TypedDict):
|
51 |
+
family_history: str
|
52 |
+
|
53 |
+
class SocialHistory(TypedDict):
|
54 |
+
occupation: str
|
55 |
+
smoke: bool
|
56 |
+
alcohol: bool
|
57 |
+
drug: bool
|
58 |
+
support_system: str
|
59 |
+
living_condition: str
|
60 |
+
|
61 |
+
class ReviewSystem(TypedDict):
|
62 |
+
weight_change: str
|
63 |
+
fever: bool
|
64 |
+
chill: bool
|
65 |
+
night_sweats: bool
|
66 |
+
sleep: str
|
67 |
+
gastrointestinal: str
|
68 |
+
urinary: str
|
69 |
+
|
70 |
+
class PainManagement(TypedDict):
|
71 |
+
pain_medication: str
|
72 |
+
specialist: bool
|
73 |
+
other_therapy: str
|
74 |
+
effectiveness: bool
|
75 |
+
|
76 |
+
class Functional(TypedDict):
|
77 |
+
life_quality: str
|
78 |
+
limit_activity: str
|
79 |
+
mood: str
|
80 |
+
|
81 |
+
class Plan(TypedDict):
|
82 |
+
goal: str
|
83 |
+
expectation: str
|
84 |
+
alternative_treatment_illness: str
|
85 |
+
|
86 |
+
class PatientData(TypedDict):
|
87 |
+
ID: PatientID
|
88 |
+
main: MainSymptom
|
89 |
+
"""pain: Pain
|
90 |
+
medical_hist: MedicalHistory
|
91 |
+
family_hist: FamilyHistory
|
92 |
+
social_hist: SocialHistory
|
93 |
+
review_system: ReviewSystem
|
94 |
+
pain_manage: PainManagement
|
95 |
+
functional: Functional
|
96 |
+
plan: Plan"""
|
97 |
+
|
98 |
+
class DataState(TypedDict):
|
99 |
+
"""State representing the patient's data status and conversation."""
|
100 |
+
messages: Annotated[list, add_messages]
|
101 |
+
data: Dict[str, PatientData]
|
102 |
+
finished: bool
|
modules/instrctions.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# The system instruction defines how the chatbot is expected to behave and includes
|
2 |
+
# rules for when to call different functions, as well as rules for the conversation, such
|
3 |
+
# as tone and what is permitted for discussion.
|
4 |
+
MEDICAL_INTAKE_SYSINT = (
|
5 |
+
"system",
|
6 |
+
"""You are MedAssist, an intelligent medical intake system designed to gather comprehensive patient information. You guide patients through a structured data collection process while maintaining a supportive and professional demeanor. Do NOT make things up!
|
7 |
+
|
8 |
+
Primary Data Collection Areas:
|
9 |
+
1. Patient Identification
|
10 |
+
- Basic information (name, DOB, gender, contact)
|
11 |
+
- Emergency contact information
|
12 |
+
|
13 |
+
2. Main Symptom Assessment
|
14 |
+
- Primary complaint
|
15 |
+
- Duration of symptoms
|
16 |
+
|
17 |
+
3. Pain Assessment
|
18 |
+
- Pain location using body mapping (head, arms, hands, trunk, legs, feet)
|
19 |
+
- Pain intensity (0-10 scale for each location)
|
20 |
+
- Pain characteristics and patterns
|
21 |
+
- Onset time
|
22 |
+
- Radiation patterns
|
23 |
+
- Triggering factors
|
24 |
+
- Associated symptoms
|
25 |
+
|
26 |
+
4. Medical History
|
27 |
+
- Existing medical conditions
|
28 |
+
- First occurrence date
|
29 |
+
- Surgical history with dates
|
30 |
+
- Current medications
|
31 |
+
- Allergies
|
32 |
+
|
33 |
+
5. Background Information
|
34 |
+
- Family medical history
|
35 |
+
- Social history (occupation, lifestyle factors)
|
36 |
+
- Living conditions and support system
|
37 |
+
|
38 |
+
6. System Review
|
39 |
+
- Recent health changes
|
40 |
+
- Sleep patterns
|
41 |
+
- Gastrointestinal and urinary function
|
42 |
+
- Constitutional symptoms (fever, chills, night sweats)
|
43 |
+
|
44 |
+
7. Pain Management History
|
45 |
+
- Current pain medications
|
46 |
+
- Specialist consultations
|
47 |
+
- Alternative therapies
|
48 |
+
- Treatment effectiveness
|
49 |
+
|
50 |
+
8. Functional Assessment
|
51 |
+
- Impact on quality of life
|
52 |
+
- Activity limitations
|
53 |
+
- Mood and emotional state
|
54 |
+
|
55 |
+
9. Treatment Planning
|
56 |
+
- Treatment goals
|
57 |
+
- Patient expectations
|
58 |
+
- Alternative treatment considerations
|
59 |
+
|
60 |
+
Data Management Commands:
|
61 |
+
- Use get_data to review current information
|
62 |
+
- Use add_to_data to append new information
|
63 |
+
- Use clear_data to reset the current session
|
64 |
+
- Use confirm_data to verify information with the patient
|
65 |
+
- Use insert_data to finalize the record
|
66 |
+
|
67 |
+
Guidelines:
|
68 |
+
1. Always introduce yourself and explain the intake process
|
69 |
+
2. Collect information systematically but adapt to the patient's natural flow of conversation
|
70 |
+
3. If patient starts with a specific concern, begin there but ensure all sections are eventually completed
|
71 |
+
4. Use conversational prompts to gather missing information
|
72 |
+
5. Validate pain levels on a 0-10 scale for each body location
|
73 |
+
6. Regularly summarize collected information for patient verification
|
74 |
+
7. Show empathy while maintaining professional boundaries
|
75 |
+
8. Focus on medical data collection while acknowledging patient concerns
|
76 |
+
9. Always confirm complete data set before finalizing
|
77 |
+
10. Thank the patient and provide clear closure when finished
|
78 |
+
|
79 |
+
Remember:
|
80 |
+
- Maintain medical privacy and confidentiality
|
81 |
+
- Stay within scope of data collection
|
82 |
+
- Be patient and clear in communication
|
83 |
+
- Double-check all information before final submission
|
84 |
+
- Adapt language to patient's comprehension level
|
85 |
+
- Document 'unknown' or 'not applicable' when appropriate
|
86 |
+
|
87 |
+
Always confirm_data with the patient before calling save_data, and address any corrections needed. Once save_data is complete, provide a summary and conclude the session."""
|
88 |
+
)
|
89 |
+
|
90 |
+
# This is the message with which the system opens the conversation.
|
91 |
+
WELCOME_MSG = "Welcome to the Paintrek world. I am a health assistant, an interactive clinical recording system. I will ask you questions about your pain and related symptoms and record your responses. I will then store this information securely. At any time, you can type `q` to quit."
|
modules/job.py
DELETED
@@ -1,45 +0,0 @@
|
|
1 |
-
from typing import List, Optional
|
2 |
-
from pydantic import BaseModel, Field
|
3 |
-
|
4 |
-
class Job(BaseModel):
|
5 |
-
title: str = Field(description="Job title or position.")
|
6 |
-
company: str = Field(description="The company name.")
|
7 |
-
location: Optional[str] = Field(description="Location of the job.")
|
8 |
-
salary: Optional[str] = Field(description="Salary range for the job.")
|
9 |
-
description: str = Field(description="Detailed job description.")
|
10 |
-
responsibilities: List[str] = Field(description="List of job responsibilities.")
|
11 |
-
benefits: Optional[List[str]] = Field(description="List of job benefits.")
|
12 |
-
employment_type: Optional[str] = Field(description="Type of employment (e.g., full-time, part-time).")
|
13 |
-
posted_date: Optional[str] = Field(description="Date when the job was posted.")
|
14 |
-
|
15 |
-
@classmethod
|
16 |
-
def mock(cls):
|
17 |
-
return cls(
|
18 |
-
title='Software Engineer',
|
19 |
-
company='Tech Corp',
|
20 |
-
location='San Francisco, CA',
|
21 |
-
salary='$100,000 - $120,000',
|
22 |
-
description='We are looking for a skilled Software Engineer to join our team.',
|
23 |
-
requirements=[
|
24 |
-
'Bachelor\'s degree in Computer Science or related field',
|
25 |
-
'3+ years of experience in software development',
|
26 |
-
'Proficiency in Python and JavaScript',
|
27 |
-
'Experience with Django and React',
|
28 |
-
'Strong problem-solving skills'
|
29 |
-
],
|
30 |
-
responsibilities=[
|
31 |
-
'Develop and maintain web applications',
|
32 |
-
'Collaborate with cross-functional teams',
|
33 |
-
'Write clean, scalable, and efficient code',
|
34 |
-
'Participate in code reviews',
|
35 |
-
'Troubleshoot and debug applications'
|
36 |
-
],
|
37 |
-
benefits=[
|
38 |
-
'Health insurance',
|
39 |
-
'401(k) matching',
|
40 |
-
'Paid time off',
|
41 |
-
'Flexible working hours'
|
42 |
-
],
|
43 |
-
employment_type='Full-time',
|
44 |
-
posted_date='2024-10-01'
|
45 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
modules/llm_in_use.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
from langchain_google_genai import ChatGoogleGenerativeAI
|
4 |
+
from langchain_openai import ChatOpenAI
|
5 |
+
from langchain_ollama import ChatOllama
|
6 |
+
|
7 |
+
def get_llm():
|
8 |
+
# Load environment variables from .env file
|
9 |
+
load_dotenv()
|
10 |
+
# Get the API key
|
11 |
+
# google_api_key = os.getenv("GOOGLE_API_KEY")
|
12 |
+
|
13 |
+
# llm = ChatOpenAI(temperature=0)
|
14 |
+
|
15 |
+
llm = ChatOllama(model="llama3.2:latest", temperature=0)
|
16 |
+
|
17 |
+
"""llm = ChatOpenAI(
|
18 |
+
api_key="ollama",
|
19 |
+
model="llama3.2:latest",
|
20 |
+
base_url="http://141.211.127.171/",
|
21 |
+
)"""
|
22 |
+
|
23 |
+
# llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash-latest")
|
24 |
+
|
25 |
+
return llm
|
modules/nodes.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from modules.data_class import DataState
|
2 |
+
from modules.instrctions import MEDICAL_INTAKE_SYSINT, WELCOME_MSG
|
3 |
+
from datetime import date
|
4 |
+
from typing import Literal
|
5 |
+
from langgraph.graph import StateGraph, START, END
|
6 |
+
from langchain_core.messages.ai import AIMessage
|
7 |
+
from llm_in_use import get_llm
|
8 |
+
from tools import patient_id, symptom, confirm_data, get_data, clear_data, save_data, data_node
|
9 |
+
|
10 |
+
|
11 |
+
llm = get_llm()
|
12 |
+
# Order-tools will be handled by the order node.
|
13 |
+
intake_tools = [patient_id, symptom, confirm_data, get_data, clear_data, save_data]
|
14 |
+
|
15 |
+
# The LLM needs to know about all of the tools, so specify everything here.
|
16 |
+
llm_with_tools = llm.bind_tools(intake_tools)
|
17 |
+
|
18 |
+
|
19 |
+
def human_node(state: DataState) -> DataState:
|
20 |
+
"""Display the last model message to the user, and receive the user's input."""
|
21 |
+
last_msg = state["messages"][-1]
|
22 |
+
print("Model:", last_msg.content)
|
23 |
+
|
24 |
+
user_input = input("User: ")
|
25 |
+
|
26 |
+
# If it looks like the user is trying to quit, flag the conversation
|
27 |
+
# as over.
|
28 |
+
if user_input in {"q", "quit", "exit", "goodbye"}:
|
29 |
+
state["finished"] = True
|
30 |
+
|
31 |
+
return state | {"messages": [("user", user_input)]}
|
32 |
+
|
33 |
+
|
34 |
+
def maybe_exit_human_node(state: DataState) -> Literal["chatbot_healthassistant", "__end__"]:
|
35 |
+
"""Route to the chatbot, unless it looks like the user is exiting."""
|
36 |
+
if state.get("finished", False):
|
37 |
+
return END
|
38 |
+
else:
|
39 |
+
return "chatbot_healthassistant"
|
40 |
+
|
41 |
+
|
42 |
+
def chatbot_with_tools(state: DataState) -> DataState:
|
43 |
+
"""The chatbot with tools. A simple wrapper around the model's own chat interface."""
|
44 |
+
defaults = {"data": {"ID": {
|
45 |
+
"name": "",
|
46 |
+
"DOB": date(1900, 1, 1), # Default placeholder date
|
47 |
+
"gender": "",
|
48 |
+
"contact": "",
|
49 |
+
"emergency_contact": ""
|
50 |
+
},
|
51 |
+
"symptom": {
|
52 |
+
"main_symptom": "",
|
53 |
+
"length": ""
|
54 |
+
}
|
55 |
+
}, "finished": False}
|
56 |
+
|
57 |
+
if state["messages"]:
|
58 |
+
new_output = llm_with_tools.invoke([MEDICAL_INTAKE_SYSINT] + state["messages"])
|
59 |
+
else:
|
60 |
+
new_output = AIMessage(content=WELCOME_MSG)
|
61 |
+
|
62 |
+
# Set up some defaults if not already set, then pass through the provided state,
|
63 |
+
# overriding only the "messages" field.
|
64 |
+
return defaults | state | {"messages": [new_output]}
|
65 |
+
|
66 |
+
|
67 |
+
def maybe_route_to_tools(state: DataState) -> str:
|
68 |
+
"""Route between chat and tool nodes if a tool call is made."""
|
69 |
+
if not (msgs := state.get("messages", [])):
|
70 |
+
raise ValueError(f"No messages found when parsing state: {state}")
|
71 |
+
|
72 |
+
msg = msgs[-1]
|
73 |
+
|
74 |
+
if state.get("finished", False):
|
75 |
+
# When an order is placed, exit the app. The system instruction indicates
|
76 |
+
# that the chatbot should say thanks and goodbye at this point, so we can exit
|
77 |
+
# cleanly.
|
78 |
+
return END
|
79 |
+
|
80 |
+
elif hasattr(msg, "tool_calls") and len(msg.tool_calls) > 0:
|
81 |
+
# Route to `tools` node for any automated tool calls first.
|
82 |
+
if any(
|
83 |
+
tool["name"] for tool in msg.tool_calls
|
84 |
+
):
|
85 |
+
# return "datacreation"
|
86 |
+
# else:
|
87 |
+
return "documenting"
|
88 |
+
|
89 |
+
else:
|
90 |
+
return "patient"
|
modules/resume.py
DELETED
@@ -1,95 +0,0 @@
|
|
1 |
-
from typing import List, Optional
|
2 |
-
from pydantic import BaseModel, Field, validator
|
3 |
-
|
4 |
-
class WorkExperience(BaseModel):
|
5 |
-
job_title: str = Field(description="Job title or position.")
|
6 |
-
company: str = Field(description="The company name.")
|
7 |
-
experience: int = Field(description="Years of experience in the job.")
|
8 |
-
responsibilities: List[str] = Field(description="List of responsibilities in the job.")
|
9 |
-
|
10 |
-
class Education(BaseModel):
|
11 |
-
degree: str = Field(description="Degree obtained.")
|
12 |
-
school: str = Field(description="The university name.")
|
13 |
-
major: str = Field(description="Major subject.")
|
14 |
-
year: Optional[int] = Field(description="Year of graduation.")
|
15 |
-
|
16 |
-
@validator('year', pre=True, always=True)
|
17 |
-
def set_year(cls, v):
|
18 |
-
if v is None:
|
19 |
-
return 0
|
20 |
-
return v
|
21 |
-
|
22 |
-
class Resume(BaseModel):
|
23 |
-
"""Structured resume data."""
|
24 |
-
|
25 |
-
name: str = Field(description="Name of the person")
|
26 |
-
professional_summary: str = Field(description="Professional summary of the person.")
|
27 |
-
work_experience: List[WorkExperience] = Field(description="List of work experiences held by the person.")
|
28 |
-
education: List[Education] = Field(description="List of educational qualifications of the person.")
|
29 |
-
skills: List[str] = Field(description="List of skills relevant to the jobs.")
|
30 |
-
|
31 |
-
@classmethod
|
32 |
-
def mock(cls):
|
33 |
-
return cls(
|
34 |
-
name='Jeff',
|
35 |
-
professional_summary='Innovative software engineer with 8+ years of experience in the tech industry. Senior Developer at Company X, Freelance Software Architect, and Junior Developer at Company Y. Proficient in developing scalable applications, optimizing system performance, and leading cross-functional teams. Fluent in English and Spanish.',
|
36 |
-
work_experience=[
|
37 |
-
WorkExperience(
|
38 |
-
job_title='Senior Developer',
|
39 |
-
company='Company X',
|
40 |
-
experience=5,
|
41 |
-
responsibilities=[
|
42 |
-
'Led the development of scalable web applications',
|
43 |
-
'Optimized system performance and reduced server costs',
|
44 |
-
'Mentored junior developers and conducted code reviews',
|
45 |
-
'Collaborated with product managers to define project requirements',
|
46 |
-
'Implemented CI/CD pipelines to streamline deployments',
|
47 |
-
'Developed RESTful APIs for mobile and web applications',
|
48 |
-
'Ensured application security and compliance with industry standards'
|
49 |
-
]
|
50 |
-
),
|
51 |
-
WorkExperience(
|
52 |
-
job_title='Freelance Software Architect',
|
53 |
-
company='Independent Consultant',
|
54 |
-
experience=2,
|
55 |
-
responsibilities=[
|
56 |
-
'Designed software architecture for various clients',
|
57 |
-
'Provided technical consultancy and project management',
|
58 |
-
'Developed custom software solutions to meet client needs',
|
59 |
-
'Conducted system analysis and performance tuning',
|
60 |
-
'Integrated third-party services and APIs',
|
61 |
-
'Created technical documentation and user manuals'
|
62 |
-
]
|
63 |
-
),
|
64 |
-
WorkExperience(
|
65 |
-
job_title='Junior Developer',
|
66 |
-
company='Company Y',
|
67 |
-
experience=1,
|
68 |
-
responsibilities=[
|
69 |
-
'Assisted in the development of web applications',
|
70 |
-
'Performed bug fixes and code maintenance',
|
71 |
-
'Collaborated with senior developers on project tasks',
|
72 |
-
'Participated in daily stand-ups and sprint planning',
|
73 |
-
'Wrote unit tests to ensure code quality',
|
74 |
-
'Contributed to open-source projects'
|
75 |
-
]
|
76 |
-
)
|
77 |
-
],
|
78 |
-
education=[
|
79 |
-
Education(
|
80 |
-
degree='B.Sc. Computer Science',
|
81 |
-
school='X University',
|
82 |
-
major='Computer Science',
|
83 |
-
year=1999
|
84 |
-
)
|
85 |
-
],
|
86 |
-
skills=[
|
87 |
-
'Software Architecture',
|
88 |
-
'System Optimization',
|
89 |
-
'Team Mentorship',
|
90 |
-
'Project Management',
|
91 |
-
'API Development',
|
92 |
-
'Continuous Integration/Continuous Deployment',
|
93 |
-
'Bilingual'
|
94 |
-
]
|
95 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
modules/tools.py
ADDED
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_core.tools import tool
|
2 |
+
from modules.data_class import DataState
|
3 |
+
from langgraph.prebuilt import InjectedState
|
4 |
+
from langchain_core.messages.tool import ToolMessage
|
5 |
+
|
6 |
+
# These functions have no body; LangGraph does not allow @tools to update
|
7 |
+
# the conversation state, so you will implement a separate node to handle
|
8 |
+
# state updates. Using @tools is still very convenient for defining the tool
|
9 |
+
# schema, so empty functions have been defined that will be bound to the LLM
|
10 |
+
# but their implementation is deferred to the order_node.
|
11 |
+
|
12 |
+
@tool
|
13 |
+
def patient_id(name: str, DOB: str, gender: str, contact: str, emergency_contact: str) -> str:
|
14 |
+
"""Collecting basic patient identification information including:
|
15 |
+
- Basic information (name, DOB, gender, contact details)
|
16 |
+
- Emergency contact information
|
17 |
+
|
18 |
+
Returns:
|
19 |
+
The updated data with the patient ID information added.
|
20 |
+
"""
|
21 |
+
|
22 |
+
@tool
|
23 |
+
def symptom(main_symptom: str, length: str) -> str:
|
24 |
+
"""Collecting patient's main symptom assessment including:
|
25 |
+
- Primary symptoms
|
26 |
+
- Duration of the symptoms
|
27 |
+
|
28 |
+
Returns:
|
29 |
+
The updated data with the patient's symptom information added.
|
30 |
+
"""
|
31 |
+
|
32 |
+
|
33 |
+
@tool
|
34 |
+
def confirm_data() -> str:
|
35 |
+
"""Asks the patient if the data intake is correct.
|
36 |
+
|
37 |
+
Returns:
|
38 |
+
The user's free-text response.
|
39 |
+
"""
|
40 |
+
|
41 |
+
|
42 |
+
@tool
|
43 |
+
def get_data() -> str:
|
44 |
+
"""Returns the users data so far. One item per line."""
|
45 |
+
|
46 |
+
|
47 |
+
@tool
|
48 |
+
def clear_data():
|
49 |
+
"""Removes all items from the user's order."""
|
50 |
+
|
51 |
+
|
52 |
+
@tool
|
53 |
+
def save_data() -> int:
|
54 |
+
"""Send the data into database.
|
55 |
+
|
56 |
+
Returns:
|
57 |
+
The status of data saving, finished.
|
58 |
+
"""
|
59 |
+
|
60 |
+
|
61 |
+
def data_node(state: DataState) -> DataState:
|
62 |
+
"""The ordering node. This is where the dataintake is manipulated."""
|
63 |
+
tool_msg = state.get("messages", [])[-1]
|
64 |
+
data = state.get("data", [])
|
65 |
+
outbound_msgs = []
|
66 |
+
data_saved = False
|
67 |
+
|
68 |
+
for tool_call in tool_msg.tool_calls:
|
69 |
+
|
70 |
+
if tool_call["name"] == "patient_id":
|
71 |
+
|
72 |
+
# Each order item is just a string. This is where it assembled as "drink (modifiers, ...)".
|
73 |
+
patient_name = tool_call["args"]["name"]
|
74 |
+
patient_DOB = tool_call["args"]["DOB"]
|
75 |
+
patient_gender = tool_call["args"]["gender"]
|
76 |
+
patient_contact = tool_call["args"]["contact"]
|
77 |
+
patient_emergency_contact = tool_call["args"]["emergency_contact"]
|
78 |
+
|
79 |
+
data["ID"]["name"]=patient_name
|
80 |
+
data["ID"]["DOB"]=patient_DOB
|
81 |
+
data["ID"]["gender"]=patient_gender
|
82 |
+
data["ID"]["contact"]=patient_contact
|
83 |
+
data["ID"]["emergency_contact"]=patient_emergency_contact
|
84 |
+
|
85 |
+
response = "\n".join(data)
|
86 |
+
|
87 |
+
elif tool_call["name"] == "symptom":
|
88 |
+
|
89 |
+
# Each order item is just a string. This is where it assembled as "drink (modifiers, ...)".
|
90 |
+
main_symptom = tool_call["args"]["main_symptom"]
|
91 |
+
symptom_length = tool_call["args"]["length"]
|
92 |
+
|
93 |
+
data["symptom"]["main_symptom"]=main_symptom
|
94 |
+
data["symptom"]["symptom_length"]=symptom_length
|
95 |
+
response = "\n".join(data)
|
96 |
+
|
97 |
+
elif tool_call["name"] == "confirm_data":
|
98 |
+
|
99 |
+
# We could entrust the LLM to do order confirmation, but it is a good practice to
|
100 |
+
# show the user the exact data that comprises their order so that what they confirm
|
101 |
+
# precisely matches the order that goes to the kitchen - avoiding hallucination
|
102 |
+
# or reality skew.
|
103 |
+
|
104 |
+
# In a real scenario, this is where you would connect your POS screen to show the
|
105 |
+
# order to the user.
|
106 |
+
|
107 |
+
print("Your input data:")
|
108 |
+
if not data:
|
109 |
+
print(" (no items)")
|
110 |
+
|
111 |
+
for data in data:
|
112 |
+
print(f" {data}")
|
113 |
+
|
114 |
+
response = input("Is this correct? ")
|
115 |
+
|
116 |
+
elif tool_call["name"] == "get_data":
|
117 |
+
|
118 |
+
response = "\n".join(data) if data else "(no data)"
|
119 |
+
|
120 |
+
elif tool_call["name"] == "clear_data":
|
121 |
+
|
122 |
+
data.clear()
|
123 |
+
response = None
|
124 |
+
|
125 |
+
elif tool_call["name"] == "save_data":
|
126 |
+
|
127 |
+
#order_text = "\n".join(order)
|
128 |
+
print("Saving the data!")
|
129 |
+
#print(order_text)
|
130 |
+
|
131 |
+
# TODO(you!): Implement cafe.
|
132 |
+
data_saved = True
|
133 |
+
# response = randint(1, 5) # ETA in minutes
|
134 |
+
|
135 |
+
else:
|
136 |
+
raise NotImplementedError(f'Unknown tool call: {tool_call["name"]}')
|
137 |
+
|
138 |
+
# Record the tool results as tool messages.
|
139 |
+
outbound_msgs.append(
|
140 |
+
ToolMessage(
|
141 |
+
content=response,
|
142 |
+
name=tool_call["name"],
|
143 |
+
tool_call_id=tool_call["id"],
|
144 |
+
)
|
145 |
+
)
|
146 |
+
|
147 |
+
return {"messages": outbound_msgs, "data": data, "finished": data_saved}
|
paintrek-chat-v1.ipynb
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
-
"execution_count":
|
6 |
"metadata": {
|
7 |
"execution": {
|
8 |
"iopub.execute_input": "2025-01-29T20:09:11.440091Z",
|
@@ -24,8 +24,8 @@
|
|
24 |
"\n",
|
25 |
"from langgraph.graph.message import add_messages\n",
|
26 |
"from langgraph.graph import StateGraph, START, END\n",
|
27 |
-
"
|
28 |
-
"
|
29 |
"from langchain_ollama import ChatOllama\n",
|
30 |
"\n",
|
31 |
"from IPython.display import Image, display\n",
|
@@ -38,18 +38,7 @@
|
|
38 |
},
|
39 |
{
|
40 |
"cell_type": "code",
|
41 |
-
"execution_count":
|
42 |
-
"metadata": {},
|
43 |
-
"outputs": [],
|
44 |
-
"source": [
|
45 |
-
"\n",
|
46 |
-
"def setup_ollama_api():\n",
|
47 |
-
" os.environ[\"OLLAMA_HOST\"] = \"http://141.211.127.171\""
|
48 |
-
]
|
49 |
-
},
|
50 |
-
{
|
51 |
-
"cell_type": "code",
|
52 |
-
"execution_count": 22,
|
53 |
"metadata": {
|
54 |
"execution": {
|
55 |
"iopub.execute_input": "2025-01-29T20:09:11.753044Z",
|
@@ -66,22 +55,31 @@
|
|
66 |
"# setup_openai_api()\n",
|
67 |
"# llm = ChatOpenAI(temperature=0)\n",
|
68 |
"\n",
|
69 |
-
"setup_ollama_api()\n",
|
70 |
-
"llm = ChatOllama(model=\"llama3.2:latest\", temperature=0)\n"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
]
|
72 |
},
|
73 |
{
|
74 |
"cell_type": "code",
|
75 |
-
"execution_count":
|
76 |
"metadata": {},
|
77 |
"outputs": [
|
78 |
{
|
79 |
"data": {
|
80 |
"text/plain": [
|
81 |
-
"AIMessage(content='How can I
|
82 |
]
|
83 |
},
|
84 |
-
"execution_count":
|
85 |
"metadata": {},
|
86 |
"output_type": "execute_result"
|
87 |
}
|
@@ -92,7 +90,7 @@
|
|
92 |
},
|
93 |
{
|
94 |
"cell_type": "code",
|
95 |
-
"execution_count":
|
96 |
"metadata": {
|
97 |
"execution": {
|
98 |
"iopub.execute_input": "2025-01-29T20:09:11.763927Z",
|
@@ -207,7 +205,7 @@
|
|
207 |
},
|
208 |
{
|
209 |
"cell_type": "code",
|
210 |
-
"execution_count":
|
211 |
"metadata": {
|
212 |
"execution": {
|
213 |
"iopub.execute_input": "2025-01-29T20:09:11.781576Z",
|
@@ -318,7 +316,7 @@
|
|
318 |
},
|
319 |
{
|
320 |
"cell_type": "code",
|
321 |
-
"execution_count":
|
322 |
"metadata": {
|
323 |
"execution": {
|
324 |
"iopub.execute_input": "2025-01-29T20:09:11.828125Z",
|
@@ -358,7 +356,7 @@
|
|
358 |
},
|
359 |
{
|
360 |
"cell_type": "code",
|
361 |
-
"execution_count":
|
362 |
"metadata": {},
|
363 |
"outputs": [],
|
364 |
"source": [
|
@@ -390,7 +388,7 @@
|
|
390 |
},
|
391 |
{
|
392 |
"cell_type": "code",
|
393 |
-
"execution_count":
|
394 |
"metadata": {
|
395 |
"execution": {
|
396 |
"iopub.execute_input": "2025-01-29T20:09:11.858218Z",
|
@@ -556,7 +554,23 @@
|
|
556 |
"\n",
|
557 |
"def chatbot_with_tools(state: DataState) -> DataState:\n",
|
558 |
" \"\"\"The chatbot with tools. A simple wrapper around the model's own chat interface.\"\"\"\n",
|
559 |
-
" defaults = {\"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
560 |
"\n",
|
561 |
" if state[\"messages\"]:\n",
|
562 |
" new_output = llm_with_tools.invoke([MEDICAL_INTAKE_SYSINT] + state[\"messages\"])\n",
|
@@ -596,7 +610,7 @@
|
|
596 |
},
|
597 |
{
|
598 |
"cell_type": "code",
|
599 |
-
"execution_count":
|
600 |
"metadata": {
|
601 |
"execution": {
|
602 |
"iopub.execute_input": "2025-01-29T20:09:11.906458Z",
|
@@ -616,7 +630,7 @@
|
|
616 |
"<IPython.core.display.Image object>"
|
617 |
]
|
618 |
},
|
619 |
-
"execution_count":
|
620 |
"metadata": {},
|
621 |
"output_type": "execute_result"
|
622 |
}
|
@@ -669,7 +683,7 @@
|
|
669 |
},
|
670 |
{
|
671 |
"cell_type": "code",
|
672 |
-
"execution_count":
|
673 |
"metadata": {
|
674 |
"execution": {
|
675 |
"iopub.execute_input": "2025-01-29T20:09:38.185616Z",
|
@@ -690,53 +704,23 @@
|
|
690 |
]
|
691 |
},
|
692 |
{
|
693 |
-
"ename": "
|
694 |
-
"evalue": "
|
695 |
"output_type": "error",
|
696 |
"traceback": [
|
697 |
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
698 |
-
"\u001b[0;
|
699 |
-
"
|
700 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_transports/default.py:250\u001b[0m, in \u001b[0;36mHTTPTransport.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 249\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m map_httpcore_exceptions():\n\u001b[0;32m--> 250\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pool\u001b[38;5;241m.\u001b[39mhandle_request(req)\n\u001b[1;32m 252\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(resp\u001b[38;5;241m.\u001b[39mstream, typing\u001b[38;5;241m.\u001b[39mIterable)\n",
|
701 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py:256\u001b[0m, in \u001b[0;36mConnectionPool.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 255\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_close_connections(closing)\n\u001b[0;32m--> 256\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exc \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 258\u001b[0m \u001b[38;5;66;03m# Return the response. Note that in this case we still have to manage\u001b[39;00m\n\u001b[1;32m 259\u001b[0m \u001b[38;5;66;03m# the point at which the response is closed.\u001b[39;00m\n",
|
702 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py:236\u001b[0m, in \u001b[0;36mConnectionPool.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 234\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 235\u001b[0m \u001b[38;5;66;03m# Send the request on the assigned connection.\u001b[39;00m\n\u001b[0;32m--> 236\u001b[0m response \u001b[38;5;241m=\u001b[39m connection\u001b[38;5;241m.\u001b[39mhandle_request(\n\u001b[1;32m 237\u001b[0m pool_request\u001b[38;5;241m.\u001b[39mrequest\n\u001b[1;32m 238\u001b[0m )\n\u001b[1;32m 239\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ConnectionNotAvailable:\n\u001b[1;32m 240\u001b[0m \u001b[38;5;66;03m# In some cases a connection may initially be available to\u001b[39;00m\n\u001b[1;32m 241\u001b[0m \u001b[38;5;66;03m# handle a request, but then become unavailable.\u001b[39;00m\n\u001b[1;32m 242\u001b[0m \u001b[38;5;66;03m#\u001b[39;00m\n\u001b[1;32m 243\u001b[0m \u001b[38;5;66;03m# In this case we clear the connection and try again.\u001b[39;00m\n",
|
703 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_sync/connection.py:101\u001b[0m, in \u001b[0;36mHTTPConnection.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 100\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_connect_failed \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m--> 101\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exc\n\u001b[1;32m 103\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_connection\u001b[38;5;241m.\u001b[39mhandle_request(request)\n",
|
704 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_sync/connection.py:78\u001b[0m, in \u001b[0;36mHTTPConnection.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 77\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_connection \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m---> 78\u001b[0m stream \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_connect(request)\n\u001b[1;32m 80\u001b[0m ssl_object \u001b[38;5;241m=\u001b[39m stream\u001b[38;5;241m.\u001b[39mget_extra_info(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mssl_object\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
|
705 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_sync/connection.py:124\u001b[0m, in \u001b[0;36mHTTPConnection._connect\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 123\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m Trace(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mconnect_tcp\u001b[39m\u001b[38;5;124m\"\u001b[39m, logger, request, kwargs) \u001b[38;5;28;01mas\u001b[39;00m trace:\n\u001b[0;32m--> 124\u001b[0m stream \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_network_backend\u001b[38;5;241m.\u001b[39mconnect_tcp(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 125\u001b[0m trace\u001b[38;5;241m.\u001b[39mreturn_value \u001b[38;5;241m=\u001b[39m stream\n",
|
706 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_backends/sync.py:207\u001b[0m, in \u001b[0;36mSyncBackend.connect_tcp\u001b[0;34m(self, host, port, timeout, local_address, socket_options)\u001b[0m\n\u001b[1;32m 202\u001b[0m exc_map: ExceptionMapping \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m 203\u001b[0m socket\u001b[38;5;241m.\u001b[39mtimeout: ConnectTimeout,\n\u001b[1;32m 204\u001b[0m \u001b[38;5;167;01mOSError\u001b[39;00m: ConnectError,\n\u001b[1;32m 205\u001b[0m }\n\u001b[0;32m--> 207\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m map_exceptions(exc_map):\n\u001b[1;32m 208\u001b[0m sock \u001b[38;5;241m=\u001b[39m socket\u001b[38;5;241m.\u001b[39mcreate_connection(\n\u001b[1;32m 209\u001b[0m address,\n\u001b[1;32m 210\u001b[0m timeout,\n\u001b[1;32m 211\u001b[0m source_address\u001b[38;5;241m=\u001b[39msource_address,\n\u001b[1;32m 212\u001b[0m )\n",
|
707 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/contextlib.py:158\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__exit__\u001b[0;34m(self, typ, value, traceback)\u001b[0m\n\u001b[1;32m 157\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 158\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgen\u001b[38;5;241m.\u001b[39mthrow(value)\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopIteration\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 160\u001b[0m \u001b[38;5;66;03m# Suppress StopIteration *unless* it's the same exception that\u001b[39;00m\n\u001b[1;32m 161\u001b[0m \u001b[38;5;66;03m# was passed to throw(). This prevents a StopIteration\u001b[39;00m\n\u001b[1;32m 162\u001b[0m \u001b[38;5;66;03m# raised inside the \"with\" statement from being suppressed.\u001b[39;00m\n",
|
708 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpcore/_exceptions.py:14\u001b[0m, in \u001b[0;36mmap_exceptions\u001b[0;34m(map)\u001b[0m\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(exc, from_exc):\n\u001b[0;32m---> 14\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m to_exc(exc) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mexc\u001b[39;00m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m\n",
|
709 |
-
"\u001b[0;31mConnectError\u001b[0m: [Errno 111] Connection refused",
|
710 |
-
"\nThe above exception was the direct cause of the following exception:\n",
|
711 |
-
"\u001b[0;31mConnectError\u001b[0m Traceback (most recent call last)",
|
712 |
-
"Cell \u001b[0;32mIn[25], line 6\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# The default recursion limit for traversing nodes is 25 - setting it higher\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;66;03m# means you can try a more complex order with multiple steps and round-trips.\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;66;03m# config = {\"recursion_limit\": 500}\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \n\u001b[1;32m 5\u001b[0m \u001b[38;5;66;03m# Uncomment this line to execute the graph:\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m state \u001b[38;5;241m=\u001b[39m graph_with_order_tools\u001b[38;5;241m.\u001b[39minvoke({\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m: []})\n",
|
713 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1961\u001b[0m, in \u001b[0;36mPregel.invoke\u001b[0;34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, **kwargs)\u001b[0m\n\u001b[1;32m 1959\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1960\u001b[0m chunks \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m-> 1961\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m chunk \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstream(\n\u001b[1;32m 1962\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[1;32m 1963\u001b[0m config,\n\u001b[1;32m 1964\u001b[0m stream_mode\u001b[38;5;241m=\u001b[39mstream_mode,\n\u001b[1;32m 1965\u001b[0m output_keys\u001b[38;5;241m=\u001b[39moutput_keys,\n\u001b[1;32m 1966\u001b[0m interrupt_before\u001b[38;5;241m=\u001b[39minterrupt_before,\n\u001b[1;32m 1967\u001b[0m interrupt_after\u001b[38;5;241m=\u001b[39minterrupt_after,\n\u001b[1;32m 1968\u001b[0m debug\u001b[38;5;241m=\u001b[39mdebug,\n\u001b[1;32m 1969\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 1970\u001b[0m ):\n\u001b[1;32m 1971\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvalues\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 1972\u001b[0m latest \u001b[38;5;241m=\u001b[39m chunk\n",
|
714 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1670\u001b[0m, in \u001b[0;36mPregel.stream\u001b[0;34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, subgraphs)\u001b[0m\n\u001b[1;32m 1664\u001b[0m \u001b[38;5;66;03m# Similarly to Bulk Synchronous Parallel / Pregel model\u001b[39;00m\n\u001b[1;32m 1665\u001b[0m \u001b[38;5;66;03m# computation proceeds in steps, while there are channel updates.\u001b[39;00m\n\u001b[1;32m 1666\u001b[0m \u001b[38;5;66;03m# Channel updates from step N are only visible in step N+1\u001b[39;00m\n\u001b[1;32m 1667\u001b[0m \u001b[38;5;66;03m# channels are guaranteed to be immutable for the duration of the step,\u001b[39;00m\n\u001b[1;32m 1668\u001b[0m \u001b[38;5;66;03m# with channel updates applied only at the transition between steps.\u001b[39;00m\n\u001b[1;32m 1669\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m loop\u001b[38;5;241m.\u001b[39mtick(input_keys\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minput_channels):\n\u001b[0;32m-> 1670\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m _ \u001b[38;5;129;01min\u001b[39;00m runner\u001b[38;5;241m.\u001b[39mtick(\n\u001b[1;32m 1671\u001b[0m loop\u001b[38;5;241m.\u001b[39mtasks\u001b[38;5;241m.\u001b[39mvalues(),\n\u001b[1;32m 1672\u001b[0m timeout\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstep_timeout,\n\u001b[1;32m 1673\u001b[0m retry_policy\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mretry_policy,\n\u001b[1;32m 1674\u001b[0m get_waiter\u001b[38;5;241m=\u001b[39mget_waiter,\n\u001b[1;32m 1675\u001b[0m ):\n\u001b[1;32m 1676\u001b[0m \u001b[38;5;66;03m# emit output\u001b[39;00m\n\u001b[1;32m 1677\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m output()\n\u001b[1;32m 1678\u001b[0m \u001b[38;5;66;03m# emit output\u001b[39;00m\n",
|
715 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/runner.py:230\u001b[0m, in \u001b[0;36mPregelRunner.tick\u001b[0;34m(self, tasks, reraise, timeout, retry_policy, get_waiter)\u001b[0m\n\u001b[1;32m 228\u001b[0m t \u001b[38;5;241m=\u001b[39m tasks[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 229\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 230\u001b[0m run_with_retry(\n\u001b[1;32m 231\u001b[0m t,\n\u001b[1;32m 232\u001b[0m retry_policy,\n\u001b[1;32m 233\u001b[0m configurable\u001b[38;5;241m=\u001b[39m{\n\u001b[1;32m 234\u001b[0m CONFIG_KEY_SEND: partial(writer, t),\n\u001b[1;32m 235\u001b[0m CONFIG_KEY_CALL: partial(call, t),\n\u001b[1;32m 236\u001b[0m },\n\u001b[1;32m 237\u001b[0m )\n\u001b[1;32m 238\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcommit(t, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[1;32m 239\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n",
|
716 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/retry.py:40\u001b[0m, in \u001b[0;36mrun_with_retry\u001b[0;34m(task, retry_policy, configurable)\u001b[0m\n\u001b[1;32m 38\u001b[0m task\u001b[38;5;241m.\u001b[39mwrites\u001b[38;5;241m.\u001b[39mclear()\n\u001b[1;32m 39\u001b[0m \u001b[38;5;66;03m# run the task\u001b[39;00m\n\u001b[0;32m---> 40\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m task\u001b[38;5;241m.\u001b[39mproc\u001b[38;5;241m.\u001b[39minvoke(task\u001b[38;5;241m.\u001b[39minput, config)\n\u001b[1;32m 41\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ParentCommand \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 42\u001b[0m ns: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m=\u001b[39m config[CONF][CONFIG_KEY_CHECKPOINT_NS]\n",
|
717 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/utils/runnable.py:462\u001b[0m, in \u001b[0;36mRunnableSeq.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 458\u001b[0m config \u001b[38;5;241m=\u001b[39m patch_config(\n\u001b[1;32m 459\u001b[0m config, callbacks\u001b[38;5;241m=\u001b[39mrun_manager\u001b[38;5;241m.\u001b[39mget_child(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mseq:step:\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mi\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 460\u001b[0m )\n\u001b[1;32m 461\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m--> 462\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m step\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 463\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 464\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m step\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config)\n",
|
718 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/utils/runnable.py:226\u001b[0m, in \u001b[0;36mRunnableCallable.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 224\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 225\u001b[0m context\u001b[38;5;241m.\u001b[39mrun(_set_config_context, config)\n\u001b[0;32m--> 226\u001b[0m ret \u001b[38;5;241m=\u001b[39m context\u001b[38;5;241m.\u001b[39mrun(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunc, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(ret, Runnable) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrecurse:\n\u001b[1;32m 228\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ret\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config)\n",
|
719 |
-
"Cell \u001b[0;32mIn[
|
720 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/
|
721 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/
|
722 |
-
"
|
723 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py:690\u001b[0m, in \u001b[0;36mBaseChatModel.generate\u001b[0;34m(self, messages, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[0m\n\u001b[1;32m 687\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i, m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(messages):\n\u001b[1;32m 688\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 689\u001b[0m results\u001b[38;5;241m.\u001b[39mappend(\n\u001b[0;32m--> 690\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate_with_cache(\n\u001b[1;32m 691\u001b[0m m,\n\u001b[1;32m 692\u001b[0m stop\u001b[38;5;241m=\u001b[39mstop,\n\u001b[1;32m 693\u001b[0m run_manager\u001b[38;5;241m=\u001b[39mrun_managers[i] \u001b[38;5;28;01mif\u001b[39;00m run_managers \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m 694\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 695\u001b[0m )\n\u001b[1;32m 696\u001b[0m )\n\u001b[1;32m 697\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m 698\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m run_managers:\n",
|
724 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py:925\u001b[0m, in \u001b[0;36mBaseChatModel._generate_with_cache\u001b[0;34m(self, messages, stop, run_manager, **kwargs)\u001b[0m\n\u001b[1;32m 923\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 924\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m inspect\u001b[38;5;241m.\u001b[39msignature(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate)\u001b[38;5;241m.\u001b[39mparameters\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrun_manager\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m--> 925\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(\n\u001b[1;32m 926\u001b[0m messages, stop\u001b[38;5;241m=\u001b[39mstop, run_manager\u001b[38;5;241m=\u001b[39mrun_manager, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 927\u001b[0m )\n\u001b[1;32m 928\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 929\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(messages, stop\u001b[38;5;241m=\u001b[39mstop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n",
|
725 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langchain_ollama/chat_models.py:701\u001b[0m, in \u001b[0;36mChatOllama._generate\u001b[0;34m(self, messages, stop, run_manager, **kwargs)\u001b[0m\n\u001b[1;32m 694\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_generate\u001b[39m(\n\u001b[1;32m 695\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 696\u001b[0m messages: List[BaseMessage],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 699\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 700\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ChatResult:\n\u001b[0;32m--> 701\u001b[0m final_chunk \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_chat_stream_with_aggregation(\n\u001b[1;32m 702\u001b[0m messages, stop, run_manager, verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mverbose, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m 703\u001b[0m )\n\u001b[1;32m 704\u001b[0m generation_info \u001b[38;5;241m=\u001b[39m final_chunk\u001b[38;5;241m.\u001b[39mgeneration_info\n\u001b[1;32m 705\u001b[0m chat_generation \u001b[38;5;241m=\u001b[39m ChatGeneration(\n\u001b[1;32m 706\u001b[0m message\u001b[38;5;241m=\u001b[39mAIMessage(\n\u001b[1;32m 707\u001b[0m content\u001b[38;5;241m=\u001b[39mfinal_chunk\u001b[38;5;241m.\u001b[39mtext,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 711\u001b[0m generation_info\u001b[38;5;241m=\u001b[39mgeneration_info,\n\u001b[1;32m 712\u001b[0m )\n",
|
726 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langchain_ollama/chat_models.py:602\u001b[0m, in \u001b[0;36mChatOllama._chat_stream_with_aggregation\u001b[0;34m(self, messages, stop, run_manager, verbose, **kwargs)\u001b[0m\n\u001b[1;32m 593\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_chat_stream_with_aggregation\u001b[39m(\n\u001b[1;32m 594\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m 595\u001b[0m messages: List[BaseMessage],\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 599\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[1;32m 600\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ChatGenerationChunk:\n\u001b[1;32m 601\u001b[0m final_chunk \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 602\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m stream_resp \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_create_chat_stream(messages, stop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs):\n\u001b[1;32m 603\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(stream_resp, \u001b[38;5;28mstr\u001b[39m):\n\u001b[1;32m 604\u001b[0m chunk \u001b[38;5;241m=\u001b[39m ChatGenerationChunk(\n\u001b[1;32m 605\u001b[0m message\u001b[38;5;241m=\u001b[39mAIMessageChunk(\n\u001b[1;32m 606\u001b[0m content\u001b[38;5;241m=\u001b[39m(\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 619\u001b[0m ),\n\u001b[1;32m 620\u001b[0m )\n",
|
727 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langchain_ollama/chat_models.py:589\u001b[0m, in \u001b[0;36mChatOllama._create_chat_stream\u001b[0;34m(self, messages, stop, **kwargs)\u001b[0m\n\u001b[1;32m 586\u001b[0m chat_params \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_chat_params(messages, stop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 588\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m chat_params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mstream\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n\u001b[0;32m--> 589\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_client\u001b[38;5;241m.\u001b[39mchat(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mchat_params)\n\u001b[1;32m 590\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 591\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_client\u001b[38;5;241m.\u001b[39mchat(\u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mchat_params)\n",
|
728 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/ollama/_client.py:163\u001b[0m, in \u001b[0;36mClient._request.<locals>.inner\u001b[0;34m()\u001b[0m\n\u001b[1;32m 162\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minner\u001b[39m():\n\u001b[0;32m--> 163\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_client\u001b[38;5;241m.\u001b[39mstream(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs) \u001b[38;5;28;01mas\u001b[39;00m r:\n\u001b[1;32m 164\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 165\u001b[0m r\u001b[38;5;241m.\u001b[39mraise_for_status()\n",
|
729 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/contextlib.py:137\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__enter__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39margs, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mkwds, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunc\n\u001b[1;32m 136\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 137\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mnext\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgen)\n\u001b[1;32m 138\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopIteration\u001b[39;00m:\n\u001b[1;32m 139\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgenerator didn\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt yield\u001b[39m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n",
|
730 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_client.py:868\u001b[0m, in \u001b[0;36mClient.stream\u001b[0;34m(self, method, url, content, data, files, json, params, headers, cookies, auth, follow_redirects, timeout, extensions)\u001b[0m\n\u001b[1;32m 845\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 846\u001b[0m \u001b[38;5;124;03mAlternative to `httpx.request()` that streams the response body\u001b[39;00m\n\u001b[1;32m 847\u001b[0m \u001b[38;5;124;03minstead of loading it into memory at once.\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 853\u001b[0m \u001b[38;5;124;03m[0]: /quickstart#streaming-responses\u001b[39;00m\n\u001b[1;32m 854\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 855\u001b[0m request \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbuild_request(\n\u001b[1;32m 856\u001b[0m method\u001b[38;5;241m=\u001b[39mmethod,\n\u001b[1;32m 857\u001b[0m url\u001b[38;5;241m=\u001b[39murl,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 866\u001b[0m extensions\u001b[38;5;241m=\u001b[39mextensions,\n\u001b[1;32m 867\u001b[0m )\n\u001b[0;32m--> 868\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msend(\n\u001b[1;32m 869\u001b[0m request\u001b[38;5;241m=\u001b[39mrequest,\n\u001b[1;32m 870\u001b[0m auth\u001b[38;5;241m=\u001b[39mauth,\n\u001b[1;32m 871\u001b[0m follow_redirects\u001b[38;5;241m=\u001b[39mfollow_redirects,\n\u001b[1;32m 872\u001b[0m stream\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[1;32m 873\u001b[0m )\n\u001b[1;32m 874\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 875\u001b[0m \u001b[38;5;28;01myield\u001b[39;00m response\n",
|
731 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_client.py:914\u001b[0m, in \u001b[0;36mClient.send\u001b[0;34m(self, request, stream, auth, follow_redirects)\u001b[0m\n\u001b[1;32m 910\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_set_timeout(request)\n\u001b[1;32m 912\u001b[0m auth \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_build_request_auth(request, auth)\n\u001b[0;32m--> 914\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_send_handling_auth(\n\u001b[1;32m 915\u001b[0m request,\n\u001b[1;32m 916\u001b[0m auth\u001b[38;5;241m=\u001b[39mauth,\n\u001b[1;32m 917\u001b[0m follow_redirects\u001b[38;5;241m=\u001b[39mfollow_redirects,\n\u001b[1;32m 918\u001b[0m history\u001b[38;5;241m=\u001b[39m[],\n\u001b[1;32m 919\u001b[0m )\n\u001b[1;32m 920\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 921\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m stream:\n",
|
732 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_client.py:942\u001b[0m, in \u001b[0;36mClient._send_handling_auth\u001b[0;34m(self, request, auth, follow_redirects, history)\u001b[0m\n\u001b[1;32m 939\u001b[0m request \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mnext\u001b[39m(auth_flow)\n\u001b[1;32m 941\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[0;32m--> 942\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_send_handling_redirects(\n\u001b[1;32m 943\u001b[0m request,\n\u001b[1;32m 944\u001b[0m follow_redirects\u001b[38;5;241m=\u001b[39mfollow_redirects,\n\u001b[1;32m 945\u001b[0m history\u001b[38;5;241m=\u001b[39mhistory,\n\u001b[1;32m 946\u001b[0m )\n\u001b[1;32m 947\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 948\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
|
733 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_client.py:979\u001b[0m, in \u001b[0;36mClient._send_handling_redirects\u001b[0;34m(self, request, follow_redirects, history)\u001b[0m\n\u001b[1;32m 976\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_event_hooks[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrequest\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n\u001b[1;32m 977\u001b[0m hook(request)\n\u001b[0;32m--> 979\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_send_single_request(request)\n\u001b[1;32m 980\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m 981\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m hook \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_event_hooks[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mresponse\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n",
|
734 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_client.py:1014\u001b[0m, in \u001b[0;36mClient._send_single_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 1009\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m 1010\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mAttempted to send an async request with a sync Client instance.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1011\u001b[0m )\n\u001b[1;32m 1013\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m request_context(request\u001b[38;5;241m=\u001b[39mrequest):\n\u001b[0;32m-> 1014\u001b[0m response \u001b[38;5;241m=\u001b[39m transport\u001b[38;5;241m.\u001b[39mhandle_request(request)\n\u001b[1;32m 1016\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(response\u001b[38;5;241m.\u001b[39mstream, SyncByteStream)\n\u001b[1;32m 1018\u001b[0m response\u001b[38;5;241m.\u001b[39mrequest \u001b[38;5;241m=\u001b[39m request\n",
|
735 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_transports/default.py:249\u001b[0m, in \u001b[0;36mHTTPTransport.handle_request\u001b[0;34m(self, request)\u001b[0m\n\u001b[1;32m 235\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mhttpcore\u001b[39;00m\n\u001b[1;32m 237\u001b[0m req \u001b[38;5;241m=\u001b[39m httpcore\u001b[38;5;241m.\u001b[39mRequest(\n\u001b[1;32m 238\u001b[0m method\u001b[38;5;241m=\u001b[39mrequest\u001b[38;5;241m.\u001b[39mmethod,\n\u001b[1;32m 239\u001b[0m url\u001b[38;5;241m=\u001b[39mhttpcore\u001b[38;5;241m.\u001b[39mURL(\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 247\u001b[0m extensions\u001b[38;5;241m=\u001b[39mrequest\u001b[38;5;241m.\u001b[39mextensions,\n\u001b[1;32m 248\u001b[0m )\n\u001b[0;32m--> 249\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m map_httpcore_exceptions():\n\u001b[1;32m 250\u001b[0m resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_pool\u001b[38;5;241m.\u001b[39mhandle_request(req)\n\u001b[1;32m 252\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(resp\u001b[38;5;241m.\u001b[39mstream, typing\u001b[38;5;241m.\u001b[39mIterable)\n",
|
736 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/contextlib.py:158\u001b[0m, in \u001b[0;36m_GeneratorContextManager.__exit__\u001b[0;34m(self, typ, value, traceback)\u001b[0m\n\u001b[1;32m 156\u001b[0m value \u001b[38;5;241m=\u001b[39m typ()\n\u001b[1;32m 157\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 158\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mgen\u001b[38;5;241m.\u001b[39mthrow(value)\n\u001b[1;32m 159\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopIteration\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 160\u001b[0m \u001b[38;5;66;03m# Suppress StopIteration *unless* it's the same exception that\u001b[39;00m\n\u001b[1;32m 161\u001b[0m \u001b[38;5;66;03m# was passed to throw(). This prevents a StopIteration\u001b[39;00m\n\u001b[1;32m 162\u001b[0m \u001b[38;5;66;03m# raised inside the \"with\" statement from being suppressed.\u001b[39;00m\n\u001b[1;32m 163\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m exc \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m value\n",
|
737 |
-
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/httpx/_transports/default.py:118\u001b[0m, in \u001b[0;36mmap_httpcore_exceptions\u001b[0;34m()\u001b[0m\n\u001b[1;32m 115\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m\n\u001b[1;32m 117\u001b[0m message \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mstr\u001b[39m(exc)\n\u001b[0;32m--> 118\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m mapped_exc(message) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mexc\u001b[39;00m\n",
|
738 |
-
"\u001b[0;31mConnectError\u001b[0m: [Errno 111] Connection refused",
|
739 |
-
"\u001b[0mDuring task with name 'chatbot_healthassistant' and id '0532b9be-af05-b133-af5f-0e24ecfcbbb0'"
|
740 |
]
|
741 |
}
|
742 |
],
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "code",
|
5 |
+
"execution_count": 7,
|
6 |
"metadata": {
|
7 |
"execution": {
|
8 |
"iopub.execute_input": "2025-01-29T20:09:11.440091Z",
|
|
|
24 |
"\n",
|
25 |
"from langgraph.graph.message import add_messages\n",
|
26 |
"from langgraph.graph import StateGraph, START, END\n",
|
27 |
+
"from langchain_google_genai import ChatGoogleGenerativeAI\n",
|
28 |
+
"from langchain_openai import ChatOpenAI\n",
|
29 |
"from langchain_ollama import ChatOllama\n",
|
30 |
"\n",
|
31 |
"from IPython.display import Image, display\n",
|
|
|
38 |
},
|
39 |
{
|
40 |
"cell_type": "code",
|
41 |
+
"execution_count": 9,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
"metadata": {
|
43 |
"execution": {
|
44 |
"iopub.execute_input": "2025-01-29T20:09:11.753044Z",
|
|
|
55 |
"# setup_openai_api()\n",
|
56 |
"# llm = ChatOpenAI(temperature=0)\n",
|
57 |
"\n",
|
58 |
+
"# setup_ollama_api()\n",
|
59 |
+
"# llm = ChatOllama(model=\"llama3.2:latest\", temperature=0)\n",
|
60 |
+
"\n",
|
61 |
+
"\"\"\"llm = ChatOpenAI(\n",
|
62 |
+
" api_key=\"ollama\",\n",
|
63 |
+
" model=\"llama3.2:latest\",\n",
|
64 |
+
" base_url=\"http://141.211.127.171/\",\n",
|
65 |
+
")\"\"\"\n",
|
66 |
+
"\n",
|
67 |
+
"setup_google_api()\n",
|
68 |
+
"llm = ChatGoogleGenerativeAI(model=\"gemini-1.5-flash-latest\")\n"
|
69 |
]
|
70 |
},
|
71 |
{
|
72 |
"cell_type": "code",
|
73 |
+
"execution_count": 10,
|
74 |
"metadata": {},
|
75 |
"outputs": [
|
76 |
{
|
77 |
"data": {
|
78 |
"text/plain": [
|
79 |
+
"AIMessage(content='Hello there! How can I help you today?', additional_kwargs={}, response_metadata={'prompt_feedback': {'block_reason': 0, 'safety_ratings': []}, 'finish_reason': 'STOP', 'safety_ratings': []}, id='run-eb83e576-ad78-40ef-86ef-4133db5ca191-0', usage_metadata={'input_tokens': 1, 'output_tokens': 11, 'total_tokens': 12, 'input_token_details': {'cache_read': 0}})"
|
80 |
]
|
81 |
},
|
82 |
+
"execution_count": 10,
|
83 |
"metadata": {},
|
84 |
"output_type": "execute_result"
|
85 |
}
|
|
|
90 |
},
|
91 |
{
|
92 |
"cell_type": "code",
|
93 |
+
"execution_count": 12,
|
94 |
"metadata": {
|
95 |
"execution": {
|
96 |
"iopub.execute_input": "2025-01-29T20:09:11.763927Z",
|
|
|
205 |
},
|
206 |
{
|
207 |
"cell_type": "code",
|
208 |
+
"execution_count": 13,
|
209 |
"metadata": {
|
210 |
"execution": {
|
211 |
"iopub.execute_input": "2025-01-29T20:09:11.781576Z",
|
|
|
316 |
},
|
317 |
{
|
318 |
"cell_type": "code",
|
319 |
+
"execution_count": 14,
|
320 |
"metadata": {
|
321 |
"execution": {
|
322 |
"iopub.execute_input": "2025-01-29T20:09:11.828125Z",
|
|
|
356 |
},
|
357 |
{
|
358 |
"cell_type": "code",
|
359 |
+
"execution_count": 15,
|
360 |
"metadata": {},
|
361 |
"outputs": [],
|
362 |
"source": [
|
|
|
388 |
},
|
389 |
{
|
390 |
"cell_type": "code",
|
391 |
+
"execution_count": 16,
|
392 |
"metadata": {
|
393 |
"execution": {
|
394 |
"iopub.execute_input": "2025-01-29T20:09:11.858218Z",
|
|
|
554 |
"\n",
|
555 |
"def chatbot_with_tools(state: DataState) -> DataState:\n",
|
556 |
" \"\"\"The chatbot with tools. A simple wrapper around the model's own chat interface.\"\"\"\n",
|
557 |
+
" defaults = {\"data\": {\n",
|
558 |
+
" \"patient_1\": {\n",
|
559 |
+
" \"data_1\": { # Placeholder patient ID, can be replaced dynamically\n",
|
560 |
+
" \"ID\": {\n",
|
561 |
+
" \"name\": \"\",\n",
|
562 |
+
" \"DOB\": date(1900, 1, 1), # Default placeholder date\n",
|
563 |
+
" \"gender\": \"\",\n",
|
564 |
+
" \"contact\": \"\",\n",
|
565 |
+
" \"emergency_contact\": \"\"\n",
|
566 |
+
" },\n",
|
567 |
+
" \"main\": {\n",
|
568 |
+
" \"main_symptom\": \"\",\n",
|
569 |
+
" \"length\": \"\"\n",
|
570 |
+
" }\n",
|
571 |
+
" }\n",
|
572 |
+
" }\n",
|
573 |
+
" }, \"finished\": False}\n",
|
574 |
"\n",
|
575 |
" if state[\"messages\"]:\n",
|
576 |
" new_output = llm_with_tools.invoke([MEDICAL_INTAKE_SYSINT] + state[\"messages\"])\n",
|
|
|
610 |
},
|
611 |
{
|
612 |
"cell_type": "code",
|
613 |
+
"execution_count": 17,
|
614 |
"metadata": {
|
615 |
"execution": {
|
616 |
"iopub.execute_input": "2025-01-29T20:09:11.906458Z",
|
|
|
630 |
"<IPython.core.display.Image object>"
|
631 |
]
|
632 |
},
|
633 |
+
"execution_count": 17,
|
634 |
"metadata": {},
|
635 |
"output_type": "execute_result"
|
636 |
}
|
|
|
683 |
},
|
684 |
{
|
685 |
"cell_type": "code",
|
686 |
+
"execution_count": 20,
|
687 |
"metadata": {
|
688 |
"execution": {
|
689 |
"iopub.execute_input": "2025-01-29T20:09:38.185616Z",
|
|
|
704 |
]
|
705 |
},
|
706 |
{
|
707 |
+
"ename": "KeyboardInterrupt",
|
708 |
+
"evalue": "Interrupted by user",
|
709 |
"output_type": "error",
|
710 |
"traceback": [
|
711 |
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
712 |
+
"\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
|
713 |
+
"Cell \u001b[0;32mIn[20], line 6\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# The default recursion limit for traversing nodes is 25 - setting it higher\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;66;03m# means you can try a more complex order with multiple steps and round-trips.\u001b[39;00m\n\u001b[1;32m 3\u001b[0m \u001b[38;5;66;03m# config = {\"recursion_limit\": 500}\u001b[39;00m\n\u001b[1;32m 4\u001b[0m \n\u001b[1;32m 5\u001b[0m \u001b[38;5;66;03m# Uncomment this line to execute the graph:\u001b[39;00m\n\u001b[0;32m----> 6\u001b[0m state \u001b[38;5;241m=\u001b[39m graph_with_order_tools\u001b[38;5;241m.\u001b[39minvoke({\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m: []})\n",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
714 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1961\u001b[0m, in \u001b[0;36mPregel.invoke\u001b[0;34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, **kwargs)\u001b[0m\n\u001b[1;32m 1959\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 1960\u001b[0m chunks \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m-> 1961\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m chunk \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstream(\n\u001b[1;32m 1962\u001b[0m \u001b[38;5;28minput\u001b[39m,\n\u001b[1;32m 1963\u001b[0m config,\n\u001b[1;32m 1964\u001b[0m stream_mode\u001b[38;5;241m=\u001b[39mstream_mode,\n\u001b[1;32m 1965\u001b[0m output_keys\u001b[38;5;241m=\u001b[39moutput_keys,\n\u001b[1;32m 1966\u001b[0m interrupt_before\u001b[38;5;241m=\u001b[39minterrupt_before,\n\u001b[1;32m 1967\u001b[0m interrupt_after\u001b[38;5;241m=\u001b[39minterrupt_after,\n\u001b[1;32m 1968\u001b[0m debug\u001b[38;5;241m=\u001b[39mdebug,\n\u001b[1;32m 1969\u001b[0m \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs,\n\u001b[1;32m 1970\u001b[0m ):\n\u001b[1;32m 1971\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream_mode \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mvalues\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 1972\u001b[0m latest \u001b[38;5;241m=\u001b[39m chunk\n",
|
715 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/__init__.py:1670\u001b[0m, in \u001b[0;36mPregel.stream\u001b[0;34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, debug, subgraphs)\u001b[0m\n\u001b[1;32m 1664\u001b[0m \u001b[38;5;66;03m# Similarly to Bulk Synchronous Parallel / Pregel model\u001b[39;00m\n\u001b[1;32m 1665\u001b[0m \u001b[38;5;66;03m# computation proceeds in steps, while there are channel updates.\u001b[39;00m\n\u001b[1;32m 1666\u001b[0m \u001b[38;5;66;03m# Channel updates from step N are only visible in step N+1\u001b[39;00m\n\u001b[1;32m 1667\u001b[0m \u001b[38;5;66;03m# channels are guaranteed to be immutable for the duration of the step,\u001b[39;00m\n\u001b[1;32m 1668\u001b[0m \u001b[38;5;66;03m# with channel updates applied only at the transition between steps.\u001b[39;00m\n\u001b[1;32m 1669\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m loop\u001b[38;5;241m.\u001b[39mtick(input_keys\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minput_channels):\n\u001b[0;32m-> 1670\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m _ \u001b[38;5;129;01min\u001b[39;00m runner\u001b[38;5;241m.\u001b[39mtick(\n\u001b[1;32m 1671\u001b[0m loop\u001b[38;5;241m.\u001b[39mtasks\u001b[38;5;241m.\u001b[39mvalues(),\n\u001b[1;32m 1672\u001b[0m timeout\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstep_timeout,\n\u001b[1;32m 1673\u001b[0m retry_policy\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mretry_policy,\n\u001b[1;32m 1674\u001b[0m get_waiter\u001b[38;5;241m=\u001b[39mget_waiter,\n\u001b[1;32m 1675\u001b[0m ):\n\u001b[1;32m 1676\u001b[0m \u001b[38;5;66;03m# emit output\u001b[39;00m\n\u001b[1;32m 1677\u001b[0m \u001b[38;5;28;01myield from\u001b[39;00m output()\n\u001b[1;32m 1678\u001b[0m \u001b[38;5;66;03m# emit output\u001b[39;00m\n",
|
716 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/runner.py:230\u001b[0m, in \u001b[0;36mPregelRunner.tick\u001b[0;34m(self, tasks, reraise, timeout, retry_policy, get_waiter)\u001b[0m\n\u001b[1;32m 228\u001b[0m t \u001b[38;5;241m=\u001b[39m tasks[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m 229\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 230\u001b[0m run_with_retry(\n\u001b[1;32m 231\u001b[0m t,\n\u001b[1;32m 232\u001b[0m retry_policy,\n\u001b[1;32m 233\u001b[0m configurable\u001b[38;5;241m=\u001b[39m{\n\u001b[1;32m 234\u001b[0m CONFIG_KEY_SEND: partial(writer, t),\n\u001b[1;32m 235\u001b[0m CONFIG_KEY_CALL: partial(call, t),\n\u001b[1;32m 236\u001b[0m },\n\u001b[1;32m 237\u001b[0m )\n\u001b[1;32m 238\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcommit(t, \u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[1;32m 239\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exc:\n",
|
717 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/pregel/retry.py:40\u001b[0m, in \u001b[0;36mrun_with_retry\u001b[0;34m(task, retry_policy, configurable)\u001b[0m\n\u001b[1;32m 38\u001b[0m task\u001b[38;5;241m.\u001b[39mwrites\u001b[38;5;241m.\u001b[39mclear()\n\u001b[1;32m 39\u001b[0m \u001b[38;5;66;03m# run the task\u001b[39;00m\n\u001b[0;32m---> 40\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m task\u001b[38;5;241m.\u001b[39mproc\u001b[38;5;241m.\u001b[39minvoke(task\u001b[38;5;241m.\u001b[39minput, config)\n\u001b[1;32m 41\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ParentCommand \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m 42\u001b[0m ns: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m=\u001b[39m config[CONF][CONFIG_KEY_CHECKPOINT_NS]\n",
|
718 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/utils/runnable.py:462\u001b[0m, in \u001b[0;36mRunnableSeq.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 458\u001b[0m config \u001b[38;5;241m=\u001b[39m patch_config(\n\u001b[1;32m 459\u001b[0m config, callbacks\u001b[38;5;241m=\u001b[39mrun_manager\u001b[38;5;241m.\u001b[39mget_child(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mseq:step:\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mi\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 460\u001b[0m )\n\u001b[1;32m 461\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m i \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[0;32m--> 462\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m step\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 463\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 464\u001b[0m \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m step\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config)\n",
|
719 |
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/langgraph/utils/runnable.py:226\u001b[0m, in \u001b[0;36mRunnableCallable.invoke\u001b[0;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[1;32m 224\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 225\u001b[0m context\u001b[38;5;241m.\u001b[39mrun(_set_config_context, config)\n\u001b[0;32m--> 226\u001b[0m ret \u001b[38;5;241m=\u001b[39m context\u001b[38;5;241m.\u001b[39mrun(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfunc, \u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m 227\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(ret, Runnable) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrecurse:\n\u001b[1;32m 228\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m ret\u001b[38;5;241m.\u001b[39minvoke(\u001b[38;5;28minput\u001b[39m, config)\n",
|
720 |
+
"Cell \u001b[0;32mIn[14], line 6\u001b[0m, in \u001b[0;36mhuman_node\u001b[0;34m(state)\u001b[0m\n\u001b[1;32m 3\u001b[0m last_msg \u001b[38;5;241m=\u001b[39m state[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmessages\u001b[39m\u001b[38;5;124m\"\u001b[39m][\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\n\u001b[1;32m 4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mModel:\u001b[39m\u001b[38;5;124m\"\u001b[39m, last_msg\u001b[38;5;241m.\u001b[39mcontent)\n\u001b[0;32m----> 6\u001b[0m user_input \u001b[38;5;241m=\u001b[39m \u001b[38;5;28minput\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mUser: \u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 8\u001b[0m \u001b[38;5;66;03m# If it looks like the user is trying to quit, flag the conversation\u001b[39;00m\n\u001b[1;32m 9\u001b[0m \u001b[38;5;66;03m# as over.\u001b[39;00m\n\u001b[1;32m 10\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m user_input \u001b[38;5;129;01min\u001b[39;00m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mq\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mquit\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mexit\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgoodbye\u001b[39m\u001b[38;5;124m\"\u001b[39m}:\n",
|
721 |
+
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/ipykernel/kernelbase.py:1282\u001b[0m, in \u001b[0;36mKernel.raw_input\u001b[0;34m(self, prompt)\u001b[0m\n\u001b[1;32m 1280\u001b[0m msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mraw_input was called, but this frontend does not support input requests.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 1281\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m StdinNotImplementedError(msg)\n\u001b[0;32m-> 1282\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_input_request(\n\u001b[1;32m 1283\u001b[0m \u001b[38;5;28mstr\u001b[39m(prompt),\n\u001b[1;32m 1284\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parent_ident[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mshell\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[1;32m 1285\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mget_parent(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mshell\u001b[39m\u001b[38;5;124m\"\u001b[39m),\n\u001b[1;32m 1286\u001b[0m password\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[1;32m 1287\u001b[0m )\n",
|
722 |
+
"File \u001b[0;32m~/miniconda3/envs/paintrekbot/lib/python3.12/site-packages/ipykernel/kernelbase.py:1325\u001b[0m, in \u001b[0;36mKernel._input_request\u001b[0;34m(self, prompt, ident, parent, password)\u001b[0m\n\u001b[1;32m 1322\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m:\n\u001b[1;32m 1323\u001b[0m \u001b[38;5;66;03m# re-raise KeyboardInterrupt, to truncate traceback\u001b[39;00m\n\u001b[1;32m 1324\u001b[0m msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInterrupted by user\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m-> 1325\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m(msg) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m 1326\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n\u001b[1;32m 1327\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlog\u001b[38;5;241m.\u001b[39mwarning(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInvalid Message:\u001b[39m\u001b[38;5;124m\"\u001b[39m, exc_info\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n",
|
723 |
+
"\u001b[0;31mKeyboardInterrupt\u001b[0m: Interrupted by user"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
724 |
]
|
725 |
}
|
726 |
],
|
paintrek-chat-v2.ipynb
ADDED
@@ -0,0 +1,232 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 12,
|
6 |
+
"metadata": {
|
7 |
+
"execution": {
|
8 |
+
"iopub.execute_input": "2025-01-29T20:09:11.440091Z",
|
9 |
+
"iopub.status.busy": "2025-01-29T20:09:11.439766Z",
|
10 |
+
"iopub.status.idle": "2025-01-29T20:09:11.751153Z",
|
11 |
+
"shell.execute_reply": "2025-01-29T20:09:11.750263Z",
|
12 |
+
"shell.execute_reply.started": "2025-01-29T20:09:11.440060Z"
|
13 |
+
},
|
14 |
+
"id": "xaiioUQni_ga",
|
15 |
+
"trusted": true
|
16 |
+
},
|
17 |
+
"outputs": [
|
18 |
+
{
|
19 |
+
"ename": "NameError",
|
20 |
+
"evalue": "name 'add_messages' is not defined",
|
21 |
+
"output_type": "error",
|
22 |
+
"traceback": [
|
23 |
+
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
24 |
+
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
|
25 |
+
"Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mmodules\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata_class\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m DataState\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtools\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m data_node\n\u001b[1;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mnodes\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m chatbot_with_tools, human_node, maybe_exit_human_node, maybe_route_to_tools\n",
|
26 |
+
"File \u001b[0;32m/media/frank-elite/Application/PythonProj/LangGraphExampleResume/modules/data_class.py:98\u001b[0m\n\u001b[1;32m 88\u001b[0m main: MainSymptom\n\u001b[1;32m 89\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"pain: Pain\u001b[39;00m\n\u001b[1;32m 90\u001b[0m \u001b[38;5;124;03m medical_hist: MedicalHistory\u001b[39;00m\n\u001b[1;32m 91\u001b[0m \u001b[38;5;124;03m family_hist: FamilyHistory\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 95\u001b[0m \u001b[38;5;124;03m functional: Functional\u001b[39;00m\n\u001b[1;32m 96\u001b[0m \u001b[38;5;124;03m plan: Plan\"\"\"\u001b[39;00m\n\u001b[0;32m---> 98\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mDataState\u001b[39;00m(TypedDict):\n\u001b[1;32m 99\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"State representing the patient's data status and conversation.\"\"\"\u001b[39;00m\n\u001b[1;32m 100\u001b[0m messages: Annotated[\u001b[38;5;28mlist\u001b[39m, add_messages]\n",
|
27 |
+
"File \u001b[0;32m/media/frank-elite/Application/PythonProj/LangGraphExampleResume/modules/data_class.py:100\u001b[0m, in \u001b[0;36mDataState\u001b[0;34m()\u001b[0m\n\u001b[1;32m 98\u001b[0m \u001b[38;5;28;01mclass\u001b[39;00m \u001b[38;5;21;01mDataState\u001b[39;00m(TypedDict):\n\u001b[1;32m 99\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"State representing the patient's data status and conversation.\"\"\"\u001b[39;00m\n\u001b[0;32m--> 100\u001b[0m messages: Annotated[\u001b[38;5;28mlist\u001b[39m, add_messages]\n\u001b[1;32m 101\u001b[0m data: Dict[\u001b[38;5;28mstr\u001b[39m, PatientData]\n\u001b[1;32m 102\u001b[0m finished: \u001b[38;5;28mbool\u001b[39m\n",
|
28 |
+
"\u001b[0;31mNameError\u001b[0m: name 'add_messages' is not defined"
|
29 |
+
]
|
30 |
+
}
|
31 |
+
],
|
32 |
+
"source": [
|
33 |
+
"\n",
|
34 |
+
"\n",
|
35 |
+
"from modules.data_class import DataState\n",
|
36 |
+
"from tools import data_node\n",
|
37 |
+
"from nodes import chatbot_with_tools, human_node, maybe_exit_human_node, maybe_route_to_tools\n",
|
38 |
+
"\n",
|
39 |
+
"from langgraph.graph.message import add_messages\n",
|
40 |
+
"from langgraph.graph import StateGraph, START, END\n",
|
41 |
+
"\n",
|
42 |
+
"from IPython.display import Image, display\n",
|
43 |
+
"from pprint import pprint\n",
|
44 |
+
"from typing import Literal\n",
|
45 |
+
"\n",
|
46 |
+
"from langgraph.prebuilt import ToolNode\n",
|
47 |
+
"\n",
|
48 |
+
"from collections.abc import Iterable"
|
49 |
+
]
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"cell_type": "code",
|
53 |
+
"execution_count": null,
|
54 |
+
"metadata": {
|
55 |
+
"execution": {
|
56 |
+
"iopub.execute_input": "2025-01-29T20:09:11.906458Z",
|
57 |
+
"iopub.status.busy": "2025-01-29T20:09:11.905241Z",
|
58 |
+
"iopub.status.idle": "2025-01-29T20:09:11.994921Z",
|
59 |
+
"shell.execute_reply": "2025-01-29T20:09:11.993761Z",
|
60 |
+
"shell.execute_reply.started": "2025-01-29T20:09:11.906419Z"
|
61 |
+
},
|
62 |
+
"id": "9rqkQzlZxrzp",
|
63 |
+
"trusted": true
|
64 |
+
},
|
65 |
+
"outputs": [
|
66 |
+
{
|
67 |
+
"data": {
|
68 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAATIAAAFNCAIAAAAM0HSGAAAAAXNSR0IArs4c6QAAIABJREFUeJzt3XdAE2cfB/Ang0DIYoQ9RWSIiFtUVNx7oyjiqKNYd11t1TraOuvAva0DHKAoxYUoDlAZigMUZIvsECAhITt5/zhfqhgQJMkdyfP5S5PLPb+78M3t58EpFAoAQRCW4NEuAIKg+mAsIQhzYCwhCHNgLCEIc2AsIQhzYCwhCHOIaBcANaamUsJhS2prZLVcmVSikMtbwdUsAhEQiXhDOsGQRjS21DOkwr+xZsPB65YYxC4R5bzh56XySYY4oMAZ0giGdAKZQpTLWsGXRdTD8bjSWq6stkYqEsj1SHgnT4qzF5Vuqod2aa0GjCW28KqlT6MqFAAYMfXaeFLMbQ3QrqilSvIEuan8qjIx1YjYewyTZACPm74NxhJDku9Wpj3l9B7DdO1KQ7sW1Ut9wnkaVeE90tSrnxHatWAdjCVWRB4pcu5M9fBmoF2IeqU8qGIViobNsES7EEyDexSYcOr3vM4DjbU+kwCALgOM27SnXD9chHYhmAa3lug79Xve+IXWplb6aBeiOTmvec9jqvxX2aFdCEbBWKIs8khR54HG9q6GaBeiaelJ3KJsweAAC7QLwSIYSzQ9j6kkUwkevbR/31WpF/erSAZ4zz46uviNgMeWqOFVS1OfcHQ2kwCAroOMH11lKVrDPRIaBmOJmqdRFb3HMNGuAmW9R5s+vcFGuwrMgbFEB7tEpABAK69PNkuXgcbsEpGQL0W7EGyBsURHbiqfocGb0dLS0kQiEVofbxyFTsxNq1XTzFspGEt05KbxnTpQNNNWVFTU7NmzBQIBKh//JidPam4qT00zb6VgLFHAq5aQSDhzew3d7/rdGzrkLL36tpOINh0ovCppq3g4RmNgLFHAYUvVdFnqw4cPCxYs8PHxGTly5NatW+VyeVRU1Pbt2wEAgwcP7tatW1RUFACgrKxs48aNgwcP9vb29vf3v3PnDvLx6urqbt26nT9/fv369T4+PvPnz1f6cZWTShScCok65txKwWfhUFDLlRrS1bLm//zzz/z8/JUrV/L5/OfPn+Px+D59+gQGBoaEhAQHB1OpVHt7ewCAVCp9+/atn5+fkZFRbGzs+vXr7ezsPDw8kJmcOnVq8uTJR48eJRAIFhYWX39c5QzphFquzNhcHfNulWAsUcDnyCgMgjrmXFxc7ObmNmHCBABAYGAgAMDExMTW1hYA0KFDByOjT49u2NjYhIeH43A4AMC4ceMGDx788OHDulh6enouWrSobp5ff1zlKHQinwtPxv4H7sSiQ09fLWt+5MiRCQkJO3furKysbHzKzMzMFStWDB8+fMKECTKZjM3+7+Jhjx491FFbI0gGeHi32edgLFFAphK4bLUcSi1atGjFihV3794dO3ZsWFhYQ5MlJyfPmjVLLBZv3Lhx586dDAZDLpf/Vx6ZrI7aGsGpkBjS4I7bf+C6QAFyKKWOOeNwuICAgHHjxm3dunXnzp0uLi6dOnVC3vp8c3Ty5ElbW9vg4GAikdjEHKp1a1ZbIzOkqWWvvpWCW0sU0IyJ+mS1rHnkYgaFQlmwYAEAICMjoy51LBarbrLq6moXFxckk2KxuLa29vOtZT1ff1zlKAwCzRj29PMfuLVEgZEZiV0irioXG5uTVDvnX375hUqlent7x8fHAwDc3d0BAF5eXgQCYdeuXWPHjhWJRJMmTUIudURGRjIYjNDQUC6Xm5OT09D28OuPq7bmomyBXApgHz+fI2zatAntGnQRt1Jay5VatVHxUVxhYWF8fPydO3cEAsGSJUt8fX0BAHQ63cLCIiYmJi4ujsvljh492svLKzc399KlS8+fPx8yZIi/v390dLSbm5upqem5c+d8fHzat29fN8+vP67aml8/qrZwNLB0bPWdiakQfN4SHcW5gvRE7qBp8CFgcPNUic84JoMJd2L/A3di0WHtRE66U/kxs9bORXm/BFwud+zYsUrfsrW1LSws/Pr1/v37b968WdWV1jdv3rzs7OyvX3d3d09PT//6dQ8Pj0OHDjU0t4xkLskADzNZD9xaoqb8o/BBGMt/pfL+bORyeWlpqdK3cDjl3xqZTDY2NlZ1mfWxWCyJRMnVnYaqIpFITGaDj5We3pjnv8KOwoCbhy/AWKLp8TWWvYuho4eGHiXBmvQkbk2VtMcwE7QLwRx4+gtN/SaYPYpgqenWAowryRO8fcaFmVQKxhJl09bYX9xZgHYVmiYWyv89Vuy3zBbtQjAK7sSiTyKW/7Mpb/qvDhT1PFaCNRXFon+PFs/a6Egg4NCuBaNgLDFBwJNd3FkwbKaFjbOWdxib84aXdKdy2hq1PCCmNWAsMeTB5XJulaTPGCbTRgt7WC/OETy9wbaw1+87wQztWrAOxhJbCjJqn0RV2LsZWtgZtOlAIRBb/W6eWCjPTeOV5gvZJeLeo01VfmOTVoKxxKKcN7zMlJq8NL5rV5qePp5CJ1IYBANDQqvo74aAx9XWSPlcKZ8r43EkhZkCpw5Ul25UBzcdvQ70HWAsMa0go7aqXMznSvkcmVyukIpV+WXJ5fLXr1937txZhfMEAOgb4pEOByh0gqkVSeuPltUBxlJ3CQSCIUOGIM+aQJgCr1tCEObAWEIQ5sBY6rS63u4gTIGx1Glv375FuwRICRhLnaaBB8Gg7wBjqdOqqqrQLgFSAsZSp9nY2KBdAqQEjKVOKyoqQrsESAkYS92Fw+E6duyIdhWQEjCWukuhULx58wbtKiAlYCwhCHNgLHWamRl89BGLYCx1mlpHFoG+G4ylTjM3hyMwYxGMpU4rLy9HuwRICRhLCMIcGEud5uLignYJkBIwljotMzMT7RIgJWAsIQhzYCx1mqenJ9olQErAWOq01NRUtEuAlICxhCDMgbHUXTgczsvLC+0qICVgLHWXQqF4/fo12lVASsBYQhDmwFjqNNghJTbBWOo02CElNsFYQhDmwFjqNNhPLDbBWOo02E8sNsFY6jRXV1e0S4CUgLHUae/fv0e7BEgJGEsIwhwYS51mbW2NdgmQEjCWOq24uBjtEiAlYCx1GhzsAJtgLHUaHOwAm2AsdRp8sAubYCx1GnywC5tgLHWag4MD2iVASuAUCgXaNUAaNX/+/OLiYiKRKJfLKyoqmEwmHo8Xi8W3b99GuzToE7i11DnTp0/ncrlFRUUlJSUSiaSkpKSoqIhIJKJdF/QfGEud4+vrW68zdYVCAXumxBQYS100Y8YMQ0PDuv9aWVlNnToV1YqgL8BY6iJfX19nZ+e60wpeXl7wvgJMgbHUUbNnzzYyMgIAMJlMf39/tMuBvgBjqaP69evn5OSE9LIFN5VYo83n32prpOwSsUQMrwApN35okKjq8uiBP+Sm8dGuBaNI+nhTaxKZQtBwu9p53ZLPlT4ILy/NFzm4UwQ1MrTLgVorkgH+43u+bTvDoTMsCEScxtrVwljyudJrh4r6TrQ0sdRHuxZIG5Tm1yZHV0xaYqNP1tBmUwuPLc//9WHkPDuYSUhVLB0N+02yDNtbqLEWtS2Wz2Mquw421SNp23JB6GIwSY7tqW+fcTTTnLb9+ZbkCSnGemhXAWkhQzqxrECkmba0LZYyKaDBWEJqQGeSRAK5ZtrStljWcqUKDa06SLcoZEDI19BZfW2LJQRpARhLCMIcGEsIwhwYSwjCHBhLCMIcGEsIwhwYSwjCHBhLCMIcGEsIwhwYSwjCHBhLCMIcGEvlrly9MGBQt9ra2mZ9SiaTpaa++vyVMeN8jxwNbm7rPB4vMyujKVOu37AyaEFgc+ffkH37d0z0G/r5K+/S00Si/x6bUG1zn5NKpYEzJzS+rpq+WhpXb6EwCMZSlf7e/eee4K0tn8+8H6fevh2piopa5E501KLFs4VCgQbawuFwNBrdwMCgkWlUslo0uVDfTZu72NI8sYp+g8VisUrm00Ka3KQQCIQjh842Po1KVgvGt5MIGEsgFArPh5x88OAuq6LcwsJq6JBR0wN+QN6Ki4u9cOkMi1Xm2aHTqpW/m5mZAwBSU1+dDzmZmvYKAODm6rFgwXJXF3cAwPadmx48jAEADBjUDQBwIfRfK0trAEBubtaSZXOzsjLMzCymTA4cM3oiMnOpVPrPmaPRd29wONUODm1mzwry6eMLAJgaMLqqqvJ6ZPj1yHALC8tLF258cxHOnD0edeOqTCbz7T944U8rSCQS8nrkv1fCwkMqKsotLa0HDRzuP2WGvr6+WCw+d/5EbGx0OavM1JQ5dMio2bOCCIT6vdTciY4K3rcdADB+4mAAwC9rNg4fNqaR5m7f+ff69bDcvGwy2bBH916LF60yMjIGAHz8+GFv8Lb0jDQaje7d02f5sl/xePyFi2euR4bV1HCdnV1nzwqytrYNmD4WABA4fc7cOQsBAPUm6Nqlx9erpZEFWb9hpZ2tA5FIvHHzmlQi8fb2Wbb0VyqV2shCYYqux1Imk61dtzw17dXECVOd27rkf8j9WPih7m/03PkTU6bMEImE586f2LZ9w57dRwEApaXFIrFoRuA8PB4fGRn+629LL4ZGGRgYBAbMYZWXlZQU/fbrHwAAUxMmMpPsnEz/KTMGDRx+N+bmnr1bhULBZL/pAIBdu/+6d/924PQ5jo5t792//fuGVfv2nujYsfOmjTvX/LK4k1fXyX7T9f4fsEZkZmXoGxgEzV+alf3+ytULJibMmTPmIeEJvxIyccJUBwenjx/zL4edKywqWPvrHwQC4cWLxF69+1lb2WZnvw8JPU2j0adMrn/E2LNHnymTA8PCQ7ZtCaZQqLa29o039+5dqr2945AhI6uqKiOuXeLX8rdtCUZ27AsK8hctXFlby3/56jkej3+RknTi5MFBg4b37N47KfmpoLbW2Mjkzz92bf7jV6SJrycAAHy9WhpfkLDwkIEDhm7dElzwIW/Xnr9MTc0WBC1raKGwRtdj+ejx/Zevnq9e9fvIEeO+fnf3rqOWllbIlu3EyYMcTjWDYTR48IghQ0YiE7i6tl+xckFq2qvu3bxtbe0ZDKPKKranZ6fPZzJ0yKip/jMBAGNGT1yybO6Zs8dGj5rIYpVF370xc8a82bOCAAD9+w0KnDnhzNlje3YfdXNtTyQSTU2Z9ebTEGtr2727jxEIhKFDRxUU5D18FDNzxryKClbohdPr123p328QMpmpqdne4G2LF62i0+iHD53F4T51r1hcUvg4LvbrWBobm1hb2wIA3N07MBhGjTcHAFjx89q6eRKJxJDQ0yKRSF9fv7S02KWd2+hREwAASCulpcUAgAnjpnh4dKxbkz59fOs+rnSCr1cLgUBoZEFsbe3X/vYnDodzd/N4HB+b/PzZgqBlDS0U1uh6LJOSn+rr6w8bOlrpu3Q6A/mHUxtnAEA5q4zBMMLhcHHxD8LCQz58yEMG2KmqZDelLQKBMG6M3/adm96/f/ex8AMAwMdnAPIWDofr3s075t6t71gEKoVat3l3dGz7Lj0VAPDiRaJUKt2ydf2WreuRt5CeRytY5XQavaqq8tz5E8nPE2pquAAAGpXWwuYAABKJJOLapZh7t8rLS/X1DeRyeXV1lYWF5ZDBIy9cPLP/wM4ZgfOMjU0AAN49fWg0+tZtvy9ZvNrb2+frJr45QZ1GFsRA36AusRYWVmlprWnca12PZVUlm2lq9vWRVT04PB7Z4wUAnDt/8p8zRydNnPbjvCXsyorNf/wqb3I/JaZMMwAAn8/j83kAAGMjk7q36HRGbW0tn8+nUCjfvTgEAkEqlQIA2JUVAICtW4LNzSw+n8Da2raykv3jgulksuGcH36ytrY9ffow8hvRkuYUCsXadcvfZ76bNfPH9u07xsXFXrp8Dlkt8+YuMjY2CQk9ffvOvz/OXzph/BRTU+bB/acPHdnz27rlHTp4bVi/DTlor/PNCRBNXxA9op5c3pp68db1WFKptMqqJm3rECKR6MLFf0aNHL940UoAQHl5Wb0JGu8Ou7q6CgBgYmIqEAoAAFwuh8k0Q96qrGQTicS6KwQt7FabRqMj/7C3d6z31r9RV6uqKg8dOGNhYQkAMDe3bDyWTank9euUFylJ69b+NXjQcABAUWFB3Vs4HM5vUsCI4eP2Bm/df2Cnc1sXT89O9vaOO7btT3mZvGHjqh07N+36+3C9GTY0wefFNHdBmrtQKNL165adO3cXCAT3Y6PrXkF+/hsiFApEIpGLizvyXw63GgAgl3/aWhoYkCsr2XX//dqjR/doNHrbti7u7h1wOFxCYjzyulgsTkiM9/DoiGy3yQZkNruihcuFw+GuXb9c94pA8OlKHZdbbWRkjPwpI4tQ9zeqp0cSCGrr1gDZgAwAqKhgfbM5ZD24tHP7/L/IekAuSFAolNmzFyBnjOoudXTp3N3bu6/SOwSUTlBvtTSyII1o+kKhSNe3lkMGj7weGbZ9x8aMjLfObV1y87JfpCQePxra0PQMhpGTk3PEtUsmJqZ8Hu/sueN4PD43Nxt516tjl9t3/t2zd6tnh040Gr13734AgOi7N0xMTA0MyIlJT549i1u6ZA2JRLKxth02dPSZs8dkMpm1te3Nm9cqK9lrf/sTmY+nZ+f7sXcuXDxDo9E92nd0cnJu7nLZ2thNnDD1asTFtet/9unjy2ZXXI8M27Z1n0s7t06dul27Hnb6nyMeHl5xcbGJiU/kcjlyNquds6tQKNz0xy8/LfjZxtrWo4MXgUA4eHjXiGFjRWLR2DGTGmquvbsniUQ6cfLgqFETcnOzLlz8BwCQl5ttY2276Y9fqBRqt67eyG+Qq4t7esbbzX/8Mn7cFDLZMCnpqZtr+3pza2iCequlkQVpZM00faFQRNi0aRPaNahSajzH3p1KpjZ1rAgikdi//xAOp/rho5gnTx9yuNW+/Ye0b+/5PvNdcvKz6QE/6OnpAQCKij7eu3979KgJTKaZV8cuiYlPrkeGfSz8MH/+Ejs7h6ioq5P9phMIBCcn55oazv3YO6/fpDAYRl279LgacXHa1Fl3Y27eu39boVD8OH9J3d9B9269+Hze7TuRsbHRFEPKqpXru3fvhbzl4dExO/t9zL1bWVkZbm4eDvZtGqo/9sHdWj6/7lroi5SkjIy3gdPnAAC6d+9laEh59iwu9kF0YVFBn979e/fqRyaTHRzaKBTy65HhcY/vW9vYrVr5e2rqS4GgtlOnbm3atBUKBcnJz9xdPeztHek0upmZxcOHMc+exdXUcIcNG91QcxQKxdHR6U501J3oKKlUum7tXxUV5Wlpr4YNG11cXJiQGH8/9o5AKPhx/hIfH18uh5OTk/ngwd2UlCQvry4/L19LoVABAOdDTnXw8OrSuXtDE9RbLf36DmxoQerV+fx5QlZ2RsC02QCArxeqiX8qvCop66PAvQe9idO3hLYNDXRhe4HPREtji29f7oOgZinJFbx9WjlhkY0G2tL1nVjs4/F406Yr/0UP+nEZcj0Q0jIwllhnaGh4/NgFpW/RaQyNlwNpAowl1uHxeOTeWkh36PoFEgjCIBhLCMIcGEsIwhwYSwjCHBhLCMIcGEsIwhwYSwjCHBhLCMIcGEsIwhwYSwjCHG2LpZElSQG06pkYCCMUADCYepppS9tiSSLh2MWtoH9eqNVhFQoMaU19jreFtC2WbToYVpXCWEKqx2GJHdwNNdOWtsWybUcaHq9Iud+MXrMg6JueRpWb2ZCs2pA105y29U6AeHiFJZMCpq2BmY0BnoBDuxyotZJK5KxCYXEO36qNQZcBxhprVztjCQDIfsXLecMTixRNPNQUCARyubwlfbQqJRKJ5HI5mayhX9lmUSgUQqGwodoUCoVYLNbX19dkSXw+v6G3VP7VNIWJpT6ZinfpSnVw02jrWhvLpggPD//333+PHDliYGCQkJDg49NYB97NUlhYyGAw5HL5jRs3pk+frqrZqtbJkyfPnj27YsWKCROU9zyyevXqESNGDBw4UGMlnTt37tSpU1+H09LS8saNbw+RpDV0LpZZWVnXrl0bPXp0+/btL1y40KlTp/bt63eI2EJnz56NiIi4fPly46M1oovNZi9cuDAnJ8fFxSU0NLRuXIDP8Xi8iooKR8f6HUCr1fLly+Pj4z9/hUAgJCYmarIG1GnbKR+lpFJpdHR0UlISAODx48cODg7Ozs4AgICAABVm8vXr19HR0QCAjh07RkZGYjmTAICIiIj8/HwAQH5+/pUrV5ROQ6VSNZxJAMAff/zh4OBQrwwN14A6bY5lcXHxo0ePkF2jR48eWVtbAwDmzp3r7+9PasIAdU0nEokyMjL27dvn5uYGAOjcubMKZ64ObDY7JiYGGVJFLBZHREQg//7av//+e+nSJU3WRqfTly1bZmb2aQwIfX39bt26bd++XZM1oE+hdfLz8xUKRU5OzujRoyMiItTaVnp6emBgYEVFBYfDUWtDqnX8+PEePXp0/T9vb+/Lly8rnbK2tnbx4sUaL1ARHBzcs2fPrl27Iv8tLCxUKBQRERFJSUmaL0bztGdrKZPJhELhqFGjTp8+DQCwtraOiopq6GRGy2VnZwMAUlNTf/vtN1NTUzpdE51tqwSbzY6Ojv588yiRSBrajyWTyQcOHNBgdZ8sW7bM1dWVSPzUM6ONjQ0AwMfH59q1axUVFY2csNUO2nDKZ//+/devX799+zYej2ez2ZaWlmptLj8/f+XKlUFBQUOHDlVrQ2qyb9++c+fOITtKdWd6SCTSs2fPlE5fXV2dn5/fqVOTxsDVALFYXFhYuHfv3vXr11tYWDThE60Q2pvr75SXl7dly5asrCyFQnH79u3q6moNNJqYmKhQKOLj4/Py8jTQnLrxeLy+ffs2ZcoZM2akpaWpv6JmePLkyalTp5ClQLsW1WtlO7FpaWkPHz4EAMTHx7u6urZp0wYAMHz4cAZD7f2LT5s27dWrVwCAPn36aP78pDrI5XIPD4+mTLlx48bS0lL1V9QMvXv3njNnDgDg1q1bf//9txbs9H0B7d+FJikrK1MoFAkJCTNnznz58qUmm37x4kV6ejpyDkmT7WpASUnJyJEj0a5CBS5evJicnCyVStEuRGWwvrUUCoXz5s3bsWMHcj3w7NmzmjzIiYyMPHLkiK2tLQDAyclJY+1qBp/PRy4aNcWzZ88uXFA+FArqpk6d2q1bN4VCMWrUqNTUVLTLUQGMxjI1NXXr1q3I8f2iRYt2796NnBXUTOtyufz69esAAC8vrxMnTmjr5ezy8vKm3/Laq1evkJCQsrL6g9JjB5FIPHXqVEJCAnKaCu1yWgRzseTxeACAgwcPenl5IReXNXx1vra2tmfPnsjpXO04hmxIWVlZs3YBLl68iPFDOEtLy/nz5yMHnPv370e7nO+HoVhevny5W7duEokEAHDs2LFRo0ZpuIDU1FQWiyWXy5OTk729vTXcuua9e/eu3m1ujWMwGHg8XiqVqrMo1QgICGAwGK9evWro7iWMQz+Wz549e/z4MQDA1tb2+fPnxsaae6rtczdv3ty9e7exsbG27rJ+TSQSubq6Nusjubm5y5YtU1tFqjRr1qwOHTpIpdJNmzahXUuzoRzLO3fuhIaGuri4IBceUKnh3bt3AAArK6szZ87U3Vai9QQCwf379zt06NCsT3l7ew8YMCArK0ttdakSkUjU19fv2rXr5s2b0a6ledC5y+fy5cvJycm7du2qqamh0WiaL6DOgQMHiETiTz/9hGINqIiPj793715r3JJ8B6lUSiQSr169OmnSJLRraRJNby2rq6sFAsGHDx82bNgAAEAxk8jJOmdnZx3MJHJSpFevXt/32bi4uJiYGFVXpEbITlDdCaFWQGNXSN+8eTN48GAWi6WxFhtx+/bt8+fPo10FakQikbe3d0vmMG3atIyMDNVVpCHIkyjZ2dloF/INmthaIgdvRUVFly9fZjKZGmixcSKRKC4uLjAwEO1CUHP37t0ffvihJXO4cOEC8ih564I8iZKdnX3ixAm0a2mMemMpk8kWLlyYnJyM3LlqYmKi1uaaoqKiQigUbtmyBe1C0LR3794pU6a0cCYfP35MS0tTUUUaNWzYMJlMxuFw0C6kQQT1HfSzWCypVGpra6v5K5ANWbNmDZVKdXd3R7sQNCH7LIMGDWrhfIyMjHbt2oXH41vjbYnIFfKamhpDQw31yNw86tgz/vjx48CBAysrK9Ux8+/25s0b5JZ3Hefv7y8UClU1t9TUVIlEoqq5adjLly/nzJmDdhVKqOUCSXR0dM+ePY2MjFQ+5+9WUlJCIpFMTU3RLgRlmzdv7ty589ixY1U1Q5lMVlJSgtzN3xrxeLzCwkKkEybsUOWxZWFh4axZs5B9d0xl8tWrV+vXr4eZTExMLCsrU2Emkd4iWSzWvHnzVDhPTaJSqXQ6HXmSFkNUuOVdt24d1nZcEefPn+fz+WhXgb7evXvLZDJ1zLmkpARr3Rc0y5YtW65cuYJ2Ff9RzU7s9evXx48fr4pfCUhdZs2atXr16ubebdd0bDZbJpOZm5uraf7qFhcX17lzZ4zcEa2Cndg1a9YgnXdg099//11QUIB2FSjbtWvXuHHj1JdJAICpqWlUVNThw4fV14Ra9e3bFyOZVM09sVlZWe3atVNRPSrG4XAmTJgQGxuLdiFoOnv2LIfDWbp0qQbaQrr8UXfng2ry119/9e/fv2/fvmgX0rKt5Y4dO8RiMWYziTh//jzaJaApIiKisLBQM5lEAmlpaRkaGqqZ5lRr9uzZUVFRaFcBWrS1nD179sGDB7Gz3Ye+dvr0aQ6H8/PPP2u43Zs3b0okEni64btpQ/fNjXj06FFRUVFAQADahaDg5s2bb9++XbNmDSqtJycnd+/eHZWmW6K0tLSsrAzpsAZF37MTe+bMmfT0dDUUo3pyuTwlJQXtKlBw5cqVxMREtDIJAEAy+csvv6BVwPchk8ma37n4WrPvib148SKBQNDkUKQtYW5u7urqqms3EoSEhOTl5SFPtKKrTZs2R49deE/DAAAgAElEQVQexcJJlCYyMDDg8/k2NjboPp2v5TuxOmjr1q1mZmbYed6Xx+NRqdScnJy2bduiXUur0byd2NDQUKRnulbk2LFjretR+pZYvny5q6srdjJZN2jssWPHnjx5gnYtTZKRkYH6gU8zYrlz504ikainp6fOelTP19f3n3/+QbsKTVi6dOmkSZOw2V3Nzp07MzMz0a6iSXJycpDOu1HU1J1YoVBYUlKC5bt5GiEUCkkkEh6Pfu+bapKTk+Pv7x8WFob9Rx9PnTo1d+5ctKtoTG5ubkpKip+fH4o1NDWWtbW1+vr6BAJB/SWpnkAgKCoqao2dXDRFbGzs0aNHw8LC0C6kSXJyctasWXP16lW0C8G0Jm1AkpOTV6xY0UoziZz1vnHjhlbe7rNnz55Xr161lkwCANq2bRsSEoKMiYh2LcqVlZU1NAKvxjQplo8ePVqxYoX6i1Gj5cuXi8ViZIATrTF37lwLC4tW99UgQzwVFxdv3LgR7VqUSEpKio6ORrcGHb1AMnHixKqqqgcPHqBdyHdKS0vbvXv3smXLsDN2+ne4ceNGp06dzM3NSSQS2rX858WLFyKRqHfv3ijW8O1YJiUlmZuba8fYVbGxscXFxWFhYcXFxcipy5kzZ6JdVLOFhobevXv39OnTrfewoo5UKk1JSeHxeHU3qPTu3btdu3Znz55FuzQ0fXsnduHChc0a1wnLBg4ceOTIESSTCoUC6cC2dVm9enVZWdnZs2e1IJNIf+c9evRITU0tLy8HAAwdOlQsFhcUFDx8+BCtkhISElAfHvMbsczLy9u3bx8Oh9NUPerVvXt3kUiE/BvpYxvtipohJydnwIABo0aNanUHk9+0bNkyMpk8cuTIyspKAACXyz137hxaxaxcudLAwACt1hHfGKCqTZs2rfRaZT0zZ85MT0//fI8dj8dXV1ezWCwzMzNUS2uSy5cvp6SkREZG0ul0tGtRCxqNVlpailxbxuFwxcXFDx8+9PX11XAZXC73999/Rz2W39habtiwQTvOCZ07d27x4sW2trafD0vO4/Fyc3NRratJVq9e/eHDhx07dmhrJpFDys/v92CxWJcuXdJ8GXQ6ffjw4Zpvt57GYpmcnFxeXq41e7CzZs0KDw9fvny5o6MjhUJRKBQ1NTX5+flo19WY7OzsgQMHjhgxAsVHtDRgxIgRSI/ScrkceQWHw+Xm5sbFxWm4kvv37ycmJmq40a81dia2oKCAQCAgo6lohlQiF/DkGmgoJibmxo0bJSUl3t7emD1Ui4mJiYyM3LZtW0seMlIoAN2kFYyl+88//2RlZX38+JHFYuGkZIFQqFAo3Nzcdu3apckyfv3118DAQDX1Rdb07wIr1y3Tk7hv4jiVpWIyVXMnGCUSCZbvvBeLRKTPdrm/j6m1flFWrXMnau8xpoY0rOfz0VVWZkoNwwLHKZfKZTK5QoHce6AxUomEqLY/CRMr/aKsWmcvSs+RpnSTxlppLJZBQUGHDh3SwLjlSXcrK4olnfqb0BqtFfo+ErG8qkwUe6HEf5Vd438NKJKI5ac35PedaG5mRzYw1IZrP0pJJfLqcnHs5ZKJi2yMzRu8iaLBY8vc3NzKykoNZDLxTiWHJe07wQJmUk30SHhzO/LUX5wu7foo4MvQLke5c39+GLfIzs6VqsWZBAAQ9fBMG4MpK9pcO1TErWzw0eUGt5ZcLre2tlbdHX5WlYufRrH7+VmptRUIUfpBUPCOO2iqBdqF1Jd0t5KoR2jXhYF2IZpTUSTMSuEMDVT+XTS4taTT6RrohLeiSKRQaMmZXuwzMiPlpvLRrkKJwkwB1Vi39pWMLfSzX9U09G6Dsdy7d++jR4/UVtUnPI7MzA7lS7e6w8CQYG5H5nOkaBdSH4GAMzJv6cmt1oVAxNm7UqpZYqXvNhjLN2/eGBsbq7MwAACQiOQSoSauiEAIdrEQgxei2SUioHt/BZVl4oa+iwbP6Gzbtq31jr4EQa1ag7FspaO7QJAWUL4TW1NTM336dI0XA0EQaDCWbDZbKBRqvBgIgkCDsbSysjpy5IjGi4EgCDQYS319fXi+B4LQojyWT5482bNnj8aLgSAINHZsWVPT4C0IEASplfILJAMHDuzXr5/Gi4EgCDQYSzj0OgShSPlO7PXr13W8o04IQpHyWFZVVbWWY8u/tq6fORuLQ8c17l16Wl3XmEgvxoEzJxw5GoxqUdpPHau9tLSkpLRYFdX9R3ksJ02aNGPGDNW2BNW5Ex21aPFsoVBQ9woOh6PR6Kj3g6jd1LHai4oLAwLHvn+v4n7AlR9banHHh1jw+Q82gkAgHDnU7KMGDqcah8fTafDLahJVrfbPyaRSdfSGpTyWSBedU6dOVXl7KhH74O7Zc8fLykocHZzqujBE9kn+OXM0+u4NDqfawaHN7FlBPn0+9f9bVlZ68vSh5ORntbX8tm1dpkwOHOA75NTpw5fDzt+982nUtIz3735aOHP7tv09e/Rev2GlvZ2jUCS8e/eGQqHo0rnHpInTQkJPpb19bWJs+sPsBUOGjEQ+VVJafPjwnhcpiSSSvks7tzlzFrq5tgcArN+w0s7WgUgk3rh5TSqReHv7LFv6K5VKvRMdFbxvOwBg/MTBAIBf1mz08uoaMH0sACBw+py5cxZmZb9fsnTO9q37j588kJOTaWFhFTR/aZ8+/ZHmoqNvhF78p7y8tI1jWxweb2lhteH3bRr/BlB25eqFQ4f3TJw49dGjezxeTXt3z6CgZa4u7gCA8vKyU/8cTkx8wufz7OwcAqb9MHjQcGRT2fhqR4YnPnnq0P3YO2KxyM7WYcqUGQMHDEWai31wd7Lf9FOnDrErK9q1c1u1Yr29vWNJafGsH/wAAJv/+HUzAMOGjf51zSaVLKDynVgul8vhcFTSgMrdu3/nz7/Wmpowlyxe3b17r5zcrLq3du3+63LY+dGjJqxb+5elpfXvG1a9efMSAMBmVyxaMvv584Sp/jNX/rzOqY1zRUX5Nxu6eOksAGDP7mP+U2bGP3m4+pdFffr47t1z3NnZdfvOTQUF+ciclyydw63hLF60KujHpRKJZNnyeXl5OcgcwsJDSkuLt24JXrxo1cNH90JCTwEAevboM2VyIABg25bg/cEne/boY2xk8ucfuz7vNkkkEm3+81e/SQHBe45bWlj9tXUdh1MNAIh/8nD7zk1eHbusX7tFj0RKT0/zmxSgntXcCkjE4j8371r725/VnKoVK4OQAzypTJqR8XbcWL+fgpbT6YwtW9enZ7xtymqXy+Xr1v/87Nnj6QE//Lx8rbOz659/rb11OxJ5Nz09LSzs/MqV6//YvItVXrZtx0YAgKkJc93avwAAP8xesD/4ZGDAHFUtmvKt5dSpUzHSUWU9IpHo4KFdHTt2/nvnIWRsnKKij9k5mQCAgoL86Ls3Zs6YN3tWEACgf79BgTMnnDl7bM/uo+fOn6iurjp98rK9vSMAYNiw0U1py8GhzdLFqwEALu3cbt2+7ubqMWH8FADAooUr4+IfvHr9wt7e8XzISWMjk91/H0G+3SGDRwbOHH/j1rUli1YBAGxt7df+9icOh3N383gcH5v8/NmCoGXGxibW1rYAAHf3DgyGEdKWTx/fek/ELlm8GvmpnjdvcdCCwNdvUvr1HRgZGe7o6LRyxToAgJubx2T/EQmJ8e3be6pnZWPdgqDlhoaG7gC4urQPnDn+2rXLC3/62drK5szpcGRljhgxbsKkwU+ePHR38/jman8cF/sm9eXF0Cgm0wwAMHjQcIGg9mrExZEjxiETbPlrr4mJKQBg4sSph4/s5XA5DDrDpZ0bAMDe3tHTU5UjGrayY8vUtFccTrXfpIC68arw///H6zcpAAAfnwHIf3E4XPdu3jH3bgEAEpOedOncHclk0+mT/uvGgkTSr+s+1NzcAjmuAwAkJj4pZ5WNHN23bkqJRMIqL0P+baBvUPetW1hYpaW9bnrrZANy3QcBABUVLABAOavM1tYeeZ3JNDMwMKip4TZrobSShYWlvb1jesan4aWzczLPnD2GnIaRyWSVleymzCQhIV4qlQYEjq17RSaTUSj/XcA3+PIbYVewGHR19QmmPJZhYWEKhcLf319NrX638vJSAIClpfXXb/H5PACAsZFJ3St0OqO2tpbP51dVVXbt0lNVNSBJQ/YmKqvYvXr1/XHeks8n+Py7rKNH1JPLv6czSD2iHgAA+ay1te379+/EYjGJRMrNzRYKhc7Ori1YFO1Bo9GRX6iUl8m//Lqkc6dua1ZvpBhSNmxaLVc0qT+Sqiq2qSlzz66jn79IUNYhK/KNyL7r22wi5bGsrq7G5k6sEcMYAFBdXfX1W0ymOQCAy+UgOyEAgMpKNpFINDAwoFJplVVKfjJb3qsNjUbncKqbux1GfMcanuY/a8WqBStWLejapUdMzC031/bDhjZph1zrVbDK7ewdAQDnz5+0trbduiUYOayo2+mo09Bqp9Ho1dVVFhZW+i3uyb7llJ/ymTx58uTJkzVezLe1beuCx+Pv3b/99Vvu7h1wOFxCYjzyX7FYnJAY7+HRkUAgdOncPSUl6fNrvlKpFADAYBhLJBIO99PJrdLmXxTu0qVHWtrr95npda8IBIJGPwHq/laQ/dJm6dDBa9LEaXK5vLi40N9/ZvDeExroXxv7Xr16UVRc6NG+IwCAw612buuCrBaxWFwrqK07V9/4au/SpYdMJvs36krdK035KvX1DZAdWtUtDWhwa6mBPu++j4WF5YjhY2/eui4WiXr06M1mVyQmxhsbmwIAbKxthw0dfebsMZlMZm1te/PmtcpK9trf/gQAzAic9/TZ48VLfpg4YaqJienz5wlksuGqleu7de2Jw+EOHtrlNykgPy/n2In9za1n1swfExLiV69ZNGVyoLGxSVLSU5lc9tcfuxv/lEcHLwKBcPDwrhHDxorEorFjmnqXUviV0Jcvk6dMmYHD4YhEYmFhQdu27Zpbs9bYG7y1a9eexcWFVyMumpiYThjvDwDo1KlbdHTUrduRdBoj/GpoTQ03Py9HoVDgcLjGV/uQwSOjbkQcPbavpLTYpZ1bdnZm/JMHZ05fafxmA3NzC2srm7ArIQZkMpfLmew3XSU/lMpnceXKFYVCgc0N5pLFq0kk0r37d56/SOjQoVPbti51x/TLl/1KoVCvXb9cU8Nt49h26197u3TujpwoO7Dv9LHj+0JCT+kR9ezsHZGv0MGhza9rNp07f2JZ3LyOnp2D5i/dvrN5151srG0P7j995Fhw6IXTOByuXTs3ZM7f/NTKFetOnjp08NCudu3cmh5LV5f24VdCt2xdX/fKmNETV/y8tlk1aw2pVHr02D6xWOTl1fWnoOUUCgUAMGf2T5XsigMH/6bR6KNHTZziF7gneOvLV8+7dO7e+GrX09P7e8ehEycPxMZG37gRYWtrP3aM3zczhsPh1q/fuvPvzQcP7TI3txw5crxKzgMpH+zg5MmTEonkp59+ankDjUuKrhQLgZevSROmhQByehA5Cy0Wi4+d2H/9elj07adN/4UO3503dZW9IR1b43yc3pA3+kd7Mq2pVSG3E9yMemxoaKjm0tTo2oEP4xZYM5hKupNX/nVOmjQJm6d8dNzduzdPnj40wHeolZVNVRU7Li7W0dEJHl5qn1Z2bKnjHBydPDt0unf/NpfLMTVl9undP3D6XLSLglRPeSxv3rzJZrNnzpyp8Xqgxri6uP++fivaVaDPb1KAdt91qPwCiUAgKCoq0ngxEASBBreWw4YNg335QBBalMeSRqPRaDSNFwNBEGhwJzYlJWXrVngMA0HoUB5LAoGQnZ2t8WIgCAINxtLd3X3z5s0aLwaCINBgLEkkkp2dncaLgSAINDZIe0CANl8XgiAsazCWlZWVLJaKH1eBIKgpGozlsWPH4DUSCEJFg3c5Ozg4aKB5kgFODlraRQDUdEwbA4DD3EMITBsDHAFzVambiaU+AMqXusGtZWhoaEREhDqrAgAAmrEe68O3nwGHVELAk7IKhYY0zD1xIpfJK0vEaFehURKxvDCTz2CSlL7bYCzpdPqbN2/UWRgAAJjb6be4Px2oqarKxG29sDgWm72bYU2lBO0qNKqyVNSuc4MHiQ3GctiwYYsXL1ZbVZ/QjPVsnA0eXy1Vd0MQAOBeaHHf8Uy0q1Ciy0Dj9ITq8o86tN8Ue6G4z1jTht5V3juBhr19xsl6xfPqb2psQSIQG/ylgL4PnyOpZonvh5bM+dORTMHcHixCLlOc2/Kh0wATpo0Bw1T5rp0W4HOlHJbowaXSGevsKQwl/RIgGotlUFDQ6tWrnZ2d1Vbkf/Le8l89qi7NExKIcKdWlczt9avKJE4dKX3HM/F4rK/bhFvsrJc8qhGRVVh/GB81kclleDxBM+vFzFa/ulzcxpPSZwxTT7+xzU9jsdy2bVu7du38/PzUU6RyIkGTOtuFmkihUBgYYqvnnm+SiD4f8Em9Jk+efPjwYTMzMw20pZArDChN+i4ai6VcLpfL5bCrGEiLHT9+PCAggErF1pmwxmKpUChEIhEcCxWCNKzRHVwcLigoKC0tTYP1QJBG3blzp7a2Fu0q6vvGac8xY8akpKRoqhgI0rTg4GA+n492FfVh4gIJBKHl7t27/fv3x8JwQJ/7dizfvXtnb2+PtWNiCNJi3752X1BQsG3bNo0UA0EaJRQKw8PD0a5CiW/Hcvjw4ebm5iKRhi7vQpDGvH79+sGDB2hXoQQ8toR0V35+vlAodHNzQ7uQ+pp6AyrsnxLSPo6OjhjMZDNiaWdnFxwcrOZiIEhzampqMNu9Y1NvrJsxY0ZxcbFcLsfj4RMekDY4f/68ra0t2lUo14xjS4FAUFNTY25uruaSIEgTUlNT27dvj4zhizXN2PSRyeSjR49GRkaqsx4I0hBPT09sZrJ5sQQAbNiwgc1mw4slUKtWWFg4btw4tKtoDLxAAumcw4cPT5o0ycLCAu1CGvQ9sbx//35SUtJvv/2mnpIgSNd959by0aNHRCKxT58+aigJgtQlOjq6urra398f7UK+oUU7se/evWvfvr1K64EgdXnx4kVxcfGYMWPQLuTbWhTLmJgYuVw+bNgwlZYEQar36tWrTp06oV1FU7Xo3oAhQ4Z8/PhRdcVAkFps3LhRIGhNndC29JadefPmAQCuXbumonogSJXEYrFMJuvevXuvXr3QrqUZVHMnnYODQ1BQkEpmBUGqcvPmzVu3bhEIhNGjR6NdS/OoJpZdunRZvXo1AKC0FA5bAGFCRkZGYmLi+PHj0S7ke6j4doLY2NjXr1///PPPKpwnBDXLxYsXR4wYIZVKmUwsDrjSFCrumnngwIHFxcVFRUVMJhNr3RZBuuDgwYNCodDIyAjtQlpELTffSSSSgoKC6OjohQsXqnzmEPS17Ozs169fT5o0qaKiovVuJOuo5eFJPT29tm3b6uvrX7hwQR3zh6A6CoWitLR03bp1nTt3BgBoQSbVfqs6j8ejUqmHDx+eOnWqiYmJ+hqCdBCLxTpw4MCaNWsUCgWN1uAQrq2RersaQHqX7dmz5/z589XaEKRTOBwO0iF6z549qVSqlmVS0w92PX78uLCwMCAgQGMtQlqmoqJiw4YNU6ZM8fX1RbsWNdJoxzx9+/YtKSmBtwRB3wHp0DU3N3fWrFnanUl0HoMWCoUGBgarV6/28fHB+EPiEOpkMhmBQBgwYICfn9+iRYvQLkdDUOjGDhkw87fffnv9+rVYLObxeJqvAcK+7OzsNWvWIM9C3Lx5U3cyiU4sESYmJhs2bCCRSFKp1MfHJyYmBq1KIEwRiUSZmZnIWFrDhg1zdHQEABgaGqJdl0ah3+mrkZER8twmAODp06cfPnxAuyIINfHx8QMGDEDGgV24cOGgQYPQrggd6McS6eoSeZba2Nj4559/fvz4MdoVQZojlUrPnTt38OBBAIClpeXTp09b0fPKaoLFnu9KS0stLS137NhhbW09ffp02I+7tkpJSenSpUtiYmJCQsL06dO14wYdlcDiX7ylpSUAYP78+Ww2Oz09HQDw5s0btIuCVInL5fbv3z8+Ph6522TZsmUwk5/D4tbya6tWrSosLLx06ZJUKiUSVfzUC6QxV69eDQsLu3z5MnL6HQ4x3pDWEUsAQFFRkY2NTUZGxuHDhxctWuTq6op2RVCTSKXSGzdutGvXzsPD4+zZs/3790dOrkKNaDWxrPPkyZOqqqrRo0dHR0dbWlp6eXmhXRGk3Pv3711dXU+cOFFaWrp06VIGg4F2Ra2HotVKSUn54Ycf4uLiFApFUVER2uVA/0lNTe3Ro8f169fRLqS1an1by3oEAgGZTN6wYUNWVlZISAhmx2DSelKpdPv27YWFhUePHi0pKTEzM4NnAb5bq49lnczMTCcnJyKR+OOPPw4dOtTPzw/tinRCWlpabGzs0qVLeTxeTEzMiBEjkJsroZbA4gWS7+Pi4oL8PC9durSsrAy5rBIeHt5Iv70TJ07UbI2tSURERCPvpqam5ufnAwBOnjyJjKlMpVInTJgAM6kS2rO1/BqHwzly5IiBgcHy5cvT09NtbGzodPrnE3Tp0qVdu3bnzp2DvYHVc+HChZCQELlcfufOnc9fR3rK2b9/f0pKyrZt26ysrNCrUauhfXCrIY8fP/b19X3y5IlCoRAIBAqFYty4cV27du3atevkyZM5HA7aBWLIxYsXBw8e3LVr1y5dutS9+Pbt27Fjx169elWhUFRXV6NaoPbT5q3l11gslpmZ2erVq0UiUVpaGpfLRV63t7c/c+ZMvW1pQ4S1stxUfnGeqLJELOBJDQyJVSxsDY+NJ+DweECmEMlUgpmdfpv2ZNt2TX0C4/LlyydPnqyqqkJ+sufOnVtVVbVu3brMzExDQ0NkfxVSN92KZZ0nT54sWbLk87tt7e3tz58/T6FQGvlU9mveq8fcikIhzcyQyiQTiASiPoFIIuIwdoSOA0AmlUtEMqlYJhPLOKU8AVfk1p3RfYgR1aixs6Ph4eHHjx9HMokgk8kRERFmZmYaKRz6REdjOXbs2OLi4nov2tnZhYSEKE3mx8zaxxFsOY5gYsegGLe+sxoyqZxXISjLYjt5Unz9mEQ9JT8kV69ePXr06OeZRDaYL1680GClENDdWPbt21cgECAPeeLxeCKRyGAwiESivb39kSNHPp9SIQd3L1SUF4lN7RmGRq0vkPWwCziCqtreY5lO7esvy9ixY2UymVAorKmpkUqlyJoBADx//hylYnWXjsZyyJAhdDrd2NjYzMzM0dHRycmJyWQymUw7O7t6U149UKwg6jMdW3fn+fXkJRd1HcTo2OeLu+FqampKS0tZLFZ5eXlOTk5xcXFFRQWHwxGJRLdv30avWF2ko7Fson9PlCqIhgzLxg44W6mC16V9Rhm39dStzjhaC4ydrMCSqweLFHpkrcwkAMDey/LZ7eqsl7B/MyyCsVTu0dUKQDRgWGjzA4G2nhaPrlZUV4jRLgSqD8ZSiY+ZtUV5YlMHrTqeVMqus+Xtf8rRrgKqD8ZSicfX2Ea22p9JAIC+oR6OSHz7lIN2IdAXYCzry3lTowAEQ4au3CXLdDKJj2KjXQX0BRjL+l4/rjG2x+Jz9BXsj6t+7/nyzV3VzpZIIjAsKBnPuaqdLdQSMJZfENbKyj8KKa3/toFmIRuRM1P4aFcB/QfG8gu5qTy6uc5dyqOZGX58D2OJIbBbhy+UfxRTTNUVy6dJVx89ucDhlpsYW3fuONS3T6Cenn5R8fuDJ+fPnbH31t3DxaWZxkZWo4Yu7uDeD/kIj18VeWvv24zHekT9tm26qqkwPB5nZk8tyRNYtSGrqQmoWWAsv1BRJDYwVcuf5t3YE4+eXPDp5W9h1qa84sPDuJCKio/T/DYBACQSUcjldeNHrTQ2soqOPX4h/Pd1KyMpFCOJVHzszBI2+2O/PtNNjK2eJl5VR2EIqUTO58jUN3+oWWAsv1BbI6Vaq76TLg6Xdf/xmel+f3bsMBB5hUFjXo3aMW7kCuS/40et7OQ5BAAwcsjC4COzcvJfdvQY8CQhvKQ068dZB1ycewAAHO08d+73V3ltCIIekc+VqmnmUHPBWH6BRCboGag+llk5STKZNPTKhtArG/7/mgIAwKn5dCmfpPdpE21sZAUA4NawAABp6Y+sLJyRTAIA8Hg1duqnRyaKhXBriRUwll+o5UplYjmBqOIAcGsqAABzA/cYMcw/f93UxLa0LOfzV4gEPQCAXC4DAFRzSm2sNNR5vFggw8O+PDEDxvILhjSCVCwjGeqpdrZk8qfuSMzNmtHPP5VizONXNWFCFZBLpRSduYMC++AFki9QGESpSPX7cu2cuuFwuPjEsLpXROIGu8msY2Pl+rHoXTlLE+PwSsUyCh1uLbECxvILVm30BTWq7y+LaWrn4+3/LiPudMjKxBf/3nt4evveSYXFGY1/akDfmTgc/vDpBbGPzz5/eTPixt8qL6xObbXY3E63bqLAMrgT+4W2ntQ3cSXA2UTlcx47YrkRwzw+Ifx9dgKdxuzQ3pdBN2/8I0xT2/kz992I3h8de8KIYeHp7puZnajywgAA/CqhsQVJnwy3llgBeyeo78wfH6zaW+hTVHx4iWVlWZXOHsSug4zRLgT6BG4t6/PsQ8vJ4Js7NfhgV/T943EJl79+3dbKrbBE+X7pkvknLczbqKrCWzGHnyYpubWAbEATCGuUfmT5grNM0wa7eBXWCN27w/7RMQRuLZU4sibHpa89gaj8wLu2lisUKelrA4drcGUy6OYEgsp+Afm1HJFIyS2sCgXA4ZR/pJEC2B84pkyZrx/sCRZDYCyVeBNX/e6FyNKViXYhmpAWk7dwV1s8voFAQ2iAZ2KV6NjXSI8greUK0S5E7cqzKwZMMYOZxBoYS+UmLbH58LxULpWjXYgaVRZUm1niPbyx+My3joOxbNDM3x2KUs6slbwAAAEoSURBVEvRrkJdKvKr6XTZgMnwkBKLYCwbRKET/ZZap93NE/K0rctGdn6VHl48eNo3LpxCaIGnfL5BoVCc31JANaeb2DVpmD2ME9dKOKUcG3ti7zGmaNcCNQjGskniI9lvn3HMnU2MbWho1/KdpFI5K7tSUC3w9WM6eWpzt9RaAMayqQQ82eOIiqJsgT5Nn2pGoZoaqPz5L3UQCSQ1ZbX8Sr6BId69O7WjDzzB0wrAWDaPsFaW/47//gWfz5FWl4tJZALdnCziSdCu6ws4PJCK5GKhTCyQmTuQze3023Wi2LSF/fS0GjCW308qlvO5stoamVyGsXWIA3r6OAqdSKHDmytbJRhLCMIceIEEgjAHxhKCMAfGEoIwB8YSgjAHxhKCMAfGEoIw53/f3ohJ2E1W/QAAAABJRU5ErkJggg==",
|
69 |
+
"text/plain": [
|
70 |
+
"<IPython.core.display.Image object>"
|
71 |
+
]
|
72 |
+
},
|
73 |
+
"execution_count": 13,
|
74 |
+
"metadata": {},
|
75 |
+
"output_type": "execute_result"
|
76 |
+
}
|
77 |
+
],
|
78 |
+
"source": [
|
79 |
+
"graph_builder = StateGraph(DataState)\n",
|
80 |
+
"\n",
|
81 |
+
"# Nodes\n",
|
82 |
+
"graph_builder.add_node(\"chatbot_healthassistant\", chatbot_with_tools)\n",
|
83 |
+
"graph_builder.add_node(\"patient\", human_node)\n",
|
84 |
+
"graph_builder.add_node(\"documenting\", data_node)\n",
|
85 |
+
"\n",
|
86 |
+
"# Chatbot -> {ordering, tools, human, END}\n",
|
87 |
+
"graph_builder.add_conditional_edges(\"chatbot_healthassistant\", maybe_route_to_tools)\n",
|
88 |
+
"# Human -> {chatbot, END}\n",
|
89 |
+
"graph_builder.add_conditional_edges(\"patient\", maybe_exit_human_node)\n",
|
90 |
+
"# TestCase_Paintrek\n",
|
91 |
+
"# Tools (both kinds) always route back to chat afterwards.\n",
|
92 |
+
"graph_builder.add_edge(\"documenting\", \"chatbot_healthassistant\")\n",
|
93 |
+
"\n",
|
94 |
+
"graph_builder.add_edge(START, \"chatbot_healthassistant\")\n",
|
95 |
+
"graph_with_order_tools = graph_builder.compile()\n",
|
96 |
+
"\n",
|
97 |
+
"Image(graph_with_order_tools.get_graph().draw_mermaid_png())"
|
98 |
+
]
|
99 |
+
},
|
100 |
+
{
|
101 |
+
"cell_type": "code",
|
102 |
+
"execution_count": 14,
|
103 |
+
"metadata": {
|
104 |
+
"execution": {
|
105 |
+
"iopub.execute_input": "2025-01-29T20:09:38.185616Z",
|
106 |
+
"iopub.status.busy": "2025-01-29T20:09:38.185131Z",
|
107 |
+
"iopub.status.idle": "2025-01-29T20:10:08.474591Z",
|
108 |
+
"shell.execute_reply": "2025-01-29T20:10:08.472926Z",
|
109 |
+
"shell.execute_reply.started": "2025-01-29T20:09:38.185577Z"
|
110 |
+
},
|
111 |
+
"id": "NCRSgaBUfIHF",
|
112 |
+
"trusted": true
|
113 |
+
},
|
114 |
+
"outputs": [
|
115 |
+
{
|
116 |
+
"name": "stdout",
|
117 |
+
"output_type": "stream",
|
118 |
+
"text": [
|
119 |
+
"Model: Welcome to the Paintrek world. I am a health assistant, an interactive clinical recording system. I will ask you questions about your pain and related symptoms and record your responses. I will then store this information securely. At any time, you can type `q` to quit.\n"
|
120 |
+
]
|
121 |
+
},
|
122 |
+
{
|
123 |
+
"name": "stdout",
|
124 |
+
"output_type": "stream",
|
125 |
+
"text": [
|
126 |
+
"Model: I apologize for the format. Let's start again.\n",
|
127 |
+
"\n",
|
128 |
+
"Hello, my name is MedAssist, and I'll be guiding you through a structured data collection process to gather comprehensive patient information. Please feel free to share any concerns or symptoms you're experiencing.\n",
|
129 |
+
"\n",
|
130 |
+
"To begin, can you please tell me your full name?\n",
|
131 |
+
"Model: I think there's been a mistake! Let's start again from the beginning.\n",
|
132 |
+
"\n",
|
133 |
+
"Hello, my name is MedAssist, and I'll be guiding you through a structured data collection process to gather comprehensive patient information. Please feel free to share any concerns or symptoms you're experiencing.\n",
|
134 |
+
"\n",
|
135 |
+
"To confirm, your full name is indeed John Doe, as per our initial introduction. Is that correct?\n",
|
136 |
+
"Model: Thank you for correcting me, Frank. I've updated your patient information.\n",
|
137 |
+
"\n",
|
138 |
+
"Now, let's move on to the next step. Can you please tell me what brings you here today? What seems to be the main reason for your visit? Is it related to any specific symptoms or concerns you're experiencing?\n",
|
139 |
+
"Model: Thank you for sharing that with me, Frank. A headache can be quite uncomfortable.\n",
|
140 |
+
"\n",
|
141 |
+
"Can you please tell me more about your headache? Where exactly is the pain located on a scale of 0 to 10, with 0 being no pain at all and 10 being the worst possible pain?\n",
|
142 |
+
"\n",
|
143 |
+
"Also, how long have you been experiencing this headache? Was it sudden or did it come on gradually?\n",
|
144 |
+
"Model: I apologize if I lost you for a moment. Can you please tell me more about your headache? Where exactly is the pain located on a scale of 0 to 10, with 0 being no pain at all and 10 being the worst possible pain?\n",
|
145 |
+
"\n",
|
146 |
+
"Also, how long have you been experiencing this headache? Was it sudden or did it come on gradually?\n"
|
147 |
+
]
|
148 |
+
}
|
149 |
+
],
|
150 |
+
"source": [
|
151 |
+
"# The default recursion limit for traversing nodes is 25 - setting it higher\n",
|
152 |
+
"# means you can try a more complex order with multiple steps and round-trips.\n",
|
153 |
+
"config = {\"recursion_limit\": 500}\n",
|
154 |
+
"\n",
|
155 |
+
"# Uncomment this line to execute the graph:\n",
|
156 |
+
"state = graph_with_order_tools.invoke({\"messages\": []}, config)\n",
|
157 |
+
"\n",
|
158 |
+
"# pprint(state)"
|
159 |
+
]
|
160 |
+
},
|
161 |
+
{
|
162 |
+
"cell_type": "code",
|
163 |
+
"execution_count": 15,
|
164 |
+
"metadata": {
|
165 |
+
"trusted": true
|
166 |
+
},
|
167 |
+
"outputs": [
|
168 |
+
{
|
169 |
+
"data": {
|
170 |
+
"text/plain": [
|
171 |
+
"{'ID': {'name': 'Frank',\n",
|
172 |
+
" 'DOB': '1990-01-01',\n",
|
173 |
+
" 'gender': 'male',\n",
|
174 |
+
" 'contact': '123 Main St',\n",
|
175 |
+
" 'emergency_contact': 'Jane Smith'},\n",
|
176 |
+
" 'symptom': {'main_symptom': 'headache',\n",
|
177 |
+
" 'length': '',\n",
|
178 |
+
" 'symptom_length': '2 weeks'}}"
|
179 |
+
]
|
180 |
+
},
|
181 |
+
"execution_count": 15,
|
182 |
+
"metadata": {},
|
183 |
+
"output_type": "execute_result"
|
184 |
+
}
|
185 |
+
],
|
186 |
+
"source": [
|
187 |
+
"state[\"data\"]\n"
|
188 |
+
]
|
189 |
+
},
|
190 |
+
{
|
191 |
+
"cell_type": "code",
|
192 |
+
"execution_count": null,
|
193 |
+
"metadata": {},
|
194 |
+
"outputs": [],
|
195 |
+
"source": []
|
196 |
+
}
|
197 |
+
],
|
198 |
+
"metadata": {
|
199 |
+
"colab": {
|
200 |
+
"name": "day-3-building-an-agent-with-langgraph.ipynb",
|
201 |
+
"toc_visible": true
|
202 |
+
},
|
203 |
+
"kaggle": {
|
204 |
+
"accelerator": "none",
|
205 |
+
"dataSources": [],
|
206 |
+
"dockerImageVersionId": 30786,
|
207 |
+
"isGpuEnabled": false,
|
208 |
+
"isInternetEnabled": true,
|
209 |
+
"language": "python",
|
210 |
+
"sourceType": "notebook"
|
211 |
+
},
|
212 |
+
"kernelspec": {
|
213 |
+
"display_name": "paintrekbot",
|
214 |
+
"language": "python",
|
215 |
+
"name": "python3"
|
216 |
+
},
|
217 |
+
"language_info": {
|
218 |
+
"codemirror_mode": {
|
219 |
+
"name": "ipython",
|
220 |
+
"version": 3
|
221 |
+
},
|
222 |
+
"file_extension": ".py",
|
223 |
+
"mimetype": "text/x-python",
|
224 |
+
"name": "python",
|
225 |
+
"nbconvert_exporter": "python",
|
226 |
+
"pygments_lexer": "ipython3",
|
227 |
+
"version": "3.12.8"
|
228 |
+
}
|
229 |
+
},
|
230 |
+
"nbformat": 4,
|
231 |
+
"nbformat_minor": 4
|
232 |
+
}
|
testtext_tmp
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
I am Frank, DOB 1986-01-01, male, number 12345, emergency contact is Zoe, 67890.
|
2 |
+
|
3 |
+
curl -X POST http://141.211.127.171/api/generate -H "Content-Type: application/json" -d '{"model":"llama3.2:latest","prompt": "Hello"}'
|
tools.py
DELETED
@@ -1,31 +0,0 @@
|
|
1 |
-
from typing import Optional, Literal
|
2 |
-
from langchain_core.tools import tool
|
3 |
-
|
4 |
-
from modules.job import Job
|
5 |
-
from modules.resume import Resume
|
6 |
-
|
7 |
-
def process_job() -> Job:
|
8 |
-
"""Process job data."""
|
9 |
-
job = Job.mock()
|
10 |
-
return job
|
11 |
-
|
12 |
-
def process_resume() -> Resume:
|
13 |
-
"""Process resume data."""
|
14 |
-
resume = Resume.mock()
|
15 |
-
return resume
|
16 |
-
|
17 |
-
@tool
|
18 |
-
def get_job(field: Optional[Literal['title', 'company', 'location', 'salary', 'description', 'responsibilities', 'benefits', 'employment_type', 'posted_date']] = None) -> str:
|
19 |
-
"""Get job data."""
|
20 |
-
job = process_job()
|
21 |
-
if field:
|
22 |
-
return getattr(job, field)
|
23 |
-
return job.dict()
|
24 |
-
|
25 |
-
@tool
|
26 |
-
def get_resume(field: Optional[Literal['name', 'professional_summary', 'work_experience', 'education', 'skills']] = None) -> str:
|
27 |
-
"""Get resume data."""
|
28 |
-
resume = process_resume()
|
29 |
-
if field:
|
30 |
-
return getattr(resume, field)
|
31 |
-
return resume.dict()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|