Tai Truong
fix readme
d202ada
{
"data": {
"edges": [
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "ChatInput",
"id": "ChatInput-sTYr3",
"name": "message",
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "Agent-yGhD2",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "reactflow__edge-ChatInput-sTYr3{œdataTypeœ:œChatInputœ,œidœ:œChatInput-sTYr3œ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Agent-yGhD2{œfieldNameœ:œinput_valueœ,œidœ:œAgent-yGhD2œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"selected": false,
"source": "ChatInput-sTYr3",
"sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-sTYr3œ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}",
"target": "Agent-yGhD2",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œAgent-yGhD2œ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
},
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "Agent",
"id": "Agent-yGhD2",
"name": "response",
"output_types": [
"Message"
]
},
"targetHandle": {
"fieldName": "input_value",
"id": "ChatOutput-Tkzec",
"inputTypes": [
"Message"
],
"type": "str"
}
},
"id": "reactflow__edge-Agent-yGhD2{œdataTypeœ:œAgentœ,œidœ:œAgent-yGhD2œ,œnameœ:œresponseœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-Tkzec{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-Tkzecœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}",
"selected": false,
"source": "Agent-yGhD2",
"sourceHandle": "{œdataTypeœ: œAgentœ, œidœ: œAgent-yGhD2œ, œnameœ: œresponseœ, œoutput_typesœ: [œMessageœ]}",
"target": "ChatOutput-Tkzec",
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-Tkzecœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}"
},
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "CalculatorTool",
"id": "CalculatorTool-IfWT1",
"name": "api_build_tool",
"output_types": [
"Tool"
]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-yGhD2",
"inputTypes": [
"Tool",
"BaseTool",
"StructuredTool"
],
"type": "other"
}
},
"id": "reactflow__edge-CalculatorTool-IfWT1{œdataTypeœ:œCalculatorToolœ,œidœ:œCalculatorTool-IfWT1œ,œnameœ:œapi_build_toolœ,œoutput_typesœ:[œToolœ]}-Agent-yGhD2{œfieldNameœ:œtoolsœ,œidœ:œAgent-yGhD2œ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}",
"selected": false,
"source": "CalculatorTool-IfWT1",
"sourceHandle": "{œdataTypeœ: œCalculatorToolœ, œidœ: œCalculatorTool-IfWT1œ, œnameœ: œapi_build_toolœ, œoutput_typesœ: [œToolœ]}",
"target": "Agent-yGhD2",
"targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-yGhD2œ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}"
},
{
"animated": false,
"className": "",
"data": {
"sourceHandle": {
"dataType": "URL",
"id": "URL-FOPyh",
"name": "component_as_tool",
"output_types": [
"Tool"
]
},
"targetHandle": {
"fieldName": "tools",
"id": "Agent-yGhD2",
"inputTypes": [
"Tool",
"BaseTool",
"StructuredTool"
],
"type": "other"
}
},
"id": "reactflow__edge-URL-FOPyh{œdataTypeœ:œURLœ,œidœ:œURL-FOPyhœ,œnameœ:œcomponent_as_toolœ,œoutput_typesœ:[œToolœ]}-Agent-yGhD2{œfieldNameœ:œtoolsœ,œidœ:œAgent-yGhD2œ,œinputTypesœ:[œToolœ,œBaseToolœ,œStructuredToolœ],œtypeœ:œotherœ}",
"selected": false,
"source": "URL-FOPyh",
"sourceHandle": "{œdataTypeœ: œURLœ, œidœ: œURL-FOPyhœ, œnameœ: œcomponent_as_toolœ, œoutput_typesœ: [œToolœ]}",
"target": "Agent-yGhD2",
"targetHandle": "{œfieldNameœ: œtoolsœ, œidœ: œAgent-yGhD2œ, œinputTypesœ: [œToolœ, œBaseToolœ, œStructuredToolœ], œtypeœ: œotherœ}"
}
],
"nodes": [
{
"data": {
"description": "Define the agent's instructions, then enter a task to complete using tools.",
"display_name": "Agent",
"id": "Agent-yGhD2",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Define the agent's instructions, then enter a task to complete using tools.",
"display_name": "Agent",
"documentation": "",
"edited": false,
"field_order": [
"agent_llm",
"max_tokens",
"model_kwargs",
"json_mode",
"output_schema",
"model_name",
"openai_api_base",
"api_key",
"temperature",
"seed",
"output_parser",
"system_prompt",
"tools",
"input_value",
"handle_parsing_errors",
"verbose",
"max_iterations",
"agent_description",
"memory",
"sender",
"sender_name",
"n_messages",
"session_id",
"order",
"template",
"add_current_date_tool"
],
"frozen": false,
"icon": "bot",
"legacy": false,
"lf_version": "1.1.0.dev4",
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Response",
"method": "message_response",
"name": "response",
"selected": "Message",
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"add_current_date_tool": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Current Date",
"dynamic": false,
"info": "If true, will add a tool to the agent that returns the current date.",
"list": false,
"name": "add_current_date_tool",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"agent_description": {
"_input_type": "MultilineInput",
"advanced": true,
"display_name": "Agent Description",
"dynamic": false,
"info": "The description of the agent. This is only used when in Tool Mode. Defaults to 'A helpful assistant with access to the following tools:' and tools are added dynamically.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "agent_description",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "A helpful assistant with access to the following tools:"
},
"agent_llm": {
"_input_type": "DropdownInput",
"advanced": false,
"combobox": false,
"display_name": "Model Provider",
"dynamic": false,
"info": "The provider of the language model that the agent will use to generate responses.",
"input_types": [],
"name": "agent_llm",
"options": [
"Amazon Bedrock",
"Anthropic",
"Azure OpenAI",
"Groq",
"NVIDIA",
"OpenAI",
"Custom"
],
"placeholder": "",
"real_time_refresh": true,
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "OpenAI"
},
"api_key": {
"_input_type": "SecretStrInput",
"advanced": false,
"display_name": "OpenAI API Key",
"dynamic": false,
"info": "The OpenAI API Key to use for the OpenAI model.",
"input_types": [
"Message"
],
"load_from_db": false,
"name": "api_key",
"password": true,
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langchain_core.tools import StructuredTool\n\nfrom langflow.base.agents.agent import LCToolsAgentComponent\nfrom langflow.base.models.model_input_constants import (\n ALL_PROVIDER_FIELDS,\n MODEL_PROVIDERS_DICT,\n)\nfrom langflow.base.models.model_utils import get_model_name\nfrom langflow.components.helpers import CurrentDateComponent\nfrom langflow.components.helpers.memory import MemoryComponent\nfrom langflow.components.langchain_utilities.tool_calling import (\n ToolCallingAgentComponent,\n)\nfrom langflow.io import BoolInput, DropdownInput, MultilineInput, Output\nfrom langflow.schema.dotdict import dotdict\nfrom langflow.schema.message import Message\n\n\ndef set_advanced_true(component_input):\n component_input.advanced = True\n return component_input\n\n\nclass AgentComponent(ToolCallingAgentComponent):\n display_name: str = \"Agent\"\n description: str = \"Define the agent's instructions, then enter a task to complete using tools.\"\n icon = \"bot\"\n beta = False\n name = \"Agent\"\n\n memory_inputs = [set_advanced_true(component_input) for component_input in MemoryComponent().inputs]\n\n inputs = [\n DropdownInput(\n name=\"agent_llm\",\n display_name=\"Model Provider\",\n info=\"The provider of the language model that the agent will use to generate responses.\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"OpenAI\",\n real_time_refresh=True,\n input_types=[],\n ),\n *MODEL_PROVIDERS_DICT[\"OpenAI\"][\"inputs\"],\n MultilineInput(\n name=\"system_prompt\",\n display_name=\"Agent Instructions\",\n info=\"System Prompt: Initial instructions and context provided to guide the agent's behavior.\",\n value=\"You are a helpful assistant that can use tools to answer questions and perform tasks.\",\n advanced=False,\n ),\n *LCToolsAgentComponent._base_inputs,\n *memory_inputs,\n BoolInput(\n name=\"add_current_date_tool\",\n display_name=\"Current Date\",\n advanced=True,\n info=\"If true, will add a tool to the agent that returns the current date.\",\n value=True,\n ),\n ]\n outputs = [Output(name=\"response\", display_name=\"Response\", method=\"message_response\")]\n\n async def message_response(self) -> Message:\n llm_model, display_name = self.get_llm()\n self.model_name = get_model_name(llm_model, display_name=display_name)\n if llm_model is None:\n msg = \"No language model selected\"\n raise ValueError(msg)\n self.chat_history = await self.get_memory_data()\n\n if self.add_current_date_tool:\n if not isinstance(self.tools, list): # type: ignore[has-type]\n self.tools = []\n # Convert CurrentDateComponent to a StructuredTool\n current_date_tool = CurrentDateComponent().to_toolkit()[0]\n if isinstance(current_date_tool, StructuredTool):\n self.tools.append(current_date_tool)\n else:\n msg = \"CurrentDateComponent must be converted to a StructuredTool\"\n raise ValueError(msg)\n\n if not self.tools:\n msg = \"Tools are required to run the agent.\"\n raise ValueError(msg)\n self.set(\n llm=llm_model,\n tools=self.tools,\n chat_history=self.chat_history,\n input_value=self.input_value,\n system_prompt=self.system_prompt,\n )\n agent = self.create_agent_runnable()\n return await self.run_agent(agent)\n\n async def get_memory_data(self):\n memory_kwargs = {\n component_input.name: getattr(self, f\"{component_input.name}\") for component_input in self.memory_inputs\n }\n\n return await MemoryComponent().set(**memory_kwargs).retrieve_messages()\n\n def get_llm(self):\n if isinstance(self.agent_llm, str):\n try:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n display_name = component_class.display_name\n inputs = provider_info.get(\"inputs\")\n prefix = provider_info.get(\"prefix\", \"\")\n return (\n self._build_llm_model(component_class, inputs, prefix),\n display_name,\n )\n except Exception as e:\n msg = f\"Error building {self.agent_llm} language model\"\n raise ValueError(msg) from e\n return self.agent_llm, None\n\n def _build_llm_model(self, component, inputs, prefix=\"\"):\n model_kwargs = {input_.name: getattr(self, f\"{prefix}{input_.name}\") for input_ in inputs}\n return component.set(**model_kwargs).build_model()\n\n def delete_fields(self, build_config: dotdict, fields: dict | list[str]) -> None:\n \"\"\"Delete specified fields from build_config.\"\"\"\n for field in fields:\n build_config.pop(field, None)\n\n def update_input_types(self, build_config: dotdict) -> dotdict:\n \"\"\"Update input types for all fields in build_config.\"\"\"\n for key, value in build_config.items():\n if isinstance(value, dict):\n if value.get(\"input_types\") is None:\n build_config[key][\"input_types\"] = []\n elif hasattr(value, \"input_types\") and value.input_types is None:\n value.input_types = []\n return build_config\n\n def update_build_config(self, build_config: dotdict, field_value: str, field_name: str | None = None) -> dotdict:\n # Iterate over all providers in the MODEL_PROVIDERS_DICT\n # Existing logic for updating build_config\n if field_name == \"agent_llm\":\n provider_info = MODEL_PROVIDERS_DICT.get(field_value)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call the component class's update_build_config method\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n provider_configs: dict[str, tuple[dict, list[dict]]] = {\n provider: (\n MODEL_PROVIDERS_DICT[provider][\"fields\"],\n [\n MODEL_PROVIDERS_DICT[other_provider][\"fields\"]\n for other_provider in MODEL_PROVIDERS_DICT\n if other_provider != provider\n ],\n )\n for provider in MODEL_PROVIDERS_DICT\n }\n if field_value in provider_configs:\n fields_to_add, fields_to_delete = provider_configs[field_value]\n\n # Delete fields from other providers\n for fields in fields_to_delete:\n self.delete_fields(build_config, fields)\n\n # Add provider-specific fields\n if field_value == \"OpenAI\" and not any(field in build_config for field in fields_to_add):\n build_config.update(fields_to_add)\n else:\n build_config.update(fields_to_add)\n # Reset input types for agent_llm\n build_config[\"agent_llm\"][\"input_types\"] = []\n elif field_value == \"Custom\":\n # Delete all provider fields\n self.delete_fields(build_config, ALL_PROVIDER_FIELDS)\n # Update with custom component\n custom_component = DropdownInput(\n name=\"agent_llm\",\n display_name=\"Language Model\",\n options=[*sorted(MODEL_PROVIDERS_DICT.keys()), \"Custom\"],\n value=\"Custom\",\n real_time_refresh=True,\n input_types=[\"LanguageModel\"],\n )\n build_config.update({\"agent_llm\": custom_component.to_dict()})\n # Update input types for all fields\n build_config = self.update_input_types(build_config)\n\n # Validate required keys\n default_keys = [\n \"code\",\n \"_type\",\n \"agent_llm\",\n \"tools\",\n \"input_value\",\n \"add_current_date_tool\",\n \"system_prompt\",\n \"agent_description\",\n \"max_iterations\",\n \"handle_parsing_errors\",\n \"verbose\",\n ]\n missing_keys = [key for key in default_keys if key not in build_config]\n if missing_keys:\n msg = f\"Missing required keys in build_config: {missing_keys}\"\n raise ValueError(msg)\n if isinstance(self.agent_llm, str) and self.agent_llm in MODEL_PROVIDERS_DICT:\n provider_info = MODEL_PROVIDERS_DICT.get(self.agent_llm)\n if provider_info:\n component_class = provider_info.get(\"component_class\")\n prefix = provider_info.get(\"prefix\")\n if component_class and hasattr(component_class, \"update_build_config\"):\n # Call each component class's update_build_config method\n # remove the prefix from the field_name\n if isinstance(field_name, str) and isinstance(prefix, str):\n field_name = field_name.replace(prefix, \"\")\n build_config = component_class.update_build_config(build_config, field_value, field_name)\n\n return build_config\n"
},
"handle_parsing_errors": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Handle Parse Errors",
"dynamic": false,
"info": "Should the Agent fix errors when reading user input for better processing?",
"list": false,
"name": "handle_parsing_errors",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"input_value": {
"_input_type": "MessageTextInput",
"advanced": false,
"display_name": "Input",
"dynamic": false,
"info": "The input provided by the user for the agent to process.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": true,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"json_mode": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "JSON Mode",
"dynamic": false,
"info": "If True, it will output JSON regardless of passing a schema.",
"list": false,
"name": "json_mode",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": false
},
"max_iterations": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Max Iterations",
"dynamic": false,
"info": "The maximum number of attempts the agent can make to complete its task before it stops.",
"list": false,
"name": "max_iterations",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": 15
},
"max_tokens": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Max Tokens",
"dynamic": false,
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
"list": false,
"name": "max_tokens",
"placeholder": "",
"range_spec": {
"max": 128000,
"min": 0,
"step": 0.1,
"step_type": "float"
},
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": ""
},
"memory": {
"_input_type": "HandleInput",
"advanced": true,
"display_name": "External Memory",
"dynamic": false,
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.",
"input_types": [
"BaseChatMessageHistory"
],
"list": false,
"name": "memory",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "other",
"value": ""
},
"model_kwargs": {
"_input_type": "DictInput",
"advanced": true,
"display_name": "Model Kwargs",
"dynamic": false,
"info": "Additional keyword arguments to pass to the model.",
"list": false,
"name": "model_kwargs",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"type": "dict",
"value": {}
},
"model_name": {
"_input_type": "DropdownInput",
"advanced": false,
"combobox": false,
"display_name": "Model Name",
"dynamic": false,
"info": "",
"name": "model_name",
"options": [
"gpt-4o-mini",
"gpt-4o",
"gpt-4-turbo",
"gpt-4-turbo-preview",
"gpt-4",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0125"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "gpt-4o-mini"
},
"n_messages": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Number of Messages",
"dynamic": false,
"info": "Number of messages to retrieve.",
"list": false,
"name": "n_messages",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": 100
},
"openai_api_base": {
"_input_type": "StrInput",
"advanced": true,
"display_name": "OpenAI API Base",
"dynamic": false,
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.",
"list": false,
"load_from_db": false,
"name": "openai_api_base",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"order": {
"_input_type": "DropdownInput",
"advanced": true,
"combobox": false,
"display_name": "Order",
"dynamic": false,
"info": "Order of the messages.",
"name": "order",
"options": [
"Ascending",
"Descending"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "Ascending"
},
"output_parser": {
"_input_type": "HandleInput",
"advanced": true,
"display_name": "Output Parser",
"dynamic": false,
"info": "The parser to use to parse the output of the model",
"input_types": [
"OutputParser"
],
"list": false,
"name": "output_parser",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "other",
"value": ""
},
"output_schema": {
"_input_type": "DictInput",
"advanced": true,
"display_name": "Schema",
"dynamic": false,
"info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]",
"list": true,
"name": "output_schema",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"type": "dict",
"value": {}
},
"seed": {
"_input_type": "IntInput",
"advanced": true,
"display_name": "Seed",
"dynamic": false,
"info": "The seed controls the reproducibility of the job.",
"list": false,
"name": "seed",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "int",
"value": 1
},
"sender": {
"_input_type": "DropdownInput",
"advanced": true,
"combobox": false,
"display_name": "Sender Type",
"dynamic": false,
"info": "Filter by sender type.",
"name": "sender",
"options": [
"Machine",
"User",
"Machine and User"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "Machine and User"
},
"sender_name": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Sender Name",
"dynamic": false,
"info": "Filter by sender name.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "sender_name",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"session_id": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "session_id",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"system_prompt": {
"_input_type": "MultilineInput",
"advanced": false,
"display_name": "Agent Instructions",
"dynamic": false,
"info": "System Prompt: Initial instructions and context provided to guide the agent's behavior.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "system_prompt",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "You are a helpful assistant that can use tools to answer questions and perform tasks.\nUse markdown to format your answer, properly embedding images and urls."
},
"temperature": {
"_input_type": "FloatInput",
"advanced": true,
"display_name": "Temperature",
"dynamic": false,
"info": "",
"list": false,
"name": "temperature",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "float",
"value": 0.1
},
"template": {
"_input_type": "MultilineInput",
"advanced": true,
"display_name": "Template",
"dynamic": false,
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "template",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "{sender_name}: {text}"
},
"tools": {
"_input_type": "HandleInput",
"advanced": false,
"display_name": "Tools",
"dynamic": false,
"info": "These are the tools that the agent can use to help with tasks.",
"input_types": [
"Tool",
"BaseTool",
"StructuredTool"
],
"list": true,
"name": "tools",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "other",
"value": ""
},
"verbose": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Verbose",
"dynamic": false,
"info": "",
"list": false,
"name": "verbose",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
}
},
"tool_mode": false
},
"type": "Agent"
},
"dragging": false,
"height": 648,
"id": "Agent-yGhD2",
"position": {
"x": 2306.5155821255557,
"y": 332.5289520982579
},
"positionAbsolute": {
"x": 2306.5155821255557,
"y": 332.5289520982579
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "ChatInput-sTYr3",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Get chat inputs from the Playground.",
"display_name": "Chat Input",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"should_store_message",
"sender",
"sender_name",
"session_id",
"files",
"background_color",
"chat_icon",
"text_color"
],
"frozen": false,
"icon": "MessagesSquare",
"legacy": false,
"lf_version": "1.1.0.dev4",
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Message",
"method": "message_response",
"name": "message",
"selected": "Message",
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"background_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "background_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"chat_icon": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "chat_icon",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": background_color, \"text_color\": text_color, \"icon\": icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"files": {
"_input_type": "FileInput",
"advanced": true,
"display_name": "Files",
"dynamic": false,
"fileTypes": [
"txt",
"md",
"mdx",
"csv",
"json",
"yaml",
"yml",
"xml",
"html",
"htm",
"pdf",
"docx",
"py",
"sh",
"sql",
"js",
"ts",
"tsx",
"jpg",
"jpeg",
"png",
"bmp",
"image"
],
"file_path": "",
"info": "Files to be sent with the message.",
"list": true,
"name": "files",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "file",
"value": ""
},
"input_value": {
"_input_type": "MultilineInput",
"advanced": false,
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as input.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"multiline": true,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"sender": {
"_input_type": "DropdownInput",
"advanced": true,
"combobox": false,
"display_name": "Sender Type",
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "User"
},
"sender_name": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "sender_name",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "User"
},
"session_id": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "session_id",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"should_store_message": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Store Messages",
"dynamic": false,
"info": "Store the message in the history.",
"list": false,
"name": "should_store_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"text_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "text_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "ChatInput"
},
"dragging": false,
"height": 234,
"id": "ChatInput-sTYr3",
"position": {
"x": 1939.9269765193949,
"y": 965.3667152408464
},
"positionAbsolute": {
"x": 1939.9269765193949,
"y": 965.3667152408464
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"description": "Display a chat message in the Playground.",
"display_name": "Chat Output",
"id": "ChatOutput-Tkzec",
"node": {
"base_classes": [
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Display a chat message in the Playground.",
"display_name": "Chat Output",
"documentation": "",
"edited": false,
"field_order": [
"input_value",
"should_store_message",
"sender",
"sender_name",
"session_id",
"data_template",
"background_color",
"chat_icon",
"text_color"
],
"frozen": false,
"icon": "MessagesSquare",
"legacy": false,
"lf_version": "1.1.0.dev4",
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Message",
"method": "message_response",
"name": "message",
"selected": "Message",
"types": [
"Message"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"background_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Background Color",
"dynamic": false,
"info": "The background color of the icon.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "background_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"chat_icon": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Icon",
"dynamic": false,
"info": "The icon of the message.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "chat_icon",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n"
},
"data_template": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Data Template",
"dynamic": false,
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "data_template",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "{text}"
},
"input_value": {
"_input_type": "MessageInput",
"advanced": false,
"display_name": "Text",
"dynamic": false,
"info": "Message to be passed as output.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "input_value",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"sender": {
"_input_type": "DropdownInput",
"advanced": true,
"combobox": false,
"display_name": "Sender Type",
"dynamic": false,
"info": "Type of sender.",
"name": "sender",
"options": [
"Machine",
"User"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "Machine"
},
"sender_name": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Sender Name",
"dynamic": false,
"info": "Name of the sender.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "sender_name",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": "AI"
},
"session_id": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Session ID",
"dynamic": false,
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "session_id",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
},
"should_store_message": {
"_input_type": "BoolInput",
"advanced": true,
"display_name": "Store Messages",
"dynamic": false,
"info": "Store the message in the history.",
"list": false,
"name": "should_store_message",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"trace_as_metadata": true,
"type": "bool",
"value": true
},
"text_color": {
"_input_type": "MessageTextInput",
"advanced": true,
"display_name": "Text Color",
"dynamic": false,
"info": "The text color of the name",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "text_color",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "ChatOutput"
},
"dragging": false,
"height": 234,
"id": "ChatOutput-Tkzec",
"position": {
"x": 2683.9938458383212,
"y": 556.5828467235146
},
"positionAbsolute": {
"x": 2683.9938458383212,
"y": 556.5828467235146
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "URL-FOPyh",
"node": {
"base_classes": [
"Data",
"Message"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Fetch content from one or more URLs.",
"display_name": "URL",
"documentation": "",
"edited": false,
"field_order": [
"urls",
"format"
],
"frozen": false,
"icon": "layout-template",
"legacy": false,
"lf_version": "1.1.0.dev4",
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Toolset",
"hidden": null,
"method": "to_toolkit",
"name": "component_as_tool",
"required_inputs": null,
"selected": "Tool",
"types": [
"Tool"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "import re\n\nfrom langchain_community.document_loaders import AsyncHtmlLoader, WebBaseLoader\n\nfrom langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass URLComponent(Component):\n display_name = \"URL\"\n description = \"Fetch content from one or more URLs.\"\n icon = \"layout-template\"\n name = \"URL\"\n\n inputs = [\n MessageTextInput(\n name=\"urls\",\n display_name=\"URLs\",\n info=\"Enter one or more URLs, by clicking the '+' button.\",\n is_list=True,\n tool_mode=True,\n ),\n DropdownInput(\n name=\"format\",\n display_name=\"Output Format\",\n info=\"Output Format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.\",\n options=[\"Text\", \"Raw HTML\"],\n value=\"Text\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"fetch_content\"),\n Output(display_name=\"Text\", name=\"text\", method=\"fetch_content_text\"),\n ]\n\n def ensure_url(self, string: str) -> str:\n \"\"\"Ensures the given string is a URL by adding 'http://' if it doesn't start with 'http://' or 'https://'.\n\n Raises an error if the string is not a valid URL.\n\n Parameters:\n string (str): The string to be checked and possibly modified.\n\n Returns:\n str: The modified string that is ensured to be a URL.\n\n Raises:\n ValueError: If the string is not a valid URL.\n \"\"\"\n if not string.startswith((\"http://\", \"https://\")):\n string = \"http://\" + string\n\n # Basic URL validation regex\n url_regex = re.compile(\n r\"^(https?:\\/\\/)?\" # optional protocol\n r\"(www\\.)?\" # optional www\n r\"([a-zA-Z0-9.-]+)\" # domain\n r\"(\\.[a-zA-Z]{2,})?\" # top-level domain\n r\"(:\\d+)?\" # optional port\n r\"(\\/[^\\s]*)?$\", # optional path\n re.IGNORECASE,\n )\n\n if not url_regex.match(string):\n msg = f\"Invalid URL: {string}\"\n raise ValueError(msg)\n\n return string\n\n def fetch_content(self) -> list[Data]:\n urls = [self.ensure_url(url.strip()) for url in self.urls if url.strip()]\n if self.format == \"Raw HTML\":\n loader = AsyncHtmlLoader(web_path=urls, encoding=\"utf-8\")\n else:\n loader = WebBaseLoader(web_paths=urls, encoding=\"utf-8\")\n docs = loader.load()\n data = [Data(text=doc.page_content, **doc.metadata) for doc in docs]\n self.status = data\n return data\n\n def fetch_content_text(self) -> Message:\n data = self.fetch_content()\n\n result_string = data_to_text(\"{text}\", data)\n self.status = result_string\n return Message(text=result_string)\n"
},
"format": {
"_input_type": "DropdownInput",
"advanced": false,
"combobox": false,
"display_name": "Output Format",
"dynamic": false,
"info": "Output Format. Use 'Text' to extract the text from the HTML or 'Raw HTML' for the raw HTML content.",
"name": "format",
"options": [
"Text",
"Raw HTML"
],
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": false,
"trace_as_metadata": true,
"type": "str",
"value": "Text"
},
"urls": {
"_input_type": "MessageTextInput",
"advanced": false,
"display_name": "URLs",
"dynamic": false,
"info": "Enter one or more URLs, by clicking the '+' button.",
"input_types": [
"Message"
],
"list": true,
"load_from_db": false,
"name": "urls",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": true,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "URL"
},
"dragging": false,
"height": 320,
"id": "URL-FOPyh",
"position": {
"x": 1942.4405475324484,
"y": 274.6204436838327
},
"positionAbsolute": {
"x": 1942.4405475324484,
"y": 274.6204436838327
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "CalculatorTool-IfWT1",
"node": {
"base_classes": [
"Data",
"Tool"
],
"beta": false,
"conditional_paths": [],
"custom_fields": {},
"description": "Perform basic arithmetic operations on a given expression.",
"display_name": "Calculator",
"documentation": "",
"edited": false,
"field_order": [
"expression"
],
"frozen": false,
"icon": "calculator",
"legacy": false,
"lf_version": "1.1.0.dev4",
"metadata": {},
"output_types": [],
"outputs": [
{
"cache": true,
"display_name": "Data",
"method": "run_model",
"name": "api_run_model",
"required_inputs": [],
"selected": "Data",
"types": [
"Data"
],
"value": "__UNDEFINED__"
},
{
"cache": true,
"display_name": "Tool",
"method": "build_tool",
"name": "api_build_tool",
"required_inputs": [],
"selected": "Tool",
"types": [
"Tool"
],
"value": "__UNDEFINED__"
}
],
"pinned": false,
"template": {
"_type": "Component",
"code": {
"advanced": true,
"dynamic": true,
"fileTypes": [],
"file_path": "",
"info": "",
"list": false,
"load_from_db": false,
"multiline": true,
"name": "code",
"password": false,
"placeholder": "",
"required": true,
"show": true,
"title_case": false,
"type": "code",
"value": "import ast\nimport operator\n\nfrom langchain.tools import StructuredTool\nfrom langchain_core.tools import ToolException\nfrom loguru import logger\nfrom pydantic import BaseModel, Field\n\nfrom langflow.base.langchain_utilities.model import LCToolComponent\nfrom langflow.field_typing import Tool\nfrom langflow.inputs import MessageTextInput\nfrom langflow.schema import Data\n\n\nclass CalculatorToolComponent(LCToolComponent):\n display_name = \"Calculator\"\n description = \"Perform basic arithmetic operations on a given expression.\"\n icon = \"calculator\"\n name = \"CalculatorTool\"\n\n inputs = [\n MessageTextInput(\n name=\"expression\",\n display_name=\"Expression\",\n info=\"The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').\",\n ),\n ]\n\n class CalculatorToolSchema(BaseModel):\n expression: str = Field(..., description=\"The arithmetic expression to evaluate.\")\n\n def run_model(self) -> list[Data]:\n return self._evaluate_expression(self.expression)\n\n def build_tool(self) -> Tool:\n return StructuredTool.from_function(\n name=\"calculator\",\n description=\"Evaluate basic arithmetic expressions. Input should be a string containing the expression.\",\n func=self._eval_expr_with_error,\n args_schema=self.CalculatorToolSchema,\n )\n\n def _eval_expr(self, node):\n # Define the allowed operators\n operators = {\n ast.Add: operator.add,\n ast.Sub: operator.sub,\n ast.Mult: operator.mul,\n ast.Div: operator.truediv,\n ast.Pow: operator.pow,\n }\n if isinstance(node, ast.Num):\n return node.n\n if isinstance(node, ast.BinOp):\n return operators[type(node.op)](self._eval_expr(node.left), self._eval_expr(node.right))\n if isinstance(node, ast.UnaryOp):\n return operators[type(node.op)](self._eval_expr(node.operand))\n if isinstance(node, ast.Call):\n msg = (\n \"Function calls like sqrt(), sin(), cos() etc. are not supported. \"\n \"Only basic arithmetic operations (+, -, *, /, **) are allowed.\"\n )\n raise TypeError(msg)\n msg = f\"Unsupported operation or expression type: {type(node).__name__}\"\n raise TypeError(msg)\n\n def _eval_expr_with_error(self, expression: str) -> list[Data]:\n try:\n return self._evaluate_expression(expression)\n except Exception as e:\n raise ToolException(str(e)) from e\n\n def _evaluate_expression(self, expression: str) -> list[Data]:\n try:\n # Parse the expression and evaluate it\n tree = ast.parse(expression, mode=\"eval\")\n result = self._eval_expr(tree.body)\n\n # Format the result to a reasonable number of decimal places\n formatted_result = f\"{result:.6f}\".rstrip(\"0\").rstrip(\".\")\n\n self.status = formatted_result\n return [Data(data={\"result\": formatted_result})]\n\n except (SyntaxError, TypeError, KeyError) as e:\n error_message = f\"Invalid expression: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except ZeroDivisionError:\n error_message = \"Error: Division by zero\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n except Exception as e: # noqa: BLE001\n logger.opt(exception=True).debug(\"Error evaluating expression\")\n error_message = f\"Error: {e}\"\n self.status = error_message\n return [Data(data={\"error\": error_message, \"input\": expression})]\n"
},
"expression": {
"_input_type": "MessageTextInput",
"advanced": false,
"display_name": "Expression",
"dynamic": false,
"info": "The arithmetic expression to evaluate (e.g., '4*4*(33/22)+12-20').",
"input_types": [
"Message"
],
"list": false,
"load_from_db": false,
"name": "expression",
"placeholder": "",
"required": false,
"show": true,
"title_case": false,
"tool_mode": true,
"trace_as_input": true,
"trace_as_metadata": true,
"type": "str",
"value": ""
}
},
"tool_mode": false
},
"type": "CalculatorTool"
},
"dragging": false,
"height": 302,
"id": "CalculatorTool-IfWT1",
"position": {
"x": 1938.2271796804105,
"y": 629.2915039696703
},
"positionAbsolute": {
"x": 1938.2271796804105,
"y": 629.2915039696703
},
"selected": false,
"type": "genericNode",
"width": 320
},
{
"data": {
"id": "note-QuJAF",
"node": {
"description": "# 📖 README\nRun an Agent with URL and Calculator tools available for its use. \nThe Agent decides which tool to use to solve a problem.\n## Quick start\n\n1. Add your OpenAI API key to the Agent.\n2. Open the Playground and chat with the Agent. Request some information about a recipe, and then ask to add two numbers together. In the responses, the Agent will use different tools to solve different problems.\n\n## Next steps\nConnect more tools to the Agent to create your perfect assistant.\n\nFor more, see the [Langflow docs](https://docs.langflow.org/agents-tool-calling-agent-component).",
"display_name": "",
"documentation": "",
"template": {
"backgroundColor": "neutral"
}
},
"type": "note"
},
"dragging": false,
"height": 571,
"id": "note-QuJAF",
"position": {
"x": 1314.267618793912,
"y": 323.12512509078374
},
"positionAbsolute": {
"x": 1314.267618793912,
"y": 323.12512509078374
},
"resizing": false,
"selected": false,
"style": {
"height": 571,
"width": 600
},
"type": "noteNode",
"width": 600
},
{
"data": {
"id": "note-uxxuk",
"node": {
"description": "### 💡 Add your OpenAI API key here👇",
"display_name": "",
"documentation": "",
"template": {
"backgroundColor": "transparent"
}
},
"type": "note"
},
"dragging": false,
"height": 324,
"id": "note-uxxuk",
"position": {
"x": 2302.9552933034743,
"y": 288.4851054994825
},
"positionAbsolute": {
"x": 2302.9552933034743,
"y": 288.4851054994825
},
"resizing": false,
"selected": false,
"style": {
"height": 324,
"width": 330
},
"type": "noteNode",
"width": 330
}
],
"viewport": {
"x": -813.150138361608,
"y": -64.1501688971494,
"zoom": 0.6612152367588026
}
},
"description": "starterProjects.simpleAgent.description",
"endpoint_name": null,
"id": "263070d6-3560-4f4d-a6c4-003f4a586e6e",
"is_component": false,
"last_tested_version": "1.1.0",
"name": "starterProjects.simpleAgent.name",
"tags": [
"assistants",
"agents"
]
}