Spaces:
Running
Running
{ | |
"data": { | |
"edges": [ | |
{ | |
"animated": false, | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "ChatInput", | |
"id": "ChatInput-1lWBj", | |
"name": "message", | |
"output_types": [ | |
"Message" | |
] | |
}, | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "OpenAIModel-HIx8w", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-ChatInput-1lWBj{œdataTypeœ:œChatInputœ,œidœ:œChatInput-1lWBjœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HIx8w{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-HIx8wœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"source": "ChatInput-1lWBj", | |
"sourceHandle": "{œdataTypeœ: œChatInputœ, œidœ: œChatInput-1lWBjœ, œnameœ: œmessageœ, œoutput_typesœ: [œMessageœ]}", | |
"target": "OpenAIModel-HIx8w", | |
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œOpenAIModel-HIx8wœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" | |
}, | |
{ | |
"animated": false, | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "OpenAIModel", | |
"id": "OpenAIModel-HIx8w", | |
"name": "text_output", | |
"output_types": [ | |
"Message" | |
] | |
}, | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "ChatOutput-hKFON", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-OpenAIModel-HIx8w{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-HIx8wœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-hKFON{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-hKFONœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"source": "OpenAIModel-HIx8w", | |
"sourceHandle": "{œdataTypeœ: œOpenAIModelœ, œidœ: œOpenAIModel-HIx8wœ, œnameœ: œtext_outputœ, œoutput_typesœ: [œMessageœ]}", | |
"target": "ChatOutput-hKFON", | |
"targetHandle": "{œfieldNameœ: œinput_valueœ, œidœ: œChatOutput-hKFONœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" | |
}, | |
{ | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "File", | |
"id": "File-dlDLp", | |
"name": "data", | |
"output_types": [ | |
"Data" | |
] | |
}, | |
"targetHandle": { | |
"fieldName": "data", | |
"id": "ParseData-mIiSz", | |
"inputTypes": [ | |
"Data" | |
], | |
"type": "other" | |
} | |
}, | |
"id": "reactflow__edge-File-dlDLp{œdataTypeœ:œFileœ,œidœ:œFile-dlDLpœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-ParseData-mIiSz{œfieldNameœ:œdataœ,œidœ:œParseData-mIiSzœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"source": "File-dlDLp", | |
"sourceHandle": "{œdataTypeœ: œFileœ, œidœ: œFile-dlDLpœ, œnameœ: œdataœ, œoutput_typesœ: [œDataœ]}", | |
"target": "ParseData-mIiSz", | |
"targetHandle": "{œfieldNameœ: œdataœ, œidœ: œParseData-mIiSzœ, œinputTypesœ: [œDataœ], œtypeœ: œotherœ}" | |
}, | |
{ | |
"data": { | |
"sourceHandle": { | |
"dataType": "ParseData", | |
"id": "ParseData-mIiSz", | |
"name": "text", | |
"output_types": [ | |
"Message" | |
] | |
}, | |
"targetHandle": { | |
"fieldName": "Document", | |
"id": "Prompt-L5CiD", | |
"inputTypes": [ | |
"Message", | |
"Text" | |
], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-ParseData-mIiSz{œdataTypeœ:œParseDataœ,œidœ:œParseData-mIiSzœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-L5CiD{œfieldNameœ:œDocumentœ,œidœ:œPrompt-L5CiDœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"source": "ParseData-mIiSz", | |
"sourceHandle": "{œdataTypeœ: œParseDataœ, œidœ: œParseData-mIiSzœ, œnameœ: œtextœ, œoutput_typesœ: [œMessageœ]}", | |
"target": "Prompt-L5CiD", | |
"targetHandle": "{œfieldNameœ: œDocumentœ, œidœ: œPrompt-L5CiDœ, œinputTypesœ: [œMessageœ, œTextœ], œtypeœ: œstrœ}" | |
}, | |
{ | |
"data": { | |
"sourceHandle": { | |
"dataType": "Prompt", | |
"id": "Prompt-L5CiD", | |
"name": "prompt", | |
"output_types": [ | |
"Message" | |
] | |
}, | |
"targetHandle": { | |
"fieldName": "system_message", | |
"id": "OpenAIModel-HIx8w", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-Prompt-L5CiD{œdataTypeœ:œPromptœ,œidœ:œPrompt-L5CiDœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-HIx8w{œfieldNameœ:œsystem_messageœ,œidœ:œOpenAIModel-HIx8wœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"source": "Prompt-L5CiD", | |
"sourceHandle": "{œdataTypeœ: œPromptœ, œidœ: œPrompt-L5CiDœ, œnameœ: œpromptœ, œoutput_typesœ: [œMessageœ]}", | |
"target": "OpenAIModel-HIx8w", | |
"targetHandle": "{œfieldNameœ: œsystem_messageœ, œidœ: œOpenAIModel-HIx8wœ, œinputTypesœ: [œMessageœ], œtypeœ: œstrœ}" | |
} | |
], | |
"nodes": [ | |
{ | |
"data": { | |
"description": "Get chat inputs from the Playground.", | |
"display_name": "Chat Input", | |
"id": "ChatInput-1lWBj", | |
"node": { | |
"base_classes": [ | |
"Message" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Get chat inputs from the Playground.", | |
"display_name": "Chat Input", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"files" | |
], | |
"frozen": false, | |
"icon": "MessagesSquare", | |
"legacy": false, | |
"lf_version": "1.0.19.post2", | |
"metadata": {}, | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Message", | |
"method": "message_response", | |
"name": "message", | |
"selected": "Message", | |
"types": [ | |
"Message" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"background_color": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Background Color", | |
"dynamic": false, | |
"info": "The background color of the icon.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "background_color", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"chat_icon": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Icon", | |
"dynamic": false, | |
"info": "The icon of the message.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "chat_icon", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_USER, MESSAGE_SENDER_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n async def message_response(self) -> Message:\n background_color = self.background_color\n text_color = self.text_color\n icon = self.chat_icon\n\n message = await Message.create(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n properties={\"background_color\": background_color, \"text_color\": text_color, \"icon\": icon},\n )\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" | |
}, | |
"files": { | |
"advanced": true, | |
"display_name": "Files", | |
"dynamic": false, | |
"fileTypes": [ | |
"txt", | |
"md", | |
"mdx", | |
"csv", | |
"json", | |
"yaml", | |
"yml", | |
"xml", | |
"html", | |
"htm", | |
"pdf", | |
"docx", | |
"py", | |
"sh", | |
"sql", | |
"js", | |
"ts", | |
"tsx", | |
"jpg", | |
"jpeg", | |
"png", | |
"bmp", | |
"image" | |
], | |
"file_path": "", | |
"info": "Files to be sent with the message.", | |
"list": true, | |
"name": "files", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "file", | |
"value": "" | |
}, | |
"input_value": { | |
"advanced": false, | |
"display_name": "Text", | |
"dynamic": false, | |
"info": "Message to be passed as input.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "What is this document is about?" | |
}, | |
"sender": { | |
"advanced": true, | |
"display_name": "Sender Type", | |
"dynamic": false, | |
"info": "Type of sender.", | |
"name": "sender", | |
"options": [ | |
"Machine", | |
"User" | |
], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "User" | |
}, | |
"sender_name": { | |
"advanced": true, | |
"display_name": "Sender Name", | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "sender_name", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "User" | |
}, | |
"session_id": { | |
"advanced": true, | |
"display_name": "Session ID", | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "session_id", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"should_store_message": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Store Messages", | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"list": false, | |
"name": "should_store_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
}, | |
"text_color": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Text Color", | |
"dynamic": false, | |
"info": "The text color of the name", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "text_color", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
} | |
} | |
}, | |
"type": "ChatInput" | |
}, | |
"dragging": false, | |
"height": 234, | |
"id": "ChatInput-1lWBj", | |
"position": { | |
"x": 516.7529480335185, | |
"y": 237.04967879541528 | |
}, | |
"positionAbsolute": { | |
"x": 516.7529480335185, | |
"y": 237.04967879541528 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
}, | |
{ | |
"data": { | |
"description": "Display a chat message in the Playground.", | |
"display_name": "Chat Output", | |
"id": "ChatOutput-hKFON", | |
"node": { | |
"base_classes": [ | |
"Message" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Display a chat message in the Playground.", | |
"display_name": "Chat Output", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"should_store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"data_template", | |
"background_color", | |
"chat_icon", | |
"text_color" | |
], | |
"frozen": false, | |
"icon": "MessagesSquare", | |
"legacy": false, | |
"lf_version": "1.0.19.post2", | |
"metadata": {}, | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Message", | |
"method": "message_response", | |
"name": "message", | |
"selected": "Message", | |
"types": [ | |
"Message" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"background_color": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Background Color", | |
"dynamic": false, | |
"info": "The background color of the icon.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "background_color", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"chat_icon": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Icon", | |
"dynamic": false, | |
"info": "The icon of the message.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "chat_icon", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageInput, MessageTextInput, Output\nfrom langflow.schema.message import Message\nfrom langflow.schema.properties import Source\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"MessagesSquare\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n MessageTextInput(\n name=\"background_color\",\n display_name=\"Background Color\",\n info=\"The background color of the icon.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"chat_icon\",\n display_name=\"Icon\",\n info=\"The icon of the message.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"text_color\",\n display_name=\"Text Color\",\n info=\"The text color of the name\",\n advanced=True,\n ),\n ]\n outputs = [\n Output(\n display_name=\"Message\",\n name=\"message\",\n method=\"message_response\",\n ),\n ]\n\n def _build_source(self, id_: str | None, display_name: str | None, source: str | None) -> Source:\n source_dict = {}\n if id_:\n source_dict[\"id\"] = id_\n if display_name:\n source_dict[\"display_name\"] = display_name\n if source:\n source_dict[\"source\"] = source\n return Source(**source_dict)\n\n async def message_response(self) -> Message:\n source, icon, display_name, source_id = self.get_properties_from_source_component()\n background_color = self.background_color\n text_color = self.text_color\n if self.chat_icon:\n icon = self.chat_icon\n message = self.input_value if isinstance(self.input_value, Message) else Message(text=self.input_value)\n message.sender = self.sender\n message.sender_name = self.sender_name\n message.session_id = self.session_id\n message.flow_id = self.graph.flow_id if hasattr(self, \"graph\") else None\n message.properties.source = self._build_source(source_id, display_name, source)\n message.properties.icon = icon\n message.properties.background_color = background_color\n message.properties.text_color = text_color\n if self.session_id and isinstance(message, Message) and self.should_store_message:\n stored_message = await self.send_message(\n message,\n )\n self.message.value = stored_message\n message = stored_message\n\n self.status = message\n return message\n" | |
}, | |
"data_template": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Data Template", | |
"dynamic": false, | |
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "data_template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "{text}" | |
}, | |
"input_value": { | |
"_input_type": "MessageInput", | |
"advanced": false, | |
"display_name": "Text", | |
"dynamic": false, | |
"info": "Message to be passed as output.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"sender": { | |
"_input_type": "DropdownInput", | |
"advanced": true, | |
"combobox": false, | |
"display_name": "Sender Type", | |
"dynamic": false, | |
"info": "Type of sender.", | |
"name": "sender", | |
"options": [ | |
"Machine", | |
"User" | |
], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "Machine" | |
}, | |
"sender_name": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Sender Name", | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "sender_name", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "AI" | |
}, | |
"session_id": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Session ID", | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "session_id", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"should_store_message": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Store Messages", | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"list": false, | |
"name": "should_store_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
}, | |
"text_color": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Text Color", | |
"dynamic": false, | |
"info": "The text color of the name", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "text_color", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
} | |
}, | |
"tool_mode": false | |
}, | |
"type": "ChatOutput" | |
}, | |
"dragging": false, | |
"height": 234, | |
"id": "ChatOutput-hKFON", | |
"position": { | |
"x": 1631.3766926569258, | |
"y": 136.66509468115308 | |
}, | |
"positionAbsolute": { | |
"x": 1631.3766926569258, | |
"y": 136.66509468115308 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
}, | |
{ | |
"data": { | |
"description": "Convert Data into plain text following a specified template.", | |
"display_name": "Parse Data", | |
"id": "ParseData-mIiSz", | |
"node": { | |
"base_classes": [ | |
"Message" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Convert Data into plain text following a specified template.", | |
"display_name": "Parse Data", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"data", | |
"template", | |
"sep" | |
], | |
"frozen": false, | |
"icon": "braces", | |
"legacy": false, | |
"lf_version": "1.0.19.post2", | |
"metadata": {}, | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Text", | |
"method": "parse_data", | |
"name": "text", | |
"selected": "Message", | |
"types": [ | |
"Message" | |
], | |
"value": "__UNDEFINED__" | |
}, | |
{ | |
"cache": true, | |
"display_name": "Data List", | |
"method": "parse_data_as_list", | |
"name": "data_list", | |
"selected": "Data", | |
"types": [ | |
"Data" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text, data_to_text_list\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\", is_list=True),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. \"\n \"It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(\n display_name=\"Text\",\n name=\"text\",\n info=\"Data as a single Message, with each input Data separated by Separator\",\n method=\"parse_data\",\n ),\n Output(\n display_name=\"Data List\",\n name=\"data_list\",\n info=\"Data as a list of new Data, each having `text` formatted by Template\",\n method=\"parse_data_as_list\",\n ),\n ]\n\n def _clean_args(self) -> tuple[list[Data], str, str]:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n sep = self.sep\n return data, template, sep\n\n def parse_data(self) -> Message:\n data, template, sep = self._clean_args()\n result_string = data_to_text(template, data, sep)\n self.status = result_string\n return Message(text=result_string)\n\n def parse_data_as_list(self) -> list[Data]:\n data, template, _ = self._clean_args()\n text_list, data_list = data_to_text_list(template, data)\n for item, text in zip(data_list, text_list, strict=True):\n item.set_text(text)\n self.status = data_list\n return data_list\n" | |
}, | |
"data": { | |
"advanced": false, | |
"display_name": "Data", | |
"dynamic": false, | |
"info": "The data to convert to text.", | |
"input_types": [ | |
"Data" | |
], | |
"list": true, | |
"name": "data", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "other", | |
"value": "" | |
}, | |
"sep": { | |
"advanced": true, | |
"display_name": "Separator", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"name": "sep", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "\n" | |
}, | |
"template": { | |
"advanced": false, | |
"display_name": "Template", | |
"dynamic": false, | |
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "{text}" | |
} | |
} | |
}, | |
"type": "ParseData" | |
}, | |
"dragging": false, | |
"height": 302, | |
"id": "ParseData-mIiSz", | |
"position": { | |
"x": 514.8054600415829, | |
"y": -117.1921617826383 | |
}, | |
"positionAbsolute": { | |
"x": 514.8054600415829, | |
"y": -117.1921617826383 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
}, | |
{ | |
"data": { | |
"id": "note-Tz3ZY", | |
"node": { | |
"description": "## Get Your OpenAI API Key\n\n**Steps**:\n\n1. **Visit** [OpenAI's API Key Page](https://platform.openai.com/api-keys).\n\n2. **Log In/Sign Up**:\n - Log in or create a new OpenAI account.\n\n3. **Generate API Key**:\n - Click \"Create New Secret Key\" to obtain your key.\n\n4. **Store Your Key Securely**:\n - Note it down as it will only display once.\n\n5. **Enter API Key**:\n - Input your key in the OpenAI API Key field within the component setup.\n\nKeep your key safe and manage it responsibly!", | |
"display_name": "", | |
"documentation": "", | |
"template": { | |
"backgroundColor": "rose" | |
} | |
}, | |
"type": "note" | |
}, | |
"dragging": true, | |
"height": 325, | |
"id": "note-Tz3ZY", | |
"position": { | |
"x": 1253.2038187140245, | |
"y": -421.5721019678553 | |
}, | |
"positionAbsolute": { | |
"x": 1253.2038187140245, | |
"y": -421.5721019678553 | |
}, | |
"selected": false, | |
"type": "noteNode", | |
"width": 325 | |
}, | |
{ | |
"data": { | |
"id": "OpenAIModel-HIx8w", | |
"node": { | |
"base_classes": [ | |
"LanguageModel", | |
"Message" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Generates text using OpenAI LLMs.", | |
"display_name": "OpenAI", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"system_message", | |
"stream", | |
"max_tokens", | |
"model_kwargs", | |
"json_mode", | |
"output_schema", | |
"model_name", | |
"openai_api_base", | |
"api_key", | |
"temperature", | |
"seed", | |
"output_parser" | |
], | |
"frozen": false, | |
"icon": "OpenAI", | |
"legacy": false, | |
"lf_version": "1.0.19.post2", | |
"metadata": {}, | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Text", | |
"method": "text_response", | |
"name": "text_output", | |
"required_inputs": [], | |
"selected": "Message", | |
"types": [ | |
"Message" | |
], | |
"value": "__UNDEFINED__" | |
}, | |
{ | |
"cache": true, | |
"display_name": "Language Model", | |
"method": "build_model", | |
"name": "model_output", | |
"required_inputs": [], | |
"selected": "LanguageModel", | |
"types": [ | |
"LanguageModel" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"api_key": { | |
"_input_type": "SecretStrInput", | |
"advanced": false, | |
"display_name": "OpenAI API Key", | |
"dynamic": false, | |
"info": "The OpenAI API Key to use for the OpenAI model.", | |
"input_types": [ | |
"Message" | |
], | |
"load_from_db": true, | |
"name": "api_key", | |
"password": true, | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"type": "str", | |
"value": "OPENAI_API_KEY" | |
}, | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "import operator\nfrom functools import reduce\n\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langflow.inputs import BoolInput, DictInput, DropdownInput, IntInput, SecretStrInput, SliderInput, StrInput\nfrom langflow.inputs.inputs import HandleInput\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = [\n *LCModelComponent._base_inputs,\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. \"\n \"You must pass the word JSON in the prompt. \"\n \"If left blank, JSON mode will be disabled. [DEPRECATED]\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. \"\n \"Defaults to https://api.openai.com/v1. \"\n \"You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n SliderInput(\n name=\"temperature\", display_name=\"Temperature\", value=0.1, range_spec=RangeSpec(min=0, max=2, step=0.01)\n ),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n HandleInput(\n name=\"output_parser\",\n display_name=\"Output Parser\",\n info=\"The parser to use to parse the output of the model\",\n advanced=True,\n input_types=[\"OutputParser\"],\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n api_key = SecretStr(openai_api_key).get_secret_value() if openai_api_key else None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\")\n else:\n output = output.bind(response_format={\"type\": \"json_object\"})\n\n return output\n\n def _get_exception_message(self, e: Exception):\n \"\"\"Get a message from an OpenAI exception.\n\n Args:\n e (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n try:\n from openai import BadRequestError\n except ImportError:\n return None\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\")\n if message:\n return message\n return None\n" | |
}, | |
"input_value": { | |
"_input_type": "MessageInput", | |
"advanced": false, | |
"display_name": "Input", | |
"dynamic": false, | |
"info": "", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"json_mode": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "JSON Mode", | |
"dynamic": false, | |
"info": "If True, it will output JSON regardless of passing a schema.", | |
"list": false, | |
"name": "json_mode", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"max_tokens": { | |
"_input_type": "IntInput", | |
"advanced": true, | |
"display_name": "Max Tokens", | |
"dynamic": false, | |
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
"list": false, | |
"name": "max_tokens", | |
"placeholder": "", | |
"range_spec": { | |
"max": 128000, | |
"min": 0, | |
"step": 0.1, | |
"step_type": "float" | |
}, | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": "" | |
}, | |
"model_kwargs": { | |
"_input_type": "DictInput", | |
"advanced": true, | |
"display_name": "Model Kwargs", | |
"dynamic": false, | |
"info": "Additional keyword arguments to pass to the model.", | |
"list": false, | |
"name": "model_kwargs", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "dict", | |
"value": {} | |
}, | |
"model_name": { | |
"_input_type": "DropdownInput", | |
"advanced": false, | |
"combobox": false, | |
"display_name": "Model Name", | |
"dynamic": false, | |
"info": "", | |
"name": "model_name", | |
"options": [ | |
"gpt-4o-mini", | |
"gpt-4o", | |
"gpt-4-turbo", | |
"gpt-4-turbo-preview", | |
"gpt-4", | |
"gpt-3.5-turbo", | |
"gpt-3.5-turbo-0125" | |
], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "gpt-4o-mini" | |
}, | |
"openai_api_base": { | |
"_input_type": "StrInput", | |
"advanced": true, | |
"display_name": "OpenAI API Base", | |
"dynamic": false, | |
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", | |
"list": false, | |
"load_from_db": false, | |
"name": "openai_api_base", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"output_parser": { | |
"_input_type": "HandleInput", | |
"advanced": true, | |
"display_name": "Output Parser", | |
"dynamic": false, | |
"info": "The parser to use to parse the output of the model", | |
"input_types": [ | |
"OutputParser" | |
], | |
"list": false, | |
"name": "output_parser", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "other", | |
"value": "" | |
}, | |
"output_schema": { | |
"_input_type": "DictInput", | |
"advanced": true, | |
"display_name": "Schema", | |
"dynamic": false, | |
"info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled. [DEPRECATED]", | |
"list": true, | |
"name": "output_schema", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "dict", | |
"value": {} | |
}, | |
"seed": { | |
"_input_type": "IntInput", | |
"advanced": true, | |
"display_name": "Seed", | |
"dynamic": false, | |
"info": "The seed controls the reproducibility of the job.", | |
"list": false, | |
"name": "seed", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": 1 | |
}, | |
"stream": { | |
"_input_type": "BoolInput", | |
"advanced": false, | |
"display_name": "Stream", | |
"dynamic": false, | |
"info": "Stream the response from the model. Streaming works only in Chat.", | |
"list": false, | |
"name": "stream", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"system_message": { | |
"_input_type": "MessageTextInput", | |
"advanced": false, | |
"display_name": "System Message", | |
"dynamic": false, | |
"info": "System message to pass to the model.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "system_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"temperature": { | |
"_input_type": "FloatInput", | |
"advanced": false, | |
"display_name": "Temperature", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"name": "temperature", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "float", | |
"value": 0.1 | |
} | |
}, | |
"tool_mode": false | |
}, | |
"type": "OpenAIModel" | |
}, | |
"dragging": false, | |
"height": 630, | |
"id": "OpenAIModel-HIx8w", | |
"position": { | |
"x": 1259.2100978002586, | |
"y": -88.15692253090975 | |
}, | |
"positionAbsolute": { | |
"x": 1259.2100978002586, | |
"y": -88.15692253090975 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
}, | |
{ | |
"data": { | |
"id": "note-XGIUa", | |
"node": { | |
"description": "# Document Q&A\n\n**Purpose:**\nThis flow leverages a language model to answer questions based on content from a loaded document. It's ideal for obtaining quick insights from PDFs or other text files by asking direct questions.\n\n**Components**:\n1. **File Component**: Loads and processes your document in supported formats.\n2. **Parse Data**: Converts the loaded document into text using a specified template for consistent processing.\n3. **Prompt Component**: Forms a structured query by combining the parsed document content with user questions.\n4. **OpenAI Model**: Engages OpenAI's language model to generate responses to queries based on the document context.\n5. **Chat Input/Output**: Facilitates user queries and displays AI-generated answers seamlessly.\n\n**Steps to Use**:\n1. **Upload Document**: Use the \"File\" component to upload a document or text file you want to query.\n2. **Enter Question**: Through the \"Chat Input\" field, type your question related to the document content.\n3. **Run the Flow**: Activate the flow to process the input and generate an answer using the OpenAI model.\n4. **View Response**: Read the generated answer in the \"Chat Output\" field for immediate insights.\n\n**Benefits**:\n- Simplifies the process of extracting information from documents.\n- Provides a user-friendly interface for interactive document exploration using AI.\n", | |
"display_name": "", | |
"documentation": "", | |
"template": {} | |
}, | |
"type": "note" | |
}, | |
"dragging": false, | |
"height": 452, | |
"id": "note-XGIUa", | |
"position": { | |
"x": -338.7070086205371, | |
"y": -177.11912020709357 | |
}, | |
"positionAbsolute": { | |
"x": -338.7070086205371, | |
"y": -177.11912020709357 | |
}, | |
"resizing": false, | |
"selected": false, | |
"style": { | |
"height": 452, | |
"width": 469 | |
}, | |
"type": "noteNode", | |
"width": 469 | |
}, | |
{ | |
"data": { | |
"id": "File-dlDLp", | |
"node": { | |
"base_classes": [ | |
"Data" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Load a file to be used in your project.", | |
"display_name": "File", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"path", | |
"silent_errors", | |
"use_multithreading", | |
"concurrency_multithreading" | |
], | |
"frozen": false, | |
"icon": "file-text", | |
"legacy": false, | |
"metadata": {}, | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Data", | |
"method": "load_files", | |
"name": "data", | |
"required_inputs": [], | |
"selected": "Data", | |
"types": [ | |
"Data" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.data import BaseFileComponent\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parallel_load_data, parse_text_file_to_data\nfrom langflow.io import BoolInput, IntInput\nfrom langflow.schema import Data\n\n\nclass FileComponent(BaseFileComponent):\n \"\"\"Handles loading and processing of individual or zipped text files.\n\n This component supports processing multiple valid files within a zip archive,\n resolving paths, validating file types, and optionally using multithreading for processing.\n \"\"\"\n\n display_name = \"File\"\n description = \"Load a file to be used in your project.\"\n icon = \"file-text\"\n name = \"File\"\n\n VALID_EXTENSIONS = TEXT_FILE_TYPES\n\n inputs = [\n *BaseFileComponent._base_inputs,\n BoolInput(\n name=\"use_multithreading\",\n display_name=\"[Deprecated] Use Multithreading\",\n advanced=True,\n value=True,\n info=\"Set 'Processing Concurrency' greater than 1 to enable multithreading.\",\n ),\n IntInput(\n name=\"concurrency_multithreading\",\n display_name=\"Processing Concurrency\",\n advanced=False,\n info=\"When multiple files are being processed, the number of files to process concurrently.\",\n value=1,\n ),\n ]\n\n outputs = [\n *BaseFileComponent._base_outputs,\n ]\n\n def process_files(self, file_list: list[BaseFileComponent.BaseFile]) -> list[BaseFileComponent.BaseFile]:\n \"\"\"Processes files either sequentially or in parallel, depending on concurrency settings.\n\n Args:\n file_list (list[BaseFileComponent.BaseFile]): List of files to process.\n\n Returns:\n list[BaseFileComponent.BaseFile]: Updated list of files with merged data.\n \"\"\"\n\n def process_file(file_path: str, *, silent_errors: bool = False) -> Data | None:\n \"\"\"Processes a single file and returns its Data object.\"\"\"\n try:\n return parse_text_file_to_data(file_path, silent_errors=silent_errors)\n except FileNotFoundError as e:\n msg = f\"File not found: {file_path}. Error: {e}\"\n self.log(msg)\n if not silent_errors:\n raise\n return None\n except Exception as e:\n msg = f\"Unexpected error processing {file_path}: {e}\"\n self.log(msg)\n if not silent_errors:\n raise\n return None\n\n if not file_list:\n self.log(\"No files to process.\")\n return file_list\n\n concurrency = 1 if not self.use_multithreading else max(1, self.concurrency_multithreading)\n file_count = len(file_list)\n\n parallel_processing_threshold = 2\n if concurrency < parallel_processing_threshold or file_count < parallel_processing_threshold:\n if file_count > 1:\n self.log(f\"Processing {file_count} files sequentially.\")\n processed_data = [process_file(str(file.path), silent_errors=self.silent_errors) for file in file_list]\n else:\n self.log(f\"Starting parallel processing of {file_count} files with concurrency: {concurrency}.\")\n file_paths = [str(file.path) for file in file_list]\n processed_data = parallel_load_data(\n file_paths,\n silent_errors=self.silent_errors,\n load_function=process_file,\n max_concurrency=concurrency,\n )\n\n # Use rollup_basefile_data to merge processed data with BaseFile objects\n return self.rollup_data(file_list, processed_data)\n" | |
}, | |
"concurrency_multithreading": { | |
"_input_type": "IntInput", | |
"advanced": false, | |
"display_name": "Processing Concurrency", | |
"dynamic": false, | |
"info": "When multiple files are being processed, the number of files to process concurrently.", | |
"list": false, | |
"name": "concurrency_multithreading", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": 4 | |
}, | |
"delete_server_file_after_processing": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Delete Server File After Processing", | |
"dynamic": false, | |
"info": "If true, the Server File Path will be deleted after processing.", | |
"list": false, | |
"name": "delete_server_file_after_processing", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
}, | |
"file_path": { | |
"_input_type": "HandleInput", | |
"advanced": false, | |
"display_name": "Server File Path", | |
"dynamic": false, | |
"info": "Data object with a 'file_path' property pointing to server file or a Message object with a path to the file. Supercedes 'Path' but supports same file types.", | |
"input_types": [ | |
"Data", | |
"Message" | |
], | |
"list": true, | |
"name": "file_path", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "other", | |
"value": "" | |
}, | |
"ignore_unspecified_files": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Ignore Unspecified Files", | |
"dynamic": false, | |
"info": "If true, Data with no 'file_path' property will be ignored.", | |
"list": false, | |
"name": "ignore_unspecified_files", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"ignore_unsupported_extensions": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Ignore Unsupported Extensions", | |
"dynamic": false, | |
"info": "If true, files with unsupported extensions will not be processed.", | |
"list": false, | |
"name": "ignore_unsupported_extensions", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
}, | |
"path": { | |
"_input_type": "FileInput", | |
"advanced": false, | |
"display_name": "Path", | |
"dynamic": false, | |
"fileTypes": [ | |
"txt", | |
"md", | |
"mdx", | |
"csv", | |
"json", | |
"yaml", | |
"yml", | |
"xml", | |
"html", | |
"htm", | |
"pdf", | |
"docx", | |
"py", | |
"sh", | |
"sql", | |
"js", | |
"ts", | |
"tsx", | |
"zip", | |
"tar", | |
"tgz", | |
"bz2", | |
"gz" | |
], | |
"file_path": "", | |
"info": "Supported file extensions: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx; optionally bundled in file extensions: zip, tar, tgz, bz2, gz", | |
"list": false, | |
"name": "path", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "file", | |
"value": "" | |
}, | |
"silent_errors": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Silent Errors", | |
"dynamic": false, | |
"info": "If true, errors will not raise an exception.", | |
"list": false, | |
"name": "silent_errors", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"use_multithreading": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "[Deprecated] Use Multithreading", | |
"dynamic": false, | |
"info": "Set 'Processing Concurrency' greater than 1 to enable multithreading.", | |
"list": false, | |
"name": "use_multithreading", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
} | |
}, | |
"tool_mode": false | |
}, | |
"type": "File" | |
}, | |
"dragging": false, | |
"height": 232, | |
"id": "File-dlDLp", | |
"position": { | |
"x": 155.39382083637838, | |
"y": -82.32805525710685 | |
}, | |
"positionAbsolute": { | |
"x": 155.39382083637838, | |
"y": -82.32805525710685 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
}, | |
{ | |
"data": { | |
"description": "Create a prompt template with dynamic variables.", | |
"display_name": "Prompt", | |
"id": "Prompt-L5CiD", | |
"node": { | |
"base_classes": [ | |
"Message" | |
], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": { | |
"template": [ | |
"Document" | |
] | |
}, | |
"description": "Create a prompt template with dynamic variables.", | |
"display_name": "Prompt", | |
"documentation": "", | |
"edited": false, | |
"error": null, | |
"field_order": [ | |
"template" | |
], | |
"frozen": false, | |
"full_path": null, | |
"icon": "prompts", | |
"is_composition": null, | |
"is_input": null, | |
"is_output": null, | |
"legacy": false, | |
"lf_version": "1.0.19.post2", | |
"metadata": {}, | |
"name": "", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Prompt Message", | |
"method": "build_prompt", | |
"name": "prompt", | |
"selected": "Message", | |
"types": [ | |
"Message" | |
], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"Document": { | |
"advanced": false, | |
"display_name": "Document", | |
"dynamic": false, | |
"field_type": "str", | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"input_types": [ | |
"Message", | |
"Text" | |
], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "Document", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"type": "str", | |
"value": "" | |
}, | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import MessageTextInput, Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n MessageTextInput(\n name=\"tool_placeholder\",\n display_name=\"Tool Placeholder\",\n tool_mode=True,\n advanced=True,\n info=\"A placeholder input for tool mode.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(self) -> Message:\n prompt = Message.from_template(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"This function is called after the code validation is done.\"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
}, | |
"template": { | |
"advanced": false, | |
"display_name": "Template", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"name": "template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "prompt", | |
"value": "Answer user's questions based on the document below:\n\n---\n\n{Document}\n\n---\n\nQuestion:" | |
}, | |
"tool_placeholder": { | |
"_input_type": "MessageTextInput", | |
"advanced": true, | |
"display_name": "Tool Placeholder", | |
"dynamic": false, | |
"info": "A placeholder input for tool mode.", | |
"input_types": [ | |
"Message" | |
], | |
"list": false, | |
"load_from_db": false, | |
"name": "tool_placeholder", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"tool_mode": true, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
} | |
}, | |
"tool_mode": false | |
}, | |
"type": "Prompt" | |
}, | |
"dragging": false, | |
"height": 347, | |
"id": "Prompt-L5CiD", | |
"position": { | |
"x": 895.1947781377585, | |
"y": -59.89409263992732 | |
}, | |
"positionAbsolute": { | |
"x": 895.1947781377585, | |
"y": -59.89409263992732 | |
}, | |
"selected": false, | |
"type": "genericNode", | |
"width": 320 | |
} | |
], | |
"viewport": { | |
"x": 262.21464656923195, | |
"y": 450.3754323717522, | |
"zoom": 0.5739369419687381 | |
} | |
}, | |
"description": "starterProjects.documentQA.description", | |
"endpoint_name": null, | |
"gradient": "3", | |
"icon": "FileQuestion", | |
"id": "febba2f9-69b3-484b-8aef-65626810ec8a", | |
"is_component": false, | |
"last_tested_version": "1.0.19.post2", | |
"name": "starterProjects.documentQA.name", | |
"tags": [ | |
"rag", | |
"q-a", | |
"openai" | |
] | |
} |