Spaces:
Running
Running
{ | |
"id": "cb35184a-3446-4074-9ec7-8a935e980114", | |
"data": { | |
"edges": [ | |
{ | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "ChatInput", | |
"id": "ChatInput-KovKB", | |
"name": "message", | |
"output_types": ["Message"] | |
}, | |
"targetHandle": { | |
"fieldName": "user_message", | |
"id": "Prompt-Xz9bN", | |
"inputTypes": ["Message", "Text"], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-ChatInput-KovKB{œdataTypeœ:œChatInputœ,œidœ:œChatInput-KovKBœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-Xz9bN{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"source": "ChatInput-KovKB", | |
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-KovKBœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "Prompt-Xz9bN", | |
"targetHandle": "{œfieldNameœ:œuser_messageœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" | |
}, | |
{ | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "Prompt", | |
"id": "Prompt-Xz9bN", | |
"name": "prompt", | |
"output_types": ["Message"] | |
}, | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "OpenAIModel-pqHDB", | |
"inputTypes": ["Message"], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-Prompt-Xz9bN{œdataTypeœ:œPromptœ,œidœ:œPrompt-Xz9bNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-pqHDB{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-pqHDBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"source": "Prompt-Xz9bN", | |
"sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-Xz9bNœ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "OpenAIModel-pqHDB", | |
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-pqHDBœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
}, | |
{ | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "OpenAIModel", | |
"id": "OpenAIModel-pqHDB", | |
"name": "text_output", | |
"output_types": ["Message"] | |
}, | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "ChatOutput-NasE4", | |
"inputTypes": ["Message"], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-OpenAIModel-pqHDB{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-pqHDBœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-NasE4{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-NasE4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"source": "OpenAIModel-pqHDB", | |
"sourceHandle": "{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-pqHDBœ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "ChatOutput-NasE4", | |
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-NasE4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}" | |
}, | |
{ | |
"className": "", | |
"data": { | |
"sourceHandle": { | |
"dataType": "Memory", | |
"id": "Memory-x4ENQ", | |
"name": "messages_text", | |
"output_types": ["Message"] | |
}, | |
"targetHandle": { | |
"fieldName": "context", | |
"id": "Prompt-Xz9bN", | |
"inputTypes": ["Message", "Text"], | |
"type": "str" | |
} | |
}, | |
"id": "reactflow__edge-Memory-x4ENQ{œdataTypeœ:œMemoryœ,œidœ:œMemory-x4ENQœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}-Prompt-Xz9bN{œfieldNameœ:œcontextœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"source": "Memory-x4ENQ", | |
"sourceHandle": "{œdataTypeœ:œMemoryœ,œidœ:œMemory-x4ENQœ,œnameœ:œmessages_textœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "Prompt-Xz9bN", | |
"targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-Xz9bNœ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}" | |
} | |
], | |
"nodes": [ | |
{ | |
"data": { | |
"description": "Create a prompt template with dynamic variables.", | |
"display_name": "Prompt", | |
"id": "Prompt-Xz9bN", | |
"node": { | |
"base_classes": ["Message"], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": { "template": ["context", "user_message"] }, | |
"description": "Create a prompt template with dynamic variables.", | |
"display_name": "Prompt", | |
"documentation": "", | |
"edited": false, | |
"field_order": ["template"], | |
"frozen": false, | |
"icon": "prompts", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Prompt Message", | |
"method": "build_prompt", | |
"name": "prompt", | |
"selected": "Message", | |
"types": ["Message"], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n" | |
}, | |
"context": { | |
"advanced": false, | |
"display_name": "context", | |
"dynamic": false, | |
"field_type": "str", | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"input_types": ["Message", "Text"], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "context", | |
"password": false, | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"type": "str", | |
"value": "" | |
}, | |
"template": { | |
"advanced": false, | |
"display_name": "Template", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"name": "template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "prompt", | |
"value": "{context}\n\nUser: {user_message}\nAI: " | |
}, | |
"user_message": { | |
"advanced": false, | |
"display_name": "user_message", | |
"dynamic": false, | |
"field_type": "str", | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"input_types": ["Message", "Text"], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "user_message", | |
"password": false, | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"type": "str", | |
"value": "" | |
} | |
} | |
}, | |
"type": "Prompt" | |
}, | |
"dragging": false, | |
"height": 517, | |
"id": "Prompt-Xz9bN", | |
"position": { "x": 1880.8227904110583, "y": 625.8049209882275 }, | |
"positionAbsolute": { "x": 1880.8227904110583, "y": 625.8049209882275 }, | |
"selected": false, | |
"type": "genericNode", | |
"width": 384 | |
}, | |
{ | |
"data": { | |
"description": "Get chat inputs from the Playground.", | |
"display_name": "Chat Input", | |
"id": "ChatInput-KovKB", | |
"node": { | |
"base_classes": ["Message"], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Get chat inputs from the Playground.", | |
"display_name": "Chat Input", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"files" | |
], | |
"frozen": false, | |
"icon": "ChatInput", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Message", | |
"method": "message_response", | |
"name": "message", | |
"selected": "Message", | |
"types": ["Message"], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" | |
}, | |
"files": { | |
"advanced": true, | |
"display_name": "Files", | |
"dynamic": false, | |
"fileTypes": [ | |
"txt", | |
"md", | |
"mdx", | |
"csv", | |
"json", | |
"yaml", | |
"yml", | |
"xml", | |
"html", | |
"htm", | |
"pdf", | |
"docx", | |
"py", | |
"sh", | |
"sql", | |
"js", | |
"ts", | |
"tsx", | |
"jpg", | |
"jpeg", | |
"png", | |
"bmp", | |
"image" | |
], | |
"file_path": "", | |
"info": "Files to be sent with the message.", | |
"list": true, | |
"name": "files", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "file", | |
"value": "" | |
}, | |
"input_value": { | |
"advanced": false, | |
"display_name": "Text", | |
"dynamic": false, | |
"info": "Message to be passed as input.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"sender": { | |
"advanced": true, | |
"display_name": "Sender Type", | |
"dynamic": false, | |
"info": "Type of sender.", | |
"name": "sender", | |
"options": ["Machine", "User"], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "User" | |
}, | |
"sender_name": { | |
"advanced": true, | |
"display_name": "Sender Name", | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "sender_name", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "User" | |
}, | |
"session_id": { | |
"advanced": true, | |
"display_name": "Session ID", | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "session_id", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"should_store_message": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Store Messages", | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"list": false, | |
"name": "should_store_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
} | |
} | |
}, | |
"type": "ChatInput" | |
}, | |
"dragging": false, | |
"height": 309, | |
"id": "ChatInput-KovKB", | |
"position": { "x": 1275.9262193671882, "y": 836.1228056896347 }, | |
"positionAbsolute": { "x": 1275.9262193671882, "y": 836.1228056896347 }, | |
"selected": false, | |
"type": "genericNode", | |
"width": 384 | |
}, | |
{ | |
"data": { | |
"description": "Generates text using OpenAI LLMs.", | |
"display_name": "OpenAI", | |
"id": "OpenAIModel-pqHDB", | |
"node": { | |
"base_classes": ["LanguageModel", "Message"], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Generates text using OpenAI LLMs.", | |
"display_name": "OpenAI", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"max_tokens", | |
"model_kwargs", | |
"json_mode", | |
"output_schema", | |
"model_name", | |
"openai_api_base", | |
"openai_api_key", | |
"temperature", | |
"stream", | |
"system_message", | |
"seed" | |
], | |
"frozen": false, | |
"icon": "OpenAI", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Text", | |
"method": "text_response", | |
"name": "text_output", | |
"selected": "Message", | |
"types": ["Message"], | |
"value": "__UNDEFINED__" | |
}, | |
{ | |
"cache": true, | |
"display_name": "Language Model", | |
"method": "build_model", | |
"name": "model_output", | |
"selected": "LanguageModel", | |
"types": ["LanguageModel"], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"api_key": { | |
"_input_type": "SecretStrInput", | |
"advanced": false, | |
"display_name": "OpenAI API Key", | |
"dynamic": false, | |
"info": "The OpenAI API Key to use for the OpenAI model.", | |
"input_types": ["Message"], | |
"load_from_db": true, | |
"name": "api_key", | |
"password": true, | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"type": "str", | |
"value": "" | |
}, | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature or 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n" | |
}, | |
"input_value": { | |
"advanced": false, | |
"display_name": "Input", | |
"dynamic": false, | |
"info": "", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"json_mode": { | |
"advanced": true, | |
"display_name": "JSON Mode", | |
"dynamic": false, | |
"info": "If True, it will output JSON regardless of passing a schema.", | |
"list": false, | |
"name": "json_mode", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"max_tokens": { | |
"advanced": true, | |
"display_name": "Max Tokens", | |
"dynamic": false, | |
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
"list": false, | |
"name": "max_tokens", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": "" | |
}, | |
"model_kwargs": { | |
"advanced": true, | |
"display_name": "Model Kwargs", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"name": "model_kwargs", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "dict", | |
"value": {} | |
}, | |
"model_name": { | |
"advanced": false, | |
"display_name": "Model Name", | |
"dynamic": false, | |
"info": "", | |
"name": "model_name", | |
"options": [ | |
"gpt-4o-mini", | |
"gpt-4o", | |
"gpt-4-turbo", | |
"gpt-4-turbo-preview", | |
"gpt-4", | |
"gpt-3.5-turbo", | |
"gpt-3.5-turbo-0125" | |
], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "gpt-4o" | |
}, | |
"openai_api_base": { | |
"advanced": true, | |
"display_name": "OpenAI API Base", | |
"dynamic": false, | |
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", | |
"list": false, | |
"load_from_db": false, | |
"name": "openai_api_base", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"output_schema": { | |
"advanced": true, | |
"display_name": "Schema", | |
"dynamic": false, | |
"info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", | |
"list": true, | |
"name": "output_schema", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"type": "dict", | |
"value": {} | |
}, | |
"seed": { | |
"advanced": true, | |
"display_name": "Seed", | |
"dynamic": false, | |
"info": "The seed controls the reproducibility of the job.", | |
"list": false, | |
"name": "seed", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": 1 | |
}, | |
"stream": { | |
"advanced": true, | |
"display_name": "Stream", | |
"dynamic": false, | |
"info": "Stream the response from the model. Streaming works only in Chat.", | |
"list": false, | |
"name": "stream", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": false | |
}, | |
"system_message": { | |
"advanced": true, | |
"display_name": "System Message", | |
"dynamic": false, | |
"info": "System message to pass to the model.", | |
"list": false, | |
"load_from_db": false, | |
"name": "system_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"temperature": { | |
"advanced": false, | |
"display_name": "Temperature", | |
"dynamic": false, | |
"info": "", | |
"list": false, | |
"name": "temperature", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "float", | |
"value": 0.1 | |
} | |
} | |
}, | |
"type": "OpenAIModel" | |
}, | |
"dragging": false, | |
"height": 623, | |
"id": "OpenAIModel-pqHDB", | |
"position": { "x": 2468.968379487559, "y": 560.0689522326683 }, | |
"positionAbsolute": { "x": 2468.968379487559, "y": 560.0689522326683 }, | |
"selected": false, | |
"type": "genericNode", | |
"width": 384 | |
}, | |
{ | |
"data": { | |
"description": "Display a chat message in the Playground.", | |
"display_name": "Chat Output", | |
"id": "ChatOutput-NasE4", | |
"node": { | |
"base_classes": ["Message"], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Display a chat message in the Playground.", | |
"display_name": "Chat Output", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"input_value", | |
"store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"data_template" | |
], | |
"frozen": false, | |
"icon": "ChatOutput", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Message", | |
"method": "message_response", | |
"name": "message", | |
"selected": "Message", | |
"types": ["Message"], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n" | |
}, | |
"data_template": { | |
"advanced": true, | |
"display_name": "Data Template", | |
"dynamic": false, | |
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "data_template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "{text}" | |
}, | |
"input_value": { | |
"advanced": false, | |
"display_name": "Text", | |
"dynamic": false, | |
"info": "Message to be passed as output.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "input_value", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"sender": { | |
"advanced": true, | |
"display_name": "Sender Type", | |
"dynamic": false, | |
"info": "Type of sender.", | |
"name": "sender", | |
"options": ["Machine", "User"], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "Machine" | |
}, | |
"sender_name": { | |
"advanced": true, | |
"display_name": "Sender Name", | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "sender_name", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "AI" | |
}, | |
"session_id": { | |
"advanced": true, | |
"display_name": "Session ID", | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "session_id", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"should_store_message": { | |
"_input_type": "BoolInput", | |
"advanced": true, | |
"display_name": "Store Messages", | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"list": false, | |
"name": "should_store_message", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "bool", | |
"value": true | |
} | |
} | |
}, | |
"type": "ChatOutput" | |
}, | |
"height": 385, | |
"id": "ChatOutput-NasE4", | |
"position": { "x": 3083.1710516244116, "y": 701.521688846004 }, | |
"selected": false, | |
"type": "genericNode", | |
"width": 384 | |
}, | |
{ | |
"data": { | |
"description": "Retrieves stored chat messages from Langflow tables or an external memory.", | |
"display_name": "Chat Memory", | |
"id": "Memory-x4ENQ", | |
"node": { | |
"base_classes": ["BaseChatMemory", "Data", "Message"], | |
"beta": false, | |
"conditional_paths": [], | |
"custom_fields": {}, | |
"description": "Retrieves stored chat messages from Langflow tables or an external memory.", | |
"display_name": "Chat Memory", | |
"documentation": "", | |
"edited": false, | |
"field_order": [ | |
"memory", | |
"sender", | |
"sender_name", | |
"n_messages", | |
"session_id", | |
"order", | |
"template" | |
], | |
"frozen": false, | |
"icon": "message-square-more", | |
"output_types": [], | |
"outputs": [ | |
{ | |
"cache": true, | |
"display_name": "Messages (Data)", | |
"method": "retrieve_messages", | |
"name": "messages", | |
"selected": "Data", | |
"types": ["Data"], | |
"value": "__UNDEFINED__" | |
}, | |
{ | |
"cache": true, | |
"display_name": "Messages (Text)", | |
"method": "retrieve_messages_as_text", | |
"name": "messages_text", | |
"selected": "Message", | |
"types": ["Message"], | |
"value": "__UNDEFINED__" | |
}, | |
{ | |
"cache": true, | |
"display_name": "Memory", | |
"method": "build_lc_memory", | |
"name": "lc_memory", | |
"selected": "BaseChatMemory", | |
"types": ["BaseChatMemory"], | |
"value": "__UNDEFINED__" | |
} | |
], | |
"pinned": false, | |
"template": { | |
"_type": "Component", | |
"code": { | |
"advanced": true, | |
"dynamic": true, | |
"fileTypes": [], | |
"file_path": "", | |
"info": "", | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "code", | |
"password": false, | |
"placeholder": "", | |
"required": true, | |
"show": true, | |
"title_case": false, | |
"type": "code", | |
"value": "from langchain.memory import ConversationBufferMemory\n\nfrom langflow.custom import Component\nfrom langflow.field_typing import BaseChatMemory\nfrom langflow.helpers.data import data_to_text\nfrom langflow.inputs import HandleInput\nfrom langflow.io import DropdownInput, IntInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import LCBuiltinChatMemory, get_messages\nfrom langflow.schema import Data\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER\n\n\nclass MemoryComponent(Component):\n display_name = \"Chat Memory\"\n description = \"Retrieves stored chat messages from Langflow tables or an external memory.\"\n icon = \"message-square-more\"\n name = \"Memory\"\n\n inputs = [\n HandleInput(\n name=\"memory\",\n display_name=\"External Memory\",\n input_types=[\"BaseChatMessageHistory\"],\n info=\"Retrieve messages from an external memory. If empty, it will use the Langflow tables.\",\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, \"Machine and User\"],\n value=\"Machine and User\",\n info=\"Filter by sender type.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Filter by sender name.\",\n advanced=True,\n ),\n IntInput(\n name=\"n_messages\",\n display_name=\"Number of Messages\",\n value=100,\n info=\"Number of messages to retrieve.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"order\",\n display_name=\"Order\",\n options=[\"Ascending\", \"Descending\"],\n value=\"Ascending\",\n info=\"Order of the messages.\",\n advanced=True,\n ),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.\",\n value=\"{sender_name}: {text}\",\n advanced=True,\n ),\n ]\n\n outputs = [\n Output(display_name=\"Messages (Data)\", name=\"messages\", method=\"retrieve_messages\"),\n Output(display_name=\"Messages (Text)\", name=\"messages_text\", method=\"retrieve_messages_as_text\"),\n Output(display_name=\"Memory\", name=\"lc_memory\", method=\"build_lc_memory\"),\n ]\n\n def retrieve_messages(self) -> Data:\n sender = self.sender\n sender_name = self.sender_name\n session_id = self.session_id\n n_messages = self.n_messages\n order = \"DESC\" if self.order == \"Descending\" else \"ASC\"\n\n if sender == \"Machine and User\":\n sender = None\n\n if self.memory:\n # override session_id\n self.memory.session_id = session_id\n\n stored = self.memory.messages\n # langchain memories are supposed to return messages in ascending order\n if order == \"DESC\":\n stored = stored[::-1]\n if n_messages:\n stored = stored[:n_messages]\n stored = [Message.from_lc_message(m) for m in stored]\n if sender:\n expected_type = MESSAGE_SENDER_AI if sender == MESSAGE_SENDER_AI else MESSAGE_SENDER_USER\n stored = [m for m in stored if m.type == expected_type]\n else:\n stored = get_messages(\n sender=sender,\n sender_name=sender_name,\n session_id=session_id,\n limit=n_messages,\n order=order,\n )\n self.status = stored\n return stored\n\n def retrieve_messages_as_text(self) -> Message:\n stored_text = data_to_text(self.template, self.retrieve_messages())\n self.status = stored_text\n return Message(text=stored_text)\n\n def build_lc_memory(self) -> BaseChatMemory:\n if self.memory:\n chat_memory = self.memory\n else:\n chat_memory = LCBuiltinChatMemory(flow_id=self.flow_id, session_id=self.session_id)\n return ConversationBufferMemory(chat_memory=chat_memory)\n" | |
}, | |
"memory": { | |
"advanced": false, | |
"display_name": "External Memory", | |
"dynamic": false, | |
"info": "Retrieve messages from an external memory. If empty, it will use the Langflow tables.", | |
"input_types": ["BaseChatMessageHistory"], | |
"list": false, | |
"name": "memory", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "other", | |
"value": "" | |
}, | |
"n_messages": { | |
"advanced": true, | |
"display_name": "Number of Messages", | |
"dynamic": false, | |
"info": "Number of messages to retrieve.", | |
"list": false, | |
"name": "n_messages", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "int", | |
"value": 100 | |
}, | |
"order": { | |
"advanced": true, | |
"display_name": "Order", | |
"dynamic": false, | |
"info": "Order of the messages.", | |
"name": "order", | |
"options": ["Ascending", "Descending"], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "Ascending" | |
}, | |
"sender": { | |
"advanced": true, | |
"display_name": "Sender Type", | |
"dynamic": false, | |
"info": "Filter by sender type.", | |
"name": "sender", | |
"options": ["Machine", "User", "Machine and User"], | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "Machine and User" | |
}, | |
"sender_name": { | |
"advanced": true, | |
"display_name": "Sender Name", | |
"dynamic": false, | |
"info": "Filter by sender name.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "sender_name", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"session_id": { | |
"advanced": true, | |
"display_name": "Session ID", | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"name": "session_id", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "" | |
}, | |
"template": { | |
"advanced": true, | |
"display_name": "Template", | |
"dynamic": false, | |
"info": "The template to use for formatting the data. It can contain the keys {text}, {sender} or any other key in the message data.", | |
"input_types": ["Message"], | |
"list": false, | |
"load_from_db": false, | |
"multiline": true, | |
"name": "template", | |
"placeholder": "", | |
"required": false, | |
"show": true, | |
"title_case": false, | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"type": "str", | |
"value": "{sender_name}: {text}" | |
} | |
} | |
}, | |
"type": "Memory" | |
}, | |
"dragging": false, | |
"height": 387, | |
"id": "Memory-x4ENQ", | |
"position": { "x": 1301.98330242754, "y": 422.33865605652574 }, | |
"positionAbsolute": { "x": 1301.98330242754, "y": 422.33865605652574 }, | |
"selected": false, | |
"type": "genericNode", | |
"width": 384 | |
} | |
], | |
"viewport": { | |
"x": -377.45799796990354, | |
"y": 18.161555190942522, | |
"zoom": 0.45494095964690673 | |
} | |
}, | |
"description": "This project can be used as a starting point for building a Chat experience with user specific memory. You can set a different Session ID to start a new message history.", | |
"name": "Memory Chatbot (1)", | |
"last_tested_version": "1.0.15", | |
"endpoint_name": null, | |
"is_component": false | |
} | |