Created
September 5, 2024 12:47
-
-
Save ytkimirti/654e0a2b10e2546df00fdf3dd9817fee to your computer and use it in GitHub Desktop.
OpenAI & Upstash Vector RAG.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"icon_bg_color": null, | |
"updated_at": "2024-09-05T12:40:30+00:00", | |
"webhook": false, | |
"id": "0e95ed7c-5e49-436f-a10b-c648010a6531", | |
"folder_id": "cafb0f8f-75f4-439d-8f7a-0391b5e23932", | |
"name": "OpenAI & Upstash Vector RAG", | |
"description": "Driving Innovation in Business Communication.", | |
"icon": null, | |
"is_component": false, | |
"endpoint_name": null, | |
"data": { | |
"nodes": [ | |
{ | |
"id": "ChatInput-gANZa", | |
"type": "genericNode", | |
"position": { | |
"x": -376.2036735306894, | |
"y": 401.83923396270535 | |
}, | |
"data": { | |
"type": "ChatInput", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"files": { | |
"trace_as_metadata": true, | |
"file_path": "", | |
"fileTypes": [ | |
"txt", | |
"md", | |
"mdx", | |
"csv", | |
"json", | |
"yaml", | |
"yml", | |
"xml", | |
"html", | |
"htm", | |
"pdf", | |
"docx", | |
"py", | |
"sh", | |
"sql", | |
"js", | |
"ts", | |
"tsx", | |
"jpg", | |
"jpeg", | |
"png", | |
"bmp", | |
"image" | |
], | |
"list": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "files", | |
"value": "", | |
"display_name": "Files", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Files to be sent with the message.", | |
"title_case": false, | |
"type": "file", | |
"_input_type": "FileInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from langflow.base.data.utils import IMG_FILE_TYPES, TEXT_FILE_TYPES\nfrom langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, FileInput, MessageTextInput, MultilineInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_NAME_USER\n\n\nclass ChatInput(ChatComponent):\n display_name = \"Chat Input\"\n description = \"Get chat inputs from the Playground.\"\n icon = \"ChatInput\"\n name = \"ChatInput\"\n\n inputs = [\n MultilineInput(\n name=\"input_value\",\n display_name=\"Text\",\n value=\"\",\n info=\"Message to be passed as input.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_USER,\n info=\"Type of sender.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_USER,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n FileInput(\n name=\"files\",\n display_name=\"Files\",\n file_types=TEXT_FILE_TYPES + IMG_FILE_TYPES,\n info=\"Files to be sent with the message.\",\n advanced=True,\n is_list=True,\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n files=self.files,\n )\n\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"input_value": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "input_value", | |
"value": "who is his friend?", | |
"display_name": "Text", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Message to be passed as input.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
}, | |
"sender": { | |
"trace_as_metadata": true, | |
"options": [ | |
"Machine", | |
"User" | |
], | |
"combobox": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "sender", | |
"value": "User", | |
"display_name": "Sender Type", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Type of sender.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "DropdownInput" | |
}, | |
"sender_name": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "sender_name", | |
"value": "User", | |
"display_name": "Sender Name", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"session_id": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "session_id", | |
"value": "", | |
"display_name": "Session ID", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"should_store_message": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "should_store_message", | |
"value": true, | |
"display_name": "Store Messages", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"title_case": false, | |
"type": "bool", | |
"_input_type": "BoolInput" | |
} | |
}, | |
"description": "Get chat inputs from the Playground.", | |
"icon": "ChatInput", | |
"base_classes": [ | |
"Message" | |
], | |
"display_name": "Chat Input", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Message" | |
], | |
"selected": "Message", | |
"name": "message", | |
"display_name": "Message", | |
"method": "message_response", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"input_value", | |
"should_store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"files" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "ChatInput-gANZa" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 298, | |
"positionAbsolute": { | |
"x": -376.2036735306894, | |
"y": 401.83923396270535 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "OpenAIModel-Z5rm4", | |
"type": "genericNode", | |
"position": { | |
"x": 1606.2498562665157, | |
"y": 380.8237660377299 | |
}, | |
"data": { | |
"type": "OpenAIModel", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"api_key": { | |
"load_from_db": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "api_key", | |
"value": null, | |
"display_name": "OpenAI API Key", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The OpenAI API Key to use for the OpenAI model.", | |
"title_case": false, | |
"password": true, | |
"type": "str", | |
"_input_type": "SecretStrInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "import operator\nfrom functools import reduce\n\nfrom langflow.field_typing.range_spec import RangeSpec\nfrom langchain_openai import ChatOpenAI\nfrom pydantic.v1 import SecretStr\n\nfrom langflow.base.models.model import LCModelComponent\nfrom langflow.base.models.openai_constants import OPENAI_MODEL_NAMES\nfrom langflow.field_typing import LanguageModel\nfrom langflow.inputs import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n SecretStrInput,\n StrInput,\n)\n\n\nclass OpenAIModelComponent(LCModelComponent):\n display_name = \"OpenAI\"\n description = \"Generates text using OpenAI LLMs.\"\n icon = \"OpenAI\"\n name = \"OpenAIModel\"\n\n inputs = LCModelComponent._base_inputs + [\n IntInput(\n name=\"max_tokens\",\n display_name=\"Max Tokens\",\n advanced=True,\n info=\"The maximum number of tokens to generate. Set to 0 for unlimited tokens.\",\n range_spec=RangeSpec(min=0, max=128000),\n ),\n DictInput(name=\"model_kwargs\", display_name=\"Model Kwargs\", advanced=True),\n BoolInput(\n name=\"json_mode\",\n display_name=\"JSON Mode\",\n advanced=True,\n info=\"If True, it will output JSON regardless of passing a schema.\",\n ),\n DictInput(\n name=\"output_schema\",\n is_list=True,\n display_name=\"Schema\",\n advanced=True,\n info=\"The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.\",\n ),\n DropdownInput(\n name=\"model_name\",\n display_name=\"Model Name\",\n advanced=False,\n options=OPENAI_MODEL_NAMES,\n value=OPENAI_MODEL_NAMES[0],\n ),\n StrInput(\n name=\"openai_api_base\",\n display_name=\"OpenAI API Base\",\n advanced=True,\n info=\"The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"The OpenAI API Key to use for the OpenAI model.\",\n advanced=False,\n value=\"OPENAI_API_KEY\",\n ),\n FloatInput(name=\"temperature\", display_name=\"Temperature\", value=0.1),\n IntInput(\n name=\"seed\",\n display_name=\"Seed\",\n info=\"The seed controls the reproducibility of the job.\",\n advanced=True,\n value=1,\n ),\n ]\n\n def build_model(self) -> LanguageModel: # type: ignore[type-var]\n # self.output_schema is a list of dictionaries\n # let's convert it to a dictionary\n output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})\n openai_api_key = self.api_key\n temperature = self.temperature\n model_name: str = self.model_name\n max_tokens = self.max_tokens\n model_kwargs = self.model_kwargs or {}\n openai_api_base = self.openai_api_base or \"https://api.openai.com/v1\"\n json_mode = bool(output_schema_dict) or self.json_mode\n seed = self.seed\n\n if openai_api_key:\n api_key = SecretStr(openai_api_key)\n else:\n api_key = None\n output = ChatOpenAI(\n max_tokens=max_tokens or None,\n model_kwargs=model_kwargs,\n model=model_name,\n base_url=openai_api_base,\n api_key=api_key,\n temperature=temperature if temperature is not None else 0.1,\n seed=seed,\n )\n if json_mode:\n if output_schema_dict:\n output = output.with_structured_output(schema=output_schema_dict, method=\"json_mode\") # type: ignore\n else:\n output = output.bind(response_format={\"type\": \"json_object\"}) # type: ignore\n\n return output # type: ignore\n\n def _get_exception_message(self, e: Exception):\n \"\"\"\n Get a message from an OpenAI exception.\n\n Args:\n exception (Exception): The exception to get the message from.\n\n Returns:\n str: The message from the exception.\n \"\"\"\n\n try:\n from openai import BadRequestError\n except ImportError:\n return\n if isinstance(e, BadRequestError):\n message = e.body.get(\"message\") # type: ignore\n if message:\n return message\n return\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"input_value": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "input_value", | |
"value": "", | |
"display_name": "Input", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageInput" | |
}, | |
"json_mode": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "json_mode", | |
"value": false, | |
"display_name": "JSON Mode", | |
"advanced": true, | |
"dynamic": false, | |
"info": "If True, it will output JSON regardless of passing a schema.", | |
"title_case": false, | |
"type": "bool", | |
"_input_type": "BoolInput" | |
}, | |
"max_tokens": { | |
"trace_as_metadata": true, | |
"range_spec": { | |
"step_type": "float", | |
"min": 0, | |
"max": 128000, | |
"step": 0.1 | |
}, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "max_tokens", | |
"value": "", | |
"display_name": "Max Tokens", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The maximum number of tokens to generate. Set to 0 for unlimited tokens.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"model_kwargs": { | |
"trace_as_input": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "model_kwargs", | |
"value": {}, | |
"display_name": "Model Kwargs", | |
"advanced": true, | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "dict", | |
"_input_type": "DictInput" | |
}, | |
"model_name": { | |
"trace_as_metadata": true, | |
"options": [ | |
"gpt-4o-mini", | |
"gpt-4o", | |
"gpt-4-turbo", | |
"gpt-4-turbo-preview", | |
"gpt-4", | |
"gpt-3.5-turbo", | |
"gpt-3.5-turbo-0125" | |
], | |
"combobox": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "model_name", | |
"value": "gpt-4o-mini", | |
"display_name": "Model Name", | |
"advanced": false, | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "DropdownInput" | |
}, | |
"openai_api_base": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "openai_api_base", | |
"value": "", | |
"display_name": "OpenAI API Base", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The base URL of the OpenAI API. Defaults to https://api.openai.com/v1. You can change this to use other APIs like JinaChat, LocalAI and Prem.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"output_schema": { | |
"trace_as_input": true, | |
"list": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "output_schema", | |
"value": {}, | |
"display_name": "Schema", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.", | |
"title_case": false, | |
"type": "dict", | |
"_input_type": "DictInput" | |
}, | |
"seed": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "seed", | |
"value": 1, | |
"display_name": "Seed", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The seed controls the reproducibility of the job.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"stream": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "stream", | |
"value": false, | |
"display_name": "Stream", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Stream the response from the model. Streaming works only in Chat.", | |
"title_case": false, | |
"type": "bool", | |
"_input_type": "BoolInput" | |
}, | |
"system_message": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "system_message", | |
"value": "", | |
"display_name": "System Message", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "System message to pass to the model.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"temperature": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "temperature", | |
"value": 0.1, | |
"display_name": "Temperature", | |
"advanced": false, | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "float", | |
"_input_type": "FloatInput" | |
} | |
}, | |
"description": "Generates text using OpenAI LLMs.", | |
"icon": "OpenAI", | |
"base_classes": [ | |
"LanguageModel", | |
"Message" | |
], | |
"display_name": "OpenAI", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Message" | |
], | |
"selected": "Message", | |
"name": "text_output", | |
"display_name": "Text", | |
"method": "text_response", | |
"value": "__UNDEFINED__", | |
"cache": true | |
}, | |
{ | |
"types": [ | |
"LanguageModel" | |
], | |
"selected": "LanguageModel", | |
"name": "model_output", | |
"display_name": "Language Model", | |
"method": "build_model", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"input_value", | |
"system_message", | |
"stream", | |
"max_tokens", | |
"model_kwargs", | |
"json_mode", | |
"output_schema", | |
"model_name", | |
"openai_api_base", | |
"api_key", | |
"temperature", | |
"seed" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "OpenAIModel-Z5rm4" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 601, | |
"positionAbsolute": { | |
"x": 1606.2498562665157, | |
"y": 380.8237660377299 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "ChatOutput-MxAb9", | |
"type": "genericNode", | |
"position": { | |
"x": 2081.0409327491257, | |
"y": 393.19993878756844 | |
}, | |
"data": { | |
"type": "ChatOutput", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from langflow.base.io.chat import ChatComponent\nfrom langflow.inputs import BoolInput\nfrom langflow.io import DropdownInput, MessageTextInput, Output\nfrom langflow.memory import store_message\nfrom langflow.schema.message import Message\nfrom langflow.utils.constants import MESSAGE_SENDER_NAME_AI, MESSAGE_SENDER_USER, MESSAGE_SENDER_AI\n\n\nclass ChatOutput(ChatComponent):\n display_name = \"Chat Output\"\n description = \"Display a chat message in the Playground.\"\n icon = \"ChatOutput\"\n name = \"ChatOutput\"\n\n inputs = [\n MessageTextInput(\n name=\"input_value\",\n display_name=\"Text\",\n info=\"Message to be passed as output.\",\n ),\n BoolInput(\n name=\"should_store_message\",\n display_name=\"Store Messages\",\n info=\"Store the message in the history.\",\n value=True,\n advanced=True,\n ),\n DropdownInput(\n name=\"sender\",\n display_name=\"Sender Type\",\n options=[MESSAGE_SENDER_AI, MESSAGE_SENDER_USER],\n value=MESSAGE_SENDER_AI,\n advanced=True,\n info=\"Type of sender.\",\n ),\n MessageTextInput(\n name=\"sender_name\",\n display_name=\"Sender Name\",\n info=\"Name of the sender.\",\n value=MESSAGE_SENDER_NAME_AI,\n advanced=True,\n ),\n MessageTextInput(\n name=\"session_id\",\n display_name=\"Session ID\",\n info=\"The session ID of the chat. If empty, the current session ID parameter will be used.\",\n advanced=True,\n ),\n MessageTextInput(\n name=\"data_template\",\n display_name=\"Data Template\",\n value=\"{text}\",\n advanced=True,\n info=\"Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.\",\n ),\n ]\n outputs = [\n Output(display_name=\"Message\", name=\"message\", method=\"message_response\"),\n ]\n\n def message_response(self) -> Message:\n message = Message(\n text=self.input_value,\n sender=self.sender,\n sender_name=self.sender_name,\n session_id=self.session_id,\n )\n if (\n self.session_id\n and isinstance(message, Message)\n and isinstance(message.text, str)\n and self.should_store_message\n ):\n store_message(\n message,\n flow_id=self.graph.flow_id,\n )\n self.message.value = message\n\n self.status = message\n return message\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"data_template": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "data_template", | |
"value": "{text}", | |
"display_name": "Data Template", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Template to convert Data to Text. If left empty, it will be dynamically set to the Data's text key.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"input_value": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "input_value", | |
"value": "", | |
"display_name": "Text", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Message to be passed as output.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"sender": { | |
"trace_as_metadata": true, | |
"options": [ | |
"Machine", | |
"User" | |
], | |
"combobox": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "sender", | |
"value": "Machine", | |
"display_name": "Sender Type", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Type of sender.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "DropdownInput" | |
}, | |
"sender_name": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "sender_name", | |
"value": "AI", | |
"display_name": "Sender Name", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Name of the sender.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"session_id": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "session_id", | |
"value": "", | |
"display_name": "Session ID", | |
"advanced": true, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The session ID of the chat. If empty, the current session ID parameter will be used.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
}, | |
"should_store_message": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "should_store_message", | |
"value": true, | |
"display_name": "Store Messages", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Store the message in the history.", | |
"title_case": false, | |
"type": "bool", | |
"_input_type": "BoolInput" | |
} | |
}, | |
"description": "Display a chat message in the Playground.", | |
"icon": "ChatOutput", | |
"base_classes": [ | |
"Message" | |
], | |
"display_name": "Chat Output", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Message" | |
], | |
"selected": "Message", | |
"name": "message", | |
"display_name": "Message", | |
"method": "message_response", | |
"value": "__UNDEFINED__", | |
"cache": true, | |
"hidden": true | |
} | |
], | |
"field_order": [ | |
"input_value", | |
"should_store_message", | |
"sender", | |
"sender_name", | |
"session_id", | |
"data_template" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "ChatOutput-MxAb9" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 253, | |
"positionAbsolute": { | |
"x": 2081.0409327491257, | |
"y": 393.19993878756844 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "File-TGpoK", | |
"type": "genericNode", | |
"position": { | |
"x": 174.83158388605352, | |
"y": 1344.77628949614 | |
}, | |
"data": { | |
"type": "File", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"path": { | |
"trace_as_metadata": true, | |
"file_path": "0e95ed7c-5e49-436f-a10b-c648010a6531/little_prince.txt", | |
"fileTypes": [ | |
"txt", | |
"md", | |
"mdx", | |
"csv", | |
"json", | |
"yaml", | |
"yml", | |
"xml", | |
"html", | |
"htm", | |
"pdf", | |
"docx", | |
"py", | |
"sh", | |
"sql", | |
"js", | |
"ts", | |
"tsx" | |
], | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "path", | |
"value": "little_prince.txt", | |
"display_name": "Path", | |
"advanced": false, | |
"dynamic": false, | |
"info": "Supported file types: txt, md, mdx, csv, json, yaml, yml, xml, html, htm, pdf, docx, py, sh, sql, js, ts, tsx", | |
"title_case": false, | |
"type": "file", | |
"_input_type": "FileInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from pathlib import Path\n\nfrom langflow.base.data.utils import TEXT_FILE_TYPES, parse_text_file_to_data\nfrom langflow.custom import Component\nfrom langflow.io import BoolInput, FileInput, Output\nfrom langflow.schema import Data\n\n\nclass FileComponent(Component):\n display_name = \"File\"\n description = \"A generic file loader.\"\n icon = \"file-text\"\n name = \"File\"\n\n inputs = [\n FileInput(\n name=\"path\",\n display_name=\"Path\",\n file_types=TEXT_FILE_TYPES,\n info=f\"Supported file types: {', '.join(TEXT_FILE_TYPES)}\",\n ),\n BoolInput(\n name=\"silent_errors\",\n display_name=\"Silent Errors\",\n advanced=True,\n info=\"If true, errors will not raise an exception.\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Data\", name=\"data\", method=\"load_file\"),\n ]\n\n def load_file(self) -> Data:\n if not self.path:\n raise ValueError(\"Please, upload a file to use this component.\")\n resolved_path = self.resolve_path(self.path)\n silent_errors = self.silent_errors\n\n extension = Path(resolved_path).suffix[1:].lower()\n\n if extension == \"doc\":\n raise ValueError(\"doc files are not supported. Please save as .docx\")\n if extension not in TEXT_FILE_TYPES:\n raise ValueError(f\"Unsupported file type: {extension}\")\n\n data = parse_text_file_to_data(resolved_path, silent_errors)\n self.status = data if data else \"No data\"\n return data or Data()\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"silent_errors": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "silent_errors", | |
"value": false, | |
"display_name": "Silent Errors", | |
"advanced": true, | |
"dynamic": false, | |
"info": "If true, errors will not raise an exception.", | |
"title_case": false, | |
"type": "bool", | |
"_input_type": "BoolInput" | |
} | |
}, | |
"description": "A generic file loader.", | |
"icon": "file-text", | |
"base_classes": [ | |
"Data" | |
], | |
"display_name": "File", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Data" | |
], | |
"selected": "Data", | |
"name": "data", | |
"display_name": "Data", | |
"method": "load_file", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"path", | |
"silent_errors" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "File-TGpoK" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 298, | |
"positionAbsolute": { | |
"x": 174.83158388605352, | |
"y": 1344.77628949614 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "SplitText-H7iXy", | |
"type": "genericNode", | |
"position": { | |
"x": 699.72292665829, | |
"y": 1340.902785403657 | |
}, | |
"data": { | |
"type": "SplitText", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"data_inputs": { | |
"trace_as_metadata": true, | |
"list": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "data_inputs", | |
"value": "", | |
"display_name": "Data Inputs", | |
"advanced": false, | |
"input_types": [ | |
"Data" | |
], | |
"dynamic": false, | |
"info": "The data to split.", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "HandleInput" | |
}, | |
"chunk_overlap": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "chunk_overlap", | |
"value": 200, | |
"display_name": "Chunk Overlap", | |
"advanced": false, | |
"dynamic": false, | |
"info": "Number of characters to overlap between chunks.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"chunk_size": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "chunk_size", | |
"value": 1000, | |
"display_name": "Chunk Size", | |
"advanced": false, | |
"dynamic": false, | |
"info": "The maximum number of characters in each chunk.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from typing import List\n\nfrom langchain_text_splitters import CharacterTextSplitter\n\nfrom langflow.custom import Component\nfrom langflow.io import HandleInput, IntInput, MessageTextInput, Output\nfrom langflow.schema import Data\nfrom langflow.utils.util import unescape_string\n\n\nclass SplitTextComponent(Component):\n display_name: str = \"Split Text\"\n description: str = \"Split text into chunks based on specified criteria.\"\n icon = \"scissors-line-dashed\"\n name = \"SplitText\"\n\n inputs = [\n HandleInput(\n name=\"data_inputs\",\n display_name=\"Data Inputs\",\n info=\"The data to split.\",\n input_types=[\"Data\"],\n is_list=True,\n ),\n IntInput(\n name=\"chunk_overlap\",\n display_name=\"Chunk Overlap\",\n info=\"Number of characters to overlap between chunks.\",\n value=200,\n ),\n IntInput(\n name=\"chunk_size\",\n display_name=\"Chunk Size\",\n info=\"The maximum number of characters in each chunk.\",\n value=1000,\n ),\n MessageTextInput(\n name=\"separator\",\n display_name=\"Separator\",\n info=\"The character to split on. Defaults to newline.\",\n value=\"\\n\",\n ),\n ]\n\n outputs = [\n Output(display_name=\"Chunks\", name=\"chunks\", method=\"split_text\"),\n ]\n\n def _docs_to_data(self, docs):\n data = []\n for doc in docs:\n data.append(Data(text=doc.page_content, data=doc.metadata))\n return data\n\n def split_text(self) -> List[Data]:\n separator = unescape_string(self.separator)\n\n documents = []\n for _input in self.data_inputs:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n\n splitter = CharacterTextSplitter(\n chunk_overlap=self.chunk_overlap,\n chunk_size=self.chunk_size,\n separator=separator,\n )\n docs = splitter.split_documents(documents)\n data = self._docs_to_data(docs)\n self.status = data\n return data\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"separator": { | |
"trace_as_input": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "separator", | |
"value": "\n", | |
"display_name": "Separator", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The character to split on. Defaults to newline.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MessageTextInput" | |
} | |
}, | |
"description": "Split text into chunks based on specified criteria.", | |
"icon": "scissors-line-dashed", | |
"base_classes": [ | |
"Data" | |
], | |
"display_name": "Split Text", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Data" | |
], | |
"selected": "Data", | |
"name": "chunks", | |
"display_name": "Chunks", | |
"method": "split_text", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"data_inputs", | |
"chunk_overlap", | |
"chunk_size", | |
"separator" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "SplitText-H7iXy" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 546, | |
"positionAbsolute": { | |
"x": 699.72292665829, | |
"y": 1340.902785403657 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "Upstash-dFmNR", | |
"type": "genericNode", | |
"position": { | |
"x": 1229.7499140204827, | |
"y": 1340.9027854036572 | |
}, | |
"data": { | |
"type": "Upstash", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"embedding": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "embedding", | |
"value": "", | |
"display_name": "Embedding", | |
"advanced": false, | |
"input_types": [ | |
"Embeddings" | |
], | |
"dynamic": false, | |
"info": "To use Upstash's embeddings, don't provide an embedding.", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "HandleInput" | |
}, | |
"ingest_data": { | |
"trace_as_metadata": true, | |
"list": true, | |
"trace_as_input": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "ingest_data", | |
"value": "", | |
"display_name": "Ingest Data", | |
"advanced": false, | |
"input_types": [ | |
"Data" | |
], | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "DataInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from typing import List\n\nfrom langchain_community.vectorstores import UpstashVectorStore\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers.data import docs_to_data\nfrom langflow.io import (\n HandleInput,\n IntInput,\n StrInput,\n SecretStrInput,\n DataInput,\n MultilineInput,\n)\nfrom langflow.schema import Data\n\n\nclass UpstashVectorStoreComponent(LCVectorStoreComponent):\n display_name = \"Upstash\"\n description = \"Upstash Vector Store with search capabilities\"\n documentation = \"https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/\"\n name = \"Upstash\"\n icon = \"Upstash\"\n\n inputs = [\n StrInput(\n name=\"index_url\",\n display_name=\"Index URL\",\n info=\"The URL of the Upstash index.\",\n required=True,\n ),\n SecretStrInput(\n name=\"index_token\",\n display_name=\"Index Token\",\n info=\"The token for the Upstash index.\",\n required=True,\n ),\n StrInput(\n name=\"text_key\",\n display_name=\"Text Key\",\n info=\"The key in the record to use as text.\",\n value=\"text\",\n advanced=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Leave empty for default namespace.\",\n ),\n MultilineInput(name=\"search_query\", display_name=\"Search Query\"),\n MultilineInput(\n name=\"metadata_filter\",\n display_name=\"Metadata Filter\",\n info=\"Filters documents by metadata. Look at the documentation for more information.\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding\",\n input_types=[\"Embeddings\"],\n info=\"To use Upstash's embeddings, don't provide an embedding.\",\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n value=4,\n advanced=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self) -> UpstashVectorStore:\n use_upstash_embedding = self.embedding is None\n\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n\n if documents:\n if use_upstash_embedding:\n upstash_vs = UpstashVectorStore(\n embedding=use_upstash_embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n upstash_vs.add_documents(documents)\n else:\n upstash_vs = UpstashVectorStore.from_documents(\n documents=documents,\n embedding=self.embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n else:\n upstash_vs = UpstashVectorStore(\n embedding=self.embedding or use_upstash_embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n\n return upstash_vs\n\n def search_documents(self) -> List[Data]:\n vector_store = self.build_vector_store()\n\n if self.search_query and isinstance(self.search_query, str) and self.search_query.strip():\n docs = vector_store.similarity_search(\n query=self.search_query,\n k=self.number_of_results,\n filter=self.metadata_filter,\n )\n\n data = docs_to_data(docs)\n self.status = data\n return data\n else:\n return []\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"index_token": { | |
"load_from_db": true, | |
"required": true, | |
"placeholder": "", | |
"show": true, | |
"name": "index_token", | |
"value": "UPSTASH_VECTOR_TOKEN", | |
"display_name": "Index Token", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The token for the Upstash index.", | |
"title_case": false, | |
"password": true, | |
"type": "str", | |
"_input_type": "SecretStrInput" | |
}, | |
"index_url": { | |
"trace_as_metadata": true, | |
"load_from_db": true, | |
"list": false, | |
"required": true, | |
"placeholder": "", | |
"show": true, | |
"name": "index_url", | |
"value": "UPSTASH_VECTOR_URL", | |
"display_name": "Index URL", | |
"advanced": false, | |
"dynamic": false, | |
"info": "The URL of the Upstash index.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"metadata_filter": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "metadata_filter", | |
"value": "", | |
"display_name": "Metadata Filter", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Filters documents by metadata. Look at the documentation for more information.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
}, | |
"namespace": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "namespace", | |
"value": "", | |
"display_name": "Namespace", | |
"advanced": false, | |
"dynamic": false, | |
"info": "Leave empty for default namespace.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"number_of_results": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "number_of_results", | |
"value": 4, | |
"display_name": "Number of Results", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Number of results to return.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"search_query": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "search_query", | |
"value": "", | |
"display_name": "Search Query", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
}, | |
"text_key": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "text_key", | |
"value": "text", | |
"display_name": "Text Key", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The key in the record to use as text.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
} | |
}, | |
"description": "Upstash Vector Store with search capabilities", | |
"icon": "Upstash", | |
"base_classes": [ | |
"Data", | |
"Retriever", | |
"VectorStore" | |
], | |
"display_name": "Upstash", | |
"documentation": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Retriever" | |
], | |
"selected": "Retriever", | |
"name": "base_retriever", | |
"display_name": "Retriever", | |
"method": "build_base_retriever", | |
"value": "__UNDEFINED__", | |
"cache": true | |
}, | |
{ | |
"types": [ | |
"Data" | |
], | |
"selected": "Data", | |
"name": "search_results", | |
"display_name": "Search Results", | |
"method": "search_documents", | |
"value": "__UNDEFINED__", | |
"cache": true | |
}, | |
{ | |
"types": [ | |
"VectorStore" | |
], | |
"selected": "VectorStore", | |
"name": "vector_store", | |
"display_name": "Vector Store", | |
"method": "cast_vector_store", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"index_url", | |
"index_token", | |
"text_key", | |
"namespace", | |
"search_query", | |
"metadata_filter", | |
"ingest_data", | |
"embedding", | |
"number_of_results" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "Upstash-dFmNR" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 828, | |
"positionAbsolute": { | |
"x": 1229.7499140204827, | |
"y": 1340.9027854036572 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "Upstash-FIMbg", | |
"type": "genericNode", | |
"position": { | |
"x": 192.88830896698607, | |
"y": 404.21485713968906 | |
}, | |
"data": { | |
"type": "Upstash", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"embedding": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "embedding", | |
"value": "", | |
"display_name": "Embedding", | |
"advanced": false, | |
"input_types": [ | |
"Embeddings" | |
], | |
"dynamic": false, | |
"info": "To use Upstash's embeddings, don't provide an embedding.", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "HandleInput" | |
}, | |
"ingest_data": { | |
"trace_as_metadata": true, | |
"list": true, | |
"trace_as_input": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "ingest_data", | |
"value": "", | |
"display_name": "Ingest Data", | |
"advanced": false, | |
"input_types": [ | |
"Data" | |
], | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "DataInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from typing import List\n\nfrom langchain_community.vectorstores import UpstashVectorStore\n\nfrom langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store\nfrom langflow.helpers.data import docs_to_data\nfrom langflow.io import (\n HandleInput,\n IntInput,\n StrInput,\n SecretStrInput,\n DataInput,\n MultilineInput,\n)\nfrom langflow.schema import Data\n\n\nclass UpstashVectorStoreComponent(LCVectorStoreComponent):\n display_name = \"Upstash\"\n description = \"Upstash Vector Store with search capabilities\"\n documentation = \"https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/\"\n name = \"Upstash\"\n icon = \"Upstash\"\n\n inputs = [\n StrInput(\n name=\"index_url\",\n display_name=\"Index URL\",\n info=\"The URL of the Upstash index.\",\n required=True,\n ),\n SecretStrInput(\n name=\"index_token\",\n display_name=\"Index Token\",\n info=\"The token for the Upstash index.\",\n required=True,\n ),\n StrInput(\n name=\"text_key\",\n display_name=\"Text Key\",\n info=\"The key in the record to use as text.\",\n value=\"text\",\n advanced=True,\n ),\n StrInput(\n name=\"namespace\",\n display_name=\"Namespace\",\n info=\"Leave empty for default namespace.\",\n ),\n MultilineInput(name=\"search_query\", display_name=\"Search Query\"),\n MultilineInput(\n name=\"metadata_filter\",\n display_name=\"Metadata Filter\",\n info=\"Filters documents by metadata. Look at the documentation for more information.\",\n ),\n DataInput(\n name=\"ingest_data\",\n display_name=\"Ingest Data\",\n is_list=True,\n ),\n HandleInput(\n name=\"embedding\",\n display_name=\"Embedding\",\n input_types=[\"Embeddings\"],\n info=\"To use Upstash's embeddings, don't provide an embedding.\",\n ),\n IntInput(\n name=\"number_of_results\",\n display_name=\"Number of Results\",\n info=\"Number of results to return.\",\n value=4,\n advanced=True,\n ),\n ]\n\n @check_cached_vector_store\n def build_vector_store(self) -> UpstashVectorStore:\n use_upstash_embedding = self.embedding is None\n\n documents = []\n for _input in self.ingest_data or []:\n if isinstance(_input, Data):\n documents.append(_input.to_lc_document())\n else:\n documents.append(_input)\n\n if documents:\n if use_upstash_embedding:\n upstash_vs = UpstashVectorStore(\n embedding=use_upstash_embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n upstash_vs.add_documents(documents)\n else:\n upstash_vs = UpstashVectorStore.from_documents(\n documents=documents,\n embedding=self.embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n else:\n upstash_vs = UpstashVectorStore(\n embedding=self.embedding or use_upstash_embedding,\n text_key=self.text_key,\n index_url=self.index_url,\n index_token=self.index_token,\n namespace=self.namespace,\n )\n\n return upstash_vs\n\n def search_documents(self) -> List[Data]:\n vector_store = self.build_vector_store()\n\n if self.search_query and isinstance(self.search_query, str) and self.search_query.strip():\n docs = vector_store.similarity_search(\n query=self.search_query,\n k=self.number_of_results,\n filter=self.metadata_filter,\n )\n\n data = docs_to_data(docs)\n self.status = data\n return data\n else:\n return []\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"index_token": { | |
"load_from_db": true, | |
"required": true, | |
"placeholder": "", | |
"show": true, | |
"name": "index_token", | |
"value": "UPSTASH_VECTOR_TOKEN", | |
"display_name": "Index Token", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The token for the Upstash index.", | |
"title_case": false, | |
"password": true, | |
"type": "str", | |
"_input_type": "SecretStrInput" | |
}, | |
"index_url": { | |
"trace_as_metadata": true, | |
"load_from_db": true, | |
"list": false, | |
"required": true, | |
"placeholder": "", | |
"show": true, | |
"name": "index_url", | |
"value": "UPSTASH_VECTOR_URL", | |
"display_name": "Index URL", | |
"advanced": false, | |
"dynamic": false, | |
"info": "The URL of the Upstash index.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"metadata_filter": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "metadata_filter", | |
"value": "", | |
"display_name": "Metadata Filter", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "Filters documents by metadata. Look at the documentation for more information.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
}, | |
"namespace": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "namespace", | |
"value": "", | |
"display_name": "Namespace", | |
"advanced": false, | |
"dynamic": false, | |
"info": "Leave empty for default namespace.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"number_of_results": { | |
"trace_as_metadata": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "number_of_results", | |
"value": 4, | |
"display_name": "Number of Results", | |
"advanced": true, | |
"dynamic": false, | |
"info": "Number of results to return.", | |
"title_case": false, | |
"type": "int", | |
"_input_type": "IntInput" | |
}, | |
"search_query": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "search_query", | |
"value": "", | |
"display_name": "Search Query", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
}, | |
"text_key": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "text_key", | |
"value": "text", | |
"display_name": "Text Key", | |
"advanced": true, | |
"dynamic": false, | |
"info": "The key in the record to use as text.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
} | |
}, | |
"description": "Upstash Vector Store with search capabilities", | |
"icon": "Upstash", | |
"base_classes": [ | |
"Data", | |
"Retriever", | |
"VectorStore" | |
], | |
"display_name": "Upstash", | |
"documentation": "https://python.langchain.com/v0.2/docs/integrations/vectorstores/upstash/", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Retriever" | |
], | |
"selected": "Retriever", | |
"name": "base_retriever", | |
"display_name": "Retriever", | |
"method": "build_base_retriever", | |
"value": "__UNDEFINED__", | |
"cache": true | |
}, | |
{ | |
"types": [ | |
"Data" | |
], | |
"selected": "Data", | |
"name": "search_results", | |
"display_name": "Search Results", | |
"method": "search_documents", | |
"value": "__UNDEFINED__", | |
"cache": true | |
}, | |
{ | |
"types": [ | |
"VectorStore" | |
], | |
"selected": "VectorStore", | |
"name": "vector_store", | |
"display_name": "Vector Store", | |
"method": "cast_vector_store", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"index_url", | |
"index_token", | |
"text_key", | |
"namespace", | |
"search_query", | |
"metadata_filter", | |
"ingest_data", | |
"embedding", | |
"number_of_results" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "Upstash-FIMbg" | |
}, | |
"selected": true, | |
"width": 384, | |
"height": 828, | |
"positionAbsolute": { | |
"x": 192.88830896698607, | |
"y": 404.21485713968906 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "ParseData-p3WYI", | |
"type": "genericNode", | |
"position": { | |
"x": 657.0366507725095, | |
"y": 859.5390859831298 | |
}, | |
"data": { | |
"type": "ParseData", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"data": { | |
"trace_as_metadata": true, | |
"list": false, | |
"trace_as_input": true, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "data", | |
"value": "", | |
"display_name": "Data", | |
"advanced": false, | |
"input_types": [ | |
"Data" | |
], | |
"dynamic": false, | |
"info": "The data to convert to text.", | |
"title_case": false, | |
"type": "other", | |
"_input_type": "DataInput" | |
}, | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from langflow.custom import Component\nfrom langflow.helpers.data import data_to_text\nfrom langflow.io import DataInput, MultilineInput, Output, StrInput\nfrom langflow.schema.message import Message\n\n\nclass ParseDataComponent(Component):\n display_name = \"Parse Data\"\n description = \"Convert Data into plain text following a specified template.\"\n icon = \"braces\"\n name = \"ParseData\"\n\n inputs = [\n DataInput(name=\"data\", display_name=\"Data\", info=\"The data to convert to text.\"),\n MultilineInput(\n name=\"template\",\n display_name=\"Template\",\n info=\"The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.\",\n value=\"{text}\",\n ),\n StrInput(name=\"sep\", display_name=\"Separator\", advanced=True, value=\"\\n\"),\n ]\n\n outputs = [\n Output(display_name=\"Text\", name=\"text\", method=\"parse_data\"),\n ]\n\n def parse_data(self) -> Message:\n data = self.data if isinstance(self.data, list) else [self.data]\n template = self.template\n\n result_string = data_to_text(template, data, sep=self.sep)\n self.status = result_string\n return Message(text=result_string)\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"sep": { | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "sep", | |
"value": "\n", | |
"display_name": "Separator", | |
"advanced": true, | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "StrInput" | |
}, | |
"template": { | |
"trace_as_input": true, | |
"multiline": true, | |
"trace_as_metadata": true, | |
"load_from_db": false, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "template", | |
"value": "{text}", | |
"display_name": "Template", | |
"advanced": false, | |
"input_types": [ | |
"Message" | |
], | |
"dynamic": false, | |
"info": "The template to use for formatting the data. It can contain the keys {text}, {data} or any other key in the Data.", | |
"title_case": false, | |
"type": "str", | |
"_input_type": "MultilineInput" | |
} | |
}, | |
"description": "Convert Data into plain text following a specified template.", | |
"icon": "braces", | |
"base_classes": [ | |
"Message" | |
], | |
"display_name": "Parse Data", | |
"documentation": "", | |
"custom_fields": {}, | |
"output_types": [], | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Message" | |
], | |
"selected": "Message", | |
"name": "text", | |
"display_name": "Text", | |
"method": "parse_data", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"data", | |
"template", | |
"sep" | |
], | |
"beta": false, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "ParseData-p3WYI" | |
}, | |
"selected": false, | |
"width": 384, | |
"height": 374, | |
"positionAbsolute": { | |
"x": 657.0366507725095, | |
"y": 859.5390859831298 | |
}, | |
"dragging": false | |
}, | |
{ | |
"id": "Prompt-EUmv1", | |
"type": "genericNode", | |
"position": { | |
"x": 1142.6912335461534, | |
"y": 392.4151818157433 | |
}, | |
"data": { | |
"type": "Prompt", | |
"node": { | |
"template": { | |
"_type": "Component", | |
"code": { | |
"type": "code", | |
"required": true, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "from langflow.base.prompts.api_utils import process_prompt_template\nfrom langflow.custom import Component\nfrom langflow.inputs.inputs import DefaultPromptField\nfrom langflow.io import Output, PromptInput\nfrom langflow.schema.message import Message\nfrom langflow.template.utils import update_template_values\n\n\nclass PromptComponent(Component):\n display_name: str = \"Prompt\"\n description: str = \"Create a prompt template with dynamic variables.\"\n icon = \"prompts\"\n trace_type = \"prompt\"\n name = \"Prompt\"\n\n inputs = [\n PromptInput(name=\"template\", display_name=\"Template\"),\n ]\n\n outputs = [\n Output(display_name=\"Prompt Message\", name=\"prompt\", method=\"build_prompt\"),\n ]\n\n async def build_prompt(\n self,\n ) -> Message:\n prompt = await Message.from_template_and_variables(**self._attributes)\n self.status = prompt.text\n return prompt\n\n def _update_template(self, frontend_node: dict):\n prompt_template = frontend_node[\"template\"][\"template\"][\"value\"]\n custom_fields = frontend_node[\"custom_fields\"]\n frontend_node_template = frontend_node[\"template\"]\n _ = process_prompt_template(\n template=prompt_template,\n name=\"template\",\n custom_fields=custom_fields,\n frontend_node_template=frontend_node_template,\n )\n return frontend_node\n\n def post_code_processing(self, new_frontend_node: dict, current_frontend_node: dict):\n \"\"\"\n This function is called after the code validation is done.\n \"\"\"\n frontend_node = super().post_code_processing(new_frontend_node, current_frontend_node)\n template = frontend_node[\"template\"][\"template\"][\"value\"]\n # Kept it duplicated for backwards compatibility\n _ = process_prompt_template(\n template=template,\n name=\"template\",\n custom_fields=frontend_node[\"custom_fields\"],\n frontend_node_template=frontend_node[\"template\"],\n )\n # Now that template is updated, we need to grab any values that were set in the current_frontend_node\n # and update the frontend_node with those values\n update_template_values(new_template=frontend_node, previous_template=current_frontend_node[\"template\"])\n return frontend_node\n\n def _get_fallback_input(self, **kwargs):\n return DefaultPromptField(**kwargs)\n", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "code", | |
"advanced": true, | |
"dynamic": true, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false | |
}, | |
"template": { | |
"trace_as_input": true, | |
"list": false, | |
"required": false, | |
"placeholder": "", | |
"show": true, | |
"name": "template", | |
"value": "{context}\n\n---\n\nGiven the context above, answer the question as best as possible.\n\nQuestion: {question}\n\nAnswer: ", | |
"display_name": "Template", | |
"advanced": false, | |
"dynamic": false, | |
"info": "", | |
"title_case": false, | |
"type": "prompt", | |
"_input_type": "PromptInput" | |
}, | |
"context": { | |
"field_type": "str", | |
"required": false, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "context", | |
"display_name": "context", | |
"advanced": false, | |
"input_types": [ | |
"Message", | |
"Text" | |
], | |
"dynamic": false, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false, | |
"type": "str" | |
}, | |
"question": { | |
"field_type": "str", | |
"required": false, | |
"placeholder": "", | |
"list": false, | |
"show": true, | |
"multiline": true, | |
"value": "", | |
"fileTypes": [], | |
"file_path": "", | |
"password": false, | |
"name": "question", | |
"display_name": "question", | |
"advanced": false, | |
"input_types": [ | |
"Message", | |
"Text" | |
], | |
"dynamic": false, | |
"info": "", | |
"load_from_db": false, | |
"title_case": false, | |
"type": "str" | |
} | |
}, | |
"description": "Create a prompt template with dynamic variables.", | |
"icon": "prompts", | |
"is_input": null, | |
"is_output": null, | |
"is_composition": null, | |
"base_classes": [ | |
"Message" | |
], | |
"name": "", | |
"display_name": "Prompt", | |
"documentation": "", | |
"custom_fields": { | |
"template": [ | |
"context", | |
"question" | |
] | |
}, | |
"output_types": [], | |
"full_path": null, | |
"pinned": false, | |
"conditional_paths": [], | |
"frozen": false, | |
"outputs": [ | |
{ | |
"types": [ | |
"Message" | |
], | |
"selected": "Message", | |
"name": "prompt", | |
"hidden": null, | |
"display_name": "Prompt Message", | |
"method": "build_prompt", | |
"value": "__UNDEFINED__", | |
"cache": true | |
} | |
], | |
"field_order": [ | |
"template" | |
], | |
"beta": false, | |
"error": null, | |
"edited": false, | |
"lf_version": "1.0.16.post4" | |
}, | |
"id": "Prompt-EUmv1" | |
}, | |
"selected": true, | |
"width": 384, | |
"height": 498, | |
"positionAbsolute": { | |
"x": 1142.6912335461534, | |
"y": 392.4151818157433 | |
}, | |
"dragging": false | |
} | |
], | |
"edges": [ | |
{ | |
"source": "OpenAIModel-Z5rm4", | |
"sourceHandle": "{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Z5rm4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "ChatOutput-MxAb9", | |
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-MxAb9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "ChatOutput-MxAb9", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
}, | |
"sourceHandle": { | |
"dataType": "OpenAIModel", | |
"id": "OpenAIModel-Z5rm4", | |
"name": "text_output", | |
"output_types": [ | |
"Message" | |
] | |
} | |
}, | |
"id": "reactflow__edge-OpenAIModel-Z5rm4{œdataTypeœ:œOpenAIModelœ,œidœ:œOpenAIModel-Z5rm4œ,œnameœ:œtext_outputœ,œoutput_typesœ:[œMessageœ]}-ChatOutput-MxAb9{œfieldNameœ:œinput_valueœ,œidœ:œChatOutput-MxAb9œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"className": "" | |
}, | |
{ | |
"source": "File-TGpoK", | |
"sourceHandle": "{œdataTypeœ:œFileœ,œidœ:œFile-TGpoKœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}", | |
"target": "SplitText-H7iXy", | |
"targetHandle": "{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-H7iXyœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "data_inputs", | |
"id": "SplitText-H7iXy", | |
"inputTypes": [ | |
"Data" | |
], | |
"type": "other" | |
}, | |
"sourceHandle": { | |
"dataType": "File", | |
"id": "File-TGpoK", | |
"name": "data", | |
"output_types": [ | |
"Data" | |
] | |
} | |
}, | |
"id": "reactflow__edge-File-TGpoK{œdataTypeœ:œFileœ,œidœ:œFile-TGpoKœ,œnameœ:œdataœ,œoutput_typesœ:[œDataœ]}-SplitText-H7iXy{œfieldNameœ:œdata_inputsœ,œidœ:œSplitText-H7iXyœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"className": "" | |
}, | |
{ | |
"source": "SplitText-H7iXy", | |
"sourceHandle": "{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-H7iXyœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}", | |
"target": "Upstash-dFmNR", | |
"targetHandle": "{œfieldNameœ:œingest_dataœ,œidœ:œUpstash-dFmNRœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "ingest_data", | |
"id": "Upstash-dFmNR", | |
"inputTypes": [ | |
"Data" | |
], | |
"type": "other" | |
}, | |
"sourceHandle": { | |
"dataType": "SplitText", | |
"id": "SplitText-H7iXy", | |
"name": "chunks", | |
"output_types": [ | |
"Data" | |
] | |
} | |
}, | |
"id": "reactflow__edge-SplitText-H7iXy{œdataTypeœ:œSplitTextœ,œidœ:œSplitText-H7iXyœ,œnameœ:œchunksœ,œoutput_typesœ:[œDataœ]}-Upstash-dFmNR{œfieldNameœ:œingest_dataœ,œidœ:œUpstash-dFmNRœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"className": "" | |
}, | |
{ | |
"source": "ChatInput-gANZa", | |
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gANZaœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "Upstash-FIMbg", | |
"targetHandle": "{œfieldNameœ:œsearch_queryœ,œidœ:œUpstash-FIMbgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "search_query", | |
"id": "Upstash-FIMbg", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
}, | |
"sourceHandle": { | |
"dataType": "ChatInput", | |
"id": "ChatInput-gANZa", | |
"name": "message", | |
"output_types": [ | |
"Message" | |
] | |
} | |
}, | |
"id": "reactflow__edge-ChatInput-gANZa{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gANZaœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Upstash-FIMbg{œfieldNameœ:œsearch_queryœ,œidœ:œUpstash-FIMbgœ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"className": "" | |
}, | |
{ | |
"source": "Upstash-FIMbg", | |
"sourceHandle": "{œdataTypeœ:œUpstashœ,œidœ:œUpstash-FIMbgœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}", | |
"target": "ParseData-p3WYI", | |
"targetHandle": "{œfieldNameœ:œdataœ,œidœ:œParseData-p3WYIœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "data", | |
"id": "ParseData-p3WYI", | |
"inputTypes": [ | |
"Data" | |
], | |
"type": "other" | |
}, | |
"sourceHandle": { | |
"dataType": "Upstash", | |
"id": "Upstash-FIMbg", | |
"name": "search_results", | |
"output_types": [ | |
"Data" | |
] | |
} | |
}, | |
"id": "reactflow__edge-Upstash-FIMbg{œdataTypeœ:œUpstashœ,œidœ:œUpstash-FIMbgœ,œnameœ:œsearch_resultsœ,œoutput_typesœ:[œDataœ]}-ParseData-p3WYI{œfieldNameœ:œdataœ,œidœ:œParseData-p3WYIœ,œinputTypesœ:[œDataœ],œtypeœ:œotherœ}", | |
"className": "" | |
}, | |
{ | |
"source": "ParseData-p3WYI", | |
"sourceHandle": "{œdataTypeœ:œParseDataœ,œidœ:œParseData-p3WYIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "Prompt-EUmv1", | |
"targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPrompt-EUmv1œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "context", | |
"id": "Prompt-EUmv1", | |
"inputTypes": [ | |
"Message", | |
"Text" | |
], | |
"type": "str" | |
}, | |
"sourceHandle": { | |
"dataType": "ParseData", | |
"id": "ParseData-p3WYI", | |
"name": "text", | |
"output_types": [ | |
"Message" | |
] | |
} | |
}, | |
"id": "reactflow__edge-ParseData-p3WYI{œdataTypeœ:œParseDataœ,œidœ:œParseData-p3WYIœ,œnameœ:œtextœ,œoutput_typesœ:[œMessageœ]}-Prompt-EUmv1{œfieldNameœ:œcontextœ,œidœ:œPrompt-EUmv1œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"className": "" | |
}, | |
{ | |
"source": "ChatInput-gANZa", | |
"sourceHandle": "{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gANZaœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "Prompt-EUmv1", | |
"targetHandle": "{œfieldNameœ:œquestionœ,œidœ:œPrompt-EUmv1œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "question", | |
"id": "Prompt-EUmv1", | |
"inputTypes": [ | |
"Message", | |
"Text" | |
], | |
"type": "str" | |
}, | |
"sourceHandle": { | |
"dataType": "ChatInput", | |
"id": "ChatInput-gANZa", | |
"name": "message", | |
"output_types": [ | |
"Message" | |
] | |
} | |
}, | |
"id": "reactflow__edge-ChatInput-gANZa{œdataTypeœ:œChatInputœ,œidœ:œChatInput-gANZaœ,œnameœ:œmessageœ,œoutput_typesœ:[œMessageœ]}-Prompt-EUmv1{œfieldNameœ:œquestionœ,œidœ:œPrompt-EUmv1œ,œinputTypesœ:[œMessageœ,œTextœ],œtypeœ:œstrœ}", | |
"className": "" | |
}, | |
{ | |
"source": "Prompt-EUmv1", | |
"sourceHandle": "{œdataTypeœ:œPromptœ,œidœ:œPrompt-EUmv1œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}", | |
"target": "OpenAIModel-Z5rm4", | |
"targetHandle": "{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Z5rm4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"data": { | |
"targetHandle": { | |
"fieldName": "input_value", | |
"id": "OpenAIModel-Z5rm4", | |
"inputTypes": [ | |
"Message" | |
], | |
"type": "str" | |
}, | |
"sourceHandle": { | |
"dataType": "Prompt", | |
"id": "Prompt-EUmv1", | |
"name": "prompt", | |
"output_types": [ | |
"Message" | |
] | |
} | |
}, | |
"id": "reactflow__edge-Prompt-EUmv1{œdataTypeœ:œPromptœ,œidœ:œPrompt-EUmv1œ,œnameœ:œpromptœ,œoutput_typesœ:[œMessageœ]}-OpenAIModel-Z5rm4{œfieldNameœ:œinput_valueœ,œidœ:œOpenAIModel-Z5rm4œ,œinputTypesœ:[œMessageœ],œtypeœ:œstrœ}", | |
"className": "" | |
} | |
], | |
"viewport": { | |
"x": 193.60938912677057, | |
"y": -82.59632291637865, | |
"zoom": 0.5356071423381973 | |
} | |
}, | |
"user_id": "55205b62-a9b3-4726-9621-6cdcbee3826b" | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment