Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save alexandrehptavares/5722233091137518d42b2e9bf3cd9421 to your computer and use it in GitHub Desktop.
Save alexandrehptavares/5722233091137518d42b2e9bf3cd9421 to your computer and use it in GitHub Desktop.
Flow with a Custom Component in Langflow to use a fine-tuned gpt model with a chat interface
{"name":"using_fine_tuned_gpt_langflow","description":"Flow using a Custom Component in Langflow to load a fine-tuned GPT model in a chat conversation.","data":{"nodes":[{"width":384,"height":372,"id":"PromptTemplate-5GnSD","type":"genericNode","position":{"x":1002.2909888285608,"y":-28.810498723906477},"data":{"type":"PromptTemplate","node":{"template":{"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":false,"info":"","type":"BaseOutputParser","list":false},"input_variables":{"required":true,"placeholder":"","show":false,"multiline":false,"password":false,"name":"input_variables","advanced":false,"dynamic":false,"info":"","type":"str","list":true,"value":["var"]},"partial_variables":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"partial_variables","advanced":false,"dynamic":false,"info":"","type":"dict","list":false},"template":{"required":true,"placeholder":"","show":true,"multiline":true,"password":false,"name":"template","advanced":false,"dynamic":false,"info":"","type":"prompt","list":false,"value":"{var}"},"template_format":{"required":false,"placeholder":"","show":false,"multiline":false,"value":"f-string","password":false,"name":"template_format","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"validate_template":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"validate_template","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"_type":"PromptTemplate","var":{"required":false,"placeholder":"","show":true,"multiline":true,"value":"","password":false,"name":"var","display_name":"var","advanced":false,"input_types":["Document","BaseOutputParser"],"dynamic":false,"info":"","type":"str","list":false}},"description":"A prompt template for a language model.","base_classes":["PromptTemplate","BasePromptTemplate","StringPromptTemplate"],"name":"","display_name":"PromptTemplate","documentation":"https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/","custom_fields":{"":["var"],"template":["var"]},"output_types":[],"field_formatters":{"formatters":{"openai_api_key":{}},"base_formatters":{"kwargs":{},"optional":{},"list":{},"dict":{},"union":{},"multiline":{},"show":{},"password":{},"default":{},"headers":{},"dict_code_file":{},"model_fields":{"MODEL_DICT":{"OpenAI":["text-davinci-003","text-davinci-002","text-curie-001","text-babbage-001","text-ada-001"],"ChatOpenAI":["gpt-3.5-turbo-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k-0613","gpt-3.5-turbo-16k","gpt-4-0613","gpt-4-32k-0613","gpt-4","gpt-4-32k"],"Anthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"],"ChatAnthropic":["claude-v1","claude-v1-100k","claude-instant-v1","claude-instant-v1-100k","claude-v1.3","claude-v1.3-100k","claude-v1.2","claude-v1.0","claude-instant-v1.1","claude-instant-v1.1-100k","claude-instant-v1.0"]}}}},"beta":false,"error":null},"id":"PromptTemplate-5GnSD"},"selected":true,"positionAbsolute":{"x":1002.2909888285608,"y":-28.810498723906477},"dragging":false},{"width":384,"height":338,"id":"LLMChain-8C9XJ","type":"genericNode","position":{"x":1024.2195688898594,"y":410.82691722912193},"data":{"type":"LLMChain","node":{"template":{"callbacks":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"callbacks","advanced":false,"dynamic":false,"info":"","type":"langchain.callbacks.base.BaseCallbackHandler","list":true},"llm":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"llm","advanced":false,"dynamic":false,"info":"","type":"BaseLanguageModel","list":false},"memory":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"memory","advanced":false,"dynamic":false,"info":"","type":"BaseMemory","list":false},"output_parser":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"output_parser","advanced":false,"dynamic":false,"info":"","type":"BaseLLMOutputParser","list":false},"prompt":{"required":true,"placeholder":"","show":true,"multiline":false,"password":false,"name":"prompt","advanced":false,"dynamic":false,"info":"","type":"BasePromptTemplate","list":false},"llm_kwargs":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"llm_kwargs","advanced":false,"dynamic":false,"info":"","type":"dict","list":false},"metadata":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"metadata","advanced":false,"dynamic":false,"info":"","type":"dict","list":false},"output_key":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"text","password":false,"name":"output_key","advanced":true,"dynamic":false,"info":"","type":"str","list":false},"return_final_only":{"required":false,"placeholder":"","show":false,"multiline":false,"value":true,"password":false,"name":"return_final_only","advanced":false,"dynamic":false,"info":"","type":"bool","list":false},"tags":{"required":false,"placeholder":"","show":false,"multiline":false,"password":false,"name":"tags","advanced":false,"dynamic":false,"info":"","type":"str","list":true},"verbose":{"required":false,"placeholder":"","show":false,"multiline":false,"value":false,"password":false,"name":"verbose","advanced":true,"dynamic":false,"info":"","type":"bool","list":false},"_type":"LLMChain"},"description":"Chain to run queries against LLMs.","base_classes":["LLMChain","Chain","function"],"display_name":"LLMChain","custom_fields":{},"output_types":[],"documentation":"https://python.langchain.com/docs/modules/chains/foundational/llm_chain","beta":false,"error":null},"id":"LLMChain-8C9XJ"},"selected":false,"positionAbsolute":{"x":1024.2195688898594,"y":410.82691722912193},"dragging":false},{"width":384,"height":705,"id":"CustomComponent-5XYPW","type":"genericNode","position":{"x":569.2530158574887,"y":-24.122112513296386},"data":{"type":"CustomComponent","node":{"template":{"code":{"dynamic":true,"required":true,"placeholder":"","show":true,"multiline":true,"value":"from langflow import CustomComponent\r\n\r\nfrom langchain.llms.base import BaseLLM\r\nfrom langchain.chains import LLMChain\r\nfrom langchain import PromptTemplate\r\nfrom langchain.schema import Document\r\nfrom typing import Optional\r\nimport openai\r\nimport pickle\r\n\r\nfrom langchain.chat_models import ChatOpenAI\r\n\r\nimport requests\r\n\r\nclass FineTunedModel(CustomComponent):\r\n display_name: str = \"Use Fine-Tuned GPT Model\"\r\n description: str = \"OpenAI model. Can be a standard or a fine tuned one.\"\r\n\r\n def build_config(self):\r\n return { \r\n \"max_tokens\":{\r\n \"display_name\": \"Max Tokens\",\r\n \"required\": False,\r\n },\r\n \"job_id_path\": {\r\n \"required\": True,\r\n \"password\": False,\r\n \"display_name\": \"Job ID path\",\r\n \"value\": \"job_id.txt\",\r\n },\r\n \"openai_api_base\": {\r\n \"required\": False,\r\n \"password\": False,\r\n \"display_name\": \"OpenAI API Base\",\r\n },\r\n \"openai_api_key\": {\r\n \"required\": True,\r\n \"password\": True,\r\n \"display_name\": \"OpenAI API Key\",\r\n },\r\n \"temperature\": {\r\n \"required\": False,\r\n \"value\": 0.7,\r\n \"password\": False,\r\n \"name\": \"Temperature\",\r\n },\r\n }\r\n \r\n\r\n def build(\r\n self, \r\n temperature,\r\n max_tokens = None, \r\n job_id_path = None, \r\n openai_api_base = None, \r\n openai_api_key = None,\r\n ) -> BaseLLM:\r\n \r\n try:\r\n with open(job_id_path,\"rb\") as file:\r\n job_id = pickle.load(file)\r\n model_name = openai.FineTuningJob.retrieve(\r\n job_id,\r\n api_key=openai_api_key,\r\n ).fine_tuned_model\r\n except Exception as e:\r\n raise ValueError(\"For fine-tuned models please insert the Job ID path\") from e\r\n self.repr_value = e\r\n \r\n chat = ChatOpenAI(\r\n model=model_name,\r\n temperature=temperature,\r\n openai_api_key=openai_api_key,\r\n max_tokens=max_tokens,\r\n openai_api_base=openai_api_base,\r\n )\r\n return chat\r\n","password":false,"name":"code","advanced":false,"type":"code","list":false},"_type":"CustomComponent","job_id_path":{"required":true,"placeholder":"","show":true,"multiline":false,"value":"job_id.txt","password":false,"name":"job_id_path","display_name":"Job ID path","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"max_tokens":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"max_tokens","display_name":"Max Tokens","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"openai_api_base":{"required":false,"placeholder":"","show":true,"multiline":false,"password":false,"name":"openai_api_base","display_name":"OpenAI API Base","advanced":false,"dynamic":false,"info":"","type":"str","list":false},"openai_api_key":{"required":true,"placeholder":"","show":true,"multiline":false,"password":true,"name":"openai_api_key","display_name":"OpenAI API Key","advanced":false,"dynamic":false,"info":"","type":"str","list":false,"value":""},"temperature":{"required":false,"placeholder":"","show":true,"multiline":false,"value":0.7,"password":false,"name":"temperature","display_name":"temperature","advanced":false,"dynamic":false,"info":"","type":"str","list":false}},"description":"OpenAI model. Can be a standard or a fine tuned one.","base_classes":["BaseLanguageModel","BaseLLM"],"display_name":"Use Fine-Tuned GPT Model","custom_fields":{"job_id_path":null,"max_tokens":null,"openai_api_base":null,"openai_api_key":null,"temperature":null},"output_types":[],"documentation":"","beta":true,"error":null},"id":"CustomComponent-5XYPW"},"selected":false,"positionAbsolute":{"x":569.2530158574887,"y":-24.122112513296386},"dragging":false}],"edges":[{"source":"PromptTemplate-5GnSD","sourceHandle":"PromptTemplate|PromptTemplate-5GnSD|PromptTemplate|BasePromptTemplate|StringPromptTemplate","target":"LLMChain-8C9XJ","targetHandle":"BasePromptTemplate|prompt|LLMChain-8C9XJ","style":{"stroke":"#555"},"className":"","animated":false,"id":"reactflow__edge-PromptTemplate-5GnSDPromptTemplate|PromptTemplate-5GnSD|PromptTemplate|BasePromptTemplate|StringPromptTemplate-LLMChain-8C9XJBasePromptTemplate|prompt|LLMChain-8C9XJ"},{"source":"CustomComponent-5XYPW","sourceHandle":"CustomComponent|CustomComponent-5XYPW|BaseLanguageModel|BaseLLM","target":"LLMChain-8C9XJ","targetHandle":"BaseLanguageModel|llm|LLMChain-8C9XJ","style":{"stroke":"#555"},"className":"","animated":false,"id":"reactflow__edge-CustomComponent-5XYPWCustomComponent|CustomComponent-5XYPW|BaseLanguageModel|BaseLLM-LLMChain-8C9XJBaseLanguageModel|llm|LLMChain-8C9XJ"}],"viewport":{"x":-203.3065974595986,"y":72.07039269509912,"zoom":0.630407321211217}},"id":"a3e094e2-cf7e-49e4-8667-6fa2d362a17a","user_id":"aa32ab4f-da83-4937-966e-36f98082564c"}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment