Merge pull request #171 from FlowiseAI/feature/MultiPromptChain

Feature/MultiPromptChain
pull/179/head
Henry Heng 2023-05-25 12:08:58 +01:00 committed by GitHub
commit 2802ee0142
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 609 additions and 1 deletions

View File

@ -56,7 +56,8 @@ class ConversationChain_Chains implements INode {
const obj: any = {
llm: model,
memory
memory,
verbose: process.env.DEBUG === 'true' ? true : false
}
const chatPrompt = ChatPromptTemplate.fromPromptMessages([

View File

@ -0,0 +1,68 @@
import { BaseLanguageModel } from 'langchain/base_language'
import { INode, INodeData, INodeParams, PromptRetriever } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { MultiPromptChain } from 'langchain/chains'
class MultiPromptChain_Chains implements INode {
label: string
name: string
type: string
icon: string
category: string
baseClasses: string[]
description: string
inputs: INodeParams[]
constructor() {
this.label = 'Multi Prompt Chain'
this.name = 'multiPromptChain'
this.type = 'MultiPromptChain'
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'Chain automatically picks an appropriate prompt from multiple prompt templates'
this.baseClasses = [this.type, ...getBaseClasses(MultiPromptChain)]
this.inputs = [
{
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
label: 'Prompt Retriever',
name: 'promptRetriever',
type: 'PromptRetriever',
list: true
}
]
}
async init(nodeData: INodeData): Promise<any> {
const model = nodeData.inputs?.model as BaseLanguageModel
const promptRetriever = nodeData.inputs?.promptRetriever as PromptRetriever[]
const promptNames = []
const promptDescriptions = []
const promptTemplates = []
for (const prompt of promptRetriever) {
promptNames.push(prompt.name)
promptDescriptions.push(prompt.description)
promptTemplates.push(prompt.systemMessage)
}
const chain = MultiPromptChain.fromPrompts(model, promptNames, promptDescriptions, promptTemplates, undefined, {
verbose: process.env.DEBUG === 'true' ? true : false
} as any)
return chain
}
async run(nodeData: INodeData, input: string): Promise<string> {
const chain = nodeData.instance as MultiPromptChain
const res = await chain.call({ input })
return res?.text
}
}
module.exports = { nodeClass: MultiPromptChain_Chains }

View File

@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-dna" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M14.828 14.828a4 4 0 1 0 -5.656 -5.656a4 4 0 0 0 5.656 5.656z"></path>
<path d="M9.172 20.485a4 4 0 1 0 -5.657 -5.657"></path>
<path d="M14.828 3.515a4 4 0 0 0 5.657 5.657"></path>
</svg>

After

Width:  |  Height:  |  Size: 489 B

View File

@ -0,0 +1,62 @@
import { INode, INodeData, INodeParams, PromptRetriever, PromptRetrieverInput } from '../../../src/Interface'
class PromptRetriever_Retrievers implements INode {
label: string
name: string
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
constructor() {
this.label = 'Prompt Retriever'
this.name = 'promptRetriever'
this.type = 'PromptRetriever'
this.icon = 'promptretriever.svg'
this.category = 'Retrievers'
this.description = 'Store prompt template with name & description to be later queried by MultiPromptChain'
this.baseClasses = [this.type]
this.inputs = [
{
label: 'Prompt Name',
name: 'name',
type: 'string',
placeholder: 'physics-qa'
},
{
label: 'Prompt Description',
name: 'description',
type: 'string',
rows: 3,
description: 'Description of what the prompt does and when it should be used',
placeholder: 'Good for answering questions about physics'
},
{
label: 'Prompt System Message',
name: 'systemMessage',
type: 'string',
rows: 4,
placeholder: `You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.`
}
]
}
async init(nodeData: INodeData): Promise<any> {
const name = nodeData.inputs?.name as string
const description = nodeData.inputs?.description as string
const systemMessage = nodeData.inputs?.systemMessage as string
const obj = {
name,
description,
systemMessage
} as PromptRetrieverInput
const retriever = new PromptRetriever(obj)
return retriever
}
}
module.exports = { nodeClass: PromptRetriever_Retrievers }

View File

@ -0,0 +1,8 @@
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-message-down" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
<path d="M8 9h8"></path>
<path d="M8 13h6"></path>
<path d="M11.998 18.601l-3.998 2.399v-3h-2a3 3 0 0 1 -3 -3v-8a3 3 0 0 1 3 -3h12a3 3 0 0 1 3 3v5.5"></path>
<path d="M19 16v6"></path>
<path d="M22 19l-3 3l-3 -3"></path>
</svg>

After

Width:  |  Height:  |  Size: 535 B

View File

@ -103,3 +103,24 @@ export class PromptTemplate extends LangchainPromptTemplate {
super(input)
}
}
export interface PromptRetrieverInput {
name: string
description: string
systemMessage: string
}
const fixedTemplate = `Here is a question:
{input}
`
export class PromptRetriever {
name: string
description: string
systemMessage: string
constructor(fields: PromptRetrieverInput) {
this.name = fields.name
this.description = fields.description
this.systemMessage = `${fields.systemMessage}\n${fixedTemplate}`
}
}

View File

@ -0,0 +1,442 @@
{
"description": "A chain that automatically picks an appropriate prompt from multiple prompts",
"nodes": [
{
"width": 300,
"height": 632,
"id": "promptRetriever_0",
"position": {
"x": 197.46642699727397,
"y": 25.945621297410923
},
"type": "customNode",
"data": {
"id": "promptRetriever_0",
"label": "Prompt Retriever",
"name": "promptRetriever",
"type": "PromptRetriever",
"baseClasses": ["PromptRetriever"],
"category": "Retrievers",
"description": "Store prompt template with name & description to be later queried by MultiPromptChain",
"inputParams": [
{
"label": "Prompt Name",
"name": "name",
"type": "string",
"placeholder": "physics-qa",
"id": "promptRetriever_0-input-name-string"
},
{
"label": "Prompt Description",
"name": "description",
"type": "string",
"rows": 3,
"description": "Description of what the prompt does and when it should be used",
"placeholder": "Good for answering questions about physics",
"id": "promptRetriever_0-input-description-string"
},
{
"label": "Prompt System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.",
"id": "promptRetriever_0-input-systemMessage-string"
}
],
"inputAnchors": [],
"inputs": {
"name": "physics",
"description": "Good for answering questions about physics",
"systemMessage": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know."
},
"outputAnchors": [
{
"id": "promptRetriever_0-output-promptRetriever-PromptRetriever",
"name": "promptRetriever",
"label": "PromptRetriever",
"type": "PromptRetriever"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 197.46642699727397,
"y": 25.945621297410923
},
"dragging": false
},
{
"width": 300,
"height": 280,
"id": "multiPromptChain_0",
"position": {
"x": 1619.1305522575494,
"y": 210.28103293821243
},
"type": "customNode",
"data": {
"id": "multiPromptChain_0",
"label": "Multi Prompt Chain",
"name": "multiPromptChain",
"type": "MultiPromptChain",
"baseClasses": ["MultiPromptChain", "MultiRouteChain", "BaseChain", "BaseLangChain"],
"category": "Chains",
"description": "Chain automatically picks an appropriate prompt from multiple prompt templates",
"inputParams": [],
"inputAnchors": [
{
"label": "Language Model",
"name": "model",
"type": "BaseLanguageModel",
"id": "multiPromptChain_0-input-model-BaseLanguageModel"
},
{
"label": "Prompt Retriever",
"name": "promptRetriever",
"type": "PromptRetriever",
"list": true,
"id": "multiPromptChain_0-input-promptRetriever-PromptRetriever"
}
],
"inputs": {
"model": "{{chatOpenAI_0.data.instance}}",
"promptRetriever": [
"{{promptRetriever_0.data.instance}}",
"{{promptRetriever_2.data.instance}}",
"{{promptRetriever_1.data.instance}}"
]
},
"outputAnchors": [
{
"id": "multiPromptChain_0-output-multiPromptChain-MultiPromptChain|MultiRouteChain|BaseChain|BaseLangChain",
"name": "multiPromptChain",
"label": "MultiPromptChain",
"type": "MultiPromptChain | MultiRouteChain | BaseChain | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"positionAbsolute": {
"x": 1619.1305522575494,
"y": 210.28103293821243
},
"selected": false,
"dragging": false
},
{
"width": 300,
"height": 632,
"id": "promptRetriever_1",
"position": {
"x": 539.1322780233141,
"y": -250.72967142925938
},
"type": "customNode",
"data": {
"id": "promptRetriever_1",
"label": "Prompt Retriever",
"name": "promptRetriever",
"type": "PromptRetriever",
"baseClasses": ["PromptRetriever"],
"category": "Retrievers",
"description": "Store prompt template with name & description to be later queried by MultiPromptChain",
"inputParams": [
{
"label": "Prompt Name",
"name": "name",
"type": "string",
"placeholder": "physics-qa",
"id": "promptRetriever_1-input-name-string"
},
{
"label": "Prompt Description",
"name": "description",
"type": "string",
"rows": 3,
"description": "Description of what the prompt does and when it should be used",
"placeholder": "Good for answering questions about physics",
"id": "promptRetriever_1-input-description-string"
},
{
"label": "Prompt System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.",
"id": "promptRetriever_1-input-systemMessage-string"
}
],
"inputAnchors": [],
"inputs": {
"name": "math",
"description": "Good for answering math questions",
"systemMessage": "You are a very good mathematician. You are great at answering math questions. You are so good because you are able to break down hard problems into their component parts, answer the component parts, and then put them together to answer the broader question."
},
"outputAnchors": [
{
"id": "promptRetriever_1-output-promptRetriever-PromptRetriever",
"name": "promptRetriever",
"label": "PromptRetriever",
"type": "PromptRetriever"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 539.1322780233141,
"y": -250.72967142925938
},
"dragging": false
},
{
"width": 300,
"height": 632,
"id": "promptRetriever_2",
"position": {
"x": 872.6184534864304,
"y": -366.9443140594265
},
"type": "customNode",
"data": {
"id": "promptRetriever_2",
"label": "Prompt Retriever",
"name": "promptRetriever",
"type": "PromptRetriever",
"baseClasses": ["PromptRetriever"],
"category": "Retrievers",
"description": "Store prompt template with name & description to be later queried by MultiPromptChain",
"inputParams": [
{
"label": "Prompt Name",
"name": "name",
"type": "string",
"placeholder": "physics-qa",
"id": "promptRetriever_2-input-name-string"
},
{
"label": "Prompt Description",
"name": "description",
"type": "string",
"rows": 3,
"description": "Description of what the prompt does and when it should be used",
"placeholder": "Good for answering questions about physics",
"id": "promptRetriever_2-input-description-string"
},
{
"label": "Prompt System Message",
"name": "systemMessage",
"type": "string",
"rows": 4,
"placeholder": "You are a very smart physics professor. You are great at answering questions about physics in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know.",
"id": "promptRetriever_2-input-systemMessage-string"
}
],
"inputAnchors": [],
"inputs": {
"name": "history",
"description": "Good for answering questions about history",
"systemMessage": "You are a very smart history professor. You are great at answering questions about history in a concise and easy to understand manner. When you don't know the answer to a question you admit that you don't know."
},
"outputAnchors": [
{
"id": "promptRetriever_2-output-promptRetriever-PromptRetriever",
"name": "promptRetriever",
"label": "PromptRetriever",
"type": "PromptRetriever"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 872.6184534864304,
"y": -366.9443140594265
},
"dragging": false
},
{
"width": 300,
"height": 524,
"id": "chatOpenAI_0",
"position": {
"x": 1230.07368145571,
"y": -296.44522826934826
},
"type": "customNode",
"data": {
"id": "chatOpenAI_0",
"label": "ChatOpenAI",
"name": "chatOpenAI",
"type": "ChatOpenAI",
"baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "BaseLangChain"],
"category": "Chat Models",
"description": "Wrapper around OpenAI large language models that use the Chat endpoint",
"inputParams": [
{
"label": "OpenAI Api Key",
"name": "openAIApiKey",
"type": "password",
"id": "chatOpenAI_0-input-openAIApiKey-password"
},
{
"label": "Model Name",
"name": "modelName",
"type": "options",
"options": [
{
"label": "gpt-4",
"name": "gpt-4"
},
{
"label": "gpt-4-0314",
"name": "gpt-4-0314"
},
{
"label": "gpt-4-32k-0314",
"name": "gpt-4-32k-0314"
},
{
"label": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo"
},
{
"label": "gpt-3.5-turbo-0301",
"name": "gpt-3.5-turbo-0301"
}
],
"default": "gpt-3.5-turbo",
"optional": true,
"id": "chatOpenAI_0-input-modelName-options"
},
{
"label": "Temperature",
"name": "temperature",
"type": "number",
"default": 0.9,
"optional": true,
"id": "chatOpenAI_0-input-temperature-number"
},
{
"label": "Max Tokens",
"name": "maxTokens",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-maxTokens-number"
},
{
"label": "Top Probability",
"name": "topP",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-topP-number"
},
{
"label": "Frequency Penalty",
"name": "frequencyPenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-frequencyPenalty-number"
},
{
"label": "Presence Penalty",
"name": "presencePenalty",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-presencePenalty-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "chatOpenAI_0-input-timeout-number"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "gpt-3.5-turbo",
"temperature": 0.9,
"maxTokens": "",
"topP": "",
"frequencyPenalty": "",
"presencePenalty": "",
"timeout": ""
},
"outputAnchors": [
{
"id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"name": "chatOpenAI",
"label": "ChatOpenAI",
"type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | BaseLangChain"
}
],
"outputs": {},
"selected": false
},
"selected": false,
"positionAbsolute": {
"x": 1230.07368145571,
"y": -296.44522826934826
},
"dragging": false
}
],
"edges": [
{
"source": "promptRetriever_0",
"sourceHandle": "promptRetriever_0-output-promptRetriever-PromptRetriever",
"target": "multiPromptChain_0",
"targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever",
"type": "buttonedge",
"id": "promptRetriever_0-promptRetriever_0-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever",
"data": {
"label": ""
}
},
{
"source": "promptRetriever_2",
"sourceHandle": "promptRetriever_2-output-promptRetriever-PromptRetriever",
"target": "multiPromptChain_0",
"targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever",
"type": "buttonedge",
"id": "promptRetriever_2-promptRetriever_2-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever",
"data": {
"label": ""
}
},
{
"source": "promptRetriever_1",
"sourceHandle": "promptRetriever_1-output-promptRetriever-PromptRetriever",
"target": "multiPromptChain_0",
"targetHandle": "multiPromptChain_0-input-promptRetriever-PromptRetriever",
"type": "buttonedge",
"id": "promptRetriever_1-promptRetriever_1-output-promptRetriever-PromptRetriever-multiPromptChain_0-multiPromptChain_0-input-promptRetriever-PromptRetriever",
"data": {
"label": ""
}
},
{
"source": "chatOpenAI_0",
"sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain",
"target": "multiPromptChain_0",
"targetHandle": "multiPromptChain_0-input-model-BaseLanguageModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|BaseLangChain-multiPromptChain_0-multiPromptChain_0-input-model-BaseLanguageModel",
"data": {
"label": ""
}
}
]
}