update cohere package to resolve google genai pipeThrough bug

pull/2798/head
Henry 2024-07-17 19:26:01 +01:00
parent 811109cd76
commit 36eac28d23
7 changed files with 589 additions and 228 deletions

View File

@ -1,7 +1,7 @@
import { BaseCache } from '@langchain/core/caches'
import { Cohere, CohereInput } from '@langchain/cohere'
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { Cohere, CohereInput } from './core'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
class Cohere_LLMs implements INode {

View File

@ -1,78 +0,0 @@
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
export interface CohereInput extends BaseLLMParams {
/** Sampling temperature to use */
temperature?: number
/**
* Maximum number of tokens to generate in the completion.
*/
maxTokens?: number
/** Model to use */
model?: string
apiKey?: string
}
export class Cohere extends LLM implements CohereInput {
temperature = 0
maxTokens = 250
model: string
apiKey: string
constructor(fields?: CohereInput) {
super(fields ?? {})
const apiKey = fields?.apiKey ?? undefined
if (!apiKey) {
throw new Error('Please set the COHERE_API_KEY environment variable or pass it to the constructor as the apiKey field.')
}
this.apiKey = apiKey
this.maxTokens = fields?.maxTokens ?? this.maxTokens
this.temperature = fields?.temperature ?? this.temperature
this.model = fields?.model ?? this.model
}
_llmType() {
return 'cohere'
}
/** @ignore */
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
const { cohere } = await Cohere.imports()
cohere.init(this.apiKey)
// Hit the `generate` endpoint on the `large` model
const generateResponse = await this.caller.callWithOptions({ signal: options.signal }, cohere.generate.bind(cohere), {
prompt,
model: this.model,
max_tokens: this.maxTokens,
temperature: this.temperature,
end_sequences: options.stop
})
try {
return generateResponse.body.generations[0].text
} catch {
throw new Error('Could not parse response.')
}
}
/** @ignore */
static async imports(): Promise<{
cohere: typeof import('cohere-ai')
}> {
try {
const { default: cohere } = await import('cohere-ai')
return { cohere }
} catch (e) {
throw new Error('Please install cohere-ai as a dependency with, e.g. `pnpm install cohere-ai`')
}
}
}

View File

@ -3,7 +3,7 @@ import { DataSource } from 'typeorm'
import { RunnableSequence, RunnablePassthrough, RunnableConfig } from '@langchain/core/runnables'
import { ChatPromptTemplate, MessagesPlaceholder, HumanMessagePromptTemplate, BaseMessagePromptTemplateLike } from '@langchain/core/prompts'
import { BaseChatModel } from '@langchain/core/language_models/chat_models'
import { AIMessage, HumanMessage } from '@langchain/core/messages'
import { HumanMessage } from '@langchain/core/messages'
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
import { type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
import { StringOutputParser } from '@langchain/core/output_parsers'
@ -21,7 +21,6 @@ import {
import { ToolCallingAgentOutputParser, AgentExecutor } from '../../../src/agents'
import { getInputVariables, getVars, handleEscapeCharacters, prepareSandboxVars } from '../../../src/utils'
import { customGet, getVM, processImageMessage, transformObjectPropertyToFunction } from '../commonUtils'
import { ChatMistralAI } from '@langchain/mistralai'
const examplePrompt = 'You are a research assistant who can search for up-to-date info using search engine.'
const customOutputFuncDesc = `This is only applicable when you have a custom State at the START node. After agent execution, you might want to update the State values`
@ -357,7 +356,6 @@ class Agent_SeqAgents implements INode {
return await agentNode(
{
state,
llm,
agent: await createAgent(
agentName,
state,
@ -510,7 +508,6 @@ async function createAgent(
async function agentNode(
{
state,
llm,
agent,
name,
abortControllerSignal,
@ -519,7 +516,6 @@ async function agentNode(
options
}: {
state: ISeqAgentsState
llm: BaseChatModel
agent: AgentExecutor | RunnableSequence
name: string
abortControllerSignal: AbortController
@ -557,22 +553,16 @@ async function agentNode(
const returnedOutput = await getReturnOutput(nodeData, input, options, formattedOutput, state)
return {
...returnedOutput,
messages: convertCustomMessagesToBaseMessages([outputContent], name, additional_kwargs, llm)
messages: convertCustomMessagesToBaseMessages([outputContent], name, additional_kwargs)
}
} else {
return {
messages: [
llm instanceof ChatMistralAI
? new HumanMessage({
content: outputContent,
name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
: new AIMessage({
content: outputContent,
name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
new HumanMessage({
content: outputContent,
name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
]
}
}
@ -634,19 +624,13 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom
return {}
}
const convertCustomMessagesToBaseMessages = (messages: string[], name: string, additional_kwargs: ICommonObject, llm: BaseChatModel) => {
const convertCustomMessagesToBaseMessages = (messages: string[], name: string, additional_kwargs: ICommonObject) => {
return messages.map((message) => {
return llm instanceof ChatMistralAI
? new HumanMessage({
content: message,
name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
: new AIMessage({
content: message,
name: name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
return new HumanMessage({
content: message,
name,
additional_kwargs: Object.keys(additional_kwargs).length ? additional_kwargs : undefined
})
})
}

View File

@ -66,7 +66,7 @@
"axios": "1.6.2",
"cheerio": "^1.0.0-rc.12",
"chromadb": "^1.5.11",
"cohere-ai": "^6.2.0",
"cohere-ai": "^7.7.5",
"crypto-js": "^4.1.1",
"css-what": "^6.1.0",
"d3-dsv": "2",

View File

@ -546,8 +546,8 @@
"conditionAgentName": "Router Agent",
"sequentialNode": ["{{seqAgent_0.data.instance}}"],
"model": "",
"systemMessagePrompt": "You are an expert customer support routing system.\nYour job is to detect whether a customer support representative is routing a user to the billing or technical support team, or just responding conversationally.",
"humanMessagePrompt": "The previous conversation is an interaction between a customer support representative and a user.\nExtract whether the representative is routing the user to the technical support team, or just responding conversationally.\n\nIf representative want to route the user to the billing team, respond only with the word \"BILLING\".\n\nIf representative want to route the user to the technical support team, respond only with the word \"TECHNICAL\".\n\nOtherwise, respond only with the word \"CONVERSATION\".\n\nRemember, only respond with one of the above words.",
"systemMessagePrompt": "You are an expert customer support routing system.\nYour job is to detect whether a frontline support is routing a user to the billing or technical support team, or just responding conversationally.",
"humanMessagePrompt": "The previous conversation is an interaction between a frontline support and a user. Based on the latest message, extract whether the support is routing the user to the technical support team, or just responding conversationally.\n\nIf representative want to route the user to the billing team, respond only with the word \"BILLING\".\n\nIf representative want to route the user to the technical support team, respond only with the word \"TECHNICAL\".\n\nOtherwise, respond only with the word \"CONVERSATION\".\n\nRemember, only respond with one of the above words.",
"promptValues": "",
"conditionAgentStructuredOutput": "[{\"key\":\"route\",\"type\":\"Enum\",\"enumValues\":\"BILLING, TECHNICAL, CONVERSATION\",\"description\":\"the route to take next\",\"actions\":\"\",\"id\":1}]",
"condition": "",
@ -780,7 +780,7 @@
"agentName": "Billing Team",
"systemMessagePrompt": "You are an expert billing support specialist for Flowise, a company that sells computers.\nHelp the user to the best of your ability, but be concise in your responses.\nYou have the ability to authorize refunds, which you can do collecting the required information.",
"humanMessagePrompt": "",
"tools": "",
"tools": ["{{customTool_0.data.instance}}"],
"sequentialNode": ["{{seqConditionAgent_0.data.instance}}"],
"model": "",
"promptValues": "",
@ -990,7 +990,7 @@
"agentName": "Technical Team",
"systemMessagePrompt": "You are an expert at diagnosing technical computer issues. You work for a company called Flowise that sells computers.\n\nUse the \"search_manual\" tool to look for relavant information to answer user question to the best of your ability, be concise in your responses.",
"humanMessagePrompt": "",
"tools": "",
"tools": ["{{retrieverTool_0.data.instance}}"],
"sequentialNode": ["{{seqConditionAgent_0.data.instance}}"],
"model": "",
"promptValues": "",
@ -1290,10 +1290,11 @@
"data": {
"id": "stickyNote_0",
"label": "Sticky Note",
"version": 1,
"version": 2,
"name": "stickyNote",
"type": "StickyNote",
"baseClasses": ["StickyNote"],
"tags": ["Utilities"],
"category": "Utilities",
"description": "Add a sticky note",
"inputParams": [
@ -1331,6 +1332,438 @@
"y": -186.42930966821612
},
"dragging": false
},
{
"id": "customTool_0",
"position": {
"x": 1283.1262680528823,
"y": -524.6893630236756
},
"type": "customNode",
"data": {
"id": "customTool_0",
"label": "Custom Tool",
"version": 1,
"name": "customTool",
"type": "CustomTool",
"baseClasses": ["CustomTool", "Tool", "StructuredTool", "Runnable"],
"category": "Tools",
"description": "Use custom tool you've created in Flowise within chatflow",
"inputParams": [
{
"label": "Select Tool",
"name": "selectedTool",
"type": "asyncOptions",
"loadMethod": "listTools",
"id": "customTool_0-input-selectedTool-asyncOptions"
}
],
"inputAnchors": [],
"inputs": {
"selectedTool": ""
},
"outputAnchors": [
{
"id": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
"name": "customTool",
"label": "CustomTool",
"description": "Use custom tool you've created in Flowise within chatflow",
"type": "CustomTool | Tool | StructuredTool | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 285,
"selected": false,
"positionAbsolute": {
"x": 1283.1262680528823,
"y": -524.6893630236756
},
"dragging": false
},
{
"id": "stickyNote_1",
"position": {
"x": 966.9014980551112,
"y": -502.9862305655977
},
"type": "stickyNote",
"data": {
"id": "stickyNote_1",
"label": "Sticky Note",
"version": 2,
"name": "stickyNote",
"type": "StickyNote",
"baseClasses": ["StickyNote"],
"tags": ["Utilities"],
"category": "Utilities",
"description": "Add a sticky note",
"inputParams": [
{
"label": "",
"name": "note",
"type": "string",
"rows": 1,
"placeholder": "Type something here",
"optional": true,
"id": "stickyNote_1-input-note-string"
}
],
"inputAnchors": [],
"inputs": {
"note": "A custom tool that checks the user order receipt number and email. If record found, proceed with refund"
},
"outputAnchors": [
{
"id": "stickyNote_1-output-stickyNote-StickyNote",
"name": "stickyNote",
"label": "StickyNote",
"description": "Add a sticky note",
"type": "StickyNote"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 82,
"selected": false,
"positionAbsolute": {
"x": 966.9014980551112,
"y": -502.9862305655977
},
"dragging": false
},
{
"id": "retrieverTool_0",
"position": {
"x": 1281.4243491233265,
"y": 769.9943552071177
},
"type": "customNode",
"data": {
"id": "retrieverTool_0",
"label": "Retriever Tool",
"version": 2,
"name": "retrieverTool",
"type": "RetrieverTool",
"baseClasses": ["RetrieverTool", "DynamicTool", "Tool", "StructuredTool", "Runnable"],
"category": "Tools",
"description": "Use a retriever as allowed tool for agent",
"inputParams": [
{
"label": "Retriever Name",
"name": "name",
"type": "string",
"placeholder": "search_state_of_union",
"id": "retrieverTool_0-input-name-string"
},
{
"label": "Retriever Description",
"name": "description",
"type": "string",
"description": "When should agent uses to retrieve documents",
"rows": 3,
"placeholder": "Searches and returns documents regarding the state-of-the-union.",
"id": "retrieverTool_0-input-description-string"
},
{
"label": "Return Source Documents",
"name": "returnSourceDocuments",
"type": "boolean",
"optional": true,
"id": "retrieverTool_0-input-returnSourceDocuments-boolean"
}
],
"inputAnchors": [
{
"label": "Retriever",
"name": "retriever",
"type": "BaseRetriever",
"id": "retrieverTool_0-input-retriever-BaseRetriever"
}
],
"inputs": {
"name": "search_technical",
"description": "Searches and return dcouments regarding technical issues",
"retriever": "{{faiss_0.data.instance}}",
"returnSourceDocuments": true
},
"outputAnchors": [
{
"id": "retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable",
"name": "retrieverTool",
"label": "RetrieverTool",
"description": "Use a retriever as allowed tool for agent",
"type": "RetrieverTool | DynamicTool | Tool | StructuredTool | Runnable"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 602,
"selected": false,
"positionAbsolute": {
"x": 1281.4243491233265,
"y": 769.9943552071177
},
"dragging": false
},
{
"id": "openAIEmbeddings_0",
"position": {
"x": 583.6375880054426,
"y": 909.5517074306946
},
"type": "customNode",
"data": {
"id": "openAIEmbeddings_0",
"label": "OpenAI Embeddings",
"version": 4,
"name": "openAIEmbeddings",
"type": "OpenAIEmbeddings",
"baseClasses": ["OpenAIEmbeddings", "Embeddings"],
"category": "Embeddings",
"description": "OpenAI API to generate embeddings for a given text",
"inputParams": [
{
"label": "Connect Credential",
"name": "credential",
"type": "credential",
"credentialNames": ["openAIApi"],
"id": "openAIEmbeddings_0-input-credential-credential"
},
{
"label": "Model Name",
"name": "modelName",
"type": "asyncOptions",
"loadMethod": "listModels",
"default": "text-embedding-ada-002",
"id": "openAIEmbeddings_0-input-modelName-asyncOptions"
},
{
"label": "Strip New Lines",
"name": "stripNewLines",
"type": "boolean",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-stripNewLines-boolean"
},
{
"label": "Batch Size",
"name": "batchSize",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-batchSize-number"
},
{
"label": "Timeout",
"name": "timeout",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-timeout-number"
},
{
"label": "BasePath",
"name": "basepath",
"type": "string",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-basepath-string"
},
{
"label": "Dimensions",
"name": "dimensions",
"type": "number",
"optional": true,
"additionalParams": true,
"id": "openAIEmbeddings_0-input-dimensions-number"
}
],
"inputAnchors": [],
"inputs": {
"modelName": "text-embedding-ada-002",
"stripNewLines": "",
"batchSize": "",
"timeout": "",
"basepath": "",
"dimensions": ""
},
"outputAnchors": [
{
"id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"name": "openAIEmbeddings",
"label": "OpenAIEmbeddings",
"description": "OpenAI API to generate embeddings for a given text",
"type": "OpenAIEmbeddings | Embeddings"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 423,
"selected": false,
"positionAbsolute": {
"x": 583.6375880054426,
"y": 909.5517074306946
},
"dragging": false
},
{
"id": "faiss_0",
"position": {
"x": 932.5309685643846,
"y": 887.426761346469
},
"type": "customNode",
"data": {
"id": "faiss_0",
"label": "Faiss",
"version": 1,
"name": "faiss",
"type": "Faiss",
"baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"],
"category": "Vector Stores",
"description": "Upsert embedded data and perform similarity search upon query using Faiss library from Meta",
"inputParams": [
{
"label": "Base Path to load",
"name": "basePath",
"description": "Path to load faiss.index file",
"placeholder": "C:\\Users\\User\\Desktop",
"type": "string",
"id": "faiss_0-input-basePath-string"
},
{
"label": "Top K",
"name": "topK",
"description": "Number of top results to fetch. Default to 4",
"placeholder": "4",
"type": "number",
"additionalParams": true,
"optional": true,
"id": "faiss_0-input-topK-number"
}
],
"inputAnchors": [
{
"label": "Document",
"name": "document",
"type": "Document",
"list": true,
"optional": true,
"id": "faiss_0-input-document-Document"
},
{
"label": "Embeddings",
"name": "embeddings",
"type": "Embeddings",
"id": "faiss_0-input-embeddings-Embeddings"
}
],
"inputs": {
"document": "",
"embeddings": "{{openAIEmbeddings_0.data.instance}}",
"basePath": "",
"topK": ""
},
"outputAnchors": [
{
"name": "output",
"label": "Output",
"type": "options",
"description": "",
"options": [
{
"id": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever",
"name": "retriever",
"label": "Faiss Retriever",
"description": "",
"type": "Faiss | VectorStoreRetriever | BaseRetriever"
},
{
"id": "faiss_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore",
"name": "vectorStore",
"label": "Faiss Vector Store",
"description": "",
"type": "Faiss | SaveableVectorStore | VectorStore"
}
],
"default": "retriever"
}
],
"outputs": {
"output": "retriever"
},
"selected": false
},
"width": 300,
"height": 458,
"selected": false,
"positionAbsolute": {
"x": 932.5309685643846,
"y": 887.426761346469
},
"dragging": false
},
{
"id": "stickyNote_2",
"position": {
"x": 2011.4161039844623,
"y": 835.680987230599
},
"type": "stickyNote",
"data": {
"id": "stickyNote_2",
"label": "Sticky Note",
"version": 2,
"name": "stickyNote",
"type": "StickyNote",
"baseClasses": ["StickyNote"],
"tags": ["Utilities"],
"category": "Utilities",
"description": "Add a sticky note",
"inputParams": [
{
"label": "",
"name": "note",
"type": "string",
"rows": 1,
"placeholder": "Type something here",
"optional": true,
"id": "stickyNote_2-input-note-string"
}
],
"inputAnchors": [],
"inputs": {
"note": "This agent is a RAG that is able to search for answers given user question"
},
"outputAnchors": [
{
"id": "stickyNote_2-output-stickyNote-StickyNote",
"name": "stickyNote",
"label": "StickyNote",
"description": "Add a sticky note",
"type": "StickyNote"
}
],
"outputs": {},
"selected": false
},
"width": 300,
"height": 62,
"selected": false,
"positionAbsolute": {
"x": 2011.4161039844623,
"y": 835.680987230599
},
"dragging": false
}
],
"edges": [
@ -1397,6 +1830,38 @@
"targetHandle": "seqStart_0-input-model-BaseChatModel",
"type": "buttonedge",
"id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-seqStart_0-seqStart_0-input-model-BaseChatModel"
},
{
"source": "customTool_0",
"sourceHandle": "customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable",
"target": "seqAgent_1",
"targetHandle": "seqAgent_1-input-tools-Tool",
"type": "buttonedge",
"id": "customTool_0-customTool_0-output-customTool-CustomTool|Tool|StructuredTool|Runnable-seqAgent_1-seqAgent_1-input-tools-Tool"
},
{
"source": "retrieverTool_0",
"sourceHandle": "retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable",
"target": "seqAgent_2",
"targetHandle": "seqAgent_2-input-tools-Tool",
"type": "buttonedge",
"id": "retrieverTool_0-retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable-seqAgent_2-seqAgent_2-input-tools-Tool"
},
{
"source": "faiss_0",
"sourceHandle": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever",
"target": "retrieverTool_0",
"targetHandle": "retrieverTool_0-input-retriever-BaseRetriever",
"type": "buttonedge",
"id": "faiss_0-faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-retrieverTool_0-retrieverTool_0-input-retriever-BaseRetriever"
},
{
"source": "openAIEmbeddings_0",
"sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings",
"target": "faiss_0",
"targetHandle": "faiss_0-input-embeddings-Embeddings",
"type": "buttonedge",
"id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-faiss_0-faiss_0-input-embeddings-Embeddings"
}
]
}

View File

@ -1494,7 +1494,11 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
<>
{elem.type === 'approve-button' && elem.label === 'Yes' ? (
<Button
sx={{ width: 'max-content', borderRadius: '20px', background: 'white' }}
sx={{
width: 'max-content',
borderRadius: '20px',
background: customization.isDarkMode ? 'transparent' : 'white'
}}
variant='outlined'
color='success'
key={index}
@ -1505,7 +1509,11 @@ export const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, preview
</Button>
) : elem.type === 'reject-button' && elem.label === 'No' ? (
<Button
sx={{ width: 'max-content', borderRadius: '20px', background: 'white' }}
sx={{
width: 'max-content',
borderRadius: '20px',
background: customization.isDarkMode ? 'transparent' : 'white'
}}
variant='outlined'
color='error'
key={index}

File diff suppressed because one or more lines are too long