diff --git a/packages/components/nodes/chatmodels/AWSBedrock/FlowiseAWSChatBedrock.ts b/packages/components/nodes/chatmodels/AWSBedrock/FlowiseAWSChatBedrock.ts index 5fcd32eb..5e90b996 100644 --- a/packages/components/nodes/chatmodels/AWSBedrock/FlowiseAWSChatBedrock.ts +++ b/packages/components/nodes/chatmodels/AWSBedrock/FlowiseAWSChatBedrock.ts @@ -5,15 +5,15 @@ import { IVisionChatModal, IMultiModalOption } from '../../../src' export class BedrockChat extends LCBedrockChat implements IVisionChatModal { configuredModel: string - configuredMaxToken: number + configuredMaxToken?: number multiModalOption: IMultiModalOption id: string constructor(id: string, fields: BaseBedrockInput & BaseChatModelParams) { super(fields) this.id = id - this.configuredModel = fields?.model || 'anthropic.claude-3-haiku-20240307-v1:0' - this.configuredMaxToken = fields?.maxTokens ?? 256 + this.configuredModel = fields?.model || '' + this.configuredMaxToken = fields?.maxTokens } revertToOriginalModel(): void { @@ -28,7 +28,7 @@ export class BedrockChat extends LCBedrockChat implements IVisionChatModal { setVisionModel(): void { if (!this.model.startsWith('claude-3')) { super.model = 'anthropic.claude-3-haiku-20240307-v1:0' - super.maxTokens = 1024 + super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024 } } } diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts index 7eb00a5a..4c19634e 100644 --- a/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts +++ b/packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts @@ -125,7 +125,7 @@ class ChatAnthropic_ChatModels implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const temperature = nodeData.inputs?.temperature as string const modelName = nodeData.inputs?.modelName as string - const maxTokensToSample = nodeData.inputs?.maxTokensToSample as string + const maxTokens = nodeData.inputs?.maxTokensToSample as string const topP = nodeData.inputs?.topP as string const topK = nodeData.inputs?.topK as string const streaming = nodeData.inputs?.streaming as boolean @@ -143,7 +143,7 @@ class ChatAnthropic_ChatModels implements INode { streaming: streaming ?? true } - if (maxTokensToSample) obj.maxTokensToSample = parseInt(maxTokensToSample, 10) + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (topP) obj.topP = parseFloat(topP) if (topK) obj.topK = parseFloat(topK) if (cache) obj.cache = cache diff --git a/packages/components/nodes/chatmodels/ChatAnthropic/FlowiseChatAnthropic.ts b/packages/components/nodes/chatmodels/ChatAnthropic/FlowiseChatAnthropic.ts index bef8c10f..86f6dcba 100644 --- a/packages/components/nodes/chatmodels/ChatAnthropic/FlowiseChatAnthropic.ts +++ b/packages/components/nodes/chatmodels/ChatAnthropic/FlowiseChatAnthropic.ts @@ -11,8 +11,8 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat constructor(id: string, fields: Partial & BaseLLMParams & { anthropicApiKey?: string }) { super(fields) this.id = id - this.configuredModel = fields?.modelName || 'claude-3-haiku-20240307' - this.configuredMaxToken = fields?.maxTokens ?? 256 + this.configuredModel = fields?.modelName || '' + this.configuredMaxToken = fields?.maxTokens ?? 2048 } revertToOriginalModel(): void { @@ -27,7 +27,7 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat setVisionModel(): void { if (!this.modelName.startsWith('claude-3')) { super.modelName = 'claude-3-haiku-20240307' - super.maxTokens = 1024 + super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 2048 } } } diff --git a/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts b/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts index 0227362c..7bb5f511 100644 --- a/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts +++ b/packages/components/nodes/chatmodels/ChatOpenAI/FlowiseChatOpenAI.ts @@ -5,7 +5,7 @@ import { IMultiModalOption, IVisionChatModal } from '../../../src' export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal { configuredModel: string - configuredMaxToken: number + configuredMaxToken?: number multiModalOption: IMultiModalOption id: string @@ -19,8 +19,8 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal ) { super(fields, configuration) this.id = id - this.configuredModel = fields?.modelName ?? 'gpt-3.5-turbo' - this.configuredMaxToken = fields?.maxTokens ?? 256 + this.configuredModel = fields?.modelName ?? '' + this.configuredMaxToken = fields?.maxTokens } revertToOriginalModel(): void { @@ -34,6 +34,6 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal setVisionModel(): void { super.modelName = 'gpt-4-vision-preview' - super.maxTokens = 1024 + super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024 } } diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 798e91a2..725f64c6 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -274,9 +274,8 @@ export abstract class FlowiseSummaryMemory extends ConversationSummaryMemory imp export interface IVisionChatModal { id: string configuredModel: string - configuredMaxToken: number multiModalOption: IMultiModalOption - + configuredMaxToken?: number setVisionModel(): void revertToOriginalModel(): void setMultiModalOption(multiModalOption: IMultiModalOption): void