Bugfix/Avoid hardcoded max tokens (#2003)

remove hard-coded max tokens
pull/2038/head
Henry Heng 2024-03-25 12:10:31 +08:00 committed by GitHub
parent 4ca82ee733
commit 414b9f125c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 14 additions and 15 deletions

View File

@ -5,15 +5,15 @@ import { IVisionChatModal, IMultiModalOption } from '../../../src'
export class BedrockChat extends LCBedrockChat implements IVisionChatModal {
configuredModel: string
configuredMaxToken: number
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string
constructor(id: string, fields: BaseBedrockInput & BaseChatModelParams) {
super(fields)
this.id = id
this.configuredModel = fields?.model || 'anthropic.claude-3-haiku-20240307-v1:0'
this.configuredMaxToken = fields?.maxTokens ?? 256
this.configuredModel = fields?.model || ''
this.configuredMaxToken = fields?.maxTokens
}
revertToOriginalModel(): void {
@ -28,7 +28,7 @@ export class BedrockChat extends LCBedrockChat implements IVisionChatModal {
setVisionModel(): void {
if (!this.model.startsWith('claude-3')) {
super.model = 'anthropic.claude-3-haiku-20240307-v1:0'
super.maxTokens = 1024
super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024
}
}
}

View File

@ -125,7 +125,7 @@ class ChatAnthropic_ChatModels implements INode {
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokensToSample = nodeData.inputs?.maxTokensToSample as string
const maxTokens = nodeData.inputs?.maxTokensToSample as string
const topP = nodeData.inputs?.topP as string
const topK = nodeData.inputs?.topK as string
const streaming = nodeData.inputs?.streaming as boolean
@ -143,7 +143,7 @@ class ChatAnthropic_ChatModels implements INode {
streaming: streaming ?? true
}
if (maxTokensToSample) obj.maxTokensToSample = parseInt(maxTokensToSample, 10)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (topK) obj.topK = parseFloat(topK)
if (cache) obj.cache = cache

View File

@ -11,8 +11,8 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat
constructor(id: string, fields: Partial<AnthropicInput> & BaseLLMParams & { anthropicApiKey?: string }) {
super(fields)
this.id = id
this.configuredModel = fields?.modelName || 'claude-3-haiku-20240307'
this.configuredMaxToken = fields?.maxTokens ?? 256
this.configuredModel = fields?.modelName || ''
this.configuredMaxToken = fields?.maxTokens ?? 2048
}
revertToOriginalModel(): void {
@ -27,7 +27,7 @@ export class ChatAnthropic extends LangchainChatAnthropic implements IVisionChat
setVisionModel(): void {
if (!this.modelName.startsWith('claude-3')) {
super.modelName = 'claude-3-haiku-20240307'
super.maxTokens = 1024
super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 2048
}
}
}

View File

@ -5,7 +5,7 @@ import { IMultiModalOption, IVisionChatModal } from '../../../src'
export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal {
configuredModel: string
configuredMaxToken: number
configuredMaxToken?: number
multiModalOption: IMultiModalOption
id: string
@ -19,8 +19,8 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal
) {
super(fields, configuration)
this.id = id
this.configuredModel = fields?.modelName ?? 'gpt-3.5-turbo'
this.configuredMaxToken = fields?.maxTokens ?? 256
this.configuredModel = fields?.modelName ?? ''
this.configuredMaxToken = fields?.maxTokens
}
revertToOriginalModel(): void {
@ -34,6 +34,6 @@ export class ChatOpenAI extends LangchainChatOpenAI implements IVisionChatModal
setVisionModel(): void {
super.modelName = 'gpt-4-vision-preview'
super.maxTokens = 1024
super.maxTokens = this.configuredMaxToken ? this.configuredMaxToken : 1024
}
}

View File

@ -274,9 +274,8 @@ export abstract class FlowiseSummaryMemory extends ConversationSummaryMemory imp
export interface IVisionChatModal {
id: string
configuredModel: string
configuredMaxToken: number
multiModalOption: IMultiModalOption
configuredMaxToken?: number
setVisionModel(): void
revertToOriginalModel(): void
setMultiModalOption(multiModalOption: IMultiModalOption): void