Compare commits
23 Commits
flowise-ui
...
main
| Author | SHA1 | Date |
|---|---|---|
|
|
54ff43e8f1 | |
|
|
95b2cf7b7f | |
|
|
074bb738a3 | |
|
|
78e60e22d2 | |
|
|
9e88c45051 | |
|
|
363d1bfc44 | |
|
|
9ea439d135 | |
|
|
1015e1193f | |
|
|
7166317482 | |
|
|
3cbbd59242 | |
|
|
90558ca688 | |
|
|
b1e38783e4 | |
|
|
dfdeb02b3a | |
|
|
cacbfa8162 | |
|
|
656f6cad81 | |
|
|
efc6e02828 | |
|
|
512df4197c | |
|
|
4d174495dc | |
|
|
15a416a58f | |
|
|
e69fee1375 | |
|
|
cc24f94358 | |
|
|
b55f87cc40 | |
|
|
7067f90153 |
|
|
@ -44,9 +44,9 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0
|
|||
|
||||
1. Go to `docker` folder at the root of the project
|
||||
2. Copy `.env.example` file, paste it into the same location, and rename to `.env`
|
||||
3. `docker-compose up -d`
|
||||
3. `docker compose up -d`
|
||||
4. Open [http://localhost:3000](http://localhost:3000)
|
||||
5. You can bring the containers down by `docker-compose stop`
|
||||
5. You can bring the containers down by `docker compose stop`
|
||||
|
||||
### Docker Image
|
||||
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
|
|||
# DISABLE_CHATFLOW_REUSE=true
|
||||
|
||||
# DEBUG=true
|
||||
# LOG_LEVEL=debug (error | warn | info | verbose | debug)
|
||||
# LOG_LEVEL=info (error | warn | info | verbose | debug)
|
||||
# TOOL_FUNCTION_BUILTIN_DEP=crypto,fs
|
||||
# TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash
|
||||
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise
|
|||
## Usage
|
||||
|
||||
1. Create `.env` file and specify the `PORT` (refer to `.env.example`)
|
||||
2. `docker-compose up -d`
|
||||
2. `docker compose up -d`
|
||||
3. Open [http://localhost:3000](http://localhost:3000)
|
||||
4. You can bring the containers down by `docker-compose stop`
|
||||
4. You can bring the containers down by `docker compose stop`
|
||||
|
||||
## 🔒 Authentication
|
||||
|
||||
|
|
@ -19,9 +19,9 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise
|
|||
- FLOWISE_USERNAME=${FLOWISE_USERNAME}
|
||||
- FLOWISE_PASSWORD=${FLOWISE_PASSWORD}
|
||||
```
|
||||
3. `docker-compose up -d`
|
||||
3. `docker compose up -d`
|
||||
4. Open [http://localhost:3000](http://localhost:3000)
|
||||
5. You can bring the containers down by `docker-compose stop`
|
||||
5. You can bring the containers down by `docker compose stop`
|
||||
|
||||
## 🌱 Env Variables
|
||||
|
||||
|
|
|
|||
|
|
@ -44,9 +44,9 @@
|
|||
|
||||
1. プロジェクトのルートにある `docker` フォルダに移動する
|
||||
2. `.env.example` ファイルをコピーして同じ場所に貼り付け、名前を `.env` に変更する
|
||||
3. `docker-compose up -d`
|
||||
3. `docker compose up -d`
|
||||
4. [http://localhost:3000](http://localhost:3000) を開く
|
||||
5. コンテナを停止するには、`docker-compose stop` を使用します
|
||||
5. コンテナを停止するには、`docker compose stop` を使用します
|
||||
|
||||
### Docker Image
|
||||
|
||||
|
|
|
|||
|
|
@ -44,9 +44,9 @@
|
|||
|
||||
1. 프로젝트의 최상위(root) 디렉토리에 있는 `docker` 폴더로 이동하세요.
|
||||
2. `.env.example` 파일을 복사한 후, 같은 경로에 붙여넣기 한 다음, `.env`로 이름을 변경합니다.
|
||||
3. `docker-compose up -d` 실행
|
||||
3. `docker compose up -d` 실행
|
||||
4. [http://localhost:3000](http://localhost:3000) URL 열기
|
||||
5. `docker-compose stop` 명령어를 통해 컨테이너를 종료시킬 수 있습니다.
|
||||
5. `docker compose stop` 명령어를 통해 컨테이너를 종료시킬 수 있습니다.
|
||||
|
||||
### 도커 이미지 활용
|
||||
|
||||
|
|
|
|||
|
|
@ -44,9 +44,9 @@
|
|||
|
||||
1. 进入项目根目录下的 `docker` 文件夹
|
||||
2. 创建 `.env` 文件并指定 `PORT`(参考 `.env.example`)
|
||||
3. 运行 `docker-compose up -d`
|
||||
3. 运行 `docker compose up -d`
|
||||
4. 打开 [http://localhost:3000](http://localhost:3000)
|
||||
5. 可以通过 `docker-compose stop` 停止容器
|
||||
5. 可以通过 `docker compose stop` 停止容器
|
||||
|
||||
### Docker 镜像
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "1.8.2",
|
||||
"version": "1.8.4",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
|
|||
|
|
@ -0,0 +1,28 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class BaiduApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Baidu API'
|
||||
this.name = 'baiduApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Baidu Api Key',
|
||||
name: 'baiduApiKey',
|
||||
type: 'password'
|
||||
},
|
||||
{
|
||||
label: 'Baidu Secret Key',
|
||||
name: 'baiduSecretKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: BaiduApi }
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class FireCrawlApiCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'FireCrawl API'
|
||||
this.name = 'fireCrawlApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'You can find the FireCrawl API token on your <a target="_blank" href="https://www.firecrawl.dev/">FireCrawl account</a> page.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'FireCrawl API',
|
||||
name: 'firecrawlApiToken',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: FireCrawlApiCredential }
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class SpiderApiCredential implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Spider API'
|
||||
this.name = 'spiderApi'
|
||||
this.version = 1.0
|
||||
this.description = 'Get your API key from the <a target="_blank" href="https://spider.cloud">Spider</a> dashboard.'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Spider API Key',
|
||||
name: 'spiderApiKey',
|
||||
type: 'password'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: SpiderApiCredential }
|
||||
|
|
@ -1,187 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { BaseMessage } from '@langchain/core/messages'
|
||||
import { ChainValues } from '@langchain/core/utils/types'
|
||||
import { AgentStep } from '@langchain/core/agents'
|
||||
import { RunnableSequence } from '@langchain/core/runnables'
|
||||
import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
|
||||
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
|
||||
import { checkInputs, Moderation } from '../../moderation/Moderation'
|
||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
|
||||
const defaultMessage = `Do your best to answer the questions. Feel free to use any tools available to look up relevant information, only if necessary.`
|
||||
|
||||
class ConversationalRetrievalAgent_Agents implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
badge?: string
|
||||
sessionId?: string
|
||||
|
||||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'Conversational Retrieval Agent'
|
||||
this.name = 'conversationalRetrievalAgent'
|
||||
this.version = 4.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.badge = 'DEPRECATING'
|
||||
this.icon = 'agent.svg'
|
||||
this.description = `An agent optimized for retrieval during conversation, answering questions based on past dialogue, all using OpenAI's Function Calling`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Allowed Tools',
|
||||
name: 'tools',
|
||||
type: 'Tool',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Memory',
|
||||
name: 'memory',
|
||||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'OpenAI/Azure Chat Model',
|
||||
name: 'model',
|
||||
type: 'BaseChatModel'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
default: defaultMessage,
|
||||
rows: 4,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
name: 'inputModeration',
|
||||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
||||
if (moderations && moderations.length > 0) {
|
||||
try {
|
||||
// Use the output of the moderation chain as input for the BabyAGI agent
|
||||
input = await checkInputs(moderations, input)
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
//streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId)
|
||||
return formatResponse(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
const executor = prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
let res: ChainValues = {}
|
||||
|
||||
if (options.socketIO && options.socketIOClientId) {
|
||||
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
}
|
||||
|
||||
await memory.addChatMessages(
|
||||
[
|
||||
{
|
||||
text: input,
|
||||
type: 'userMessage'
|
||||
},
|
||||
{
|
||||
text: res?.output,
|
||||
type: 'apiMessage'
|
||||
}
|
||||
],
|
||||
this.sessionId
|
||||
)
|
||||
|
||||
return res?.output
|
||||
}
|
||||
}
|
||||
|
||||
const prepareAgent = (nodeData: INodeData, options: ICommonObject, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['ai', systemMessage ? systemMessage : defaultMessage],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
['human', `{${inputKey}}`],
|
||||
new MessagesPlaceholder('agent_scratchpad')
|
||||
])
|
||||
|
||||
const modelWithFunctions = model.bind({
|
||||
functions: [...tools.map((tool: any) => formatToOpenAIFunction(tool))]
|
||||
})
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
|
||||
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
|
||||
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, prependMessages)) as BaseMessage[]
|
||||
return messages ?? []
|
||||
}
|
||||
},
|
||||
prompt,
|
||||
modelWithFunctions,
|
||||
new OpenAIFunctionsAgentOutputParser()
|
||||
])
|
||||
|
||||
const executor = AgentExecutor.fromAgentAndTools({
|
||||
agent: runnableAgent,
|
||||
tools,
|
||||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
returnIntermediateSteps: true,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ConversationalRetrievalAgent_Agents }
|
||||
|
|
@ -1,7 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10 6C10 5.44772 10.4477 5 11 5H21C21.5523 5 22 5.44772 22 6V11C22 13.2091 20.2091 15 18 15H14C11.7909 15 10 13.2091 10 11V6Z" stroke="black" stroke-width="2" stroke-linejoin="round"/>
|
||||
<path d="M16 5V3" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="14" cy="9" r="1.5" fill="black"/>
|
||||
<circle cx="18" cy="9" r="1.5" fill="black"/>
|
||||
<path d="M26 27C26 22.0294 21.5228 18 16 18C10.4772 18 6 22.0294 6 27" stroke="black" stroke-width="2" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 616 B |
|
|
@ -1 +0,0 @@
|
|||
<svg width="32" height="32" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M5 6H4v19.5h1m8-7.5v3h1m7-11.5V6h1m-5 7.5V10h1" stroke="#000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/><mask id="MistralAI__a" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="5" y="6" width="22" height="20"><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" fill="#FD7000"/></mask><g mask="url(#MistralAI__a)"><path fill="#FFCD00" d="M4 6h25v4H4z"/></g><mask id="MistralAI__b" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="5" y="6" width="22" height="20"><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" fill="#FD7000"/></mask><g mask="url(#MistralAI__b)"><path fill="#FFA200" d="M4 10h25v4H4z"/></g><mask id="MistralAI__c" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="5" y="6" width="22" height="20"><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" fill="#FD7000"/></mask><g mask="url(#MistralAI__c)"><path fill="#FF6E00" d="M4 14h25v4H4z"/></g><mask id="MistralAI__d" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="5" y="6" width="22" height="20"><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" fill="#FD7000"/></mask><g mask="url(#MistralAI__d)"><path fill="#FF4A09" d="M4 18h25v4H4z"/></g><mask id="MistralAI__e" style="mask-type:alpha" maskUnits="userSpaceOnUse" x="5" y="6" width="22" height="20"><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" fill="#FD7000"/></mask><g mask="url(#MistralAI__e)"><path fill="#FE060F" d="M4 22h25v4H4z"/></g><path d="M21 18v7h1" stroke="#000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/><path d="M5 6v19.5h5v-8h4V21h4v-3.5h4V25h5V6h-4.5v4H18v3.5h-4v-4h-4V6H5Z" stroke="#000" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.8 KiB |
|
|
@ -1,213 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { BaseMessage } from '@langchain/core/messages'
|
||||
import { ChainValues } from '@langchain/core/utils/types'
|
||||
import { AgentStep } from '@langchain/core/agents'
|
||||
import { RunnableSequence } from '@langchain/core/runnables'
|
||||
import { ChatOpenAI } from '@langchain/openai'
|
||||
import { convertToOpenAITool } from '@langchain/core/utils/function_calling'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||
import { OpenAIToolsAgentOutputParser } from 'langchain/agents/openai/output_parser'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
|
||||
import { Moderation, checkInputs, streamResponse } from '../../moderation/Moderation'
|
||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
|
||||
class MistralAIToolAgent_Agents implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
sessionId?: string
|
||||
badge?: string
|
||||
|
||||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'MistralAI Tool Agent'
|
||||
this.name = 'mistralAIToolAgent'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'MistralAI.svg'
|
||||
this.badge = 'DEPRECATING'
|
||||
this.description = `Agent that uses MistralAI Function Calling to pick the tools and args to call`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Tools',
|
||||
name: 'tools',
|
||||
type: 'Tool',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Memory',
|
||||
name: 'memory',
|
||||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'MistralAI Chat Model',
|
||||
name: 'model',
|
||||
type: 'BaseChatModel'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
name: 'inputModeration',
|
||||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
||||
if (moderations && moderations.length > 0) {
|
||||
try {
|
||||
// Use the output of the moderation chain as input for the OpenAI Function Agent
|
||||
input = await checkInputs(moderations, input)
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId)
|
||||
return formatResponse(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
const executor = prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
let res: ChainValues = {}
|
||||
let sourceDocuments: ICommonObject[] = []
|
||||
let usedTools: IUsedTool[] = []
|
||||
|
||||
if (options.socketIO && options.socketIOClientId) {
|
||||
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
}
|
||||
|
||||
await memory.addChatMessages(
|
||||
[
|
||||
{
|
||||
text: input,
|
||||
type: 'userMessage'
|
||||
},
|
||||
{
|
||||
text: res?.output,
|
||||
type: 'apiMessage'
|
||||
}
|
||||
],
|
||||
this.sessionId
|
||||
)
|
||||
|
||||
let finalRes = res?.output
|
||||
|
||||
if (sourceDocuments.length || usedTools.length) {
|
||||
finalRes = { text: res?.output }
|
||||
if (sourceDocuments.length) {
|
||||
finalRes.sourceDocuments = flatten(sourceDocuments)
|
||||
}
|
||||
if (usedTools.length) {
|
||||
finalRes.usedTools = usedTools
|
||||
}
|
||||
return finalRes
|
||||
}
|
||||
|
||||
return finalRes
|
||||
}
|
||||
}
|
||||
|
||||
const prepareAgent = (nodeData: INodeData, options: ICommonObject, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
['human', `{${inputKey}}`],
|
||||
new MessagesPlaceholder('agent_scratchpad')
|
||||
])
|
||||
|
||||
const llmWithTools = model.bind({
|
||||
tools: tools.map(convertToOpenAITool)
|
||||
})
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
|
||||
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
|
||||
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, prependMessages)) as BaseMessage[]
|
||||
return messages ?? []
|
||||
}
|
||||
},
|
||||
prompt,
|
||||
llmWithTools,
|
||||
new OpenAIToolsAgentOutputParser()
|
||||
])
|
||||
|
||||
const executor = AgentExecutor.fromAgentAndTools({
|
||||
agent: runnableAgent,
|
||||
tools,
|
||||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MistralAIToolAgent_Agents }
|
||||
|
|
@ -1,212 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { BaseMessage } from '@langchain/core/messages'
|
||||
import { ChainValues } from '@langchain/core/utils/types'
|
||||
import { AgentStep } from '@langchain/core/agents'
|
||||
import { RunnableSequence } from '@langchain/core/runnables'
|
||||
import { ChatOpenAI, formatToOpenAIFunction } from '@langchain/openai'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||
import { OpenAIFunctionsAgentOutputParser } from 'langchain/agents/openai/output_parser'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { AgentExecutor, formatAgentSteps } from '../../../src/agents'
|
||||
import { Moderation, checkInputs } from '../../moderation/Moderation'
|
||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
|
||||
class OpenAIFunctionAgent_Agents implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
badge?: string
|
||||
sessionId?: string
|
||||
|
||||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'OpenAI Function Agent'
|
||||
this.name = 'openAIFunctionAgent'
|
||||
this.version = 4.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'function.svg'
|
||||
this.description = `An agent that uses OpenAI Function Calling to pick the tool and args to call`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Allowed Tools',
|
||||
name: 'tools',
|
||||
type: 'Tool',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Memory',
|
||||
name: 'memory',
|
||||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'OpenAI/Azure Chat Model',
|
||||
name: 'model',
|
||||
type: 'BaseChatModel'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
name: 'inputModeration',
|
||||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
||||
if (moderations && moderations.length > 0) {
|
||||
try {
|
||||
// Use the output of the moderation chain as input for the OpenAI Function Agent
|
||||
input = await checkInputs(moderations, input)
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
//streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId)
|
||||
return formatResponse(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
const executor = prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
let res: ChainValues = {}
|
||||
let sourceDocuments: ICommonObject[] = []
|
||||
let usedTools: IUsedTool[] = []
|
||||
|
||||
if (options.socketIO && options.socketIOClientId) {
|
||||
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
}
|
||||
|
||||
await memory.addChatMessages(
|
||||
[
|
||||
{
|
||||
text: input,
|
||||
type: 'userMessage'
|
||||
},
|
||||
{
|
||||
text: res?.output,
|
||||
type: 'apiMessage'
|
||||
}
|
||||
],
|
||||
this.sessionId
|
||||
)
|
||||
|
||||
let finalRes = res?.output
|
||||
|
||||
if (sourceDocuments.length || usedTools.length) {
|
||||
finalRes = { text: res?.output }
|
||||
if (sourceDocuments.length) {
|
||||
finalRes.sourceDocuments = flatten(sourceDocuments)
|
||||
}
|
||||
if (usedTools.length) {
|
||||
finalRes.usedTools = usedTools
|
||||
}
|
||||
return finalRes
|
||||
}
|
||||
|
||||
return finalRes
|
||||
}
|
||||
}
|
||||
|
||||
const prepareAgent = (nodeData: INodeData, options: ICommonObject, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
['human', `{${inputKey}}`],
|
||||
new MessagesPlaceholder('agent_scratchpad')
|
||||
])
|
||||
|
||||
const modelWithFunctions = model.bind({
|
||||
functions: [...tools.map((tool: any) => formatToOpenAIFunction(tool))]
|
||||
})
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: AgentStep[] }) => i.input,
|
||||
agent_scratchpad: (i: { input: string; steps: AgentStep[] }) => formatAgentSteps(i.steps),
|
||||
[memoryKey]: async (_: { input: string; steps: AgentStep[] }) => {
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, prependMessages)) as BaseMessage[]
|
||||
return messages ?? []
|
||||
}
|
||||
},
|
||||
prompt,
|
||||
modelWithFunctions,
|
||||
new OpenAIFunctionsAgentOutputParser()
|
||||
])
|
||||
|
||||
const executor = AgentExecutor.fromAgentAndTools({
|
||||
agent: runnableAgent,
|
||||
tools,
|
||||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenAIFunctionAgent_Agents }
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M16 12.6108L22 15.9608" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7.17701 19.5848C6.49568 20.4069 6.12505 21.4424 6.12993 22.5101C6.13481 23.5779 6.51489 24.6099 7.2037 25.4258C7.89252 26.2416 8.84622 26.7893 9.89802 26.9732C10.9498 27.157 12.0328 26.9653 12.9575 26.4314L15.4787 24.9657M18.6002 14.106V19.5848" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.19877 9.98459C6.39026 9.67775 4.57524 10.4982 3.60403 12.1806C3.00524 13.2178 2.84295 14.4504 3.15284 15.6073C3.46273 16.7642 4.21943 17.7507 5.25652 18.3498L10.3049 21.3269C10.6109 21.5074 10.9898 21.5119 11.3001 21.3388L18.6 17.2655" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M17.0172 6.06585C16.6456 5.06522 15.9342 4.227 15.0072 3.6977C14.0803 3.1684 12.9969 2.98168 11.9462 3.17018C10.8956 3.35869 9.94464 3.91042 9.25954 4.72895C8.57444 5.54747 8.19879 6.58074 8.19824 7.64814V13.6575C8.19824 14.0154 8.38951 14.346 8.69977 14.5244L15.9992 18.7215" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M24.8216 11.7476C25.5029 10.9255 25.8735 9.89004 25.8687 8.8223C25.8638 7.75457 25.4837 6.72253 24.7949 5.90667C24.1061 5.09082 23.1524 4.54308 22.1006 4.35924C21.0488 4.17541 19.9658 4.36718 19.0411 4.90101L13.8942 7.90613C13.5872 8.08539 13.3984 8.41418 13.3984 8.76971V17.2265" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M28.3944 19.0635C28.9932 18.0263 29.1555 16.7937 28.8456 15.6368C28.5357 14.4799 27.779 13.4934 26.7419 12.8943L21.6409 9.91752C21.3316 9.73703 20.9494 9.7357 20.6388 9.91405L13.3984 14.0723" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M18 28.9997H18.8071C19.6909 28.9997 20.4526 28.3921 20.6297 27.546L21.991 21.4537C22.1681 20.6076 22.9299 20 23.8136 20H24.6207M20.0929 22.7023H23.8136M24 25.0214H24.5014C24.8438 25.0214 25.1586 25.2052 25.3207 25.5L27.3429 28.5213C27.5051 28.8161 27.8198 29 28.1622 29H28.6997M24.049 29C24.6261 29 25.1609 28.7041 25.4578 28.2205L27.2424 25.8009C27.5393 25.3173 28.0741 25.0214 28.6512 25.0214" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.3 KiB |
|
|
@ -1,211 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { BaseMessage } from '@langchain/core/messages'
|
||||
import { ChainValues } from '@langchain/core/utils/types'
|
||||
import { RunnableSequence } from '@langchain/core/runnables'
|
||||
import { ChatOpenAI } from '@langchain/openai'
|
||||
import { ChatPromptTemplate, MessagesPlaceholder } from '@langchain/core/prompts'
|
||||
import { convertToOpenAITool } from '@langchain/core/utils/function_calling'
|
||||
import { formatToOpenAIToolMessages } from 'langchain/agents/format_scratchpad/openai_tools'
|
||||
import { OpenAIToolsAgentOutputParser, type ToolsAgentStep } from 'langchain/agents/openai/output_parser'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { FlowiseMemory, ICommonObject, INode, INodeData, INodeParams, IUsedTool } from '../../../src/Interface'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { AgentExecutor } from '../../../src/agents'
|
||||
import { Moderation, checkInputs } from '../../moderation/Moderation'
|
||||
import { formatResponse } from '../../outputparsers/OutputParserHelpers'
|
||||
|
||||
class OpenAIToolAgent_Agents implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
sessionId?: string
|
||||
badge?: string
|
||||
|
||||
constructor(fields?: { sessionId?: string }) {
|
||||
this.label = 'OpenAI Tool Agent'
|
||||
this.name = 'openAIToolAgent'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'function.svg'
|
||||
this.description = `Agent that uses OpenAI Function Calling to pick the tools and args to call`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Tools',
|
||||
name: 'tools',
|
||||
type: 'Tool',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Memory',
|
||||
name: 'memory',
|
||||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'OpenAI/Azure Chat Model',
|
||||
name: 'model',
|
||||
type: 'BaseChatModel'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
name: 'systemMessage',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
name: 'inputModeration',
|
||||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Max Iterations',
|
||||
name: 'maxIterations',
|
||||
type: 'number',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
this.sessionId = fields?.sessionId
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, input: string, options: ICommonObject): Promise<any> {
|
||||
return prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | ICommonObject> {
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
|
||||
if (moderations && moderations.length > 0) {
|
||||
try {
|
||||
// Use the output of the moderation chain as input for the OpenAI Function Agent
|
||||
input = await checkInputs(moderations, input)
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
//streamResponse(options.socketIO && options.socketIOClientId, e.message, options.socketIO, options.socketIOClientId)
|
||||
return formatResponse(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
const executor = prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input })
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
let res: ChainValues = {}
|
||||
let sourceDocuments: ICommonObject[] = []
|
||||
let usedTools: IUsedTool[] = []
|
||||
|
||||
if (options.socketIO && options.socketIOClientId) {
|
||||
const handler = new CustomChainHandler(options.socketIO, options.socketIOClientId)
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, handler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('sourceDocuments', flatten(res.sourceDocuments))
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
options.socketIO.to(options.socketIOClientId).emit('usedTools', res.usedTools)
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
} else {
|
||||
res = await executor.invoke({ input }, { callbacks: [loggerHandler, ...callbacks] })
|
||||
if (res.sourceDocuments) {
|
||||
sourceDocuments = res.sourceDocuments
|
||||
}
|
||||
if (res.usedTools) {
|
||||
usedTools = res.usedTools
|
||||
}
|
||||
}
|
||||
|
||||
await memory.addChatMessages(
|
||||
[
|
||||
{
|
||||
text: input,
|
||||
type: 'userMessage'
|
||||
},
|
||||
{
|
||||
text: res?.output,
|
||||
type: 'apiMessage'
|
||||
}
|
||||
],
|
||||
this.sessionId
|
||||
)
|
||||
|
||||
let finalRes = res?.output
|
||||
|
||||
if (sourceDocuments.length || usedTools.length) {
|
||||
finalRes = { text: res?.output }
|
||||
if (sourceDocuments.length) {
|
||||
finalRes.sourceDocuments = flatten(sourceDocuments)
|
||||
}
|
||||
if (usedTools.length) {
|
||||
finalRes.usedTools = usedTools
|
||||
}
|
||||
return finalRes
|
||||
}
|
||||
|
||||
return finalRes
|
||||
}
|
||||
}
|
||||
|
||||
const prepareAgent = (nodeData: INodeData, options: ICommonObject, flowObj: { sessionId?: string; chatId?: string; input?: string }) => {
|
||||
const model = nodeData.inputs?.model as ChatOpenAI
|
||||
const maxIterations = nodeData.inputs?.maxIterations as string
|
||||
const memory = nodeData.inputs?.memory as FlowiseMemory
|
||||
const systemMessage = nodeData.inputs?.systemMessage as string
|
||||
let tools = nodeData.inputs?.tools
|
||||
tools = flatten(tools)
|
||||
const memoryKey = memory.memoryKey ? memory.memoryKey : 'chat_history'
|
||||
const inputKey = memory.inputKey ? memory.inputKey : 'input'
|
||||
const prependMessages = options?.prependMessages
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', systemMessage ? systemMessage : `You are a helpful AI assistant.`],
|
||||
new MessagesPlaceholder(memoryKey),
|
||||
['human', `{${inputKey}}`],
|
||||
new MessagesPlaceholder('agent_scratchpad')
|
||||
])
|
||||
|
||||
const modelWithTools = model.bind({ tools: tools.map(convertToOpenAITool) })
|
||||
|
||||
const runnableAgent = RunnableSequence.from([
|
||||
{
|
||||
[inputKey]: (i: { input: string; steps: ToolsAgentStep[] }) => i.input,
|
||||
agent_scratchpad: (i: { input: string; steps: ToolsAgentStep[] }) => formatToOpenAIToolMessages(i.steps),
|
||||
[memoryKey]: async (_: { input: string; steps: ToolsAgentStep[] }) => {
|
||||
const messages = (await memory.getChatMessages(flowObj?.sessionId, true, prependMessages)) as BaseMessage[]
|
||||
return messages ?? []
|
||||
}
|
||||
},
|
||||
prompt,
|
||||
modelWithTools,
|
||||
new OpenAIToolsAgentOutputParser()
|
||||
])
|
||||
|
||||
const executor = AgentExecutor.fromAgentAndTools({
|
||||
agent: runnableAgent,
|
||||
tools,
|
||||
sessionId: flowObj?.sessionId,
|
||||
chatId: flowObj?.chatId,
|
||||
input: flowObj?.input,
|
||||
verbose: process.env.DEBUG === 'true' ? true : false,
|
||||
maxIterations: maxIterations ? parseFloat(maxIterations) : undefined
|
||||
})
|
||||
|
||||
return executor
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenAIToolAgent_Agents }
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M16 12.6108L22 15.9608" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7.17701 19.5848C6.49568 20.4069 6.12505 21.4424 6.12993 22.5101C6.13481 23.5779 6.51489 24.6099 7.2037 25.4258C7.89252 26.2416 8.84622 26.7893 9.89802 26.9732C10.9498 27.157 12.0328 26.9653 12.9575 26.4314L15.4787 24.9657M18.6002 14.106V19.5848" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.19877 9.98459C6.39026 9.67775 4.57524 10.4982 3.60403 12.1806C3.00524 13.2178 2.84295 14.4504 3.15284 15.6073C3.46273 16.7642 4.21943 17.7507 5.25652 18.3498L10.3049 21.3269C10.6109 21.5074 10.9898 21.5119 11.3001 21.3388L18.6 17.2655" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M17.0172 6.06585C16.6456 5.06522 15.9342 4.227 15.0072 3.6977C14.0803 3.1684 12.9969 2.98168 11.9462 3.17018C10.8956 3.35869 9.94464 3.91042 9.25954 4.72895C8.57444 5.54747 8.19879 6.58074 8.19824 7.64814V13.6575C8.19824 14.0154 8.38951 14.346 8.69977 14.5244L15.9992 18.7215" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M24.8216 11.7476C25.5029 10.9255 25.8735 9.89004 25.8687 8.8223C25.8638 7.75457 25.4837 6.72253 24.7949 5.90667C24.1061 5.09082 23.1524 4.54308 22.1006 4.35924C21.0488 4.17541 19.9658 4.36718 19.0411 4.90101L13.8942 7.90613C13.5872 8.08539 13.3984 8.41418 13.3984 8.76971V17.2265" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M28.3944 19.0635C28.9932 18.0263 29.1555 16.7937 28.8456 15.6368C28.5357 14.4799 27.779 13.4934 26.7419 12.8943L21.6409 9.91752C21.3316 9.73703 20.9494 9.7357 20.6388 9.91405L13.3984 14.0723" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M18 28.9997H18.8071C19.6909 28.9997 20.4526 28.3921 20.6297 27.546L21.991 21.4537C22.1681 20.6076 22.9299 20 23.8136 20H24.6207M20.0929 22.7023H23.8136M24 25.0214H24.5014C24.8438 25.0214 25.1586 25.2052 25.3207 25.5L27.3429 28.5213C27.5051 28.8161 27.8198 29 28.1622 29H28.6997M24.049 29C24.6261 29 25.1609 28.7041 25.4578 28.2205L27.2424 25.8009C27.5393 25.3173 28.0741 25.0214 28.6512 25.0214" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.3 KiB |
|
|
@ -36,7 +36,6 @@ class ToolAgent_Agents implements INode {
|
|||
this.icon = 'toolAgent.png'
|
||||
this.description = `Agent that uses Function Calling to pick the tools and args to call`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Tools',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,80 @@
|
|||
import { BaseCache } from '@langchain/core/caches'
|
||||
import { ChatBaiduWenxin } from '@langchain/community/chat_models/baiduwenxin'
|
||||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class ChatBaiduWenxin_ChatModels implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
description: string
|
||||
baseClasses: string[]
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'ChatBaiduWenxin'
|
||||
this.name = 'chatBaiduWenxin'
|
||||
this.version = 1.0
|
||||
this.type = 'ChatBaiduWenxin'
|
||||
this.icon = 'baiduwenxin.svg'
|
||||
this.category = 'Chat Models'
|
||||
this.description = 'Wrapper around BaiduWenxin Chat Endpoints'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(ChatBaiduWenxin)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['baiduApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Cache',
|
||||
name: 'cache',
|
||||
type: 'BaseCache',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Model',
|
||||
name: 'modelName',
|
||||
type: 'string',
|
||||
placeholder: 'ERNIE-Bot-turbo'
|
||||
},
|
||||
{
|
||||
label: 'Temperature',
|
||||
name: 'temperature',
|
||||
type: 'number',
|
||||
step: 0.1,
|
||||
default: 0.9,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const temperature = nodeData.inputs?.temperature as string
|
||||
const modelName = nodeData.inputs?.modelName as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const baiduApiKey = getCredentialParam('baiduApiKey', credentialData, nodeData)
|
||||
const baiduSecretKey = getCredentialParam('baiduSecretKey', credentialData, nodeData)
|
||||
|
||||
const obj: Partial<ChatBaiduWenxin> = {
|
||||
streaming: true,
|
||||
baiduApiKey,
|
||||
baiduSecretKey,
|
||||
modelName,
|
||||
temperature: temperature ? parseFloat(temperature) : undefined
|
||||
}
|
||||
if (cache) obj.cache = cache
|
||||
|
||||
const model = new ChatBaiduWenxin(obj)
|
||||
return model
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ChatBaiduWenxin_ChatModels }
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
<?xml version="1.0" encoding="utf-8"?><!-- Uploaded to: SVG Repo, www.svgrepo.com, Generator: SVG Repo Mixer Tools -->
|
||||
<svg xmlns="http://www.w3.org/2000/svg"
|
||||
aria-label="Baidu" role="img"
|
||||
viewBox="0 0 512 512"><rect
|
||||
width="512" height="512"
|
||||
rx="15%"
|
||||
fill="#ffffff"/><path d="m131 251c41-9 35-58 34-68-2-17-21-45-48-43-33 3-37 50-37 50-5 22 10 70 51 61m76-82c22 0 40-26 40-58s-18-58-40-58c-23 0-41 26-41 58s18 58 41 58m96 4c31 4 50-28 54-53 4-24-16-52-37-57s-48 29-50 52c-3 27 3 54 33 58m120 41c0-12-10-47-46-47s-41 33-41 57c0 22 2 53 47 52s40-51 40-62m-46 102s-46-36-74-75c-36-57-89-34-106-5-18 29-45 48-49 53-4 4-56 33-44 84 11 52 52 51 52 51s30 3 65-5 65 2 65 2 81 27 104-25c22-53-13-80-13-80" fill="#2319dc"/><path d="m214 266v34h-28s-29 3-39 35c-3 21 4 34 5 36 1 3 10 19 33 23h53v-128zm-1 107h-21s-15-1-19-18c-3-7 0-16 1-20 1-3 6-11 17-14h22zm38-70v68s1 17 24 23h61v-91h-26v68h-25s-8-1-10-7v-61z" fill="#ffffff"/></svg>
|
||||
|
After Width: | Height: | Size: 924 B |
|
|
@ -552,6 +552,13 @@ function zodToGeminiParameters(zodObj: any) {
|
|||
const jsonSchema: any = zodToJsonSchema(zodObj)
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
const { $schema, additionalProperties, ...rest } = jsonSchema
|
||||
if (rest.properties) {
|
||||
Object.keys(rest.properties).forEach((key) => {
|
||||
if (rest.properties[key].enum?.length) {
|
||||
rest.properties[key] = { type: 'string', format: 'enum', enum: rest.properties[key].enum }
|
||||
}
|
||||
})
|
||||
}
|
||||
return rest
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
constructor() {
|
||||
this.label = 'ChatHuggingFace'
|
||||
this.name = 'chatHuggingFace'
|
||||
this.version = 2.0
|
||||
this.version = 3.0
|
||||
this.type = 'ChatHuggingFace'
|
||||
this.icon = 'HuggingFace.svg'
|
||||
this.category = 'Chat Models'
|
||||
|
|
@ -96,6 +96,16 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
description: 'Frequency Penalty parameter may not apply to certain model. Please check available model parameters',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Stop Sequence',
|
||||
name: 'stop',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: 'AI assistant:',
|
||||
description: 'Sets the stop sequences to use. Use comma to seperate different sequences.',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -109,6 +119,7 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
|
||||
const endpoint = nodeData.inputs?.endpoint as string
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const stop = nodeData.inputs?.stop as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
|
||||
|
|
@ -123,7 +134,11 @@ class ChatHuggingFace_ChatModels implements INode {
|
|||
if (topP) obj.topP = parseFloat(topP)
|
||||
if (hfTopK) obj.topK = parseFloat(hfTopK)
|
||||
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
|
||||
if (endpoint) obj.endpoint = endpoint
|
||||
if (endpoint) obj.endpointUrl = endpoint
|
||||
if (stop) {
|
||||
const stopSequences = stop.split(',')
|
||||
obj.stopSequences = stopSequences
|
||||
}
|
||||
|
||||
const huggingFace = new HuggingFaceInference(obj)
|
||||
if (cache) huggingFace.cache = cache
|
||||
|
|
|
|||
|
|
@ -1,32 +1,19 @@
|
|||
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
|
||||
import { getEnvironmentVariable } from '../../../src/utils'
|
||||
import { GenerationChunk } from '@langchain/core/outputs'
|
||||
import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'
|
||||
|
||||
export interface HFInput {
|
||||
/** Model to use */
|
||||
model: string
|
||||
|
||||
/** Sampling temperature to use */
|
||||
temperature?: number
|
||||
|
||||
/**
|
||||
* Maximum number of tokens to generate in the completion.
|
||||
*/
|
||||
maxTokens?: number
|
||||
|
||||
/** Total probability mass of tokens to consider at each step */
|
||||
stopSequences?: string[]
|
||||
topP?: number
|
||||
|
||||
/** Integer to define the top tokens considered within the sample operation to create new text. */
|
||||
topK?: number
|
||||
|
||||
/** Penalizes repeated tokens according to frequency */
|
||||
frequencyPenalty?: number
|
||||
|
||||
/** API key to use. */
|
||||
apiKey?: string
|
||||
|
||||
/** Private endpoint to use. */
|
||||
endpoint?: string
|
||||
endpointUrl?: string
|
||||
includeCredentials?: string | boolean
|
||||
}
|
||||
|
||||
export class HuggingFaceInference extends LLM implements HFInput {
|
||||
|
|
@ -40,6 +27,8 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
|
||||
temperature: number | undefined = undefined
|
||||
|
||||
stopSequences: string[] | undefined = undefined
|
||||
|
||||
maxTokens: number | undefined = undefined
|
||||
|
||||
topP: number | undefined = undefined
|
||||
|
|
@ -50,7 +39,9 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
|
||||
apiKey: string | undefined = undefined
|
||||
|
||||
endpoint: string | undefined = undefined
|
||||
endpointUrl: string | undefined = undefined
|
||||
|
||||
includeCredentials: string | boolean | undefined = undefined
|
||||
|
||||
constructor(fields?: Partial<HFInput> & BaseLLMParams) {
|
||||
super(fields ?? {})
|
||||
|
|
@ -58,11 +49,13 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
this.model = fields?.model ?? this.model
|
||||
this.temperature = fields?.temperature ?? this.temperature
|
||||
this.maxTokens = fields?.maxTokens ?? this.maxTokens
|
||||
this.stopSequences = fields?.stopSequences ?? this.stopSequences
|
||||
this.topP = fields?.topP ?? this.topP
|
||||
this.topK = fields?.topK ?? this.topK
|
||||
this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty
|
||||
this.endpoint = fields?.endpoint ?? ''
|
||||
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
|
||||
this.endpointUrl = fields?.endpointUrl
|
||||
this.includeCredentials = fields?.includeCredentials
|
||||
if (!this.apiKey) {
|
||||
throw new Error(
|
||||
'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.'
|
||||
|
|
@ -74,31 +67,65 @@ export class HuggingFaceInference extends LLM implements HFInput {
|
|||
return 'hf'
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hf = new HfInference(this.apiKey)
|
||||
const obj: any = {
|
||||
invocationParams(options?: this['ParsedCallOptions']) {
|
||||
return {
|
||||
model: this.model,
|
||||
parameters: {
|
||||
// make it behave similar to openai, returning only the generated text
|
||||
return_full_text: false,
|
||||
temperature: this.temperature,
|
||||
max_new_tokens: this.maxTokens,
|
||||
stop: options?.stop ?? this.stopSequences,
|
||||
top_p: this.topP,
|
||||
top_k: this.topK,
|
||||
repetition_penalty: this.frequencyPenalty
|
||||
},
|
||||
inputs: prompt
|
||||
}
|
||||
}
|
||||
if (this.endpoint) {
|
||||
hf.endpoint(this.endpoint)
|
||||
} else {
|
||||
obj.model = this.model
|
||||
}
|
||||
|
||||
async *_streamResponseChunks(
|
||||
prompt: string,
|
||||
options: this['ParsedCallOptions'],
|
||||
runManager?: CallbackManagerForLLMRun
|
||||
): AsyncGenerator<GenerationChunk> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const stream = await this.caller.call(async () =>
|
||||
hfi.textGenerationStream({
|
||||
...this.invocationParams(options),
|
||||
inputs: prompt
|
||||
})
|
||||
)
|
||||
for await (const chunk of stream) {
|
||||
const token = chunk.token.text
|
||||
yield new GenerationChunk({ text: token, generationInfo: chunk })
|
||||
await runManager?.handleLLMNewToken(token ?? '')
|
||||
|
||||
// stream is done
|
||||
if (chunk.generated_text)
|
||||
yield new GenerationChunk({
|
||||
text: '',
|
||||
generationInfo: { finished: true }
|
||||
})
|
||||
}
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj)
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
|
||||
const hfi = await this._prepareHFInference()
|
||||
const args = { ...this.invocationParams(options), inputs: prompt }
|
||||
const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args)
|
||||
return res.generated_text
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
private async _prepareHFInference() {
|
||||
const { HfInference } = await HuggingFaceInference.imports()
|
||||
const hfi = new HfInference(this.apiKey, {
|
||||
includeCredentials: this.includeCredentials
|
||||
})
|
||||
return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi
|
||||
}
|
||||
|
||||
/** @ignore */
|
||||
static async imports(): Promise<{
|
||||
HfInference: typeof import('@huggingface/inference').HfInference
|
||||
|
|
|
|||
|
|
@ -45,7 +45,6 @@ class ChatOllamaFunction_ChatModels implements INode {
|
|||
this.category = 'Chat Models'
|
||||
this.description = 'Run open-source function-calling compatible LLM on Ollama'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(OllamaFunctions)]
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Cache',
|
||||
|
|
|
|||
|
|
@ -131,7 +131,11 @@ class Cheerio_DocumentLoaders implements INode {
|
|||
|
||||
async function cheerioLoader(url: string): Promise<any> {
|
||||
try {
|
||||
let docs = []
|
||||
let docs: IDocument[] = []
|
||||
if (url.endsWith('.pdf')) {
|
||||
if (process.env.DEBUG === 'true') options.logger.info(`CheerioWebBaseLoader does not support PDF files: ${url}`)
|
||||
return docs
|
||||
}
|
||||
const loader = new CheerioWebBaseLoader(url, params)
|
||||
if (textSplitter) {
|
||||
docs = await loader.loadAndSplit(textSplitter)
|
||||
|
|
@ -141,6 +145,7 @@ class Cheerio_DocumentLoaders implements INode {
|
|||
return docs
|
||||
} catch (err) {
|
||||
if (process.env.DEBUG === 'true') options.logger.error(`error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`)
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -23,7 +23,6 @@ class CustomDocumentLoader_DocumentLoaders implements INode {
|
|||
this.type = 'Document'
|
||||
this.icon = 'customDocLoader.svg'
|
||||
this.category = 'Document Loaders'
|
||||
this.badge = 'NEW'
|
||||
this.description = `Custom function for loading documents`
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -22,7 +22,6 @@ class DocStore_DocumentLoaders implements INode {
|
|||
this.version = 1.0
|
||||
this.type = 'Document'
|
||||
this.icon = 'dstore.svg'
|
||||
this.badge = 'NEW'
|
||||
this.category = 'Document Loaders'
|
||||
this.description = `Load data from pre-configured document stores`
|
||||
this.baseClasses = [this.type]
|
||||
|
|
|
|||
|
|
@ -0,0 +1,378 @@
|
|||
import { TextSplitter } from 'langchain/text_splitter'
|
||||
import { Document, DocumentInterface } from '@langchain/core/documents'
|
||||
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
|
||||
import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import axios, { AxiosResponse, AxiosRequestHeaders } from 'axios'
|
||||
import { z } from 'zod'
|
||||
import { zodToJsonSchema } from 'zod-to-json-schema'
|
||||
|
||||
// FirecrawlApp interfaces
|
||||
interface FirecrawlAppConfig {
|
||||
apiKey?: string | null
|
||||
apiUrl?: string | null
|
||||
}
|
||||
|
||||
interface FirecrawlDocumentMetadata {
|
||||
title?: string
|
||||
description?: string
|
||||
language?: string
|
||||
// ... (other metadata fields)
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface FirecrawlDocument {
|
||||
id?: string
|
||||
url?: string
|
||||
content: string
|
||||
markdown?: string
|
||||
html?: string
|
||||
llm_extraction?: Record<string, any>
|
||||
createdAt?: Date
|
||||
updatedAt?: Date
|
||||
type?: string
|
||||
metadata: FirecrawlDocumentMetadata
|
||||
childrenLinks?: string[]
|
||||
provider?: string
|
||||
warning?: string
|
||||
index?: number
|
||||
}
|
||||
|
||||
interface ScrapeResponse {
|
||||
success: boolean
|
||||
data?: FirecrawlDocument
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface CrawlResponse {
|
||||
success: boolean
|
||||
jobId?: string
|
||||
data?: FirecrawlDocument[]
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface Params {
|
||||
[key: string]: any
|
||||
extractorOptions?: {
|
||||
extractionSchema: z.ZodSchema | any
|
||||
mode?: 'llm-extraction'
|
||||
extractionPrompt?: string
|
||||
}
|
||||
}
|
||||
|
||||
// FirecrawlApp class (not exported)
|
||||
class FirecrawlApp {
|
||||
private apiKey: string
|
||||
private apiUrl: string
|
||||
|
||||
constructor({ apiKey = null, apiUrl = null }: FirecrawlAppConfig) {
|
||||
this.apiKey = apiKey || ''
|
||||
this.apiUrl = apiUrl || 'https://api.firecrawl.dev'
|
||||
if (!this.apiKey) {
|
||||
throw new Error('No API key provided')
|
||||
}
|
||||
}
|
||||
|
||||
async scrapeUrl(url: string, params: Params | null = null): Promise<ScrapeResponse> {
|
||||
const headers = this.prepareHeaders()
|
||||
let jsonData: Params = { url, ...params }
|
||||
if (params?.extractorOptions?.extractionSchema) {
|
||||
let schema = params.extractorOptions.extractionSchema
|
||||
if (schema instanceof z.ZodSchema) {
|
||||
schema = zodToJsonSchema(schema)
|
||||
}
|
||||
jsonData = {
|
||||
...jsonData,
|
||||
extractorOptions: {
|
||||
...params.extractorOptions,
|
||||
extractionSchema: schema,
|
||||
mode: params.extractorOptions.mode || 'llm-extraction'
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/scrape', jsonData, headers)
|
||||
if (response.status === 200) {
|
||||
const responseData = response.data
|
||||
if (responseData.success) {
|
||||
return responseData
|
||||
} else {
|
||||
throw new Error(`Failed to scrape URL. Error: ${responseData.error}`)
|
||||
}
|
||||
} else {
|
||||
this.handleError(response, 'scrape URL')
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
return { success: false, error: 'Internal server error.' }
|
||||
}
|
||||
|
||||
async crawlUrl(
|
||||
url: string,
|
||||
params: Params | null = null,
|
||||
waitUntilDone: boolean = true,
|
||||
pollInterval: number = 2,
|
||||
idempotencyKey?: string
|
||||
): Promise<CrawlResponse | any> {
|
||||
const headers = this.prepareHeaders(idempotencyKey)
|
||||
let jsonData: Params = { url, ...params }
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/crawl', jsonData, headers)
|
||||
if (response.status === 200) {
|
||||
const jobId: string = response.data.jobId
|
||||
if (waitUntilDone) {
|
||||
return this.monitorJobStatus(jobId, headers, pollInterval)
|
||||
} else {
|
||||
return { success: true, jobId }
|
||||
}
|
||||
} else {
|
||||
this.handleError(response, 'start crawl job')
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
return { success: false, error: 'Internal server error.' }
|
||||
}
|
||||
|
||||
private prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.apiKey}`,
|
||||
...(idempotencyKey ? { 'x-idempotency-key': idempotencyKey } : {})
|
||||
} as AxiosRequestHeaders & { 'x-idempotency-key'?: string }
|
||||
}
|
||||
|
||||
private postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
return axios.post(url, data, { headers })
|
||||
}
|
||||
|
||||
private getRequest(url: string, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
return axios.get(url, { headers })
|
||||
}
|
||||
|
||||
private async monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise<any> {
|
||||
let isJobCompleted = false
|
||||
while (!isJobCompleted) {
|
||||
const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers)
|
||||
if (statusResponse.status === 200) {
|
||||
const statusData = statusResponse.data
|
||||
switch (statusData.status) {
|
||||
case 'completed':
|
||||
isJobCompleted = true
|
||||
if ('data' in statusData) {
|
||||
return statusData.data
|
||||
} else {
|
||||
throw new Error('Crawl job completed but no data was returned')
|
||||
}
|
||||
case 'active':
|
||||
case 'paused':
|
||||
case 'pending':
|
||||
case 'queued':
|
||||
await new Promise((resolve) => setTimeout(resolve, Math.max(checkInterval, 2) * 1000))
|
||||
break
|
||||
default:
|
||||
throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`)
|
||||
}
|
||||
} else {
|
||||
this.handleError(statusResponse, 'check crawl status')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private handleError(response: AxiosResponse, action: string): void {
|
||||
if ([402, 408, 409, 500].includes(response.status)) {
|
||||
const errorMessage: string = response.data.error || 'Unknown error occurred'
|
||||
throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`)
|
||||
} else {
|
||||
throw new Error(`Unexpected error occurred while trying to ${action}. Status code: ${response.status}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FireCrawl Loader
|
||||
interface FirecrawlLoaderParameters {
|
||||
url: string
|
||||
apiKey?: string
|
||||
mode?: 'crawl' | 'scrape'
|
||||
params?: Record<string, unknown>
|
||||
}
|
||||
|
||||
class FireCrawlLoader extends BaseDocumentLoader {
|
||||
private apiKey: string
|
||||
private url: string
|
||||
private mode: 'crawl' | 'scrape'
|
||||
private params?: Record<string, unknown>
|
||||
|
||||
constructor(loaderParams: FirecrawlLoaderParameters) {
|
||||
super()
|
||||
const { apiKey, url, mode = 'crawl', params } = loaderParams
|
||||
if (!apiKey) {
|
||||
throw new Error('Firecrawl API key not set. You can set it as FIRECRAWL_API_KEY in your .env file, or pass it to Firecrawl.')
|
||||
}
|
||||
|
||||
this.apiKey = apiKey
|
||||
this.url = url
|
||||
this.mode = mode
|
||||
this.params = params
|
||||
}
|
||||
|
||||
public async load(): Promise<DocumentInterface[]> {
|
||||
const app = new FirecrawlApp({ apiKey: this.apiKey })
|
||||
let firecrawlDocs: FirecrawlDocument[]
|
||||
|
||||
if (this.mode === 'scrape') {
|
||||
const response = await app.scrapeUrl(this.url, this.params)
|
||||
if (!response.success) {
|
||||
throw new Error(`Firecrawl: Failed to scrape URL. Error: ${response.error}`)
|
||||
}
|
||||
firecrawlDocs = [response.data as FirecrawlDocument]
|
||||
} else if (this.mode === 'crawl') {
|
||||
const response = await app.crawlUrl(this.url, this.params, true)
|
||||
firecrawlDocs = response as FirecrawlDocument[]
|
||||
} else {
|
||||
throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape'.`)
|
||||
}
|
||||
|
||||
return firecrawlDocs.map(
|
||||
(doc) =>
|
||||
new Document({
|
||||
pageContent: doc.markdown || '',
|
||||
metadata: doc.metadata || {}
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Flowise Node Class
|
||||
class FireCrawl_DocumentLoaders implements INode {
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
version: number
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
||||
constructor() {
|
||||
this.label = 'FireCrawl'
|
||||
this.name = 'fireCrawl'
|
||||
this.type = 'Document'
|
||||
this.icon = 'firecrawl.png'
|
||||
this.version = 1.0
|
||||
this.category = 'Document Loaders'
|
||||
this.description = 'Load data from URL using FireCrawl'
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Text Splitter',
|
||||
name: 'textSplitter',
|
||||
type: 'TextSplitter',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'URLs',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
description: 'URL to be crawled/scraped',
|
||||
placeholder: 'https://docs.flowiseai.com'
|
||||
},
|
||||
{
|
||||
label: 'Crawler type',
|
||||
type: 'options',
|
||||
name: 'crawlerType',
|
||||
options: [
|
||||
{
|
||||
label: 'Crawl',
|
||||
name: 'crawl',
|
||||
description: 'Crawl a URL and all accessible subpages'
|
||||
},
|
||||
{
|
||||
label: 'Scrape',
|
||||
name: 'scrape',
|
||||
description: 'Scrape a URL and get its content'
|
||||
}
|
||||
],
|
||||
default: 'crawl'
|
||||
}
|
||||
// ... (other input parameters)
|
||||
]
|
||||
this.credential = {
|
||||
label: 'FireCrawl API',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['fireCrawlApi']
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
||||
const metadata = nodeData.inputs?.metadata
|
||||
const url = nodeData.inputs?.url as string
|
||||
const crawlerType = nodeData.inputs?.crawlerType as string
|
||||
const maxCrawlPages = nodeData.inputs?.maxCrawlPages as string
|
||||
const generateImgAltText = nodeData.inputs?.generateImgAltText as boolean
|
||||
const returnOnlyUrls = nodeData.inputs?.returnOnlyUrls as boolean
|
||||
const onlyMainContent = nodeData.inputs?.onlyMainContent as boolean
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const firecrawlApiToken = getCredentialParam('firecrawlApiToken', credentialData, nodeData)
|
||||
|
||||
const urlPatternsExcludes = nodeData.inputs?.urlPatternsExcludes
|
||||
? (nodeData.inputs.urlPatternsExcludes.split(',') as string[])
|
||||
: undefined
|
||||
const urlPatternsIncludes = nodeData.inputs?.urlPatternsIncludes
|
||||
? (nodeData.inputs.urlPatternsIncludes.split(',') as string[])
|
||||
: undefined
|
||||
|
||||
const input: FirecrawlLoaderParameters = {
|
||||
url,
|
||||
mode: crawlerType as 'crawl' | 'scrape',
|
||||
apiKey: firecrawlApiToken,
|
||||
params: {
|
||||
crawlerOptions: {
|
||||
includes: urlPatternsIncludes,
|
||||
excludes: urlPatternsExcludes,
|
||||
generateImgAltText,
|
||||
returnOnlyUrls,
|
||||
limit: maxCrawlPages ? parseFloat(maxCrawlPages) : undefined
|
||||
},
|
||||
pageOptions: {
|
||||
onlyMainContent
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const loader = new FireCrawlLoader(input)
|
||||
|
||||
let docs = []
|
||||
|
||||
if (textSplitter) {
|
||||
docs = await loader.loadAndSplit(textSplitter)
|
||||
} else {
|
||||
docs = await loader.load()
|
||||
}
|
||||
|
||||
if (metadata) {
|
||||
const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata)
|
||||
let finaldocs = []
|
||||
for (const doc of docs) {
|
||||
const newdoc = {
|
||||
...doc,
|
||||
metadata: {
|
||||
...doc.metadata,
|
||||
...parsedMetadata
|
||||
}
|
||||
}
|
||||
finaldocs.push(newdoc)
|
||||
}
|
||||
return finaldocs
|
||||
}
|
||||
|
||||
return docs
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: FireCrawl_DocumentLoaders }
|
||||
|
After Width: | Height: | Size: 17 KiB |
|
|
@ -0,0 +1,189 @@
|
|||
import { TextSplitter } from 'langchain/text_splitter'
|
||||
import { Document, DocumentInterface } from '@langchain/core/documents'
|
||||
import { BaseDocumentLoader } from 'langchain/document_loaders/base'
|
||||
import { INode, INodeData, INodeParams, ICommonObject } from '../../../src/Interface'
|
||||
import { getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import SpiderApp from './SpiderApp'
|
||||
|
||||
interface SpiderLoaderParameters {
|
||||
url: string
|
||||
apiKey?: string
|
||||
mode?: 'crawl' | 'scrape'
|
||||
limit?: number
|
||||
params?: Record<string, unknown>
|
||||
}
|
||||
|
||||
class SpiderLoader extends BaseDocumentLoader {
|
||||
private apiKey: string
|
||||
private url: string
|
||||
private mode: 'crawl' | 'scrape'
|
||||
private limit?: number
|
||||
private params?: Record<string, unknown>
|
||||
|
||||
constructor(loaderParams: SpiderLoaderParameters) {
|
||||
super()
|
||||
const { apiKey, url, mode = 'crawl', limit, params } = loaderParams
|
||||
if (!apiKey) {
|
||||
throw new Error('Spider API key not set. You can set it as SPIDER_API_KEY in your .env file, or pass it to Spider.')
|
||||
}
|
||||
|
||||
this.apiKey = apiKey
|
||||
this.url = url
|
||||
this.mode = mode
|
||||
this.limit = Number(limit)
|
||||
this.params = params
|
||||
}
|
||||
|
||||
public async load(): Promise<DocumentInterface[]> {
|
||||
const app = new SpiderApp({ apiKey: this.apiKey })
|
||||
let spiderDocs: any[]
|
||||
|
||||
if (this.mode === 'scrape') {
|
||||
const response = await app.scrapeUrl(this.url, this.params)
|
||||
if (!response.success) {
|
||||
throw new Error(`Spider: Failed to scrape URL. Error: ${response.error}`)
|
||||
}
|
||||
spiderDocs = [response.data]
|
||||
} else if (this.mode === 'crawl') {
|
||||
if (this.params) {
|
||||
this.params.limit = this.limit
|
||||
}
|
||||
const response = await app.crawlUrl(this.url, this.params)
|
||||
if (!response.success) {
|
||||
throw new Error(`Spider: Failed to crawl URL. Error: ${response.error}`)
|
||||
}
|
||||
spiderDocs = response.data
|
||||
} else {
|
||||
throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape'.`)
|
||||
}
|
||||
|
||||
return spiderDocs.map(
|
||||
(doc) =>
|
||||
new Document({
|
||||
pageContent: doc.content || '',
|
||||
metadata: { source: doc.url }
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
class Spider_DocumentLoaders implements INode {
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
version: number
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
||||
constructor() {
|
||||
this.label = 'Spider Document Loaders'
|
||||
this.name = 'spiderDocumentLoaders'
|
||||
this.version = 1.0
|
||||
this.type = 'Document'
|
||||
this.icon = 'spider.svg'
|
||||
this.category = 'Document Loaders'
|
||||
this.description = 'Scrape & Crawl the web with Spider'
|
||||
this.baseClasses = [this.type]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Text Splitter',
|
||||
name: 'textSplitter',
|
||||
type: 'TextSplitter',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Mode',
|
||||
name: 'mode',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'Scrape',
|
||||
name: 'scrape',
|
||||
description: 'Scrape a single page'
|
||||
},
|
||||
{
|
||||
label: 'Crawl',
|
||||
name: 'crawl',
|
||||
description: 'Crawl a website and extract pages within the same domain'
|
||||
}
|
||||
],
|
||||
default: 'scrape'
|
||||
},
|
||||
{
|
||||
label: 'Web Page URL',
|
||||
name: 'url',
|
||||
type: 'string',
|
||||
placeholder: 'https://spider.cloud'
|
||||
},
|
||||
{
|
||||
label: 'Limit',
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
default: 25
|
||||
},
|
||||
{
|
||||
label: 'Additional Parameters',
|
||||
name: 'params',
|
||||
description:
|
||||
'Find all the available parameters in the <a _target="blank" href="https://spider.cloud/docs/api">Spider API documentation</a>',
|
||||
additionalParams: true,
|
||||
placeholder: '{ "anti_bot": true }',
|
||||
type: 'json',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.credential = {
|
||||
label: 'Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['spiderApi']
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const textSplitter = nodeData.inputs?.textSplitter as TextSplitter
|
||||
const url = nodeData.inputs?.url as string
|
||||
const mode = nodeData.inputs?.mode as 'crawl' | 'scrape'
|
||||
const limit = nodeData.inputs?.limit as number
|
||||
let params = nodeData.inputs?.params || {}
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const spiderApiKey = getCredentialParam('spiderApiKey', credentialData, nodeData)
|
||||
|
||||
if (typeof params === 'string') {
|
||||
try {
|
||||
params = JSON.parse(params)
|
||||
} catch (e) {
|
||||
throw new Error('Invalid JSON string provided for params')
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure return_format is set to markdown
|
||||
params.return_format = 'markdown'
|
||||
|
||||
const input: SpiderLoaderParameters = {
|
||||
url,
|
||||
mode: mode as 'crawl' | 'scrape',
|
||||
apiKey: spiderApiKey,
|
||||
limit: limit as number,
|
||||
params: params as Record<string, unknown>
|
||||
}
|
||||
|
||||
const loader = new SpiderLoader(input)
|
||||
|
||||
let docs = []
|
||||
|
||||
if (textSplitter) {
|
||||
docs = await loader.loadAndSplit(textSplitter)
|
||||
} else {
|
||||
docs = await loader.load()
|
||||
}
|
||||
|
||||
return docs
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Spider_DocumentLoaders }
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
import axios, { AxiosResponse, AxiosRequestHeaders } from 'axios'
|
||||
|
||||
interface SpiderAppConfig {
|
||||
apiKey?: string | null
|
||||
apiUrl?: string | null
|
||||
}
|
||||
|
||||
interface SpiderDocumentMetadata {
|
||||
title?: string
|
||||
description?: string
|
||||
language?: string
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface SpiderDocument {
|
||||
id?: string
|
||||
url?: string
|
||||
content: string
|
||||
markdown?: string
|
||||
html?: string
|
||||
createdAt?: Date
|
||||
updatedAt?: Date
|
||||
type?: string
|
||||
metadata: SpiderDocumentMetadata
|
||||
}
|
||||
|
||||
interface ScrapeResponse {
|
||||
success: boolean
|
||||
data?: SpiderDocument
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface CrawlResponse {
|
||||
success: boolean
|
||||
data?: SpiderDocument[]
|
||||
error?: string
|
||||
}
|
||||
|
||||
interface Params {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
class SpiderApp {
|
||||
private apiKey: string
|
||||
private apiUrl: string
|
||||
|
||||
constructor({ apiKey = null, apiUrl = null }: SpiderAppConfig) {
|
||||
this.apiKey = apiKey || ''
|
||||
this.apiUrl = apiUrl || 'https://api.spider.cloud/v1'
|
||||
if (!this.apiKey) {
|
||||
throw new Error('No API key provided')
|
||||
}
|
||||
}
|
||||
|
||||
async scrapeUrl(url: string, params: Params | null = null): Promise<ScrapeResponse> {
|
||||
const headers = this.prepareHeaders()
|
||||
const jsonData: Params = { url, limit: 1, ...params }
|
||||
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest('crawl', jsonData, headers)
|
||||
if (response.status === 200) {
|
||||
const responseData = response.data
|
||||
if (responseData[0].status) {
|
||||
return { success: true, data: responseData[0] }
|
||||
} else {
|
||||
throw new Error(`Failed to scrape URL. Error: ${responseData.error}`)
|
||||
}
|
||||
} else {
|
||||
this.handleError(response, 'scrape URL')
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
return { success: false, error: 'Internal server error.' }
|
||||
}
|
||||
|
||||
async crawlUrl(url: string, params: Params | null = null, idempotencyKey?: string): Promise<CrawlResponse | any> {
|
||||
const headers = this.prepareHeaders(idempotencyKey)
|
||||
const jsonData: Params = { url, ...params }
|
||||
|
||||
try {
|
||||
const response: AxiosResponse = await this.postRequest('crawl', jsonData, headers)
|
||||
if (response.status === 200) {
|
||||
return { success: true, data: response.data }
|
||||
} else {
|
||||
this.handleError(response, 'start crawl job')
|
||||
}
|
||||
} catch (error: any) {
|
||||
throw new Error(error.message)
|
||||
}
|
||||
return { success: false, error: 'Internal server error.' }
|
||||
}
|
||||
|
||||
private prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${this.apiKey}`,
|
||||
...(idempotencyKey ? { 'x-idempotency-key': idempotencyKey } : {})
|
||||
} as AxiosRequestHeaders & { 'x-idempotency-key'?: string }
|
||||
}
|
||||
|
||||
private postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise<AxiosResponse> {
|
||||
return axios.post(`${this.apiUrl}/${url}`, data, { headers })
|
||||
}
|
||||
|
||||
private handleError(response: AxiosResponse, action: string): void {
|
||||
if ([402, 408, 409, 500].includes(response.status)) {
|
||||
const errorMessage: string = response.data.error || 'Unknown error occurred'
|
||||
throw new Error(`Failed to ${action}. Status code: ${response.status}. Error: ${errorMessage}`)
|
||||
} else {
|
||||
throw new Error(`Unexpected error occurred while trying to ${action}. Status code: ${response.status}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default SpiderApp
|
||||
|
|
@ -0,0 +1 @@
|
|||
<svg height="30" width="30" viewBox="0 0 36 34" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" class="fill-accent-foreground transition-all group-hover:scale-110"><title>Spider v1 Logo</title><path fill-rule="evenodd" clip-rule="evenodd" d="M9.13883 7.06589V0.164429L13.0938 0.164429V6.175L14.5178 7.4346C15.577 6.68656 16.7337 6.27495 17.945 6.27495C19.1731 6.27495 20.3451 6.69807 21.4163 7.46593L22.8757 6.175V0.164429L26.8307 0.164429V7.06589V7.95679L26.1634 8.54706L24.0775 10.3922C24.3436 10.8108 24.5958 11.2563 24.8327 11.7262L26.0467 11.4215L28.6971 8.08749L31.793 10.5487L28.7257 14.407L28.3089 14.9313L27.6592 15.0944L26.2418 15.4502C26.3124 15.7082 26.3793 15.9701 26.4422 16.2355L28.653 16.6566L29.092 16.7402L29.4524 17.0045L35.3849 21.355L33.0461 24.5444L27.474 20.4581L27.0719 20.3816C27.1214 21.0613 27.147 21.7543 27.147 22.4577C27.147 22.5398 27.1466 22.6214 27.1459 22.7024L29.5889 23.7911L30.3219 24.1177L30.62 24.8629L33.6873 32.5312L30.0152 34L27.246 27.0769L26.7298 26.8469C25.5612 32.2432 22.0701 33.8808 17.945 33.8808C13.8382 33.8808 10.3598 32.2577 9.17593 26.9185L8.82034 27.0769L6.05109 34L2.37897 32.5312L5.44629 24.8629L5.74435 24.1177L6.47743 23.7911L8.74487 22.7806C8.74366 22.6739 8.74305 22.5663 8.74305 22.4577C8.74305 21.7616 8.76804 21.0758 8.81654 20.4028L8.52606 20.4581L2.95395 24.5444L0.615112 21.355L6.54761 17.0045L6.908 16.7402L7.34701 16.6566L9.44264 16.2575C9.50917 15.9756 9.5801 15.6978 9.65528 15.4242L8.34123 15.0944L7.69155 14.9313L7.27471 14.407L4.20739 10.5487L7.30328 8.08749L9.95376 11.4215L11.0697 11.7016C11.3115 11.2239 11.5692 10.7716 11.8412 10.3473L9.80612 8.54706L9.13883 7.95679V7.06589Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
|
|
@ -273,10 +273,8 @@ class Supervisor_MultiAgents implements INode {
|
|||
* So we have to place the system + human prompt at last
|
||||
*/
|
||||
let prompt = ChatPromptTemplate.fromMessages([
|
||||
['human', systemPrompt],
|
||||
['ai', ''],
|
||||
['system', systemPrompt],
|
||||
new MessagesPlaceholder('messages'),
|
||||
['ai', ''],
|
||||
['human', userPrompt]
|
||||
])
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ class MySQLRecordManager_RecordManager implements INode {
|
|||
this.category = 'Record Manager'
|
||||
this.description = 'Use MySQL to keep track of document writes into the vector databases'
|
||||
this.baseClasses = [this.type, 'RecordManager', ...getBaseClasses(MySQLRecordManager)]
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Host',
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ class PostgresRecordManager_RecordManager implements INode {
|
|||
this.category = 'Record Manager'
|
||||
this.description = 'Use Postgres to keep track of document writes into the vector databases'
|
||||
this.baseClasses = [this.type, 'RecordManager', ...getBaseClasses(PostgresRecordManager)]
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Host',
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ class SQLiteRecordManager_RecordManager implements INode {
|
|||
this.category = 'Record Manager'
|
||||
this.description = 'Use SQLite to keep track of document writes into the vector databases'
|
||||
this.baseClasses = [this.type, 'RecordManager', ...getBaseClasses(SQLiteRecordManager)]
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Database File Path',
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ class CohereRerankRetriever_Retrievers implements INode {
|
|||
this.type = 'Cohere Rerank Retriever'
|
||||
this.icon = 'Cohere.svg'
|
||||
this.category = 'Retrievers'
|
||||
this.badge = 'NEW'
|
||||
this.description = 'Cohere Rerank indexes the documents from most to least semantically relevant to the query.'
|
||||
this.baseClasses = [this.type, 'BaseRetriever']
|
||||
this.credential = {
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ class EmbeddingsFilterRetriever_Retrievers implements INode {
|
|||
this.type = 'EmbeddingsFilterRetriever'
|
||||
this.icon = 'compressionRetriever.svg'
|
||||
this.category = 'Retrievers'
|
||||
this.badge = 'NEW'
|
||||
this.description = 'A document compressor that uses embeddings to drop documents unrelated to the query'
|
||||
this.baseClasses = [this.type, 'BaseRetriever']
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ class LLMFilterCompressionRetriever_Retrievers implements INode {
|
|||
this.type = 'LLMFilterRetriever'
|
||||
this.icon = 'llmFilterRetriever.svg'
|
||||
this.category = 'Retrievers'
|
||||
this.badge = 'NEW'
|
||||
this.description =
|
||||
'Iterate over the initially returned documents and extract, from each, only the content that is relevant to the query'
|
||||
this.baseClasses = [this.type, 'BaseRetriever']
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ class RRFRetriever_Retrievers implements INode {
|
|||
this.name = 'RRFRetriever'
|
||||
this.version = 1.0
|
||||
this.type = 'RRFRetriever'
|
||||
this.badge = 'NEW'
|
||||
this.icon = 'rrfRetriever.svg'
|
||||
this.category = 'Retrievers'
|
||||
this.description = 'Reciprocal Rank Fusion to re-rank search results by multiple query generation.'
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ class VoyageAIRerankRetriever_Retrievers implements INode {
|
|||
this.type = 'VoyageAIRerankRetriever'
|
||||
this.icon = 'voyageai.png'
|
||||
this.category = 'Retrievers'
|
||||
this.badge = 'NEW'
|
||||
this.description = 'Voyage AI Rerank indexes the documents from most to least semantically relevant to the query.'
|
||||
this.baseClasses = [this.type, 'BaseRetriever']
|
||||
this.credential = {
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ class E2B_Tools implements INode {
|
|||
this.type = 'E2B'
|
||||
this.icon = 'e2b.png'
|
||||
this.category = 'Tools'
|
||||
this.badge = 'NEW'
|
||||
this.description = 'Execute code in E2B Code Intepreter'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(E2BTool)]
|
||||
this.credential = {
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ class PythonInterpreter_Tools implements INode {
|
|||
this.type = 'PythonInterpreter'
|
||||
this.icon = 'python.svg'
|
||||
this.category = 'Tools'
|
||||
this.badge = 'NEW'
|
||||
this.description = 'Execute python code in Pyodide sandbox environment'
|
||||
this.baseClasses = [this.type, 'Tool', ...getBaseClasses(PythonInterpreterTool)]
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ class CustomFunction_Utilities implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -18,12 +19,13 @@ class CustomFunction_Utilities implements INode {
|
|||
constructor() {
|
||||
this.label = 'Custom JS Function'
|
||||
this.name = 'customFunction'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'CustomFunction'
|
||||
this.icon = 'customfunction.svg'
|
||||
this.category = 'Utilities'
|
||||
this.description = `Execute custom javascript function`
|
||||
this.baseClasses = [this.type, 'Utilities']
|
||||
this.tags = ['Utilities']
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Input Variables',
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ class GetVariable_Utilities implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -15,12 +16,13 @@ class GetVariable_Utilities implements INode {
|
|||
constructor() {
|
||||
this.label = 'Get Variable'
|
||||
this.name = 'getVariable'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'GetVariable'
|
||||
this.icon = 'getvar.svg'
|
||||
this.category = 'Utilities'
|
||||
this.description = `Get variable that was saved using Set Variable node`
|
||||
this.baseClasses = [this.type, 'Utilities']
|
||||
this.tags = ['Utilities']
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Variable Name',
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ class IfElseFunction_Utilities implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -18,12 +19,13 @@ class IfElseFunction_Utilities implements INode {
|
|||
constructor() {
|
||||
this.label = 'IfElse Function'
|
||||
this.name = 'ifElseFunction'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'IfElseFunction'
|
||||
this.icon = 'ifelsefunction.svg'
|
||||
this.category = 'Utilities'
|
||||
this.description = `Split flows based on If Else javascript functions`
|
||||
this.baseClasses = [this.type, 'Utilities']
|
||||
this.tags = ['Utilities']
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Input Variables',
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ class SetVariable_Utilities implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -15,11 +16,12 @@ class SetVariable_Utilities implements INode {
|
|||
constructor() {
|
||||
this.label = 'Set Variable'
|
||||
this.name = 'setVariable'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'SetVariable'
|
||||
this.icon = 'setvar.svg'
|
||||
this.category = 'Utilities'
|
||||
this.description = `Set variable which can be retrieved at a later stage. Variable is only available during runtime.`
|
||||
this.tags = ['Utilities']
|
||||
this.baseClasses = [this.type, 'Utilities']
|
||||
this.inputs = [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -8,16 +8,18 @@ class StickyNote implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
tags: string[]
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Sticky Note'
|
||||
this.name = 'stickyNote'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'StickyNote'
|
||||
this.icon = 'stickyNote.svg'
|
||||
this.category = 'Utilities'
|
||||
this.tags = ['Utilities']
|
||||
this.description = 'Add a sticky note'
|
||||
this.inputs = [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -30,7 +30,6 @@ class Chroma_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Chroma, an open-source embedding database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,129 +0,0 @@
|
|||
import { Chroma } from '@langchain/community/vectorstores/chroma'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { ChromaExtended } from './core'
|
||||
|
||||
class Chroma_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Chroma Load Existing Index'
|
||||
this.name = 'chromaExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Chroma'
|
||||
this.icon = 'chroma.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Chroma (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed if you have chroma on cloud services with X-Api-key',
|
||||
optional: true,
|
||||
credentialNames: ['chromaApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Chroma URL',
|
||||
name: 'chromaURL',
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Chroma Metadata Filter',
|
||||
name: 'chromaMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Chroma Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Chroma Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Chroma)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData)
|
||||
|
||||
const chromaMetadataFilter = nodeData.inputs?.chromaMetadataFilter
|
||||
|
||||
const obj: {
|
||||
collectionName: string
|
||||
url?: string
|
||||
chromaApiKey?: string
|
||||
filter?: object | undefined
|
||||
} = { collectionName }
|
||||
if (chromaURL) obj.url = chromaURL
|
||||
if (chromaApiKey) obj.chromaApiKey = chromaApiKey
|
||||
if (chromaMetadataFilter) {
|
||||
const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : JSON.parse(chromaMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await ChromaExtended.fromExistingCollection(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (chromaMetadataFilter) {
|
||||
;(vectorStore as any).filter = obj.filter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Chroma_Existing_VectorStores }
|
||||
|
|
@ -1,129 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Chroma } from '@langchain/community/vectorstores/chroma'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { ChromaExtended } from './core'
|
||||
|
||||
class ChromaUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Chroma Upsert Document'
|
||||
this.name = 'chromaUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Chroma'
|
||||
this.icon = 'chroma.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Chroma'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed if you have chroma on cloud services with X-Api-key',
|
||||
optional: true,
|
||||
credentialNames: ['chromaApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Chroma URL',
|
||||
name: 'chromaURL',
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Chroma Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Chroma Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Chroma)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: {
|
||||
collectionName: string
|
||||
url?: string
|
||||
chromaApiKey?: string
|
||||
} = { collectionName }
|
||||
if (chromaURL) obj.url = chromaURL
|
||||
if (chromaApiKey) obj.chromaApiKey = chromaApiKey
|
||||
|
||||
const vectorStore = await ChromaExtended.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ChromaUpsert_VectorStores }
|
||||
|
|
@ -1,208 +0,0 @@
|
|||
import {
|
||||
getBaseClasses,
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
ICommonObject,
|
||||
INodeData,
|
||||
INodeOutputsValue,
|
||||
INodeParams
|
||||
} from '../../../src'
|
||||
import { Client, ClientOptions } from '@elastic/elasticsearch'
|
||||
import { ElasticClientArgs, ElasticVectorSearch } from '@langchain/community/vectorstores/elasticsearch'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
|
||||
export abstract class ElasticSearchBase {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
protected constructor() {
|
||||
this.type = 'Elasticsearch'
|
||||
this.icon = 'elasticsearch.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.badge = 'DEPRECATING'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['elasticsearchApi', 'elasticSearchUserPassword']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'similarity',
|
||||
description: 'Similarity measure used in Elasticsearch.',
|
||||
type: 'options',
|
||||
default: 'l2_norm',
|
||||
options: [
|
||||
{
|
||||
label: 'l2_norm',
|
||||
name: 'l2_norm'
|
||||
},
|
||||
{
|
||||
label: 'dot_product',
|
||||
name: 'dot_product'
|
||||
},
|
||||
{
|
||||
label: 'cosine',
|
||||
name: 'cosine'
|
||||
}
|
||||
],
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Elasticsearch Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Elasticsearch Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(ElasticVectorSearch)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
abstract constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
elasticSearchClientArgs: ElasticClientArgs,
|
||||
docs: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore>
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject, docs: Document<Record<string, any>>[] | undefined): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const endPoint = getCredentialParam('endpoint', credentialData, nodeData)
|
||||
const cloudId = getCredentialParam('cloudId', credentialData, nodeData)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const similarityMeasure = nodeData.inputs?.similarityMeasure as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const elasticSearchClientArgs = this.prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName)
|
||||
|
||||
const vectorStore = await this.constructVectorStore(embeddings, elasticSearchClientArgs, docs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
protected prepareConnectionOptions(
|
||||
endPoint: string | undefined,
|
||||
cloudId: string | undefined,
|
||||
credentialData: ICommonObject,
|
||||
nodeData: INodeData
|
||||
) {
|
||||
let elasticSearchClientOptions: ClientOptions = {}
|
||||
if (endPoint) {
|
||||
let apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
elasticSearchClientOptions = {
|
||||
node: endPoint,
|
||||
auth: {
|
||||
apiKey: apiKey
|
||||
}
|
||||
}
|
||||
} else if (cloudId) {
|
||||
let username = getCredentialParam('username', credentialData, nodeData)
|
||||
let password = getCredentialParam('password', credentialData, nodeData)
|
||||
if (cloudId.startsWith('http')) {
|
||||
elasticSearchClientOptions = {
|
||||
node: cloudId,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
},
|
||||
tls: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
elasticSearchClientOptions = {
|
||||
cloud: {
|
||||
id: cloudId
|
||||
},
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return elasticSearchClientOptions
|
||||
}
|
||||
|
||||
protected prepareClientArgs(
|
||||
endPoint: string | undefined,
|
||||
cloudId: string | undefined,
|
||||
credentialData: ICommonObject,
|
||||
nodeData: INodeData,
|
||||
similarityMeasure: string,
|
||||
indexName: string
|
||||
) {
|
||||
let elasticSearchClientOptions = this.prepareConnectionOptions(endPoint, cloudId, credentialData, nodeData)
|
||||
let vectorSearchOptions = {}
|
||||
switch (similarityMeasure) {
|
||||
case 'dot_product':
|
||||
vectorSearchOptions = {
|
||||
similarity: 'dot_product'
|
||||
}
|
||||
break
|
||||
case 'cosine':
|
||||
vectorSearchOptions = {
|
||||
similarity: 'cosine'
|
||||
}
|
||||
break
|
||||
default:
|
||||
vectorSearchOptions = {
|
||||
similarity: 'l2_norm'
|
||||
}
|
||||
}
|
||||
const elasticSearchClientArgs: ElasticClientArgs = {
|
||||
client: new Client(elasticSearchClientOptions),
|
||||
indexName: indexName,
|
||||
vectorSearchOptions: vectorSearchOptions
|
||||
}
|
||||
return elasticSearchClientArgs
|
||||
}
|
||||
}
|
||||
|
|
@ -31,7 +31,6 @@ class Elasticsearch_VectorStores implements INode {
|
|||
this.icon = 'elasticsearch.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,30 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { ElasticClientArgs, ElasticVectorSearch } from '@langchain/community/vectorstores/elasticsearch'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { ElasticSearchBase } from './ElasticSearchBase'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
|
||||
class ElasicsearchExisting_VectorStores extends ElasticSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'Elasticsearch Load Existing Index'
|
||||
this.name = 'ElasticsearchIndex'
|
||||
this.version = 1.0
|
||||
this.description = 'Load existing index from Elasticsearch (i.e: Document has been upserted)'
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
elasticSearchClientArgs: ElasticClientArgs,
|
||||
_: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore> {
|
||||
return await ElasticVectorSearch.fromExistingIndex(embeddings, elasticSearchClientArgs)
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
return super.init(nodeData, _, options, undefined)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ElasicsearchExisting_VectorStores }
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { ElasticClientArgs, ElasticVectorSearch } from '@langchain/community/vectorstores/elasticsearch'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
import { ElasticSearchBase } from './ElasticSearchBase'
|
||||
|
||||
class ElasicsearchUpsert_VectorStores extends ElasticSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'Elasticsearch Upsert Document'
|
||||
this.name = 'ElasticsearchUpsert'
|
||||
this.version = 1.0
|
||||
this.description = 'Upsert documents to Elasticsearch'
|
||||
this.inputs.unshift({
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
})
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
elasticSearchClientArgs: ElasticClientArgs,
|
||||
docs: Document<Record<string, any>>[]
|
||||
): Promise<VectorStore> {
|
||||
const vectorStore = new ElasticVectorSearch(embeddings, elasticSearchClientArgs)
|
||||
await vectorStore.addDocuments(docs)
|
||||
return vectorStore
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
// The following code is a workaround for a bug (Langchain Issue #1589) in the underlying library.
|
||||
// Store does not support object in metadata and fail silently
|
||||
finalDocs.forEach((d) => {
|
||||
delete d.metadata.pdf
|
||||
delete d.metadata.loc
|
||||
})
|
||||
// end of workaround
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: ElasicsearchUpsert_VectorStores }
|
||||
|
|
@ -27,7 +27,6 @@ class Faiss_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Faiss library from Meta'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
|
|
|
|||
|
|
@ -1,104 +0,0 @@
|
|||
import { FaissStore } from '@langchain/community/vectorstores/faiss'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
class Faiss_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Faiss Load Existing Index'
|
||||
this.name = 'faissExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Faiss'
|
||||
this.icon = 'faiss.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Faiss (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base Path to load',
|
||||
name: 'basePath',
|
||||
description: 'Path to load faiss.index file',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Faiss Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Faiss Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(FaissStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const vectorStore = await FaissStore.load(basePath, embeddings)
|
||||
|
||||
// Avoid illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => {
|
||||
const index = vectorStore.index
|
||||
|
||||
if (k > index.ntotal()) {
|
||||
const total = index.ntotal()
|
||||
console.warn(`k (${k}) is greater than the number of elements in the index (${total}), setting k to ${total}`)
|
||||
k = total
|
||||
}
|
||||
|
||||
const result = index.search(query, k)
|
||||
return result.labels.map((id, index) => {
|
||||
const uuid = vectorStore._mapping[id]
|
||||
return [vectorStore.docstore.search(uuid), result.distances[index]] as [Document, number]
|
||||
})
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Faiss_Existing_VectorStores }
|
||||
|
|
@ -1,121 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { FaissStore } from '@langchain/community/vectorstores/faiss'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
class FaissUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Faiss Upsert Document'
|
||||
this.name = 'faissUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Faiss'
|
||||
this.icon = 'faiss.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Faiss'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base Path to store',
|
||||
name: 'basePath',
|
||||
description: 'Path to store faiss.index file',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Faiss Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Faiss Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(FaissStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const vectorStore = await FaissStore.fromDocuments(finalDocs, embeddings)
|
||||
await vectorStore.save(basePath)
|
||||
|
||||
// Avoid illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => {
|
||||
const index = vectorStore.index
|
||||
|
||||
if (k > index.ntotal()) {
|
||||
const total = index.ntotal()
|
||||
console.warn(`k (${k}) is greater than the number of elements in the index (${total}), setting k to ${total}`)
|
||||
k = total
|
||||
}
|
||||
|
||||
const result = index.search(query, k)
|
||||
return result.labels.map((id, index) => {
|
||||
const uuid = vectorStore._mapping[id]
|
||||
return [vectorStore.docstore.search(uuid), result.distances[index]] as [Document, number]
|
||||
})
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: FaissUpsert_VectorStores }
|
||||
|
|
@ -33,7 +33,6 @@ class Milvus_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using Milvus, world's most advanced open-source vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,210 +0,0 @@
|
|||
import { DataType, ErrorCode } from '@zilliz/milvus2-sdk-node'
|
||||
import { MilvusLibArgs, Milvus } from '@langchain/community/vectorstores/milvus'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class Milvus_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Milvus Load Existing collection'
|
||||
this.name = 'milvusExistingCollection'
|
||||
this.version = 2.0
|
||||
this.type = 'Milvus'
|
||||
this.icon = 'milvus.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing collection from Milvus (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
credentialNames: ['milvusAuth']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Server URL',
|
||||
name: 'milvusServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:19530'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Collection Name',
|
||||
name: 'milvusCollection',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Filter',
|
||||
name: 'milvusFilter',
|
||||
type: 'string',
|
||||
optional: true,
|
||||
description:
|
||||
'Filter data with a simple string query. Refer Milvus <a target="_blank" href="https://milvus.io/blog/2022-08-08-How-to-use-string-data-to-empower-your-similarity-search-applications.md#Hybrid-search">docs</a> for more details.',
|
||||
placeholder: 'doc=="a"',
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Milvus Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Milvus Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Milvus)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
// server setup
|
||||
const address = nodeData.inputs?.milvusServerUrl as string
|
||||
const collectionName = nodeData.inputs?.milvusCollection as string
|
||||
const milvusFilter = nodeData.inputs?.milvusFilter as string
|
||||
|
||||
// embeddings
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
|
||||
// output
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
// format data
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
// credential
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData)
|
||||
const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData)
|
||||
|
||||
// init MilvusLibArgs
|
||||
const milVusArgs: MilvusLibArgs = {
|
||||
url: address,
|
||||
collectionName: collectionName
|
||||
}
|
||||
|
||||
if (milvusUser) milVusArgs.username = milvusUser
|
||||
if (milvusPassword) milVusArgs.password = milvusPassword
|
||||
|
||||
const vectorStore = await Milvus.fromExistingCollection(embeddings, milVusArgs)
|
||||
|
||||
// Avoid Illegal Invocation
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => {
|
||||
const hasColResp = await vectorStore.client.hasCollection({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
if (hasColResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error checking collection: ${hasColResp}`)
|
||||
}
|
||||
if (hasColResp.value === false) {
|
||||
throw new Error(`Collection not found: ${vectorStore.collectionName}, please create collection before search.`)
|
||||
}
|
||||
|
||||
const filterStr = milvusFilter ?? filter ?? ''
|
||||
|
||||
await vectorStore.grabCollectionFields()
|
||||
|
||||
const loadResp = await vectorStore.client.loadCollectionSync({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
|
||||
if (loadResp.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error loading collection: ${loadResp}`)
|
||||
}
|
||||
|
||||
const outputFields = vectorStore.fields.filter((field) => field !== vectorStore.vectorField)
|
||||
|
||||
const searchResp = await vectorStore.client.search({
|
||||
collection_name: vectorStore.collectionName,
|
||||
search_params: {
|
||||
anns_field: vectorStore.vectorField,
|
||||
topk: k.toString(),
|
||||
metric_type: vectorStore.indexCreateParams.metric_type,
|
||||
params: vectorStore.indexSearchParams
|
||||
},
|
||||
output_fields: outputFields,
|
||||
vector_type: DataType.FloatVector,
|
||||
vectors: [query],
|
||||
filter: filterStr
|
||||
})
|
||||
if (searchResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error searching data: ${JSON.stringify(searchResp)}`)
|
||||
}
|
||||
const results: [Document, number][] = []
|
||||
searchResp.results.forEach((result) => {
|
||||
const fields = {
|
||||
pageContent: '',
|
||||
metadata: {} as Record<string, any>
|
||||
}
|
||||
Object.keys(result).forEach((key) => {
|
||||
if (key === vectorStore.textField) {
|
||||
fields.pageContent = result[key]
|
||||
} else if (vectorStore.fields.includes(key) || key === vectorStore.primaryField) {
|
||||
if (typeof result[key] === 'string') {
|
||||
const { isJson, obj } = checkJsonString(result[key])
|
||||
fields.metadata[key] = isJson ? obj : result[key]
|
||||
} else {
|
||||
fields.metadata[key] = result[key]
|
||||
}
|
||||
}
|
||||
})
|
||||
results.push([new Document(fields), result.score])
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (milvusFilter) {
|
||||
;(vectorStore as any).filter = milvusFilter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
function checkJsonString(value: string): { isJson: boolean; obj: any } {
|
||||
try {
|
||||
const result = JSON.parse(value)
|
||||
return { isJson: true, obj: result }
|
||||
} catch (e) {
|
||||
return { isJson: false, obj: null }
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Milvus_Existing_VectorStores }
|
||||
|
|
@ -1,285 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { DataType, ErrorCode, MetricType, IndexType } from '@zilliz/milvus2-sdk-node'
|
||||
import { MilvusLibArgs, Milvus } from '@langchain/community/vectorstores/milvus'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
interface InsertRow {
|
||||
[x: string]: string | number[]
|
||||
}
|
||||
|
||||
class Milvus_Upsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Milvus Upsert Document'
|
||||
this.name = 'milvusUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Milvus'
|
||||
this.icon = 'milvus.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Milvus'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
credentialNames: ['milvusAuth']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Server URL',
|
||||
name: 'milvusServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:19530'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Collection Name',
|
||||
name: 'milvusCollection',
|
||||
type: 'string'
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Milvus Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Milvus Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Milvus)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
// server setup
|
||||
const address = nodeData.inputs?.milvusServerUrl as string
|
||||
const collectionName = nodeData.inputs?.milvusCollection as string
|
||||
|
||||
// embeddings
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
|
||||
// output
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
// format data
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
// credential
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData)
|
||||
const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData)
|
||||
|
||||
// init MilvusLibArgs
|
||||
const milVusArgs: MilvusLibArgs = {
|
||||
url: address,
|
||||
collectionName: collectionName
|
||||
}
|
||||
|
||||
if (milvusUser) milVusArgs.username = milvusUser
|
||||
if (milvusPassword) milVusArgs.password = milvusPassword
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const vectorStore = await MilvusUpsert.fromDocuments(finalDocs, embeddings, milVusArgs)
|
||||
|
||||
// Avoid Illegal Invocation
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => {
|
||||
const hasColResp = await vectorStore.client.hasCollection({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
if (hasColResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error checking collection: ${hasColResp}`)
|
||||
}
|
||||
if (hasColResp.value === false) {
|
||||
throw new Error(`Collection not found: ${vectorStore.collectionName}, please create collection before search.`)
|
||||
}
|
||||
|
||||
const filterStr = filter ?? ''
|
||||
|
||||
await vectorStore.grabCollectionFields()
|
||||
|
||||
const loadResp = await vectorStore.client.loadCollectionSync({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
if (loadResp.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error loading collection: ${loadResp}`)
|
||||
}
|
||||
|
||||
const outputFields = vectorStore.fields.filter((field) => field !== vectorStore.vectorField)
|
||||
|
||||
const searchResp = await vectorStore.client.search({
|
||||
collection_name: vectorStore.collectionName,
|
||||
search_params: {
|
||||
anns_field: vectorStore.vectorField,
|
||||
topk: k.toString(),
|
||||
metric_type: vectorStore.indexCreateParams.metric_type,
|
||||
params: vectorStore.indexSearchParams
|
||||
},
|
||||
output_fields: outputFields,
|
||||
vector_type: DataType.FloatVector,
|
||||
vectors: [query],
|
||||
filter: filterStr
|
||||
})
|
||||
if (searchResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error searching data: ${JSON.stringify(searchResp)}`)
|
||||
}
|
||||
const results: [Document, number][] = []
|
||||
searchResp.results.forEach((result) => {
|
||||
const fields = {
|
||||
pageContent: '',
|
||||
metadata: {} as Record<string, any>
|
||||
}
|
||||
Object.keys(result).forEach((key) => {
|
||||
if (key === vectorStore.textField) {
|
||||
fields.pageContent = result[key]
|
||||
} else if (vectorStore.fields.includes(key) || key === vectorStore.primaryField) {
|
||||
if (typeof result[key] === 'string') {
|
||||
const { isJson, obj } = checkJsonString(result[key])
|
||||
fields.metadata[key] = isJson ? obj : result[key]
|
||||
} else {
|
||||
fields.metadata[key] = result[key]
|
||||
}
|
||||
}
|
||||
})
|
||||
results.push([new Document(fields), result.score])
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
function checkJsonString(value: string): { isJson: boolean; obj: any } {
|
||||
try {
|
||||
const result = JSON.parse(value)
|
||||
return { isJson: true, obj: result }
|
||||
} catch (e) {
|
||||
return { isJson: false, obj: null }
|
||||
}
|
||||
}
|
||||
|
||||
class MilvusUpsert extends Milvus {
|
||||
async addVectors(vectors: number[][], documents: Document[]): Promise<void> {
|
||||
if (vectors.length === 0) {
|
||||
return
|
||||
}
|
||||
await this.ensureCollection(vectors, documents)
|
||||
|
||||
const insertDatas: InsertRow[] = []
|
||||
|
||||
for (let index = 0; index < vectors.length; index++) {
|
||||
const vec = vectors[index]
|
||||
const doc = documents[index]
|
||||
const data: InsertRow = {
|
||||
[this.textField]: doc.pageContent,
|
||||
[this.vectorField]: vec
|
||||
}
|
||||
this.fields.forEach((field) => {
|
||||
switch (field) {
|
||||
case this.primaryField:
|
||||
if (!this.autoId) {
|
||||
if (doc.metadata[this.primaryField] === undefined) {
|
||||
throw new Error(
|
||||
`The Collection's primaryField is configured with autoId=false, thus its value must be provided through metadata.`
|
||||
)
|
||||
}
|
||||
data[field] = doc.metadata[this.primaryField]
|
||||
}
|
||||
break
|
||||
case this.textField:
|
||||
data[field] = doc.pageContent
|
||||
break
|
||||
case this.vectorField:
|
||||
data[field] = vec
|
||||
break
|
||||
default: // metadata fields
|
||||
if (doc.metadata[field] === undefined) {
|
||||
throw new Error(`The field "${field}" is not provided in documents[${index}].metadata.`)
|
||||
} else if (typeof doc.metadata[field] === 'object') {
|
||||
data[field] = JSON.stringify(doc.metadata[field])
|
||||
} else {
|
||||
data[field] = doc.metadata[field]
|
||||
}
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
insertDatas.push(data)
|
||||
}
|
||||
|
||||
const descIndexResp = await this.client.describeIndex({
|
||||
collection_name: this.collectionName
|
||||
})
|
||||
|
||||
if (descIndexResp.status.error_code === ErrorCode.IndexNotExist) {
|
||||
const resp = await this.client.createIndex({
|
||||
collection_name: this.collectionName,
|
||||
field_name: this.vectorField,
|
||||
index_name: `myindex_${Date.now().toString()}`,
|
||||
index_type: IndexType.AUTOINDEX,
|
||||
metric_type: MetricType.L2
|
||||
})
|
||||
if (resp.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error creating index`)
|
||||
}
|
||||
}
|
||||
|
||||
const insertResp = await this.client.insert({
|
||||
collection_name: this.collectionName,
|
||||
fields_data: insertDatas
|
||||
})
|
||||
|
||||
if (insertResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error inserting data: ${JSON.stringify(insertResp)}`)
|
||||
}
|
||||
|
||||
await this.client.flushSync({ collection_names: [this.collectionName] })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Milvus_Upsert_VectorStores }
|
||||
|
|
@ -4,7 +4,7 @@ import { MongoDBAtlasVectorSearch } from '@langchain/mongodb'
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams, IndexingResult } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam, getVersion } from '../../../src/utils'
|
||||
import { addMMRInputParams, resolveVectorStoreOrRetriever } from '../VectorStoreUtils'
|
||||
|
||||
class MongoDBAtlas_VectorStores implements INode {
|
||||
|
|
@ -30,7 +30,6 @@ class MongoDBAtlas_VectorStores implements INode {
|
|||
this.icon = 'mongodb.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -192,15 +191,17 @@ let mongoClientSingleton: MongoClient
|
|||
let mongoUrl: string
|
||||
|
||||
const getMongoClient = async (newMongoUrl: string) => {
|
||||
const driverInfo = { name: 'Flowise', version: (await getVersion()).version }
|
||||
|
||||
if (!mongoClientSingleton) {
|
||||
// if client does not exist
|
||||
mongoClientSingleton = new MongoClient(newMongoUrl)
|
||||
mongoClientSingleton = new MongoClient(newMongoUrl, { driverInfo })
|
||||
mongoUrl = newMongoUrl
|
||||
return mongoClientSingleton
|
||||
} else if (mongoClientSingleton && newMongoUrl !== mongoUrl) {
|
||||
// if client exists but url changed
|
||||
mongoClientSingleton.close()
|
||||
mongoClientSingleton = new MongoClient(newMongoUrl)
|
||||
mongoClientSingleton = new MongoClient(newMongoUrl, { driverInfo })
|
||||
mongoUrl = newMongoUrl
|
||||
return mongoClientSingleton
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,146 +0,0 @@
|
|||
import {
|
||||
getBaseClasses,
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
ICommonObject,
|
||||
INodeData,
|
||||
INodeOutputsValue,
|
||||
INodeParams
|
||||
} from '../../../src'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { MongoDBAtlasVectorSearch } from '@langchain/community/vectorstores/mongodb_atlas'
|
||||
import { Collection, MongoClient } from 'mongodb'
|
||||
|
||||
export abstract class MongoDBSearchBase {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
mongoClient: MongoClient
|
||||
|
||||
protected constructor() {
|
||||
this.type = 'MongoDB Atlas'
|
||||
this.icon = 'mongodb.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['mongoDBUrlApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'databaseName',
|
||||
placeholder: '<DB_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
placeholder: '<COLLECTION_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<VECTOR_INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Content Field',
|
||||
name: 'textKey',
|
||||
description: 'Name of the field (column) that contains the actual content',
|
||||
type: 'string',
|
||||
default: 'text',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embedded Field',
|
||||
name: 'embeddingKey',
|
||||
description: 'Name of the field (column) that contains the Embedding',
|
||||
type: 'string',
|
||||
default: 'embedding',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'MongoDB Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'MongoDB Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(MongoDBAtlasVectorSearch)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
abstract constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
docs: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore>
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject, docs: Document<Record<string, any>>[] | undefined): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const databaseName = nodeData.inputs?.databaseName as string
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let textKey = nodeData.inputs?.textKey as string
|
||||
let embeddingKey = nodeData.inputs?.embeddingKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
|
||||
|
||||
this.mongoClient = new MongoClient(mongoDBConnectUrl)
|
||||
const collection = this.mongoClient.db(databaseName).collection(collectionName)
|
||||
if (!textKey || textKey === '') textKey = 'text'
|
||||
if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding'
|
||||
const vectorStore = await this.constructVectorStore(embeddings, collection, indexName, textKey, embeddingKey, docs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
import { Collection } from 'mongodb'
|
||||
import { MongoDBAtlasVectorSearch } from '@langchain/community/vectorstores/mongodb_atlas'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { MongoDBSearchBase } from './MongoDBSearchBase'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
|
||||
class MongoDBExisting_VectorStores extends MongoDBSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'MongoDB Atlas Load Existing Index'
|
||||
this.name = 'MongoDBIndex'
|
||||
this.version = 1.0
|
||||
this.description = 'Load existing data from MongoDB Atlas (i.e: Document has been upserted)'
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
return super.init(nodeData, _, options, undefined)
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
_: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore> {
|
||||
return new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection: collection,
|
||||
indexName: indexName,
|
||||
textKey: textKey,
|
||||
embeddingKey: embeddingKey
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDBExisting_VectorStores }
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Collection } from 'mongodb'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { MongoDBAtlasVectorSearch } from '@langchain/community/vectorstores/mongodb_atlas'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
import { MongoDBSearchBase } from './MongoDBSearchBase'
|
||||
|
||||
class MongoDBUpsert_VectorStores extends MongoDBSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'MongoDB Atlas Upsert Document'
|
||||
this.name = 'MongoDBUpsert'
|
||||
this.version = 1.0
|
||||
this.description = 'Upsert documents to MongoDB Atlas'
|
||||
this.inputs.unshift({
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
})
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
docs: Document<Record<string, any>>[]
|
||||
): Promise<VectorStore> {
|
||||
const mongoDBAtlasVectorSearch = new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection: collection,
|
||||
indexName: indexName,
|
||||
textKey: textKey,
|
||||
embeddingKey: embeddingKey
|
||||
})
|
||||
await mongoDBAtlasVectorSearch.addDocuments(docs)
|
||||
return mongoDBAtlasVectorSearch
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
const document = new Document(flattenDocs[i])
|
||||
finalDocs.push(document)
|
||||
}
|
||||
}
|
||||
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDBUpsert_VectorStores }
|
||||
|
|
@ -29,7 +29,6 @@ class OpenSearch_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using OpenSearch, an open-source, all-in-one vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,116 +0,0 @@
|
|||
import { OpenSearchVectorStore } from '@langchain/community/vectorstores/opensearch'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { Client } from '@opensearch-project/opensearch'
|
||||
import { flatten } from 'lodash'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class OpenSearchUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'OpenSearch Upsert Document'
|
||||
this.name = 'openSearchUpsertDocument'
|
||||
this.version = 1.0
|
||||
this.type = 'OpenSearch'
|
||||
this.icon = 'opensearch.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to OpenSearch'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch URL',
|
||||
name: 'opensearchURL',
|
||||
type: 'string',
|
||||
placeholder: 'http://127.0.0.1:9200'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'OpenSearch Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(OpenSearchVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const opensearchURL = nodeData.inputs?.opensearchURL as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
nodes: [opensearchURL]
|
||||
})
|
||||
|
||||
const vectorStore = await OpenSearchVectorStore.fromDocuments(finalDocs, embeddings, {
|
||||
client,
|
||||
indexName: indexName
|
||||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenSearchUpsert_VectorStores }
|
||||
|
|
@ -1,99 +0,0 @@
|
|||
import { OpenSearchVectorStore } from '@langchain/community/vectorstores/opensearch'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Client } from '@opensearch-project/opensearch'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class OpenSearch_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'OpenSearch Load Existing Index'
|
||||
this.name = 'openSearchExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'OpenSearch'
|
||||
this.icon = 'opensearch.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from OpenSearch (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch URL',
|
||||
name: 'opensearchURL',
|
||||
type: 'string',
|
||||
placeholder: 'http://127.0.0.1:9200'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'OpenSearch Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(OpenSearchVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const opensearchURL = nodeData.inputs?.opensearchURL as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const client = new Client({
|
||||
nodes: [opensearchURL]
|
||||
})
|
||||
|
||||
const vectorStore = new OpenSearchVectorStore(embeddings, {
|
||||
client,
|
||||
indexName
|
||||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenSearch_Existing_VectorStores }
|
||||
|
|
@ -48,7 +48,6 @@ class Pinecone_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity or mmr search using Pinecone, a leading fully managed hosted vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,128 +0,0 @@
|
|||
import { Pinecone } from '@pinecone-database/pinecone'
|
||||
import { PineconeStoreParams, PineconeStore } from '@langchain/pinecone'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class Pinecone_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Pinecone Load Existing Index'
|
||||
this.name = 'pineconeExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Pinecone'
|
||||
this.icon = 'pinecone.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['pineconeApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Index',
|
||||
name: 'pineconeIndex',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Namespace',
|
||||
name: 'pineconeNamespace',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-namespace',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Metadata Filter',
|
||||
name: 'pineconeMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Pinecone Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(PineconeStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string
|
||||
const pineconeMetadataFilter = nodeData.inputs?.pineconeMetadataFilter
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new Pinecone({
|
||||
apiKey: pineconeApiKey
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index(index)
|
||||
|
||||
const obj: PineconeStoreParams = {
|
||||
pineconeIndex
|
||||
}
|
||||
|
||||
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
||||
if (pineconeMetadataFilter) {
|
||||
const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Pinecone_Existing_VectorStores }
|
||||
|
|
@ -1,133 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Pinecone } from '@pinecone-database/pinecone'
|
||||
import { PineconeStoreParams, PineconeStore } from '@langchain/pinecone'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class PineconeUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Pinecone Upsert Document'
|
||||
this.name = 'pineconeUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Pinecone'
|
||||
this.icon = 'pinecone.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Pinecone'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['pineconeApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Index',
|
||||
name: 'pineconeIndex',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Namespace',
|
||||
name: 'pineconeNamespace',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-namespace',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Pinecone Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(PineconeStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new Pinecone({
|
||||
apiKey: pineconeApiKey
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index(index)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: PineconeStoreParams = {
|
||||
pineconeIndex
|
||||
}
|
||||
|
||||
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
||||
|
||||
const vectorStore = await PineconeStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: PineconeUpsert_VectorStores }
|
||||
|
|
@ -31,7 +31,6 @@ class Postgres_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using pgvector on Postgres'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,202 +0,0 @@
|
|||
import { DataSourceOptions } from 'typeorm'
|
||||
import { Pool } from 'pg'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { TypeORMVectorStore, TypeORMVectorStoreDocument } from '@langchain/community/vectorstores/typeorm'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Postgres_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Postgres Load Existing Index'
|
||||
this.name = 'postgresExistingIndex'
|
||||
this.version = 2.0
|
||||
this.type = 'Postgres'
|
||||
this.icon = 'postgres.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Postgres using pgvector (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['PostgresApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'SSL Connection',
|
||||
name: 'sslConnection',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
optional: false
|
||||
},
|
||||
{
|
||||
label: 'Port',
|
||||
name: 'port',
|
||||
type: 'number',
|
||||
placeholder: '6432',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'documents',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Configuration',
|
||||
name: 'additionalConfig',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Postgres Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Postgres Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(TypeORMVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
const _tableName = nodeData.inputs?.tableName as string
|
||||
const tableName = _tableName ? _tableName : 'documents'
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const sslConnection = nodeData.inputs?.sslConnection as boolean
|
||||
|
||||
let additionalConfiguration = {}
|
||||
if (additionalConfig) {
|
||||
try {
|
||||
additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig)
|
||||
} catch (exception) {
|
||||
throw new Error('Invalid JSON in the Additional Configuration: ' + exception)
|
||||
}
|
||||
}
|
||||
|
||||
const postgresConnectionOptions = {
|
||||
...additionalConfiguration,
|
||||
type: 'postgres',
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: nodeData.inputs?.port as number,
|
||||
username: user,
|
||||
password: password,
|
||||
database: nodeData.inputs?.database as string,
|
||||
ssl: sslConnection
|
||||
}
|
||||
|
||||
const args = {
|
||||
postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions,
|
||||
tableName: tableName
|
||||
}
|
||||
|
||||
const vectorStore = await TypeORMVectorStore.fromDataSource(embeddings, args)
|
||||
|
||||
// Rewrite the method to use pg pool connection instead of the default connection
|
||||
/* Otherwise a connection error is displayed when the chain tries to execute the function
|
||||
[chain/start] [1:chain:ConversationalRetrievalQAChain] Entering Chain run with input: { "question": "what the document is about", "chat_history": [] }
|
||||
[retriever/start] [1:chain:ConversationalRetrievalQAChain > 2:retriever:VectorStoreRetriever] Entering Retriever run with input: { "query": "what the document is about" }
|
||||
[ERROR]: uncaughtException: Illegal invocation TypeError: Illegal invocation at Socket.ref (node:net:1524:18) at Connection.ref (.../node_modules/pg/lib/connection.js:183:17) at Client.ref (.../node_modules/pg/lib/client.js:591:21) at BoundPool._pulseQueue (/node_modules/pg-pool/index.js:148:28) at .../node_modules/pg-pool/index.js:184:37 at process.processTicksAndRejections (node:internal/process/task_queues:77:11)
|
||||
*/
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
const embeddingString = `[${query.join(',')}]`
|
||||
const _filter = filter ?? '{}'
|
||||
|
||||
const queryString = `
|
||||
SELECT *, embedding <=> $1 as "_distance"
|
||||
FROM ${tableName}
|
||||
WHERE metadata @> $2
|
||||
ORDER BY "_distance" ASC
|
||||
LIMIT $3;`
|
||||
|
||||
const poolOptions = {
|
||||
host: postgresConnectionOptions.host,
|
||||
port: postgresConnectionOptions.port,
|
||||
user: postgresConnectionOptions.username,
|
||||
password: postgresConnectionOptions.password,
|
||||
database: postgresConnectionOptions.database
|
||||
}
|
||||
const pool = new Pool(poolOptions)
|
||||
const conn = await pool.connect()
|
||||
|
||||
const documents = await conn.query(queryString, [embeddingString, _filter, k])
|
||||
|
||||
conn.release()
|
||||
|
||||
const results = [] as [TypeORMVectorStoreDocument, number][]
|
||||
for (const doc of documents.rows) {
|
||||
if (doc._distance != null && doc.pageContent != null) {
|
||||
const document = new Document(doc) as TypeORMVectorStoreDocument
|
||||
document.id = doc.id
|
||||
results.push([document, doc._distance])
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Postgres_Existing_VectorStores }
|
||||
|
|
@ -1,218 +0,0 @@
|
|||
import { DataSourceOptions } from 'typeorm'
|
||||
import { flatten } from 'lodash'
|
||||
import { Pool } from 'pg'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { TypeORMVectorStore, TypeORMVectorStoreDocument } from '@langchain/community/vectorstores/typeorm'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class PostgresUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Postgres Upsert Document'
|
||||
this.name = 'postgresUpsert'
|
||||
this.version = 2.0
|
||||
this.type = 'Postgres'
|
||||
this.icon = 'postgres.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Postgres using pgvector'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['PostgresApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'SSL Connection',
|
||||
name: 'sslConnection',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
optional: false
|
||||
},
|
||||
{
|
||||
label: 'Port',
|
||||
name: 'port',
|
||||
type: 'number',
|
||||
placeholder: '6432',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'documents',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Configuration',
|
||||
name: 'additionalConfig',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Postgres Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Postgres Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(TypeORMVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
const _tableName = nodeData.inputs?.tableName as string
|
||||
const tableName = _tableName ? _tableName : 'documents'
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const sslConnection = nodeData.inputs?.sslConnection as boolean
|
||||
|
||||
let additionalConfiguration = {}
|
||||
if (additionalConfig) {
|
||||
try {
|
||||
additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig)
|
||||
} catch (exception) {
|
||||
throw new Error('Invalid JSON in the Additional Configuration: ' + exception)
|
||||
}
|
||||
}
|
||||
|
||||
const postgresConnectionOptions = {
|
||||
...additionalConfiguration,
|
||||
type: 'postgres',
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: nodeData.inputs?.port as number,
|
||||
username: user,
|
||||
password: password,
|
||||
database: nodeData.inputs?.database as string,
|
||||
ssl: sslConnection
|
||||
}
|
||||
|
||||
const args = {
|
||||
postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions,
|
||||
tableName: tableName
|
||||
}
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const vectorStore = await TypeORMVectorStore.fromDocuments(finalDocs, embeddings, args)
|
||||
|
||||
// Rewrite the method to use pg pool connection instead of the default connection
|
||||
/* Otherwise a connection error is displayed when the chain tries to execute the function
|
||||
[chain/start] [1:chain:ConversationalRetrievalQAChain] Entering Chain run with input: { "question": "what the document is about", "chat_history": [] }
|
||||
[retriever/start] [1:chain:ConversationalRetrievalQAChain > 2:retriever:VectorStoreRetriever] Entering Retriever run with input: { "query": "what the document is about" }
|
||||
[ERROR]: uncaughtException: Illegal invocation TypeError: Illegal invocation at Socket.ref (node:net:1524:18) at Connection.ref (.../node_modules/pg/lib/connection.js:183:17) at Client.ref (.../node_modules/pg/lib/client.js:591:21) at BoundPool._pulseQueue (/node_modules/pg-pool/index.js:148:28) at .../node_modules/pg-pool/index.js:184:37 at process.processTicksAndRejections (node:internal/process/task_queues:77:11)
|
||||
*/
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
const embeddingString = `[${query.join(',')}]`
|
||||
const _filter = filter ?? '{}'
|
||||
|
||||
const queryString = `
|
||||
SELECT *, embedding <=> $1 as "_distance"
|
||||
FROM ${tableName}
|
||||
WHERE metadata @> $2
|
||||
ORDER BY "_distance" ASC
|
||||
LIMIT $3;`
|
||||
|
||||
const poolOptions = {
|
||||
host: postgresConnectionOptions.host,
|
||||
port: postgresConnectionOptions.port,
|
||||
user: postgresConnectionOptions.username,
|
||||
password: postgresConnectionOptions.password,
|
||||
database: postgresConnectionOptions.database
|
||||
}
|
||||
const pool = new Pool(poolOptions)
|
||||
const conn = await pool.connect()
|
||||
|
||||
const documents = await conn.query(queryString, [embeddingString, _filter, k])
|
||||
|
||||
conn.release()
|
||||
|
||||
const results = [] as [TypeORMVectorStoreDocument, number][]
|
||||
for (const doc of documents.rows) {
|
||||
if (doc._distance != null && doc.pageContent != null) {
|
||||
const document = new Document(doc) as TypeORMVectorStoreDocument
|
||||
document.id = doc.id
|
||||
results.push([document, doc._distance])
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: PostgresUpsert_VectorStores }
|
||||
|
|
@ -39,7 +39,6 @@ class Qdrant_VectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Qdrant, a scalable open source vector database written in Rust'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,194 +0,0 @@
|
|||
import { QdrantClient } from '@qdrant/js-client-rest'
|
||||
import { QdrantVectorStore, QdrantLibArgs } from '@langchain/community/vectorstores/qdrant'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStoreRetrieverInput } from '@langchain/core/vectorstores'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
type RetrieverConfig = Partial<VectorStoreRetrieverInput<QdrantVectorStore>>
|
||||
|
||||
class Qdrant_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Qdrant Load Existing Index'
|
||||
this.name = 'qdrantExistingIndex'
|
||||
this.version = 2.0
|
||||
this.type = 'Qdrant'
|
||||
this.icon = 'qdrant.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Qdrant (i.e., documents have been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Qdrant cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['qdrantApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Server URL',
|
||||
name: 'qdrantServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:6333'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Collection Name',
|
||||
name: 'qdrantCollection',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Vector Dimension',
|
||||
name: 'qdrantVectorDimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'qdrantSimilarity',
|
||||
description: 'Similarity measure used in Qdrant.',
|
||||
type: 'options',
|
||||
default: 'Cosine',
|
||||
options: [
|
||||
{
|
||||
label: 'Cosine',
|
||||
name: 'Cosine'
|
||||
},
|
||||
{
|
||||
label: 'Euclid',
|
||||
name: 'Euclid'
|
||||
},
|
||||
{
|
||||
label: 'Dot',
|
||||
name: 'Dot'
|
||||
}
|
||||
],
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Collection Cofiguration',
|
||||
name: 'qdrantCollectionConfiguration',
|
||||
description:
|
||||
'Refer to <a target="_blank" href="https://qdrant.tech/documentation/concepts/collections">collection docs</a> for more reference',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Search Filter',
|
||||
name: 'qdrantFilter',
|
||||
description: 'Only return points which satisfy the conditions',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Qdrant Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
|
||||
const collectionName = nodeData.inputs?.qdrantCollection as string
|
||||
let qdrantCollectionConfiguration = nodeData.inputs?.qdrantCollectionConfiguration
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
let queryFilter = nodeData.inputs?.qdrantFilter
|
||||
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new QdrantClient({
|
||||
url: qdrantServerUrl,
|
||||
apiKey: qdrantApiKey
|
||||
})
|
||||
|
||||
const dbConfig: QdrantLibArgs = {
|
||||
client,
|
||||
collectionName
|
||||
}
|
||||
|
||||
const retrieverConfig: RetrieverConfig = {
|
||||
k
|
||||
}
|
||||
|
||||
if (qdrantCollectionConfiguration) {
|
||||
qdrantCollectionConfiguration =
|
||||
typeof qdrantCollectionConfiguration === 'object'
|
||||
? qdrantCollectionConfiguration
|
||||
: JSON.parse(qdrantCollectionConfiguration)
|
||||
dbConfig.collectionConfig = {
|
||||
...qdrantCollectionConfiguration,
|
||||
vectors: {
|
||||
...qdrantCollectionConfiguration.vectors,
|
||||
size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536,
|
||||
distance: qdrantSimilarity ?? 'Cosine'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (queryFilter) {
|
||||
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter)
|
||||
}
|
||||
|
||||
const vectorStore = await QdrantVectorStore.fromExistingCollection(embeddings, dbConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(retrieverConfig)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (queryFilter) {
|
||||
;(vectorStore as any).filter = retrieverConfig.filter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Qdrant_Existing_VectorStores }
|
||||
|
|
@ -1,213 +0,0 @@
|
|||
import { QdrantClient } from '@qdrant/js-client-rest'
|
||||
import { QdrantVectorStore, QdrantLibArgs } from '@langchain/community/vectorstores/qdrant'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { flatten } from 'lodash'
|
||||
import { VectorStoreRetrieverInput } from '@langchain/core/vectorstores'
|
||||
|
||||
type RetrieverConfig = Partial<VectorStoreRetrieverInput<QdrantVectorStore>>
|
||||
|
||||
class QdrantUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Qdrant Upsert Document'
|
||||
this.name = 'qdrantUpsert'
|
||||
this.version = 3.0
|
||||
this.type = 'Qdrant'
|
||||
this.icon = 'qdrant.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Qdrant'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Qdrant cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['qdrantApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Server URL',
|
||||
name: 'qdrantServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:6333'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Collection Name',
|
||||
name: 'qdrantCollection',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Vector Dimension',
|
||||
name: 'qdrantVectorDimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Upsert Batch Size',
|
||||
name: 'batchSize',
|
||||
type: 'number',
|
||||
step: 1,
|
||||
description: 'Upsert in batches of size N',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'qdrantSimilarity',
|
||||
description: 'Similarity measure used in Qdrant.',
|
||||
type: 'options',
|
||||
default: 'Cosine',
|
||||
options: [
|
||||
{
|
||||
label: 'Cosine',
|
||||
name: 'Cosine'
|
||||
},
|
||||
{
|
||||
label: 'Euclid',
|
||||
name: 'Euclid'
|
||||
},
|
||||
{
|
||||
label: 'Dot',
|
||||
name: 'Dot'
|
||||
}
|
||||
],
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Search Filter',
|
||||
name: 'qdrantFilter',
|
||||
description: 'Only return points which satisfy the conditions',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Qdrant Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
|
||||
const collectionName = nodeData.inputs?.qdrantCollection as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
const _batchSize = nodeData.inputs?.batchSize
|
||||
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
let queryFilter = nodeData.inputs?.qdrantFilter
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new QdrantClient({
|
||||
url: qdrantServerUrl,
|
||||
apiKey: qdrantApiKey
|
||||
})
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const dbConfig: QdrantLibArgs = {
|
||||
client,
|
||||
url: qdrantServerUrl,
|
||||
collectionName,
|
||||
collectionConfig: {
|
||||
vectors: {
|
||||
size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536,
|
||||
distance: qdrantSimilarity ?? 'Cosine'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const retrieverConfig: RetrieverConfig = {
|
||||
k
|
||||
}
|
||||
|
||||
if (queryFilter) {
|
||||
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter)
|
||||
}
|
||||
|
||||
let vectorStore: QdrantVectorStore | undefined = undefined
|
||||
if (_batchSize) {
|
||||
const batchSize = parseInt(_batchSize, 10)
|
||||
for (let i = 0; i < finalDocs.length; i += batchSize) {
|
||||
const batch = finalDocs.slice(i, i + batchSize)
|
||||
vectorStore = await QdrantVectorStore.fromDocuments(batch, embeddings, dbConfig)
|
||||
}
|
||||
} else {
|
||||
vectorStore = await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
}
|
||||
|
||||
if (vectorStore === undefined) {
|
||||
throw new Error('No documents to upsert')
|
||||
} else {
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(retrieverConfig)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: QdrantUpsert_VectorStores }
|
||||
|
|
@ -52,7 +52,6 @@ class Redis_VectorStores implements INode {
|
|||
this.icon = 'redis.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,237 +0,0 @@
|
|||
import { createClient, SearchOptions, RedisClientOptions } from 'redis'
|
||||
import { isEqual } from 'lodash'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { RedisVectorStore } from '@langchain/community/vectorstores/redis'
|
||||
import { escapeSpecialChars, unEscapeSpecialChars } from './utils'
|
||||
import {
|
||||
getBaseClasses,
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
ICommonObject,
|
||||
INodeData,
|
||||
INodeOutputsValue,
|
||||
INodeParams
|
||||
} from '../../../src'
|
||||
|
||||
let redisClientSingleton: ReturnType<typeof createClient>
|
||||
let redisClientOption: RedisClientOptions
|
||||
|
||||
const getRedisClient = async (option: RedisClientOptions) => {
|
||||
if (!redisClientSingleton) {
|
||||
// if client doesn't exists
|
||||
redisClientSingleton = createClient(option)
|
||||
await redisClientSingleton.connect()
|
||||
redisClientOption = option
|
||||
return redisClientSingleton
|
||||
} else if (redisClientSingleton && !isEqual(option, redisClientOption)) {
|
||||
// if client exists but option changed
|
||||
redisClientSingleton.quit()
|
||||
redisClientSingleton = createClient(option)
|
||||
await redisClientSingleton.connect()
|
||||
redisClientOption = option
|
||||
return redisClientSingleton
|
||||
}
|
||||
return redisClientSingleton
|
||||
}
|
||||
|
||||
export abstract class RedisSearchBase {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
redisClient: ReturnType<typeof createClient>
|
||||
|
||||
protected constructor() {
|
||||
this.type = 'Redis'
|
||||
this.icon = 'redis.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['redisCacheUrlApi', 'redisCacheApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<VECTOR_INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Replace Index?',
|
||||
name: 'replaceIndex',
|
||||
description: 'Selecting this option will delete the existing index and recreate a new one',
|
||||
default: false,
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
label: 'Content Field',
|
||||
name: 'contentKey',
|
||||
description: 'Name of the field (column) that contains the actual content',
|
||||
type: 'string',
|
||||
default: 'content',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Field',
|
||||
name: 'metadataKey',
|
||||
description: 'Name of the field (column) that contains the metadata of the document',
|
||||
type: 'string',
|
||||
default: 'metadata',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Vector Field',
|
||||
name: 'vectorKey',
|
||||
description: 'Name of the field (column) that contains the vector',
|
||||
type: 'string',
|
||||
default: 'content_vector',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Redis Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Redis Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(RedisVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
abstract constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
indexName: string,
|
||||
replaceIndex: boolean,
|
||||
docs: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore>
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject, docs: Document<Record<string, any>>[] | undefined): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let contentKey = nodeData.inputs?.contentKey as string
|
||||
let metadataKey = nodeData.inputs?.metadataKey as string
|
||||
let vectorKey = nodeData.inputs?.vectorKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const replaceIndex = nodeData.inputs?.replaceIndex as boolean
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let redisUrl = getCredentialParam('redisUrl', credentialData, nodeData)
|
||||
if (!redisUrl || redisUrl === '') {
|
||||
const username = getCredentialParam('redisCacheUser', credentialData, nodeData)
|
||||
const password = getCredentialParam('redisCachePwd', credentialData, nodeData)
|
||||
const portStr = getCredentialParam('redisCachePort', credentialData, nodeData)
|
||||
const host = getCredentialParam('redisCacheHost', credentialData, nodeData)
|
||||
|
||||
redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr
|
||||
}
|
||||
|
||||
this.redisClient = await getRedisClient({ url: redisUrl })
|
||||
|
||||
const vectorStore = await this.constructVectorStore(embeddings, indexName, replaceIndex, docs)
|
||||
if (!contentKey || contentKey === '') contentKey = 'content'
|
||||
if (!metadataKey || metadataKey === '') metadataKey = 'metadata'
|
||||
if (!vectorKey || vectorKey === '') vectorKey = 'content_vector'
|
||||
|
||||
const buildQuery = (query: number[], k: number, filter?: string[]): [string, SearchOptions] => {
|
||||
const vectorScoreField = 'vector_score'
|
||||
|
||||
let hybridFields = '*'
|
||||
// if a filter is set, modify the hybrid query
|
||||
if (filter && filter.length) {
|
||||
// `filter` is a list of strings, then it's applied using the OR operator in the metadata key
|
||||
hybridFields = `@${metadataKey}:(${filter.map(escapeSpecialChars).join('|')})`
|
||||
}
|
||||
|
||||
const baseQuery = `${hybridFields} => [KNN ${k} @${vectorKey} $vector AS ${vectorScoreField}]`
|
||||
const returnFields = [metadataKey, contentKey, vectorScoreField]
|
||||
|
||||
const options: SearchOptions = {
|
||||
PARAMS: {
|
||||
vector: Buffer.from(new Float32Array(query).buffer)
|
||||
},
|
||||
RETURN: returnFields,
|
||||
SORTBY: vectorScoreField,
|
||||
DIALECT: 2,
|
||||
LIMIT: {
|
||||
from: 0,
|
||||
size: k
|
||||
}
|
||||
}
|
||||
|
||||
return [baseQuery, options]
|
||||
}
|
||||
|
||||
vectorStore.similaritySearchVectorWithScore = async (
|
||||
query: number[],
|
||||
k: number,
|
||||
filter?: string[]
|
||||
): Promise<[Document, number][]> => {
|
||||
const results = await this.redisClient.ft.search(indexName, ...buildQuery(query, k, filter))
|
||||
const result: [Document, number][] = []
|
||||
|
||||
if (results.total) {
|
||||
for (const res of results.documents) {
|
||||
if (res.value) {
|
||||
const document = res.value
|
||||
if (document.vector_score) {
|
||||
const metadataString = unEscapeSpecialChars(document[metadataKey] as string)
|
||||
result.push([
|
||||
new Document({
|
||||
pageContent: document[contentKey] as string,
|
||||
metadata: JSON.parse(metadataString)
|
||||
}),
|
||||
Number(document.vector_score)
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { RedisVectorStore, RedisVectorStoreConfig } from '@langchain/community/vectorstores/redis'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
import { RedisSearchBase } from './RedisSearchBase'
|
||||
|
||||
class RedisExisting_VectorStores extends RedisSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'Redis Load Existing Index'
|
||||
this.name = 'RedisIndex'
|
||||
this.version = 1.0
|
||||
this.description = 'Load existing index from Redis (i.e: Document has been upserted)'
|
||||
|
||||
// Remove replaceIndex from inputs as it is not applicable while fetching data from Redis
|
||||
let input = this.inputs.find((i) => i.name === 'replaceIndex')
|
||||
if (input) this.inputs.splice(this.inputs.indexOf(input), 1)
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
indexName: string,
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
replaceIndex: boolean,
|
||||
_: Document<Record<string, any>>[]
|
||||
): Promise<VectorStore> {
|
||||
const storeConfig: RedisVectorStoreConfig = {
|
||||
redisClient: this.redisClient,
|
||||
indexName: indexName
|
||||
}
|
||||
|
||||
return new RedisVectorStore(embeddings, storeConfig)
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
return super.init(nodeData, _, options, undefined)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: RedisExisting_VectorStores }
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { flatten } from 'lodash'
|
||||
import { VectorStore } from '@langchain/core/vectorstores'
|
||||
import { RedisVectorStore, RedisVectorStoreConfig } from '@langchain/community/vectorstores/redis'
|
||||
import { RedisSearchBase } from './RedisSearchBase'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
import { escapeAllStrings } from './utils'
|
||||
|
||||
class RedisUpsert_VectorStores extends RedisSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'Redis Upsert Document'
|
||||
this.name = 'RedisUpsert'
|
||||
this.version = 1.0
|
||||
this.description = 'Upsert documents to Redis'
|
||||
this.inputs.unshift({
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
})
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
indexName: string,
|
||||
replaceIndex: boolean,
|
||||
docs: Document<Record<string, any>>[]
|
||||
): Promise<VectorStore> {
|
||||
const storeConfig: RedisVectorStoreConfig = {
|
||||
redisClient: this.redisClient,
|
||||
indexName: indexName
|
||||
}
|
||||
if (replaceIndex) {
|
||||
let response = await this.redisClient.ft.dropIndex(indexName)
|
||||
if (process.env.DEBUG === 'true') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Redis Vector Store :: Dropping index [${indexName}], Received Response [${response}]`)
|
||||
}
|
||||
}
|
||||
return await RedisVectorStore.fromDocuments(docs, embeddings, storeConfig)
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
const document = new Document(flattenDocs[i])
|
||||
escapeAllStrings(document.metadata)
|
||||
finalDocs.push(document)
|
||||
}
|
||||
}
|
||||
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: RedisUpsert_VectorStores }
|
||||
|
|
@ -29,7 +29,6 @@ class SingleStore_VectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using SingleStore, a fast and distributed cloud relational database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,148 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from '@langchain/community/vectorstores/singlestore'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class SingleStoreExisting_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'SingleStore Load Existing Table'
|
||||
this.name = 'singlestoreExisting'
|
||||
this.version = 1.0
|
||||
this.type = 'SingleStore'
|
||||
this.icon = 'singlestore.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing document from SingleStore'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Needed when using SingleStore cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['singleStoreApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'embeddings',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Content Column Name',
|
||||
name: 'contentColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'content',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Vector Column Name',
|
||||
name: 'vectorColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'vector',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Column Name',
|
||||
name: 'metadataColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'metadata',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'SingleStore Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'SingleStore Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
|
||||
const singleStoreConnectionConfig = {
|
||||
connectionOptions: {
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: 3306,
|
||||
user,
|
||||
password,
|
||||
database: nodeData.inputs?.database as string
|
||||
},
|
||||
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
|
||||
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
|
||||
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
|
||||
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
|
||||
} as SingleStoreVectorStoreConfig
|
||||
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
let vectorStore: SingleStoreVectorStore
|
||||
|
||||
vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SingleStoreExisting_VectorStores }
|
||||
|
|
@ -1,166 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from '@langchain/community/vectorstores/singlestore'
|
||||
import { flatten } from 'lodash'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class SingleStoreUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'SingleStore Upsert Document'
|
||||
this.name = 'singlestoreUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'SingleStore'
|
||||
this.icon = 'singlestore.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to SingleStore'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Needed when using SingleStore cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['singleStoreApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'embeddings',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Content Column Name',
|
||||
name: 'contentColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'content',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Vector Column Name',
|
||||
name: 'vectorColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'vector',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Column Name',
|
||||
name: 'metadataColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'metadata',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'SingleStore Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'SingleStore Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
|
||||
const singleStoreConnectionConfig = {
|
||||
connectionOptions: {
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: 3306,
|
||||
user,
|
||||
password,
|
||||
database: nodeData.inputs?.database as string
|
||||
},
|
||||
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
|
||||
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
|
||||
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
|
||||
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
|
||||
} as SingleStoreVectorStoreConfig
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
let vectorStore: SingleStoreVectorStore
|
||||
|
||||
vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
|
||||
vectorStore.addDocuments.bind(vectorStore)(finalDocs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SingleStoreUpsert_VectorStores }
|
||||
|
|
@ -32,7 +32,6 @@ class Supabase_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity or mmr search upon query using Supabase via pgvector extension'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,131 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { SupabaseLibArgs, SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class Supabase_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Supabase Load Existing Index'
|
||||
this.name = 'supabaseExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Supabase'
|
||||
this.icon = 'supabase.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Supabase (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['supabaseApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Supabase Project URL',
|
||||
name: 'supabaseProjUrl',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Query Name',
|
||||
name: 'queryName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Supabase Metadata Filter',
|
||||
name: 'supabaseMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Supabase Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Supabase Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string
|
||||
const tableName = nodeData.inputs?.tableName as string
|
||||
const queryName = nodeData.inputs?.queryName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const supabaseMetadataFilter = nodeData.inputs?.supabaseMetadataFilter
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData)
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
const obj: SupabaseLibArgs = {
|
||||
client,
|
||||
tableName,
|
||||
queryName
|
||||
}
|
||||
|
||||
if (supabaseMetadataFilter) {
|
||||
const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (supabaseMetadataFilter) {
|
||||
;(vectorStore as any).filter = obj.filter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Supabase_Existing_VectorStores }
|
||||
|
|
@ -1,157 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { SupabaseVectorStore } from '@langchain/community/vectorstores/supabase'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { flatten } from 'lodash'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class SupabaseUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Supabase Upsert Document'
|
||||
this.name = 'supabaseUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Supabase'
|
||||
this.icon = 'supabase.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Supabase'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['supabaseApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Supabase Project URL',
|
||||
name: 'supabaseProjUrl',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Query Name',
|
||||
name: 'queryName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Supabase Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Supabase Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string
|
||||
const tableName = nodeData.inputs?.tableName as string
|
||||
const queryName = nodeData.inputs?.queryName as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData)
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
|
||||
const vectorStore = await SupabaseUpsertVectorStore.fromDocuments(finalDocs, embeddings, {
|
||||
client,
|
||||
tableName: tableName,
|
||||
queryName: queryName
|
||||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
class SupabaseUpsertVectorStore extends SupabaseVectorStore {
|
||||
async addVectors(vectors: number[][], documents: Document[]): Promise<string[]> {
|
||||
if (vectors.length === 0) {
|
||||
return []
|
||||
}
|
||||
const rows = vectors.map((embedding, idx) => ({
|
||||
content: documents[idx].pageContent,
|
||||
embedding,
|
||||
metadata: documents[idx].metadata
|
||||
}))
|
||||
|
||||
let returnedIds: string[] = []
|
||||
for (let i = 0; i < rows.length; i += this.upsertBatchSize) {
|
||||
const chunk = rows.slice(i, i + this.upsertBatchSize).map((row, index) => {
|
||||
return { id: index, ...row }
|
||||
})
|
||||
|
||||
const res = await this.client.from(this.tableName).upsert(chunk).select()
|
||||
if (res.error) {
|
||||
throw new Error(`Error inserting: ${res.error.message} ${res.status} ${res.statusText}`)
|
||||
}
|
||||
if (res.data) {
|
||||
returnedIds = returnedIds.concat(res.data.map((row) => row.id))
|
||||
}
|
||||
}
|
||||
return returnedIds
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SupabaseUpsert_VectorStores }
|
||||
|
|
@ -36,7 +36,6 @@ class Upstash_VectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert data as embedding or string and perform similarity search with Upstash, the leading serverless data platform'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -36,7 +36,6 @@ class Vectara_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Vectara, a LLM-powered search-as-a-service'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,139 +0,0 @@
|
|||
import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig } from '@langchain/community/vectorstores/vectara'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class VectaraExisting_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Vectara Load Existing Index'
|
||||
this.name = 'vectaraExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Vectara'
|
||||
this.icon = 'vectara.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Vectara (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['vectaraApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Metadata Filter',
|
||||
name: 'filter',
|
||||
description:
|
||||
'Filter to apply to Vectara metadata. Refer to the <a target="_blank" href="https://docs.flowiseai.com/vector-stores/vectara">documentation</a> on how to use Vectara filters with Flowise.',
|
||||
type: 'string',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences Before',
|
||||
name: 'sentencesBefore',
|
||||
description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences After',
|
||||
name: 'sentencesAfter',
|
||||
description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Lambda',
|
||||
name: 'lambda',
|
||||
description:
|
||||
'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Defaults to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Vectara Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Vectara Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(VectaraStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
const customerId = getCredentialParam('customerID', credentialData, nodeData)
|
||||
const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',')
|
||||
|
||||
const vectaraMetadataFilter = nodeData.inputs?.filter as string
|
||||
const sentencesBefore = nodeData.inputs?.sentencesBefore as number
|
||||
const sentencesAfter = nodeData.inputs?.sentencesAfter as number
|
||||
const lambda = nodeData.inputs?.lambda as number
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const vectaraArgs: VectaraLibArgs = {
|
||||
apiKey: apiKey,
|
||||
customerId: customerId,
|
||||
corpusId: corpusId,
|
||||
source: 'flowise'
|
||||
}
|
||||
|
||||
const vectaraFilter: VectaraFilter = {}
|
||||
if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter
|
||||
if (lambda) vectaraFilter.lambda = lambda
|
||||
|
||||
const vectaraContextConfig: VectaraContextConfig = {}
|
||||
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
|
||||
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
|
||||
vectaraFilter.contextConfig = vectaraContextConfig
|
||||
|
||||
const vectorStore = new VectaraStore(vectaraArgs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k, vectaraFilter)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (vectaraMetadataFilter) {
|
||||
;(vectorStore as any).filter = vectaraFilter.filter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: VectaraExisting_VectorStores }
|
||||
|
|
@ -1,196 +0,0 @@
|
|||
import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig, VectaraFile } from '@langchain/community/vectorstores/vectara'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { getFileFromStorage } from '../../../src'
|
||||
|
||||
class VectaraUpload_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Vectara Upload File'
|
||||
this.name = 'vectaraUpload'
|
||||
this.version = 1.0
|
||||
this.type = 'Vectara'
|
||||
this.icon = 'vectara.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upload files to Vectara'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['vectaraApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'File',
|
||||
name: 'file',
|
||||
description:
|
||||
'File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes',
|
||||
type: 'file'
|
||||
},
|
||||
{
|
||||
label: 'Metadata Filter',
|
||||
name: 'filter',
|
||||
description:
|
||||
'Filter to apply to Vectara metadata. Refer to the <a target="_blank" href="https://docs.flowiseai.com/vector-stores/vectara">documentation</a> on how to use Vectara filters with Flowise.',
|
||||
type: 'string',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences Before',
|
||||
name: 'sentencesBefore',
|
||||
description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences After',
|
||||
name: 'sentencesAfter',
|
||||
description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Lambda',
|
||||
name: 'lambda',
|
||||
description:
|
||||
'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Defaults to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Vectara Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Vectara Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(VectaraStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
const customerId = getCredentialParam('customerID', credentialData, nodeData)
|
||||
const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',')
|
||||
|
||||
const fileBase64 = nodeData.inputs?.file
|
||||
const vectaraMetadataFilter = nodeData.inputs?.filter as string
|
||||
const sentencesBefore = nodeData.inputs?.sentencesBefore as number
|
||||
const sentencesAfter = nodeData.inputs?.sentencesAfter as number
|
||||
const lambda = nodeData.inputs?.lambda as number
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const vectaraArgs: VectaraLibArgs = {
|
||||
apiKey: apiKey,
|
||||
customerId: customerId,
|
||||
corpusId: corpusId,
|
||||
source: 'flowise'
|
||||
}
|
||||
|
||||
const vectaraFilter: VectaraFilter = {}
|
||||
if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter
|
||||
if (lambda) vectaraFilter.lambda = lambda
|
||||
|
||||
const vectaraContextConfig: VectaraContextConfig = {}
|
||||
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
|
||||
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
|
||||
vectaraFilter.contextConfig = vectaraContextConfig
|
||||
|
||||
let files: string[] = []
|
||||
const vectaraFiles: VectaraFile[] = []
|
||||
|
||||
if (fileBase64.startsWith('FILE-STORAGE::')) {
|
||||
const fileName = fileBase64.replace('FILE-STORAGE::', '')
|
||||
if (fileName.startsWith('[') && fileName.endsWith(']')) {
|
||||
files = JSON.parse(fileName)
|
||||
} else {
|
||||
files = [fileName]
|
||||
}
|
||||
const chatflowid = options.chatflowid
|
||||
|
||||
for (const file of files) {
|
||||
const fileData = await getFileFromStorage(file, chatflowid)
|
||||
const blob = new Blob([fileData])
|
||||
vectaraFiles.push({ blob: blob, fileName: getFileName(file) })
|
||||
}
|
||||
} else {
|
||||
if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) {
|
||||
files = JSON.parse(fileBase64)
|
||||
} else {
|
||||
files = [fileBase64]
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
const splitDataURI = file.split(',')
|
||||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const blob = new Blob([bf])
|
||||
vectaraFiles.push({ blob: blob, fileName: getFileName(file) })
|
||||
}
|
||||
}
|
||||
|
||||
const vectorStore = new VectaraStore(vectaraArgs)
|
||||
await vectorStore.addFiles(vectaraFiles)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k, vectaraFilter)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const getFileName = (fileBase64: string) => {
|
||||
let fileNames = []
|
||||
if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) {
|
||||
const files = JSON.parse(fileBase64)
|
||||
for (const file of files) {
|
||||
const splitDataURI = file.split(',')
|
||||
const filename = splitDataURI[splitDataURI.length - 1].split(':')[1]
|
||||
fileNames.push(filename)
|
||||
}
|
||||
return fileNames.join(', ')
|
||||
} else {
|
||||
const splitDataURI = fileBase64.split(',')
|
||||
const filename = splitDataURI[splitDataURI.length - 1].split(':')[1]
|
||||
return filename
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: VectaraUpload_VectorStores }
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig } from '@langchain/community/vectorstores/vectara'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { flatten } from 'lodash'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class VectaraUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Vectara Upsert Document'
|
||||
this.name = 'vectaraUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Vectara'
|
||||
this.icon = 'vectara.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Vectara'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['vectaraApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Filter',
|
||||
name: 'filter',
|
||||
description:
|
||||
'Filter to apply to Vectara metadata. Refer to the <a target="_blank" href="https://docs.flowiseai.com/vector-stores/vectara">documentation</a> on how to use Vectara filters with Flowise.',
|
||||
type: 'string',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences Before',
|
||||
name: 'sentencesBefore',
|
||||
description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences After',
|
||||
name: 'sentencesAfter',
|
||||
description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Lambda',
|
||||
name: 'lambda',
|
||||
description:
|
||||
'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Defaults to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Vectara Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Vectara Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(VectaraStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
const customerId = getCredentialParam('customerID', credentialData, nodeData)
|
||||
const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',')
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = {} as Embeddings
|
||||
const vectaraMetadataFilter = nodeData.inputs?.filter as string
|
||||
const sentencesBefore = nodeData.inputs?.sentencesBefore as number
|
||||
const sentencesAfter = nodeData.inputs?.sentencesAfter as number
|
||||
const lambda = nodeData.inputs?.lambda as number
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
|
||||
const vectaraArgs: VectaraLibArgs = {
|
||||
apiKey: apiKey,
|
||||
customerId: customerId,
|
||||
corpusId: corpusId,
|
||||
source: 'flowise'
|
||||
}
|
||||
|
||||
const vectaraFilter: VectaraFilter = {}
|
||||
if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter
|
||||
if (lambda) vectaraFilter.lambda = lambda
|
||||
|
||||
const vectaraContextConfig: VectaraContextConfig = {}
|
||||
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
|
||||
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
|
||||
vectaraFilter.contextConfig = vectaraContextConfig
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const vectorStore = await VectaraStore.fromDocuments(finalDocs, embeddings, vectaraArgs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k, vectaraFilter)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: VectaraUpsert_VectorStores }
|
||||
|
|
@ -32,7 +32,6 @@ class Weaviate_VectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert embedded data and perform similarity or mmr search using Weaviate, a scalable open-source vector database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,157 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client'
|
||||
import { WeaviateLibArgs, WeaviateStore } from '@langchain/community/vectorstores/weaviate'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Weaviate_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Weaviate Load Existing Index'
|
||||
this.name = 'weaviateExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Weaviate'
|
||||
this.icon = 'weaviate.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Weaviate (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Weaviate cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['weaviateApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Scheme',
|
||||
name: 'weaviateScheme',
|
||||
type: 'options',
|
||||
default: 'https',
|
||||
options: [
|
||||
{
|
||||
label: 'https',
|
||||
name: 'https'
|
||||
},
|
||||
{
|
||||
label: 'http',
|
||||
name: 'http'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Host',
|
||||
name: 'weaviateHost',
|
||||
type: 'string',
|
||||
placeholder: 'localhost:8080'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Index',
|
||||
name: 'weaviateIndex',
|
||||
type: 'string',
|
||||
placeholder: 'Test'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Text Key',
|
||||
name: 'weaviateTextKey',
|
||||
type: 'string',
|
||||
placeholder: 'text',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Metadata Keys',
|
||||
name: 'weaviateMetadataKeys',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: `["foo"]`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Weaviate Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(WeaviateStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const weaviateScheme = nodeData.inputs?.weaviateScheme as string
|
||||
const weaviateHost = nodeData.inputs?.weaviateHost as string
|
||||
const weaviateIndex = nodeData.inputs?.weaviateIndex as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData)
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
host: weaviateHost
|
||||
}
|
||||
if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey)
|
||||
|
||||
const client: WeaviateClient = weaviate.client(clientConfig)
|
||||
|
||||
const obj: WeaviateLibArgs = {
|
||||
client,
|
||||
indexName: weaviateIndex
|
||||
}
|
||||
|
||||
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
||||
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
||||
|
||||
const vectorStore = await WeaviateStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Weaviate_Existing_VectorStores }
|
||||
|
|
@ -1,174 +0,0 @@
|
|||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { WeaviateLibArgs, WeaviateStore } from '@langchain/community/vectorstores/weaviate'
|
||||
import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client'
|
||||
import { flatten } from 'lodash'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class WeaviateUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Weaviate Upsert Document'
|
||||
this.name = 'weaviateUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Weaviate'
|
||||
this.icon = 'weaviate.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Weaviate'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Weaviate cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['weaviateApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Scheme',
|
||||
name: 'weaviateScheme',
|
||||
type: 'options',
|
||||
default: 'https',
|
||||
options: [
|
||||
{
|
||||
label: 'https',
|
||||
name: 'https'
|
||||
},
|
||||
{
|
||||
label: 'http',
|
||||
name: 'http'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Host',
|
||||
name: 'weaviateHost',
|
||||
type: 'string',
|
||||
placeholder: 'localhost:8080'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Index',
|
||||
name: 'weaviateIndex',
|
||||
type: 'string',
|
||||
placeholder: 'Test'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Text Key',
|
||||
name: 'weaviateTextKey',
|
||||
type: 'string',
|
||||
placeholder: 'text',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Metadata Keys',
|
||||
name: 'weaviateMetadataKeys',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: `["foo"]`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Weaviate Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(WeaviateStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const weaviateScheme = nodeData.inputs?.weaviateScheme as string
|
||||
const weaviateHost = nodeData.inputs?.weaviateHost as string
|
||||
const weaviateIndex = nodeData.inputs?.weaviateIndex as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData)
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
host: weaviateHost
|
||||
}
|
||||
if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey)
|
||||
|
||||
const client: WeaviateClient = weaviate.client(clientConfig)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: WeaviateLibArgs = {
|
||||
client,
|
||||
indexName: weaviateIndex
|
||||
}
|
||||
|
||||
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
||||
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
||||
|
||||
const vectorStore = await WeaviateStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: WeaviateUpsert_VectorStores }
|
||||
|
|
@ -31,7 +31,6 @@ class Zep_VectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert embedded data and perform similarity or mmr search upon query using Zep, a fast and scalable building block for LLM apps'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,240 +0,0 @@
|
|||
import { IDocument, ZepClient } from '@getzep/zep-js'
|
||||
import { ZepVectorStore, IZepConfig } from '@langchain/community/vectorstores/zep'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class Zep_Existing_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Zep Load Existing Index - Open Source'
|
||||
this.name = 'zepExistingIndex'
|
||||
this.version = 1.0
|
||||
this.type = 'Zep'
|
||||
this.icon = 'zep.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Zep (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
description: 'Configure JWT authentication on your Zep instance (Optional)',
|
||||
credentialNames: ['zepMemoryApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'baseURL',
|
||||
type: 'string',
|
||||
default: 'http://127.0.0.1:8000'
|
||||
},
|
||||
{
|
||||
label: 'Zep Collection',
|
||||
name: 'zepCollection',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-collection'
|
||||
},
|
||||
{
|
||||
label: 'Zep Metadata Filter',
|
||||
name: 'zepMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Embedding Dimension',
|
||||
name: 'dimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Zep Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Zep Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(ZepVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const baseURL = nodeData.inputs?.baseURL as string
|
||||
const zepCollection = nodeData.inputs?.zepCollection as string
|
||||
const zepMetadataFilter = nodeData.inputs?.zepMetadataFilter
|
||||
const dimension = nodeData.inputs?.dimension as number
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
|
||||
const zepConfig: IZepConfig & Partial<ZepFilter> = {
|
||||
apiUrl: baseURL,
|
||||
collectionName: zepCollection,
|
||||
embeddingDimensions: dimension,
|
||||
isAutoEmbedded: false
|
||||
}
|
||||
if (apiKey) zepConfig.apiKey = apiKey
|
||||
if (zepMetadataFilter) {
|
||||
const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter)
|
||||
zepConfig.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await ZepExistingVS.fromExistingIndex(embeddings, zepConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
if (zepMetadataFilter) {
|
||||
;(vectorStore as any).filter = zepConfig.filter
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
interface ZepFilter {
|
||||
filter: Record<string, any>
|
||||
}
|
||||
|
||||
function zepDocsToDocumentsAndScore(results: IDocument[]): [Document, number][] {
|
||||
return results.map((d) => [
|
||||
new Document({
|
||||
pageContent: d.content,
|
||||
metadata: d.metadata
|
||||
}),
|
||||
d.score ? d.score : 0
|
||||
])
|
||||
}
|
||||
|
||||
function assignMetadata(value: string | Record<string, unknown> | object | undefined): Record<string, unknown> | undefined {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return value as Record<string, unknown>
|
||||
}
|
||||
if (value !== undefined) {
|
||||
console.warn('Metadata filters must be an object, Record, or undefined.')
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
class ZepExistingVS extends ZepVectorStore {
|
||||
filter?: Record<string, any>
|
||||
args?: IZepConfig & Partial<ZepFilter>
|
||||
|
||||
constructor(embeddings: Embeddings, args: IZepConfig & Partial<ZepFilter>) {
|
||||
super(embeddings, args)
|
||||
this.filter = args.filter
|
||||
this.args = args
|
||||
}
|
||||
|
||||
async initalizeCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
this.client = await ZepClient.init(args.apiUrl, args.apiKey)
|
||||
try {
|
||||
this.collection = await this.client.document.getCollection(args.collectionName)
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
if (err.name === 'NotFoundError') {
|
||||
await this.createNewCollection(args)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async createNewCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
if (!args.embeddingDimensions) {
|
||||
throw new Error(
|
||||
`Collection ${args.collectionName} not found. You can create a new Collection by providing embeddingDimensions.`
|
||||
)
|
||||
}
|
||||
|
||||
this.collection = await this.client.document.addCollection({
|
||||
name: args.collectionName,
|
||||
description: args.description,
|
||||
metadata: args.metadata,
|
||||
embeddingDimensions: args.embeddingDimensions,
|
||||
isAutoEmbedded: false
|
||||
})
|
||||
}
|
||||
|
||||
async similaritySearchVectorWithScore(
|
||||
query: number[],
|
||||
k: number,
|
||||
filter?: Record<string, unknown> | undefined
|
||||
): Promise<[Document, number][]> {
|
||||
if (filter && this.filter) {
|
||||
throw new Error('cannot provide both `filter` and `this.filter`')
|
||||
}
|
||||
const _filters = filter ?? this.filter
|
||||
const ANDFilters = []
|
||||
for (const filterKey in _filters) {
|
||||
let filterVal = _filters[filterKey]
|
||||
if (typeof filterVal === 'string') filterVal = `"${filterVal}"`
|
||||
ANDFilters.push({ jsonpath: `$[*] ? (@.${filterKey} == ${filterVal})` })
|
||||
}
|
||||
const newfilter = {
|
||||
where: { and: ANDFilters }
|
||||
}
|
||||
await this.initalizeCollection(this.args!).catch((err) => {
|
||||
console.error('Error initializing collection:', err)
|
||||
throw err
|
||||
})
|
||||
const results = await this.collection.search(
|
||||
{
|
||||
embedding: new Float32Array(query),
|
||||
metadata: assignMetadata(newfilter)
|
||||
},
|
||||
k
|
||||
)
|
||||
return zepDocsToDocumentsAndScore(results)
|
||||
}
|
||||
|
||||
static async fromExistingIndex(embeddings: Embeddings, dbConfig: IZepConfig & Partial<ZepFilter>): Promise<ZepVectorStore> {
|
||||
const instance = new this(embeddings, dbConfig)
|
||||
return instance
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Zep_Existing_VectorStores }
|
||||
|
|
@ -1,137 +0,0 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { ZepVectorStore, IZepConfig } from '@langchain/community/vectorstores/zep'
|
||||
import { Embeddings } from '@langchain/core/embeddings'
|
||||
import { Document } from '@langchain/core/documents'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
|
||||
class Zep_Upsert_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Zep Upsert Document - Open Source'
|
||||
this.name = 'zepUpsert'
|
||||
this.version = 1.0
|
||||
this.type = 'Zep'
|
||||
this.icon = 'zep.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Zep'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
description: 'Configure JWT authentication on your Zep instance (Optional)',
|
||||
credentialNames: ['zepMemoryApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'baseURL',
|
||||
type: 'string',
|
||||
default: 'http://127.0.0.1:8000'
|
||||
},
|
||||
{
|
||||
label: 'Zep Collection',
|
||||
name: 'zepCollection',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-collection'
|
||||
},
|
||||
{
|
||||
label: 'Embedding Dimension',
|
||||
name: 'dimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Zep Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Zep Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(ZepVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const baseURL = nodeData.inputs?.baseURL as string
|
||||
const zepCollection = nodeData.inputs?.zepCollection as string
|
||||
const dimension = (nodeData.inputs?.dimension as number) ?? 1536
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const zepConfig: IZepConfig = {
|
||||
apiUrl: baseURL,
|
||||
collectionName: zepCollection,
|
||||
embeddingDimensions: dimension,
|
||||
isAutoEmbedded: false
|
||||
}
|
||||
if (apiKey) zepConfig.apiKey = apiKey
|
||||
|
||||
const vectorStore = await ZepVectorStore.fromDocuments(finalDocs, embeddings, zepConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Zep_Upsert_VectorStores }
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { IDocument, ZepClient } from '@getzep/zep-cloud'
|
||||
import { ZepClient } from '@getzep/zep-cloud'
|
||||
import { IZepConfig, ZepVectorStore } from '@getzep/zep-cloud/langchain'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
|
|
@ -32,7 +32,6 @@ class Zep_CloudVectorStores implements INode {
|
|||
this.description =
|
||||
'Upsert embedded data and perform similarity or mmr search upon query using Zep, a fast and scalable building block for LLM apps'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -101,7 +100,9 @@ class Zep_CloudVectorStores implements INode {
|
|||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
const client = await ZepClient.init(apiKey)
|
||||
const client = new ZepClient({
|
||||
apiKey: apiKey
|
||||
})
|
||||
const zepConfig = {
|
||||
apiKey: apiKey,
|
||||
collectionName: zepCollection,
|
||||
|
|
@ -129,7 +130,9 @@ class Zep_CloudVectorStores implements INode {
|
|||
if (zepMetadataFilter) {
|
||||
zepConfig.filter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter)
|
||||
}
|
||||
zepConfig.client = await ZepClient.init(zepConfig.apiKey)
|
||||
zepConfig.client = new ZepClient({
|
||||
apiKey: apiKey
|
||||
})
|
||||
const vectorStore = await ZepExistingVS.init(zepConfig)
|
||||
return resolveVectorStoreOrRetriever(nodeData, vectorStore, zepConfig.filter)
|
||||
}
|
||||
|
|
@ -139,26 +142,6 @@ interface ZepFilter {
|
|||
filter: Record<string, any>
|
||||
}
|
||||
|
||||
function zepDocsToDocumentsAndScore(results: IDocument[]): [Document, number][] {
|
||||
return results.map((d) => [
|
||||
new Document({
|
||||
pageContent: d.content,
|
||||
metadata: d.metadata
|
||||
}),
|
||||
d.score ? d.score : 0
|
||||
])
|
||||
}
|
||||
|
||||
function assignMetadata(value: string | Record<string, unknown> | object | undefined): Record<string, unknown> | undefined {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return value as Record<string, unknown>
|
||||
}
|
||||
if (value !== undefined) {
|
||||
console.warn('Metadata filters must be an object, Record, or undefined.')
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
class ZepExistingVS extends ZepVectorStore {
|
||||
filter?: Record<string, any>
|
||||
args?: IZepConfig & Partial<ZepFilter>
|
||||
|
|
@ -169,61 +152,6 @@ class ZepExistingVS extends ZepVectorStore {
|
|||
this.args = args
|
||||
}
|
||||
|
||||
async initializeCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
this.client = await ZepClient.init(args.apiKey, args.apiUrl)
|
||||
try {
|
||||
this.collection = await this.client.document.getCollection(args.collectionName)
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
if (err.name === 'NotFoundError') {
|
||||
await this.createNewCollection(args)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async createNewCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
this.collection = await this.client.document.addCollection({
|
||||
name: args.collectionName,
|
||||
description: args.description,
|
||||
metadata: args.metadata
|
||||
})
|
||||
}
|
||||
|
||||
async similaritySearchVectorWithScore(
|
||||
query: number[],
|
||||
k: number,
|
||||
filter?: Record<string, unknown> | undefined
|
||||
): Promise<[Document, number][]> {
|
||||
if (filter && this.filter) {
|
||||
throw new Error('cannot provide both `filter` and `this.filter`')
|
||||
}
|
||||
const _filters = filter ?? this.filter
|
||||
const ANDFilters = []
|
||||
for (const filterKey in _filters) {
|
||||
let filterVal = _filters[filterKey]
|
||||
if (typeof filterVal === 'string') filterVal = `"${filterVal}"`
|
||||
ANDFilters.push({ jsonpath: `$[*] ? (@.${filterKey} == ${filterVal})` })
|
||||
}
|
||||
const newfilter = {
|
||||
where: { and: ANDFilters }
|
||||
}
|
||||
await this.initializeCollection(this.args!).catch((err) => {
|
||||
console.error('Error initializing collection:', err)
|
||||
throw err
|
||||
})
|
||||
const results = await this.collection.search(
|
||||
{
|
||||
embedding: new Float32Array(query),
|
||||
metadata: assignMetadata(newfilter)
|
||||
},
|
||||
k
|
||||
)
|
||||
return zepDocsToDocumentsAndScore(results)
|
||||
}
|
||||
|
||||
static async fromExistingIndex(embeddings: Embeddings, dbConfig: IZepConfig & Partial<ZepFilter>): Promise<ZepVectorStore> {
|
||||
return new this(embeddings, dbConfig)
|
||||
}
|
||||
|
|
|
|||