Merge branch 'main' into feature/OpenAI-Assistant
18
LICENSE.md
|
|
@ -1,7 +1,23 @@
|
|||
Apache License
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Flowise is governed by the Apache License 2.0, with additional terms and conditions outlined below:
|
||||
|
||||
Flowise can be used for commercial purposes for "backend-as-a-service" for your applications or as a development platform for enterprises. However, under specific conditions, you must reach out to the project's administrators to secure a commercial license:
|
||||
|
||||
a. Multi-tenant SaaS service: Unless you have explicit written authorization from Flowise, you may not utilize the Flowise source code to operate a multi-tenant SaaS service that closely resembles the Flowise cloud-based services.
|
||||
b. Logo and copyright information: While using Flowise in commercial application, you are prohibited from removing or altering the LOGO or copyright information displayed in the Flowise console and UI.
|
||||
|
||||
For inquiries regarding licensing matters, please contact hello@flowiseai.com via email.
|
||||
|
||||
Contributors are required to consent to the following terms related to their contributed code:
|
||||
|
||||
a. The project maintainers have the authority to modify the open-source agreement to be more stringent or lenient.
|
||||
b. Contributed code can be used for commercial purposes, including Flowise's cloud-based services.
|
||||
|
||||
All other rights and restrictions are in accordance with the Apache License 2.0.
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "flowise",
|
||||
"version": "1.4.0-rc.1",
|
||||
"version": "1.4.3",
|
||||
"private": true,
|
||||
"homepage": "https://flowiseai.com",
|
||||
"workspaces": [
|
||||
|
|
|
|||
|
|
@ -11,8 +11,8 @@ class ElasticSearchUserPassword implements INodeCredential {
|
|||
this.label = 'ElasticSearch User Password'
|
||||
this.name = 'elasticSearchUserPassword'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://www.elastic.co/guide/en/kibana/current/tutorial-secure-access-to-kibana.html">official guide</a> on how to get User Password from ElasticSearch'
|
||||
this.description = `Use Cloud ID field to enter your Elastic Cloud ID or the URL of the Elastic server instance.
|
||||
Refer to <a target="_blank" href="https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html">official guide</a> on how to get User Password from ElasticSearch.`
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Cloud ID',
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ class LangfuseApi implements INodeCredential {
|
|||
this.name = 'langfuseApi'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Refer to <a target="_blank" href="https://langfuse.com/docs/get-started/">official guide</a> on how to get API key on Langfuse'
|
||||
'Refer to <a target="_blank" href="https://langfuse.com/docs/flowise">integration guide</a> on how to get API keys on Langfuse'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Secret Key',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
import { INodeParams, INodeCredential } from '../src/Interface'
|
||||
|
||||
class MongoDBUrlApi implements INodeCredential {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'MongoDB ATLAS'
|
||||
this.name = 'mongoDBUrlApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'ATLAS Connection URL',
|
||||
name: 'mongoDBConnectUrl',
|
||||
type: 'string',
|
||||
placeholder: 'mongodb+srv://<user>:<pwd>@cluster0.example.mongodb.net/?retryWrites=true&w=majority'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { credClass: MongoDBUrlApi }
|
||||
|
|
@ -8,7 +8,7 @@ class RedisCacheApi implements INodeCredential {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Redis Cache API'
|
||||
this.label = 'Redis API'
|
||||
this.name = 'redisCacheApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ class RedisCacheUrlApi implements INodeCredential {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Redis Cache URL'
|
||||
this.label = 'Redis URL'
|
||||
this.name = 'redisCacheUrlApi'
|
||||
this.version = 1.0
|
||||
this.inputs = [
|
||||
|
|
@ -16,7 +16,7 @@ class RedisCacheUrlApi implements INodeCredential {
|
|||
label: 'Redis URL',
|
||||
name: 'redisUrl',
|
||||
type: 'string',
|
||||
default: '127.0.0.1'
|
||||
default: 'redis://localhost:6379'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import { initializeAgentExecutorWithOptions, AgentExecutor, InitializeAgentExecu
|
|||
import { Tool } from 'langchain/tools'
|
||||
import { BaseChatMemory } from 'langchain/memory'
|
||||
import { getBaseClasses, mapChatHistory } from '../../../src/utils'
|
||||
import { BaseLanguageModel } from 'langchain/base_language'
|
||||
import { BaseChatModel } from 'langchain/chat_models/base'
|
||||
import { flatten } from 'lodash'
|
||||
import { additionalCallbacks } from '../../../src/handler'
|
||||
|
||||
|
|
@ -29,7 +29,7 @@ class ConversationalAgent_Agents implements INode {
|
|||
constructor() {
|
||||
this.label = 'Conversational Agent'
|
||||
this.name = 'conversationalAgent'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'agent.svg'
|
||||
|
|
@ -45,7 +45,7 @@ class ConversationalAgent_Agents implements INode {
|
|||
{
|
||||
label: 'Language Model',
|
||||
name: 'model',
|
||||
type: 'BaseLanguageModel'
|
||||
type: 'BaseChatModel'
|
||||
},
|
||||
{
|
||||
label: 'Memory',
|
||||
|
|
@ -65,7 +65,7 @@ class ConversationalAgent_Agents implements INode {
|
|||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const model = nodeData.inputs?.model as BaseLanguageModel
|
||||
const model = nodeData.inputs?.model as BaseChatModel
|
||||
let tools = nodeData.inputs?.tools as Tool[]
|
||||
tools = flatten(tools)
|
||||
const memory = nodeData.inputs?.memory as BaseChatMemory
|
||||
|
|
@ -92,8 +92,6 @@ class ConversationalAgent_Agents implements INode {
|
|||
const executor = nodeData.instance as AgentExecutor
|
||||
const memory = nodeData.inputs?.memory as BaseChatMemory
|
||||
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
if (options && options.chatHistory) {
|
||||
const chatHistoryClassName = memory.chatHistory.constructor.name
|
||||
// Only replace when its In-Memory
|
||||
|
|
@ -103,6 +101,10 @@ class ConversationalAgent_Agents implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
;(executor.memory as any).returnMessages = true // Return true for BaseChatModel
|
||||
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
const result = await executor.call({ input }, [...callbacks])
|
||||
return result?.output
|
||||
}
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ class ConversationalRetrievalAgent_Agents implements INode {
|
|||
constructor() {
|
||||
this.label = 'Conversational Retrieval Agent'
|
||||
this.name = 'conversationalRetrievalAgent'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'agent.svg'
|
||||
|
|
@ -40,9 +40,9 @@ class ConversationalRetrievalAgent_Agents implements INode {
|
|||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'OpenAI Chat Model',
|
||||
label: 'OpenAI/Azure Chat Model',
|
||||
name: 'model',
|
||||
type: 'ChatOpenAI'
|
||||
type: 'ChatOpenAI | AzureChatOpenAI'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
|
|
@ -82,6 +82,8 @@ class ConversationalRetrievalAgent_Agents implements INode {
|
|||
if (executor.memory) {
|
||||
;(executor.memory as any).memoryKey = 'chat_history'
|
||||
;(executor.memory as any).outputKey = 'output'
|
||||
;(executor.memory as any).returnMessages = true
|
||||
|
||||
const chatHistoryClassName = (executor.memory as any).chatHistory.constructor.name
|
||||
// Only replace when its In-Memory
|
||||
if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') {
|
||||
|
|
|
|||
|
|
@ -18,13 +18,13 @@ class MRKLAgentChat_Agents implements INode {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'MRKL Agent for Chat Models'
|
||||
this.label = 'ReAct Agent for Chat Models'
|
||||
this.name = 'mrklAgentChat'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'agent.svg'
|
||||
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models'
|
||||
this.description = 'Agent that uses the ReAct logic to decide what action to take, optimized to be used with Chat Models'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.inputs = [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -18,13 +18,13 @@ class MRKLAgentLLM_Agents implements INode {
|
|||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'MRKL Agent for LLMs'
|
||||
this.label = 'ReAct Agent for LLMs'
|
||||
this.name = 'mrklAgentLLM'
|
||||
this.version = 1.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'agent.svg'
|
||||
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs'
|
||||
this.description = 'Agent that uses the ReAct logic to decide what action to take, optimized to be used with LLMs'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.inputs = [
|
||||
{
|
||||
|
|
|
|||
|
|
@ -20,11 +20,11 @@ class OpenAIFunctionAgent_Agents implements INode {
|
|||
constructor() {
|
||||
this.label = 'OpenAI Function Agent'
|
||||
this.name = 'openAIFunctionAgent'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'AgentExecutor'
|
||||
this.category = 'Agents'
|
||||
this.icon = 'openai.png'
|
||||
this.description = `An agent that uses OpenAI's Function Calling functionality to pick the tool and args to call`
|
||||
this.description = `An agent that uses Function Calling to pick the tool and args to call`
|
||||
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
|
||||
this.inputs = [
|
||||
{
|
||||
|
|
@ -39,11 +39,9 @@ class OpenAIFunctionAgent_Agents implements INode {
|
|||
type: 'BaseChatMemory'
|
||||
},
|
||||
{
|
||||
label: 'OpenAI Chat Model',
|
||||
label: 'OpenAI/Azure Chat Model',
|
||||
name: 'model',
|
||||
description:
|
||||
'Only works with gpt-3.5-turbo-0613 and gpt-4-0613. Refer <a target="_blank" href="https://platform.openai.com/docs/guides/gpt/function-calling">docs</a> for more info',
|
||||
type: 'BaseChatModel'
|
||||
type: 'ChatOpenAI | AzureChatOpenAI'
|
||||
},
|
||||
{
|
||||
label: 'System Message',
|
||||
|
|
@ -89,6 +87,8 @@ class OpenAIFunctionAgent_Agents implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
;(executor.memory as any).returnMessages = true // Return true for BaseChatModel
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ class GETApiChain_Chains implements INode {
|
|||
name: 'apiDocs',
|
||||
type: 'string',
|
||||
description:
|
||||
'Description of how API works. Please refer to more <a target="_blank" href="https://github.com/hwchase17/langchain/blob/master/langchain/chains/api/open_meteo_docs.py">examples</a>',
|
||||
'Description of how API works. Please refer to more <a target="_blank" href="https://github.com/langchain-ai/langchain/blob/master/libs/langchain/langchain/chains/api/open_meteo_docs.py">examples</a>',
|
||||
rows: 4
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ class POSTApiChain_Chains implements INode {
|
|||
name: 'apiDocs',
|
||||
type: 'string',
|
||||
description:
|
||||
'Description of how API works. Please refer to more <a target="_blank" href="https://github.com/hwchase17/langchain/blob/master/langchain/chains/api/open_meteo_docs.py">examples</a>',
|
||||
'Description of how API works. Please refer to more <a target="_blank" href="https://github.com/langchain-ai/langchain/blob/master/libs/langchain/langchain/chains/api/open_meteo_docs.py">examples</a>',
|
||||
rows: 4
|
||||
},
|
||||
{
|
||||
|
|
|
|||
|
|
@ -106,16 +106,18 @@ class ConversationChain_Chains implements INode {
|
|||
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
|
||||
const chain = nodeData.instance as ConversationChain
|
||||
const memory = nodeData.inputs?.memory as BufferMemory
|
||||
memory.returnMessages = true // Return true for BaseChatModel
|
||||
|
||||
if (options && options.chatHistory) {
|
||||
const chatHistoryClassName = memory.chatHistory.constructor.name
|
||||
// Only replace when its In-Memory
|
||||
if (chatHistoryClassName && chatHistoryClassName === 'ChatMessageHistory') {
|
||||
memory.chatHistory = mapChatHistory(options)
|
||||
chain.memory = memory
|
||||
}
|
||||
}
|
||||
|
||||
chain.memory = memory
|
||||
|
||||
const loggerHandler = new ConsoleCallbackHandler(options.logger)
|
||||
const callbacks = await additionalCallbacks(nodeData, options)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
|
||||
import { LLMChain } from 'langchain/chains'
|
||||
import { BaseLanguageModel } from 'langchain/base_language'
|
||||
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
|
||||
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
|
||||
import { BaseOutputParser } from 'langchain/schema/output_parser'
|
||||
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
|
||||
import { BaseLLMOutputParser } from 'langchain/schema/output_parser'
|
||||
import { OutputFixingParser } from 'langchain/output_parsers'
|
||||
import { checkInputs, Moderation, streamResponse } from '../../moderation/Moderation'
|
||||
|
||||
class LLMChain_Chains implements INode {
|
||||
label: string
|
||||
|
|
@ -47,6 +48,14 @@ class LLMChain_Chains implements INode {
|
|||
type: 'BaseLLMOutputParser',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Input Moderation',
|
||||
description: 'Detect text that could generate harmful output and prevent it from being sent to the language model',
|
||||
name: 'inputModeration',
|
||||
type: 'Moderation',
|
||||
optional: true,
|
||||
list: true
|
||||
},
|
||||
{
|
||||
label: 'Chain Name',
|
||||
name: 'chainName',
|
||||
|
|
@ -132,7 +141,7 @@ class LLMChain_Chains implements INode {
|
|||
|
||||
const runPrediction = async (
|
||||
inputVariables: string[],
|
||||
chain: LLMChain<string | object>,
|
||||
chain: LLMChain<string | object | BaseLanguageModel<any, BaseLanguageModelCallOptions>>,
|
||||
input: string,
|
||||
promptValuesRaw: ICommonObject | undefined,
|
||||
options: ICommonObject,
|
||||
|
|
@ -144,7 +153,7 @@ const runPrediction = async (
|
|||
const isStreaming = options.socketIO && options.socketIOClientId
|
||||
const socketIO = isStreaming ? options.socketIO : undefined
|
||||
const socketIOClientId = isStreaming ? options.socketIOClientId : ''
|
||||
|
||||
const moderations = nodeData.inputs?.inputModeration as Moderation[]
|
||||
/**
|
||||
* Apply string transformation to reverse converted special chars:
|
||||
* FROM: { "value": "hello i am benFLOWISE_NEWLINEFLOWISE_NEWLINEFLOWISE_TABhow are you?" }
|
||||
|
|
@ -152,6 +161,17 @@ const runPrediction = async (
|
|||
*/
|
||||
const promptValues = handleEscapeCharacters(promptValuesRaw, true)
|
||||
|
||||
if (moderations && moderations.length > 0) {
|
||||
try {
|
||||
// Use the output of the moderation chain as input for the LLM chain
|
||||
input = await checkInputs(moderations, input)
|
||||
} catch (e) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 500))
|
||||
streamResponse(isStreaming, e.message, socketIO, socketIOClientId)
|
||||
return formatResponse(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
if (promptValues && inputVariables.length > 0) {
|
||||
let seen: string[] = []
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,339 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { VectorDBQAChain } from 'langchain/chains'
|
||||
import { Document } from 'langchain/document'
|
||||
import { VectaraStore } from 'langchain/vectorstores/vectara'
|
||||
import fetch from 'node-fetch'
|
||||
|
||||
// functionality based on https://github.com/vectara/vectara-answer
|
||||
const reorderCitations = (unorderedSummary: string) => {
|
||||
const allCitations = unorderedSummary.match(/\[\d+\]/g) || []
|
||||
|
||||
const uniqueCitations = [...new Set(allCitations)]
|
||||
const citationToReplacement: { [key: string]: string } = {}
|
||||
uniqueCitations.forEach((citation, index) => {
|
||||
citationToReplacement[citation] = `[${index + 1}]`
|
||||
})
|
||||
|
||||
return unorderedSummary.replace(/\[\d+\]/g, (match) => citationToReplacement[match])
|
||||
}
|
||||
const applyCitationOrder = (searchResults: any[], unorderedSummary: string) => {
|
||||
const orderedSearchResults: any[] = []
|
||||
const allCitations = unorderedSummary.match(/\[\d+\]/g) || []
|
||||
|
||||
const addedIndices = new Set<number>()
|
||||
for (let i = 0; i < allCitations.length; i++) {
|
||||
const citation = allCitations[i]
|
||||
const index = Number(citation.slice(1, citation.length - 1)) - 1
|
||||
|
||||
if (addedIndices.has(index)) continue
|
||||
orderedSearchResults.push(searchResults[index])
|
||||
addedIndices.add(index)
|
||||
}
|
||||
|
||||
return orderedSearchResults
|
||||
}
|
||||
|
||||
class VectaraChain_Chains implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
description: string
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Vectara QA Chain'
|
||||
this.name = 'vectaraQAChain'
|
||||
this.version = 1.0
|
||||
this.type = 'VectaraQAChain'
|
||||
this.icon = 'vectara.png'
|
||||
this.category = 'Chains'
|
||||
this.description = 'QA chain for Vectara'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(VectorDBQAChain)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Vectara Store',
|
||||
name: 'vectaraStore',
|
||||
type: 'VectorStore'
|
||||
},
|
||||
{
|
||||
label: 'Summarizer Prompt Name',
|
||||
name: 'summarizerPromptName',
|
||||
description:
|
||||
'Summarize the results fetched from Vectara. Read <a target="_blank" href="https://docs.vectara.com/docs/learn/grounded-generation/select-a-summarizer">more</a>',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'vectara-summary-ext-v1.2.0 (gpt-3.5-turbo)',
|
||||
name: 'vectara-summary-ext-v1.2.0'
|
||||
},
|
||||
{
|
||||
label: 'vectara-experimental-summary-ext-2023-10-23-small (gpt-3.5-turbo)',
|
||||
name: 'vectara-experimental-summary-ext-2023-10-23-small',
|
||||
description: 'In beta, available to both Growth and Scale Vectara users'
|
||||
},
|
||||
{
|
||||
label: 'vectara-summary-ext-v1.3.0 (gpt-4.0)',
|
||||
name: 'vectara-summary-ext-v1.3.0',
|
||||
description: 'Only available to paying Scale Vectara users'
|
||||
},
|
||||
{
|
||||
label: 'vectara-experimental-summary-ext-2023-10-23-med (gpt-4.0)',
|
||||
name: 'vectara-experimental-summary-ext-2023-10-23-med',
|
||||
description: 'In beta, only available to paying Scale Vectara users'
|
||||
}
|
||||
],
|
||||
default: 'vectara-summary-ext-v1.2.0'
|
||||
},
|
||||
{
|
||||
label: 'Response Language',
|
||||
name: 'responseLang',
|
||||
description:
|
||||
'Return the response in specific language. If not selected, Vectara will automatically detects the language. Read <a target="_blank" href="https://docs.vectara.com/docs/learn/grounded-generation/grounded-generation-response-languages">more</a>',
|
||||
type: 'options',
|
||||
options: [
|
||||
{
|
||||
label: 'English',
|
||||
name: 'eng'
|
||||
},
|
||||
{
|
||||
label: 'German',
|
||||
name: 'deu'
|
||||
},
|
||||
{
|
||||
label: 'French',
|
||||
name: 'fra'
|
||||
},
|
||||
{
|
||||
label: 'Chinese',
|
||||
name: 'zho'
|
||||
},
|
||||
{
|
||||
label: 'Korean',
|
||||
name: 'kor'
|
||||
},
|
||||
{
|
||||
label: 'Arabic',
|
||||
name: 'ara'
|
||||
},
|
||||
{
|
||||
label: 'Russian',
|
||||
name: 'rus'
|
||||
},
|
||||
{
|
||||
label: 'Thai',
|
||||
name: 'tha'
|
||||
},
|
||||
{
|
||||
label: 'Dutch',
|
||||
name: 'nld'
|
||||
},
|
||||
{
|
||||
label: 'Italian',
|
||||
name: 'ita'
|
||||
},
|
||||
{
|
||||
label: 'Portuguese',
|
||||
name: 'por'
|
||||
},
|
||||
{
|
||||
label: 'Spanish',
|
||||
name: 'spa'
|
||||
},
|
||||
{
|
||||
label: 'Japanese',
|
||||
name: 'jpn'
|
||||
},
|
||||
{
|
||||
label: 'Polish',
|
||||
name: 'pol'
|
||||
},
|
||||
{
|
||||
label: 'Turkish',
|
||||
name: 'tur'
|
||||
},
|
||||
{
|
||||
label: 'Vietnamese',
|
||||
name: 'vie'
|
||||
},
|
||||
{
|
||||
label: 'Indonesian',
|
||||
name: 'ind'
|
||||
},
|
||||
{
|
||||
label: 'Czech',
|
||||
name: 'ces'
|
||||
},
|
||||
{
|
||||
label: 'Ukrainian',
|
||||
name: 'ukr'
|
||||
},
|
||||
{
|
||||
label: 'Greek',
|
||||
name: 'ell'
|
||||
},
|
||||
{
|
||||
label: 'Hebrew',
|
||||
name: 'heb'
|
||||
},
|
||||
{
|
||||
label: 'Farsi/Persian',
|
||||
name: 'fas'
|
||||
},
|
||||
{
|
||||
label: 'Hindi',
|
||||
name: 'hin'
|
||||
},
|
||||
{
|
||||
label: 'Urdu',
|
||||
name: 'urd'
|
||||
},
|
||||
{
|
||||
label: 'Swedish',
|
||||
name: 'swe'
|
||||
},
|
||||
{
|
||||
label: 'Bengali',
|
||||
name: 'ben'
|
||||
},
|
||||
{
|
||||
label: 'Malay',
|
||||
name: 'msa'
|
||||
},
|
||||
{
|
||||
label: 'Romanian',
|
||||
name: 'ron'
|
||||
}
|
||||
],
|
||||
optional: true,
|
||||
default: 'eng'
|
||||
},
|
||||
{
|
||||
label: 'Max Summarized Results',
|
||||
name: 'maxSummarizedResults',
|
||||
description: 'Maximum results used to build the summarized response',
|
||||
type: 'number',
|
||||
default: 7
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(): Promise<any> {
|
||||
return null
|
||||
}
|
||||
|
||||
async run(nodeData: INodeData, input: string): Promise<object> {
|
||||
const vectorStore = nodeData.inputs?.vectaraStore as VectaraStore
|
||||
const responseLang = (nodeData.inputs?.responseLang as string) ?? 'auto'
|
||||
const summarizerPromptName = nodeData.inputs?.summarizerPromptName as string
|
||||
const maxSummarizedResultsStr = nodeData.inputs?.maxSummarizedResults as string
|
||||
const maxSummarizedResults = maxSummarizedResultsStr ? parseInt(maxSummarizedResultsStr, 10) : 7
|
||||
|
||||
const topK = (vectorStore as any)?.k ?? 10
|
||||
|
||||
const headers = await vectorStore.getJsonHeader()
|
||||
const vectaraFilter = (vectorStore as any).vectaraFilter ?? {}
|
||||
const corpusId: number[] = (vectorStore as any).corpusId ?? []
|
||||
const customerId = (vectorStore as any).customerId ?? ''
|
||||
|
||||
const corpusKeys = corpusId.map((corpusId) => ({
|
||||
customerId,
|
||||
corpusId,
|
||||
metadataFilter: vectaraFilter?.filter ?? '',
|
||||
lexicalInterpolationConfig: { lambda: vectaraFilter?.lambda ?? 0.025 }
|
||||
}))
|
||||
|
||||
const data = {
|
||||
query: [
|
||||
{
|
||||
query: input,
|
||||
start: 0,
|
||||
numResults: topK,
|
||||
contextConfig: {
|
||||
sentencesAfter: vectaraFilter?.contextConfig?.sentencesAfter ?? 2,
|
||||
sentencesBefore: vectaraFilter?.contextConfig?.sentencesBefore ?? 2
|
||||
},
|
||||
corpusKey: corpusKeys,
|
||||
summary: [
|
||||
{
|
||||
summarizerPromptName,
|
||||
responseLang,
|
||||
maxSummarizedResults
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`https://api.vectara.io/v1/query`, {
|
||||
method: 'POST',
|
||||
headers: headers?.headers,
|
||||
body: JSON.stringify(data)
|
||||
})
|
||||
|
||||
if (response.status !== 200) {
|
||||
throw new Error(`Vectara API returned status code ${response.status}`)
|
||||
}
|
||||
|
||||
const result = await response.json()
|
||||
const responses = result.responseSet[0].response
|
||||
const documents = result.responseSet[0].document
|
||||
let rawSummarizedText = ''
|
||||
|
||||
for (let i = 0; i < responses.length; i += 1) {
|
||||
const responseMetadata = responses[i].metadata
|
||||
const documentMetadata = documents[responses[i].documentIndex].metadata
|
||||
const combinedMetadata: Record<string, unknown> = {}
|
||||
|
||||
responseMetadata.forEach((item: { name: string; value: unknown }) => {
|
||||
combinedMetadata[item.name] = item.value
|
||||
})
|
||||
|
||||
documentMetadata.forEach((item: { name: string; value: unknown }) => {
|
||||
combinedMetadata[item.name] = item.value
|
||||
})
|
||||
|
||||
responses[i].metadata = combinedMetadata
|
||||
}
|
||||
|
||||
const summaryStatus = result.responseSet[0].summary[0].status
|
||||
if (summaryStatus.length > 0 && summaryStatus[0].code === 'BAD_REQUEST') {
|
||||
throw new Error(
|
||||
`BAD REQUEST: Too much text for the summarizer to summarize. Please try reducing the number of search results to summarize, or the context of each result by adjusting the 'summary_num_sentences', and 'summary_num_results' parameters respectively.`
|
||||
)
|
||||
}
|
||||
|
||||
if (
|
||||
summaryStatus.length > 0 &&
|
||||
summaryStatus[0].code === 'NOT_FOUND' &&
|
||||
summaryStatus[0].statusDetail === 'Failed to retrieve summarizer.'
|
||||
) {
|
||||
throw new Error(`BAD REQUEST: summarizer ${summarizerPromptName} is invalid for this account.`)
|
||||
}
|
||||
|
||||
rawSummarizedText = result.responseSet[0].summary[0]?.text
|
||||
|
||||
let summarizedText = reorderCitations(rawSummarizedText)
|
||||
let summaryResponses = applyCitationOrder(responses, rawSummarizedText)
|
||||
|
||||
const sourceDocuments: Document[] = summaryResponses.map(
|
||||
(response: { text: string; metadata: Record<string, unknown>; score: number }) =>
|
||||
new Document({
|
||||
pageContent: response.text,
|
||||
metadata: response.metadata
|
||||
})
|
||||
)
|
||||
|
||||
return { text: summarizedText, sourceDocuments: sourceDocuments }
|
||||
} catch (error) {
|
||||
throw new Error(error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: VectaraChain_Chains }
|
||||
|
After Width: | Height: | Size: 66 KiB |
|
|
@ -27,7 +27,7 @@ class AWSChatBedrock_ChatModels implements INode {
|
|||
constructor() {
|
||||
this.label = 'AWS Bedrock'
|
||||
this.name = 'awsChatBedrock'
|
||||
this.version = 2.0
|
||||
this.version = 3.0
|
||||
this.type = 'AWSChatBedrock'
|
||||
this.icon = 'awsBedrock.png'
|
||||
this.category = 'Chat Models'
|
||||
|
|
@ -97,7 +97,8 @@ class AWSChatBedrock_ChatModels implements INode {
|
|||
options: [
|
||||
{ label: 'anthropic.claude-instant-v1', name: 'anthropic.claude-instant-v1' },
|
||||
{ label: 'anthropic.claude-v1', name: 'anthropic.claude-v1' },
|
||||
{ label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' }
|
||||
{ label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' },
|
||||
{ label: 'meta.llama2-13b-chat-v1', name: 'meta.llama2-13b-chat-v1' }
|
||||
],
|
||||
default: 'anthropic.claude-v2'
|
||||
},
|
||||
|
|
@ -128,12 +129,14 @@ class AWSChatBedrock_ChatModels implements INode {
|
|||
const iTemperature = nodeData.inputs?.temperature as string
|
||||
const iMax_tokens_to_sample = nodeData.inputs?.max_tokens_to_sample as string
|
||||
const cache = nodeData.inputs?.cache as BaseCache
|
||||
const streaming = nodeData.inputs?.streaming as boolean
|
||||
|
||||
const obj: BaseBedrockInput & BaseLLMParams = {
|
||||
region: iRegion,
|
||||
model: iModel,
|
||||
maxTokens: parseInt(iMax_tokens_to_sample, 10),
|
||||
temperature: parseFloat(iTemperature)
|
||||
temperature: parseFloat(iTemperature),
|
||||
streaming: streaming ?? true
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ class ChatAnthropic_ChatModels implements INode {
|
|||
constructor() {
|
||||
this.label = 'ChatAnthropic'
|
||||
this.name = 'chatAnthropic'
|
||||
this.version = 2.0
|
||||
this.version = 3.0
|
||||
this.type = 'ChatAnthropic'
|
||||
this.icon = 'chatAnthropic.png'
|
||||
this.category = 'Chat Models'
|
||||
|
|
@ -48,6 +48,11 @@ class ChatAnthropic_ChatModels implements INode {
|
|||
name: 'claude-2',
|
||||
description: 'Claude 2 latest major version, automatically get updates to the model as they are released'
|
||||
},
|
||||
{
|
||||
label: 'claude-2.1',
|
||||
name: 'claude-2.1',
|
||||
description: 'Claude 2 latest full version'
|
||||
},
|
||||
{
|
||||
label: 'claude-instant-1',
|
||||
name: 'claude-instant-1',
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ class AWSBedrockEmbedding_Embeddings implements INode {
|
|||
constructor() {
|
||||
this.label = 'AWS Bedrock Embeddings'
|
||||
this.name = 'AWSBedrockEmbeddings'
|
||||
this.version = 1.0
|
||||
this.version = 2.0
|
||||
this.type = 'AWSBedrockEmbeddings'
|
||||
this.icon = 'awsBedrock.png'
|
||||
this.category = 'Embeddings'
|
||||
|
|
@ -81,7 +81,9 @@ class AWSBedrockEmbedding_Embeddings implements INode {
|
|||
type: 'options',
|
||||
options: [
|
||||
{ label: 'amazon.titan-embed-text-v1', name: 'amazon.titan-embed-text-v1' },
|
||||
{ label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' }
|
||||
{ label: 'amazon.titan-embed-g1-text-02', name: 'amazon.titan-embed-g1-text-02' },
|
||||
{ label: 'cohere.embed-english-v3', name: 'cohere.embed-english-v3' },
|
||||
{ label: 'cohere.embed-multilingual-v3', name: 'cohere.embed-multilingual-v3' }
|
||||
],
|
||||
default: 'amazon.titan-embed-text-v1'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ class AWSBedrock_LLMs implements INode {
|
|||
constructor() {
|
||||
this.label = 'AWS Bedrock'
|
||||
this.name = 'awsBedrock'
|
||||
this.version = 1.2
|
||||
this.version = 2.0
|
||||
this.type = 'AWSBedrock'
|
||||
this.icon = 'awsBedrock.png'
|
||||
this.category = 'LLMs'
|
||||
|
|
@ -98,6 +98,7 @@ class AWSBedrock_LLMs implements INode {
|
|||
{ label: 'amazon.titan-tg1-large', name: 'amazon.titan-tg1-large' },
|
||||
{ label: 'amazon.titan-e1t-medium', name: 'amazon.titan-e1t-medium' },
|
||||
{ label: 'cohere.command-text-v14', name: 'cohere.command-text-v14' },
|
||||
{ label: 'cohere.command-light-text-v14', name: 'cohere.command-light-text-v14' },
|
||||
{ label: 'ai21.j2-grande-instruct', name: 'ai21.j2-grande-instruct' },
|
||||
{ label: 'ai21.j2-jumbo-instruct', name: 'ai21.j2-jumbo-instruct' },
|
||||
{ label: 'ai21.j2-mid', name: 'ai21.j2-mid' },
|
||||
|
|
|
|||
|
|
@ -109,9 +109,8 @@ const initalizeDynamoDB = async (nodeData: INodeData, options: ICommonObject): P
|
|||
})
|
||||
|
||||
const memory = new BufferMemoryExtended({
|
||||
memoryKey,
|
||||
memoryKey: memoryKey ?? 'chat_history',
|
||||
chatHistory: dynamoDb,
|
||||
returnMessages: true,
|
||||
isSessionIdUsingChatMessageId
|
||||
})
|
||||
return memory
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
import { getBaseClasses, getCredentialData, getCredentialParam, ICommonObject, INode, INodeData, INodeParams } from '../../../src'
|
||||
import { MongoDBChatMessageHistory } from 'langchain/stores/message/mongodb'
|
||||
import { BufferMemory, BufferMemoryInput } from 'langchain/memory'
|
||||
import { BaseMessage, mapStoredMessageToChatMessage } from 'langchain/schema'
|
||||
import { MongoClient } from 'mongodb'
|
||||
|
||||
class MongoDB_Memory implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'MongoDB Atlas Chat Memory'
|
||||
this.name = 'MongoDBAtlasChatMemory'
|
||||
this.version = 1.0
|
||||
this.type = 'MongoDBAtlasChatMemory'
|
||||
this.icon = 'mongodb.png'
|
||||
this.category = 'Memory'
|
||||
this.description = 'Stores the conversation in MongoDB Atlas'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(BufferMemory)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['mongoDBUrlApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'databaseName',
|
||||
placeholder: '<DB_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
placeholder: '<COLLECTION_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Session Id',
|
||||
name: 'sessionId',
|
||||
type: 'string',
|
||||
description: 'If not specified, the first CHAT_MESSAGE_ID will be used as sessionId',
|
||||
default: '',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Memory Key',
|
||||
name: 'memoryKey',
|
||||
type: 'string',
|
||||
default: 'chat_history',
|
||||
additionalParams: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
return initializeMongoDB(nodeData, options)
|
||||
}
|
||||
|
||||
async clearSessionMemory(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const mongodbMemory = await initializeMongoDB(nodeData, options)
|
||||
const sessionId = nodeData.inputs?.sessionId as string
|
||||
const chatId = options?.chatId as string
|
||||
options.logger.info(`Clearing MongoDB memory session ${sessionId ? sessionId : chatId}`)
|
||||
await mongodbMemory.clear()
|
||||
options.logger.info(`Successfully cleared MongoDB memory session ${sessionId ? sessionId : chatId}`)
|
||||
}
|
||||
}
|
||||
|
||||
const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): Promise<BufferMemory> => {
|
||||
const databaseName = nodeData.inputs?.databaseName as string
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const sessionId = nodeData.inputs?.sessionId as string
|
||||
const memoryKey = nodeData.inputs?.memoryKey as string
|
||||
const chatId = options?.chatId as string
|
||||
|
||||
let isSessionIdUsingChatMessageId = false
|
||||
if (!sessionId && chatId) isSessionIdUsingChatMessageId = true
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
|
||||
|
||||
const client = new MongoClient(mongoDBConnectUrl)
|
||||
await client.connect()
|
||||
const collection = client.db(databaseName).collection(collectionName)
|
||||
|
||||
const mongoDBChatMessageHistory = new MongoDBChatMessageHistory({
|
||||
collection,
|
||||
sessionId: sessionId ? sessionId : chatId
|
||||
})
|
||||
|
||||
mongoDBChatMessageHistory.getMessages = async (): Promise<BaseMessage[]> => {
|
||||
const document = await collection.findOne({
|
||||
sessionId: (mongoDBChatMessageHistory as any).sessionId
|
||||
})
|
||||
const messages = document?.messages || []
|
||||
return messages.map(mapStoredMessageToChatMessage)
|
||||
}
|
||||
|
||||
mongoDBChatMessageHistory.addMessage = async (message: BaseMessage): Promise<void> => {
|
||||
const messages = [message].map((msg) => msg.toDict())
|
||||
await collection.updateOne(
|
||||
{ sessionId: (mongoDBChatMessageHistory as any).sessionId },
|
||||
{
|
||||
$push: { messages: { $each: messages } }
|
||||
},
|
||||
{ upsert: true }
|
||||
)
|
||||
}
|
||||
|
||||
mongoDBChatMessageHistory.clear = async (): Promise<void> => {
|
||||
await collection.deleteOne({ sessionId: (mongoDBChatMessageHistory as any).sessionId })
|
||||
}
|
||||
|
||||
return new BufferMemoryExtended({
|
||||
memoryKey: memoryKey ?? 'chat_history',
|
||||
chatHistory: mongoDBChatMessageHistory,
|
||||
isSessionIdUsingChatMessageId
|
||||
})
|
||||
}
|
||||
|
||||
interface BufferMemoryExtendedInput {
|
||||
isSessionIdUsingChatMessageId: boolean
|
||||
}
|
||||
|
||||
class BufferMemoryExtended extends BufferMemory {
|
||||
isSessionIdUsingChatMessageId? = false
|
||||
|
||||
constructor(fields: BufferMemoryInput & Partial<BufferMemoryExtendedInput>) {
|
||||
super(fields)
|
||||
this.isSessionIdUsingChatMessageId = fields.isSessionIdUsingChatMessageId
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDB_Memory }
|
||||
|
After Width: | Height: | Size: 3.7 KiB |
|
|
@ -137,7 +137,7 @@ const initalizeRedis = async (nodeData: INodeData, options: ICommonObject): Prom
|
|||
}
|
||||
|
||||
const memory = new BufferMemoryExtended({
|
||||
memoryKey,
|
||||
memoryKey: memoryKey ?? 'chat_history',
|
||||
chatHistory: redisChatMessageHistory,
|
||||
isSessionIdUsingChatMessageId
|
||||
})
|
||||
|
|
|
|||
|
|
@ -95,6 +95,7 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject
|
|||
})
|
||||
|
||||
const memory = new BufferMemoryExtended({
|
||||
memoryKey: 'chat_history',
|
||||
chatHistory: redisChatMessageHistory,
|
||||
isSessionIdUsingChatMessageId
|
||||
})
|
||||
|
|
|
|||
|
|
@ -0,0 +1,27 @@
|
|||
import { Server } from 'socket.io'
|
||||
|
||||
export abstract class Moderation {
|
||||
abstract checkForViolations(input: string): Promise<string>
|
||||
}
|
||||
|
||||
export const checkInputs = async (inputModerations: Moderation[], input: string): Promise<string> => {
|
||||
for (const moderation of inputModerations) {
|
||||
input = await moderation.checkForViolations(input)
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
// is this the correct location for this function?
|
||||
// should we have a utils files that all node components can use?
|
||||
export const streamResponse = (isStreaming: any, response: string, socketIO: Server, socketIOClientId: string) => {
|
||||
if (isStreaming) {
|
||||
const result = response.split(/(\s+)/)
|
||||
result.forEach((token: string, index: number) => {
|
||||
if (index === 0) {
|
||||
socketIO.to(socketIOClientId).emit('start', token)
|
||||
}
|
||||
socketIO.to(socketIOClientId).emit('token', token)
|
||||
})
|
||||
socketIO.to(socketIOClientId).emit('end')
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src'
|
||||
import { Moderation } from '../Moderation'
|
||||
import { OpenAIModerationRunner } from './OpenAIModerationRunner'
|
||||
|
||||
class OpenAIModeration implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
credential: INodeParams
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'OpenAI Moderation'
|
||||
this.name = 'inputModerationOpenAI'
|
||||
this.version = 1.0
|
||||
this.type = 'Moderation'
|
||||
this.icon = 'openai.png'
|
||||
this.category = 'Moderation'
|
||||
this.description = 'Check whether content complies with OpenAI usage policies.'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(Moderation)]
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['openAIApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Error Message',
|
||||
name: 'moderationErrorMessage',
|
||||
type: 'string',
|
||||
rows: 2,
|
||||
default: "Cannot Process! Input violates OpenAI's content moderation policies.",
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData)
|
||||
|
||||
const runner = new OpenAIModerationRunner(openAIApiKey)
|
||||
const moderationErrorMessage = nodeData.inputs?.moderationErrorMessage as string
|
||||
if (moderationErrorMessage) runner.setErrorMessage(moderationErrorMessage)
|
||||
return runner
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenAIModeration }
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
import { Moderation } from '../Moderation'
|
||||
import { OpenAIModerationChain } from 'langchain/chains'
|
||||
|
||||
export class OpenAIModerationRunner implements Moderation {
|
||||
private openAIApiKey = ''
|
||||
private moderationErrorMessage: string = "Text was found that violates OpenAI's content policy."
|
||||
|
||||
constructor(openAIApiKey: string) {
|
||||
this.openAIApiKey = openAIApiKey
|
||||
}
|
||||
|
||||
async checkForViolations(input: string): Promise<string> {
|
||||
if (!this.openAIApiKey) {
|
||||
throw Error('OpenAI API key not found')
|
||||
}
|
||||
// Create a new instance of the OpenAIModerationChain
|
||||
const moderation = new OpenAIModerationChain({
|
||||
openAIApiKey: this.openAIApiKey,
|
||||
throwError: false // If set to true, the call will throw an error when the moderation chain detects violating content. If set to false, violating content will return "Text was found that violates OpenAI's content policy.".
|
||||
})
|
||||
// Send the user's input to the moderation chain and wait for the result
|
||||
const { output: moderationOutput, results } = await moderation.call({
|
||||
input: input
|
||||
})
|
||||
if (results[0].flagged) {
|
||||
throw Error(this.moderationErrorMessage)
|
||||
}
|
||||
return moderationOutput
|
||||
}
|
||||
|
||||
setErrorMessage(message: string) {
|
||||
this.moderationErrorMessage = message
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 3.9 KiB |
|
|
@ -0,0 +1,55 @@
|
|||
import { INode, INodeData, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src'
|
||||
import { Moderation } from '../Moderation'
|
||||
import { SimplePromptModerationRunner } from './SimplePromptModerationRunner'
|
||||
|
||||
class SimplePromptModeration implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Simple Prompt Moderation'
|
||||
this.name = 'inputModerationSimple'
|
||||
this.version = 1.0
|
||||
this.type = 'Moderation'
|
||||
this.icon = 'simple_moderation.png'
|
||||
this.category = 'Moderation'
|
||||
this.description = 'Check whether input consists of any text from Deny list, and prevent being sent to LLM'
|
||||
this.baseClasses = [this.type, ...getBaseClasses(Moderation)]
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Deny List',
|
||||
name: 'denyList',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: `ignore previous instructions\ndo not follow the directions\nyou must ignore all previous instructions`,
|
||||
description: 'An array of string literals (enter one per line) that should not appear in the prompt text.',
|
||||
optional: false
|
||||
},
|
||||
{
|
||||
label: 'Error Message',
|
||||
name: 'moderationErrorMessage',
|
||||
type: 'string',
|
||||
rows: 2,
|
||||
default: 'Cannot Process! Input violates content moderation policies.',
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const denyList = nodeData.inputs?.denyList as string
|
||||
const moderationErrorMessage = nodeData.inputs?.moderationErrorMessage as string
|
||||
|
||||
return new SimplePromptModerationRunner(denyList, moderationErrorMessage)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SimplePromptModeration }
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
import { Moderation } from '../Moderation'
|
||||
|
||||
export class SimplePromptModerationRunner implements Moderation {
|
||||
private readonly denyList: string = ''
|
||||
private readonly moderationErrorMessage: string = ''
|
||||
|
||||
constructor(denyList: string, moderationErrorMessage: string) {
|
||||
this.denyList = denyList
|
||||
if (denyList.indexOf('\n') === -1) {
|
||||
this.denyList += '\n'
|
||||
}
|
||||
this.moderationErrorMessage = moderationErrorMessage
|
||||
}
|
||||
|
||||
async checkForViolations(input: string): Promise<string> {
|
||||
this.denyList.split('\n').forEach((denyListItem) => {
|
||||
if (denyListItem && denyListItem !== '' && input.toLowerCase().includes(denyListItem.toLowerCase())) {
|
||||
throw Error(this.moderationErrorMessage)
|
||||
}
|
||||
})
|
||||
return Promise.resolve(input)
|
||||
}
|
||||
}
|
||||
|
After Width: | Height: | Size: 44 KiB |
|
|
@ -1,6 +1,6 @@
|
|||
import { BaseOutputParser } from 'langchain/schema/output_parser'
|
||||
import { LLMChain } from 'langchain/chains'
|
||||
import { BaseLanguageModel } from 'langchain/base_language'
|
||||
import { BaseLanguageModel, BaseLanguageModelCallOptions } from 'langchain/base_language'
|
||||
import { ICommonObject } from '../../src'
|
||||
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'
|
||||
|
||||
|
|
@ -15,7 +15,7 @@ export const formatResponse = (response: string | object): string | object => {
|
|||
|
||||
export const injectOutputParser = (
|
||||
outputParser: BaseOutputParser<unknown>,
|
||||
chain: LLMChain<string, BaseLanguageModel>,
|
||||
chain: LLMChain<string | object | BaseLanguageModel<any, BaseLanguageModelCallOptions>>,
|
||||
promptValues: ICommonObject | undefined = undefined
|
||||
) => {
|
||||
if (outputParser && chain.prompt) {
|
||||
|
|
|
|||
|
|
@ -104,7 +104,7 @@ class HydeRetriever_Retrievers implements INode {
|
|||
const promptKey = nodeData.inputs?.promptKey as PromptKey
|
||||
const customPrompt = nodeData.inputs?.customPrompt as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseInt(topK, 10) : 4
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const obj: HydeRetrieverOptions<any> = {
|
||||
llm,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ class ZapierNLA_Tools implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -23,6 +24,7 @@ class ZapierNLA_Tools implements INode {
|
|||
this.icon = 'zapier.svg'
|
||||
this.category = 'Tools'
|
||||
this.description = "Access to apps and actions on Zapier's platform through a natural language API interface"
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = []
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,170 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Chroma } from 'langchain/vectorstores/chroma'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { ChromaExtended } from './core'
|
||||
|
||||
class Chroma_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Chroma'
|
||||
this.name = 'chroma'
|
||||
this.version = 1.0
|
||||
this.type = 'Chroma'
|
||||
this.icon = 'chroma.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Chroma, an open-source embedding database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed if you have chroma on cloud services with X-Api-key',
|
||||
optional: true,
|
||||
credentialNames: ['chromaApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Chroma URL',
|
||||
name: 'chromaURL',
|
||||
type: 'string',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Chroma Metadata Filter',
|
||||
name: 'chromaMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Chroma Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Chroma Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Chroma)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: {
|
||||
collectionName: string
|
||||
url?: string
|
||||
chromaApiKey?: string
|
||||
} = { collectionName }
|
||||
if (chromaURL) obj.url = chromaURL
|
||||
if (chromaApiKey) obj.chromaApiKey = chromaApiKey
|
||||
|
||||
try {
|
||||
await ChromaExtended.fromDocuments(finalDocs, embeddings, obj)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const chromaURL = nodeData.inputs?.chromaURL as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData)
|
||||
|
||||
const chromaMetadataFilter = nodeData.inputs?.chromaMetadataFilter
|
||||
|
||||
const obj: {
|
||||
collectionName: string
|
||||
url?: string
|
||||
chromaApiKey?: string
|
||||
filter?: object | undefined
|
||||
} = { collectionName }
|
||||
if (chromaURL) obj.url = chromaURL
|
||||
if (chromaApiKey) obj.chromaApiKey = chromaApiKey
|
||||
if (chromaMetadataFilter) {
|
||||
const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : JSON.parse(chromaMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await ChromaExtended.fromExistingCollection(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Chroma_VectorStores }
|
||||
|
|
@ -12,6 +12,7 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -26,6 +27,7 @@ class Chroma_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Chroma (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -28,6 +29,7 @@ class ChromaUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Chroma'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ export abstract class ElasticSearchBase {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -30,6 +31,7 @@ export abstract class ElasticSearchBase {
|
|||
this.type = 'Elasticsearch'
|
||||
this.icon = 'elasticsearch.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.badge = 'DEPRECATING'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
|
|
@ -144,13 +146,26 @@ export abstract class ElasticSearchBase {
|
|||
} else if (cloudId) {
|
||||
let username = getCredentialParam('username', credentialData, nodeData)
|
||||
let password = getCredentialParam('password', credentialData, nodeData)
|
||||
elasticSearchClientOptions = {
|
||||
cloud: {
|
||||
id: cloudId
|
||||
},
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
if (cloudId.startsWith('http')) {
|
||||
elasticSearchClientOptions = {
|
||||
node: cloudId,
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
},
|
||||
tls: {
|
||||
rejectUnauthorized: false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
elasticSearchClientOptions = {
|
||||
cloud: {
|
||||
id: cloudId
|
||||
},
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,233 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Client, ClientOptions } from '@elastic/elasticsearch'
|
||||
import { Document } from 'langchain/document'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ElasticClientArgs, ElasticVectorSearch } from 'langchain/vectorstores/elasticsearch'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Elasticsearch_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Elasticsearch'
|
||||
this.name = 'elasticsearch'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Elasticsearch, a distributed search and analytics engine'
|
||||
this.type = 'Elasticsearch'
|
||||
this.icon = 'elasticsearch.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['elasticsearchApi', 'elasticSearchUserPassword']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'similarity',
|
||||
description: 'Similarity measure used in Elasticsearch.',
|
||||
type: 'options',
|
||||
default: 'l2_norm',
|
||||
options: [
|
||||
{
|
||||
label: 'l2_norm',
|
||||
name: 'l2_norm'
|
||||
},
|
||||
{
|
||||
label: 'dot_product',
|
||||
name: 'dot_product'
|
||||
},
|
||||
{
|
||||
label: 'cosine',
|
||||
name: 'cosine'
|
||||
}
|
||||
],
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Elasticsearch Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Elasticsearch Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(ElasticVectorSearch)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const endPoint = getCredentialParam('endpoint', credentialData, nodeData)
|
||||
const cloudId = getCredentialParam('cloudId', credentialData, nodeData)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const similarityMeasure = nodeData.inputs?.similarityMeasure as string
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
// The following code is a workaround for a bug (Langchain Issue #1589) in the underlying library.
|
||||
// Store does not support object in metadata and fail silently
|
||||
finalDocs.forEach((d) => {
|
||||
delete d.metadata.pdf
|
||||
delete d.metadata.loc
|
||||
})
|
||||
// end of workaround
|
||||
|
||||
const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName)
|
||||
const vectorStore = new ElasticVectorSearch(embeddings, elasticSearchClientArgs)
|
||||
|
||||
try {
|
||||
await vectorStore.addDocuments(finalDocs)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const endPoint = getCredentialParam('endpoint', credentialData, nodeData)
|
||||
const cloudId = getCredentialParam('cloudId', credentialData, nodeData)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const similarityMeasure = nodeData.inputs?.similarityMeasure as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName)
|
||||
const vectorStore = await ElasticVectorSearch.fromExistingIndex(embeddings, elasticSearchClientArgs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const prepareConnectionOptions = (
|
||||
endPoint: string | undefined,
|
||||
cloudId: string | undefined,
|
||||
credentialData: ICommonObject,
|
||||
nodeData: INodeData
|
||||
) => {
|
||||
let elasticSearchClientOptions: ClientOptions = {}
|
||||
if (endPoint) {
|
||||
let apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
elasticSearchClientOptions = {
|
||||
node: endPoint,
|
||||
auth: {
|
||||
apiKey: apiKey
|
||||
}
|
||||
}
|
||||
} else if (cloudId) {
|
||||
let username = getCredentialParam('username', credentialData, nodeData)
|
||||
let password = getCredentialParam('password', credentialData, nodeData)
|
||||
elasticSearchClientOptions = {
|
||||
cloud: {
|
||||
id: cloudId
|
||||
},
|
||||
auth: {
|
||||
username: username,
|
||||
password: password
|
||||
}
|
||||
}
|
||||
}
|
||||
return elasticSearchClientOptions
|
||||
}
|
||||
|
||||
const prepareClientArgs = (
|
||||
endPoint: string | undefined,
|
||||
cloudId: string | undefined,
|
||||
credentialData: ICommonObject,
|
||||
nodeData: INodeData,
|
||||
similarityMeasure: string,
|
||||
indexName: string
|
||||
) => {
|
||||
let elasticSearchClientOptions = prepareConnectionOptions(endPoint, cloudId, credentialData, nodeData)
|
||||
let vectorSearchOptions = {}
|
||||
switch (similarityMeasure) {
|
||||
case 'dot_product':
|
||||
vectorSearchOptions = {
|
||||
similarity: 'dot_product'
|
||||
}
|
||||
break
|
||||
case 'cosine':
|
||||
vectorSearchOptions = {
|
||||
similarity: 'cosine'
|
||||
}
|
||||
break
|
||||
default:
|
||||
vectorSearchOptions = {
|
||||
similarity: 'l2_norm'
|
||||
}
|
||||
}
|
||||
const elasticSearchClientArgs: ElasticClientArgs = {
|
||||
client: new Client(elasticSearchClientOptions),
|
||||
indexName: indexName,
|
||||
vectorSearchOptions: vectorSearchOptions
|
||||
}
|
||||
return elasticSearchClientArgs
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Elasticsearch_VectorStores }
|
||||
|
|
@ -50,7 +50,7 @@ class ElasicsearchUpsert_VectorStores extends ElasticSearchBase implements INode
|
|||
delete d.metadata.loc
|
||||
})
|
||||
// end of workaround
|
||||
return super.init(nodeData, _, options, flattenDocs)
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,145 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Document } from 'langchain/document'
|
||||
import { FaissStore } from 'langchain/vectorstores/faiss'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
class Faiss_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Faiss'
|
||||
this.name = 'faiss'
|
||||
this.version = 1.0
|
||||
this.type = 'Faiss'
|
||||
this.icon = 'faiss.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Faiss library from Meta'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base Path to load',
|
||||
name: 'basePath',
|
||||
description: 'Path to load faiss.index file',
|
||||
placeholder: `C:\\Users\\User\\Desktop`,
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Faiss Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Faiss Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(FaissStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData): Promise<void> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const vectorStore = await FaissStore.fromDocuments(finalDocs, embeddings)
|
||||
await vectorStore.save(basePath)
|
||||
|
||||
// Avoid illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => {
|
||||
return await similaritySearchVectorWithScore(query, k, vectorStore)
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const basePath = nodeData.inputs?.basePath as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const vectorStore = await FaissStore.load(basePath, embeddings)
|
||||
|
||||
// Avoid illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => {
|
||||
return await similaritySearchVectorWithScore(query, k, vectorStore)
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const similaritySearchVectorWithScore = async (query: number[], k: number, vectorStore: FaissStore) => {
|
||||
const index = vectorStore.index
|
||||
|
||||
if (k > index.ntotal()) {
|
||||
const total = index.ntotal()
|
||||
console.warn(`k (${k}) is greater than the number of elements in the index (${total}), setting k to ${total}`)
|
||||
k = total
|
||||
}
|
||||
|
||||
const result = index.search(query, k)
|
||||
return result.labels.map((id, index) => {
|
||||
const uuid = vectorStore._mapping[id]
|
||||
return [vectorStore.docstore.search(uuid), result.distances[index]] as [Document, number]
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Faiss_VectorStores }
|
||||
|
|
@ -12,6 +12,7 @@ class Faiss_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -25,6 +26,7 @@ class Faiss_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Faiss (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
|
|
@ -13,6 +13,7 @@ class FaissUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -26,6 +27,7 @@ class FaissUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Faiss'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
|
Before Width: | Height: | Size: 648 B After Width: | Height: | Size: 648 B |
|
|
@ -1,10 +0,0 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" class="icon icon-tabler icon-tabler-chart-dots-3" width="24" height="24" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" fill="none" stroke-linecap="round" stroke-linejoin="round">
|
||||
<path stroke="none" d="M0 0h24v24H0z" fill="none"></path>
|
||||
<path d="M5 7m-2 0a2 2 0 1 0 4 0a2 2 0 1 0 -4 0"></path>
|
||||
<path d="M16 15m-2 0a2 2 0 1 0 4 0a2 2 0 1 0 -4 0"></path>
|
||||
<path d="M18 6m-3 0a3 3 0 1 0 6 0a3 3 0 1 0 -6 0"></path>
|
||||
<path d="M6 18m-3 0a3 3 0 1 0 6 0a3 3 0 1 0 -6 0"></path>
|
||||
<path d="M9 17l5 -1.5"></path>
|
||||
<path d="M6.5 8.5l7.81 5.37"></path>
|
||||
<path d="M7 7l8 -1"></path>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 648 B |
|
|
@ -1,9 +1,9 @@
|
|||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { flatten } from 'lodash'
|
||||
import { MemoryVectorStore } from 'langchain/vectorstores/memory'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
import { flatten } from 'lodash'
|
||||
|
||||
class InMemoryVectorStore_VectorStores implements INode {
|
||||
label: string
|
||||
|
|
@ -31,7 +31,8 @@ class InMemoryVectorStore_VectorStores implements INode {
|
|||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
|
|
@ -61,6 +62,28 @@ class InMemoryVectorStore_VectorStores implements INode {
|
|||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData): Promise<void> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await MemoryVectorStore.fromDocuments(finalDocs, embeddings)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
|
|
|||
|
|
@ -0,0 +1,348 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { DataType, ErrorCode, MetricType, IndexType } from '@zilliz/milvus2-sdk-node'
|
||||
import { Document } from 'langchain/document'
|
||||
import { MilvusLibArgs, Milvus } from 'langchain/vectorstores/milvus'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
interface InsertRow {
|
||||
[x: string]: string | number[]
|
||||
}
|
||||
|
||||
class Milvus_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Milvus'
|
||||
this.name = 'milvus'
|
||||
this.version = 1.0
|
||||
this.type = 'Milvus'
|
||||
this.icon = 'milvus.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using Milvus, world's most advanced open-source vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
credentialNames: ['milvusAuth']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Server URL',
|
||||
name: 'milvusServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:19530'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Collection Name',
|
||||
name: 'milvusCollection',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Milvus Filter',
|
||||
name: 'milvusFilter',
|
||||
type: 'string',
|
||||
optional: true,
|
||||
description:
|
||||
'Filter data with a simple string query. Refer Milvus <a target="_blank" href="https://milvus.io/blog/2022-08-08-How-to-use-string-data-to-empower-your-similarity-search-applications.md#Hybrid-search">docs</a> for more details.',
|
||||
placeholder: 'doc=="a"',
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Milvus Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Milvus Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(Milvus)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
// server setup
|
||||
const address = nodeData.inputs?.milvusServerUrl as string
|
||||
const collectionName = nodeData.inputs?.milvusCollection as string
|
||||
|
||||
// embeddings
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
// credential
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData)
|
||||
const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData)
|
||||
|
||||
// init MilvusLibArgs
|
||||
const milVusArgs: MilvusLibArgs = {
|
||||
url: address,
|
||||
collectionName: collectionName
|
||||
}
|
||||
|
||||
if (milvusUser) milVusArgs.username = milvusUser
|
||||
if (milvusPassword) milVusArgs.password = milvusPassword
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const vectorStore = await MilvusUpsert.fromDocuments(finalDocs, embeddings, milVusArgs)
|
||||
|
||||
// Avoid Illegal Invocation
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => {
|
||||
return await similaritySearchVectorWithScore(query, k, vectorStore, undefined, filter)
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
// server setup
|
||||
const address = nodeData.inputs?.milvusServerUrl as string
|
||||
const collectionName = nodeData.inputs?.milvusCollection as string
|
||||
const milvusFilter = nodeData.inputs?.milvusFilter as string
|
||||
|
||||
// embeddings
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
|
||||
// output
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
// format data
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
// credential
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData)
|
||||
const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData)
|
||||
|
||||
// init MilvusLibArgs
|
||||
const milVusArgs: MilvusLibArgs = {
|
||||
url: address,
|
||||
collectionName: collectionName
|
||||
}
|
||||
|
||||
if (milvusUser) milVusArgs.username = milvusUser
|
||||
if (milvusPassword) milVusArgs.password = milvusPassword
|
||||
|
||||
const vectorStore = await Milvus.fromExistingCollection(embeddings, milVusArgs)
|
||||
|
||||
// Avoid Illegal Invocation
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => {
|
||||
return await similaritySearchVectorWithScore(query, k, vectorStore, milvusFilter, filter)
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const checkJsonString = (value: string): { isJson: boolean; obj: any } => {
|
||||
try {
|
||||
const result = JSON.parse(value)
|
||||
return { isJson: true, obj: result }
|
||||
} catch (e) {
|
||||
return { isJson: false, obj: null }
|
||||
}
|
||||
}
|
||||
|
||||
const similaritySearchVectorWithScore = async (query: number[], k: number, vectorStore: Milvus, milvusFilter?: string, filter?: string) => {
|
||||
const hasColResp = await vectorStore.client.hasCollection({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
if (hasColResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error checking collection: ${hasColResp}`)
|
||||
}
|
||||
if (hasColResp.value === false) {
|
||||
throw new Error(`Collection not found: ${vectorStore.collectionName}, please create collection before search.`)
|
||||
}
|
||||
|
||||
const filterStr = milvusFilter ?? filter ?? ''
|
||||
|
||||
await vectorStore.grabCollectionFields()
|
||||
|
||||
const loadResp = await vectorStore.client.loadCollectionSync({
|
||||
collection_name: vectorStore.collectionName
|
||||
})
|
||||
|
||||
if (loadResp.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error loading collection: ${loadResp}`)
|
||||
}
|
||||
|
||||
const outputFields = vectorStore.fields.filter((field) => field !== vectorStore.vectorField)
|
||||
|
||||
const searchResp = await vectorStore.client.search({
|
||||
collection_name: vectorStore.collectionName,
|
||||
search_params: {
|
||||
anns_field: vectorStore.vectorField,
|
||||
topk: k.toString(),
|
||||
metric_type: vectorStore.indexCreateParams.metric_type,
|
||||
params: vectorStore.indexSearchParams
|
||||
},
|
||||
output_fields: outputFields,
|
||||
vector_type: DataType.FloatVector,
|
||||
vectors: [query],
|
||||
filter: filterStr
|
||||
})
|
||||
if (searchResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error searching data: ${JSON.stringify(searchResp)}`)
|
||||
}
|
||||
const results: [Document, number][] = []
|
||||
searchResp.results.forEach((result) => {
|
||||
const fields = {
|
||||
pageContent: '',
|
||||
metadata: {} as Record<string, any>
|
||||
}
|
||||
Object.keys(result).forEach((key) => {
|
||||
if (key === vectorStore.textField) {
|
||||
fields.pageContent = result[key]
|
||||
} else if (vectorStore.fields.includes(key) || key === vectorStore.primaryField) {
|
||||
if (typeof result[key] === 'string') {
|
||||
const { isJson, obj } = checkJsonString(result[key])
|
||||
fields.metadata[key] = isJson ? obj : result[key]
|
||||
} else {
|
||||
fields.metadata[key] = result[key]
|
||||
}
|
||||
}
|
||||
})
|
||||
results.push([new Document(fields), result.score])
|
||||
})
|
||||
return results
|
||||
}
|
||||
|
||||
class MilvusUpsert extends Milvus {
|
||||
async addVectors(vectors: number[][], documents: Document[]): Promise<void> {
|
||||
if (vectors.length === 0) {
|
||||
return
|
||||
}
|
||||
await this.ensureCollection(vectors, documents)
|
||||
|
||||
const insertDatas: InsertRow[] = []
|
||||
|
||||
for (let index = 0; index < vectors.length; index++) {
|
||||
const vec = vectors[index]
|
||||
const doc = documents[index]
|
||||
const data: InsertRow = {
|
||||
[this.textField]: doc.pageContent,
|
||||
[this.vectorField]: vec
|
||||
}
|
||||
this.fields.forEach((field) => {
|
||||
switch (field) {
|
||||
case this.primaryField:
|
||||
if (!this.autoId) {
|
||||
if (doc.metadata[this.primaryField] === undefined) {
|
||||
throw new Error(
|
||||
`The Collection's primaryField is configured with autoId=false, thus its value must be provided through metadata.`
|
||||
)
|
||||
}
|
||||
data[field] = doc.metadata[this.primaryField]
|
||||
}
|
||||
break
|
||||
case this.textField:
|
||||
data[field] = doc.pageContent
|
||||
break
|
||||
case this.vectorField:
|
||||
data[field] = vec
|
||||
break
|
||||
default: // metadata fields
|
||||
if (doc.metadata[field] === undefined) {
|
||||
throw new Error(`The field "${field}" is not provided in documents[${index}].metadata.`)
|
||||
} else if (typeof doc.metadata[field] === 'object') {
|
||||
data[field] = JSON.stringify(doc.metadata[field])
|
||||
} else {
|
||||
data[field] = doc.metadata[field]
|
||||
}
|
||||
break
|
||||
}
|
||||
})
|
||||
|
||||
insertDatas.push(data)
|
||||
}
|
||||
|
||||
const descIndexResp = await this.client.describeIndex({
|
||||
collection_name: this.collectionName
|
||||
})
|
||||
|
||||
if (descIndexResp.status.error_code === ErrorCode.IndexNotExist) {
|
||||
const resp = await this.client.createIndex({
|
||||
collection_name: this.collectionName,
|
||||
field_name: this.vectorField,
|
||||
index_name: `myindex_${Date.now().toString()}`,
|
||||
index_type: IndexType.AUTOINDEX,
|
||||
metric_type: MetricType.L2
|
||||
})
|
||||
if (resp.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error creating index`)
|
||||
}
|
||||
}
|
||||
|
||||
const insertResp = await this.client.insert({
|
||||
collection_name: this.collectionName,
|
||||
fields_data: insertDatas
|
||||
})
|
||||
|
||||
if (insertResp.status.error_code !== ErrorCode.SUCCESS) {
|
||||
throw new Error(`Error inserting data: ${JSON.stringify(insertResp)}`)
|
||||
}
|
||||
|
||||
await this.client.flushSync({ collection_names: [this.collectionName] })
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Milvus_VectorStores }
|
||||
|
|
@ -13,6 +13,7 @@ class Milvus_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -27,6 +28,7 @@ class Milvus_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing collection from Milvus (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ class Milvus_Upsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -32,6 +33,7 @@ class Milvus_Upsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Milvus'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,194 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { MongoClient } from 'mongodb'
|
||||
import { MongoDBAtlasVectorSearch } from 'langchain/vectorstores/mongodb_atlas'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class MongoDBAtlas_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'MongoDB Atlas'
|
||||
this.name = 'mongoDBAtlas'
|
||||
this.version = 1.0
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using MongoDB Atlas, a managed cloud mongodb database`
|
||||
this.type = 'MongoDB Atlas'
|
||||
this.icon = 'mongodb.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['mongoDBUrlApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'databaseName',
|
||||
placeholder: '<DB_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
placeholder: '<COLLECTION_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<VECTOR_INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Content Field',
|
||||
name: 'textKey',
|
||||
description: 'Name of the field (column) that contains the actual content',
|
||||
type: 'string',
|
||||
default: 'text',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embedded Field',
|
||||
name: 'embeddingKey',
|
||||
description: 'Name of the field (column) that contains the Embedding',
|
||||
type: 'string',
|
||||
default: 'embedding',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'MongoDB Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'MongoDB Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(MongoDBAtlasVectorSearch)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const databaseName = nodeData.inputs?.databaseName as string
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let textKey = nodeData.inputs?.textKey as string
|
||||
let embeddingKey = nodeData.inputs?.embeddingKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
const document = new Document(flattenDocs[i])
|
||||
finalDocs.push(document)
|
||||
}
|
||||
}
|
||||
|
||||
const mongoClient = new MongoClient(mongoDBConnectUrl)
|
||||
const collection = mongoClient.db(databaseName).collection(collectionName)
|
||||
|
||||
if (!textKey || textKey === '') textKey = 'text'
|
||||
if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding'
|
||||
|
||||
const mongoDBAtlasVectorSearch = new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection,
|
||||
indexName,
|
||||
textKey,
|
||||
embeddingKey
|
||||
})
|
||||
|
||||
try {
|
||||
await mongoDBAtlasVectorSearch.addDocuments(finalDocs)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const databaseName = nodeData.inputs?.databaseName as string
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let textKey = nodeData.inputs?.textKey as string
|
||||
let embeddingKey = nodeData.inputs?.embeddingKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
|
||||
|
||||
const mongoClient = new MongoClient(mongoDBConnectUrl)
|
||||
const collection = mongoClient.db(databaseName).collection(collectionName)
|
||||
|
||||
if (!textKey || textKey === '') textKey = 'text'
|
||||
if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding'
|
||||
|
||||
const vectorStore = new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection,
|
||||
indexName,
|
||||
textKey,
|
||||
embeddingKey
|
||||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDBAtlas_VectorStores }
|
||||
|
|
@ -0,0 +1,147 @@
|
|||
import {
|
||||
getBaseClasses,
|
||||
getCredentialData,
|
||||
getCredentialParam,
|
||||
ICommonObject,
|
||||
INodeData,
|
||||
INodeOutputsValue,
|
||||
INodeParams
|
||||
} from '../../../src'
|
||||
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { VectorStore } from 'langchain/vectorstores/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { MongoDBAtlasVectorSearch } from 'langchain/vectorstores/mongodb_atlas'
|
||||
import { Collection, MongoClient } from 'mongodb'
|
||||
|
||||
export abstract class MongoDBSearchBase {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
mongoClient: MongoClient
|
||||
|
||||
protected constructor() {
|
||||
this.type = 'MongoDB Atlas'
|
||||
this.icon = 'mongodb.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['mongoDBUrlApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'databaseName',
|
||||
placeholder: '<DB_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Collection Name',
|
||||
name: 'collectionName',
|
||||
placeholder: '<COLLECTION_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<VECTOR_INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Content Field',
|
||||
name: 'textKey',
|
||||
description: 'Name of the field (column) that contains the actual content',
|
||||
type: 'string',
|
||||
default: 'text',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embedded Field',
|
||||
name: 'embeddingKey',
|
||||
description: 'Name of the field (column) that contains the Embedding',
|
||||
type: 'string',
|
||||
default: 'embedding',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'MongoDB Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'MongoDB Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(MongoDBAtlasVectorSearch)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
abstract constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
docs: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore>
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject, docs: Document<Record<string, any>>[] | undefined): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const databaseName = nodeData.inputs?.databaseName as string
|
||||
const collectionName = nodeData.inputs?.collectionName as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let textKey = nodeData.inputs?.textKey as string
|
||||
let embeddingKey = nodeData.inputs?.embeddingKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData)
|
||||
|
||||
this.mongoClient = new MongoClient(mongoDBConnectUrl)
|
||||
const collection = this.mongoClient.db(databaseName).collection(collectionName)
|
||||
if (!textKey || textKey === '') textKey = 'text'
|
||||
if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding'
|
||||
const vectorStore = await this.constructVectorStore(embeddings, collection, indexName, textKey, embeddingKey, docs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
import { Collection } from 'mongodb'
|
||||
import { MongoDBAtlasVectorSearch } from 'langchain/vectorstores/mongodb_atlas'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { VectorStore } from 'langchain/vectorstores/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { MongoDBSearchBase } from './MongoDBSearchBase'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
|
||||
class MongoDBExisting_VectorStores extends MongoDBSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'MongoDB Atlas Load Existing Index'
|
||||
this.name = 'MongoDBIndex'
|
||||
this.version = 1.0
|
||||
this.description = 'Load existing data from MongoDB Atlas (i.e: Document has been upserted)'
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
return super.init(nodeData, _, options, undefined)
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
_: Document<Record<string, any>>[] | undefined
|
||||
): Promise<VectorStore> {
|
||||
return new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection: collection,
|
||||
indexName: indexName,
|
||||
textKey: textKey,
|
||||
embeddingKey: embeddingKey
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDBExisting_VectorStores }
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Collection } from 'mongodb'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { VectorStore } from 'langchain/vectorstores/base'
|
||||
import { MongoDBAtlasVectorSearch } from 'langchain/vectorstores/mongodb_atlas'
|
||||
import { ICommonObject, INode, INodeData } from '../../../src/Interface'
|
||||
import { MongoDBSearchBase } from './MongoDBSearchBase'
|
||||
|
||||
class MongoDBUpsert_VectorStores extends MongoDBSearchBase implements INode {
|
||||
constructor() {
|
||||
super()
|
||||
this.label = 'MongoDB Atlas Upsert Document'
|
||||
this.name = 'MongoDBUpsert'
|
||||
this.version = 1.0
|
||||
this.description = 'Upsert documents to MongoDB Atlas'
|
||||
this.inputs.unshift({
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true
|
||||
})
|
||||
}
|
||||
|
||||
async constructVectorStore(
|
||||
embeddings: Embeddings,
|
||||
collection: Collection,
|
||||
indexName: string,
|
||||
textKey: string,
|
||||
embeddingKey: string,
|
||||
docs: Document<Record<string, any>>[]
|
||||
): Promise<VectorStore> {
|
||||
const mongoDBAtlasVectorSearch = new MongoDBAtlasVectorSearch(embeddings, {
|
||||
collection: collection,
|
||||
indexName: indexName,
|
||||
textKey: textKey,
|
||||
embeddingKey: embeddingKey
|
||||
})
|
||||
await mongoDBAtlasVectorSearch.addDocuments(docs)
|
||||
return mongoDBAtlasVectorSearch
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
const document = new Document(flattenDocs[i])
|
||||
finalDocs.push(document)
|
||||
}
|
||||
}
|
||||
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: MongoDBUpsert_VectorStores }
|
||||
|
After Width: | Height: | Size: 3.7 KiB |
|
|
@ -0,0 +1,139 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Client } from '@opensearch-project/opensearch'
|
||||
import { Document } from 'langchain/document'
|
||||
import { OpenSearchVectorStore } from 'langchain/vectorstores/opensearch'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses } from '../../../src/utils'
|
||||
|
||||
class OpenSearch_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'OpenSearch'
|
||||
this.name = 'openSearch'
|
||||
this.version = 1.0
|
||||
this.type = 'OpenSearch'
|
||||
this.icon = 'opensearch.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using OpenSearch, an open-source, all-in-one vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch URL',
|
||||
name: 'opensearchURL',
|
||||
type: 'string',
|
||||
placeholder: 'http://127.0.0.1:9200'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'OpenSearch Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'OpenSearch Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(OpenSearchVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData): Promise<void> {
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const opensearchURL = nodeData.inputs?.opensearchURL as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const client = new Client({
|
||||
nodes: [opensearchURL]
|
||||
})
|
||||
|
||||
try {
|
||||
await OpenSearchVectorStore.fromDocuments(finalDocs, embeddings, {
|
||||
client,
|
||||
indexName: indexName
|
||||
})
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData): Promise<any> {
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const opensearchURL = nodeData.inputs?.opensearchURL as string
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const client = new Client({
|
||||
nodes: [opensearchURL]
|
||||
})
|
||||
|
||||
const vectorStore = new OpenSearchVectorStore(embeddings, {
|
||||
client,
|
||||
indexName
|
||||
})
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: OpenSearch_VectorStores }
|
||||
|
|
@ -14,6 +14,7 @@ class OpenSearchUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -27,6 +28,7 @@ class OpenSearchUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to OpenSearch'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
|
|
@ -12,6 +12,7 @@ class OpenSearch_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
outputs: INodeOutputsValue[]
|
||||
|
|
@ -25,6 +26,7 @@ class OpenSearch_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from OpenSearch (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Embeddings',
|
||||
|
Before Width: | Height: | Size: 5.1 KiB After Width: | Height: | Size: 5.1 KiB |
|
Before Width: | Height: | Size: 5.1 KiB |
|
|
@ -0,0 +1,189 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Pinecone } from '@pinecone-database/pinecone'
|
||||
import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Pinecone_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Pinecone'
|
||||
this.name = 'pinecone'
|
||||
this.version = 1.0
|
||||
this.type = 'Pinecone'
|
||||
this.icon = 'pinecone.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = `Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database`
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['pineconeApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Index',
|
||||
name: 'pineconeIndex',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Namespace',
|
||||
name: 'pineconeNamespace',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-namespace',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Metadata Filter',
|
||||
name: 'pineconeMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Pinecone Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Pinecone Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(PineconeStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData)
|
||||
const pineconeEnv = getCredentialParam('pineconeEnv', credentialData, nodeData)
|
||||
|
||||
const client = new Pinecone({
|
||||
apiKey: pineconeApiKey,
|
||||
environment: pineconeEnv
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index(index)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: PineconeLibArgs = {
|
||||
pineconeIndex
|
||||
}
|
||||
|
||||
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
||||
|
||||
try {
|
||||
await PineconeStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const index = nodeData.inputs?.pineconeIndex as string
|
||||
const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string
|
||||
const pineconeMetadataFilter = nodeData.inputs?.pineconeMetadataFilter
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData)
|
||||
const pineconeEnv = getCredentialParam('pineconeEnv', credentialData, nodeData)
|
||||
|
||||
const client = new Pinecone({
|
||||
apiKey: pineconeApiKey,
|
||||
environment: pineconeEnv
|
||||
})
|
||||
|
||||
const pineconeIndex = client.Index(index)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: PineconeLibArgs = {
|
||||
pineconeIndex
|
||||
}
|
||||
|
||||
if (pineconeNamespace) obj.namespace = pineconeNamespace
|
||||
if (pineconeMetadataFilter) {
|
||||
const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await PineconeStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Pinecone_VectorStores }
|
||||
|
|
@ -12,6 +12,7 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -26,6 +27,7 @@ class Pinecone_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { flatten } from 'lodash'
|
||||
import { Pinecone } from '@pinecone-database/pinecone'
|
||||
import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { flatten } from 'lodash'
|
||||
|
||||
class PineconeUpsert_VectorStores implements INode {
|
||||
label: string
|
||||
|
|
@ -14,6 +14,7 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -28,6 +29,7 @@ class PineconeUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Pinecone'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,268 @@
|
|||
import { Pool } from 'pg'
|
||||
import { flatten } from 'lodash'
|
||||
import { DataSourceOptions } from 'typeorm'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { TypeORMVectorStore, TypeORMVectorStoreDocument } from 'langchain/vectorstores/typeorm'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Postgres_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Postgres'
|
||||
this.name = 'postgres'
|
||||
this.version = 1.0
|
||||
this.type = 'Postgres'
|
||||
this.icon = 'postgres.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using pgvector on Postgres'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['PostgresApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Port',
|
||||
name: 'port',
|
||||
type: 'number',
|
||||
placeholder: '6432',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'documents',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Configuration',
|
||||
name: 'additionalConfig',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Postgres Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Postgres Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(TypeORMVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
const _tableName = nodeData.inputs?.tableName as string
|
||||
const tableName = _tableName ? _tableName : 'documents'
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
||||
|
||||
let additionalConfiguration = {}
|
||||
if (additionalConfig) {
|
||||
try {
|
||||
additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig)
|
||||
} catch (exception) {
|
||||
throw new Error('Invalid JSON in the Additional Configuration: ' + exception)
|
||||
}
|
||||
}
|
||||
|
||||
const postgresConnectionOptions = {
|
||||
...additionalConfiguration,
|
||||
type: 'postgres',
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: nodeData.inputs?.port as number,
|
||||
username: user,
|
||||
password: password,
|
||||
database: nodeData.inputs?.database as string
|
||||
}
|
||||
|
||||
const args = {
|
||||
postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions,
|
||||
tableName: tableName
|
||||
}
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const vectorStore = await TypeORMVectorStore.fromDocuments(finalDocs, embeddings, args)
|
||||
|
||||
// Avoid Illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
return await similaritySearchVectorWithScore(query, k, tableName, postgresConnectionOptions, filter)
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
const _tableName = nodeData.inputs?.tableName as string
|
||||
const tableName = _tableName ? _tableName : 'documents'
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const additionalConfig = nodeData.inputs?.additionalConfig as string
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
let additionalConfiguration = {}
|
||||
if (additionalConfig) {
|
||||
try {
|
||||
additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig)
|
||||
} catch (exception) {
|
||||
throw new Error('Invalid JSON in the Additional Configuration: ' + exception)
|
||||
}
|
||||
}
|
||||
|
||||
const postgresConnectionOptions = {
|
||||
...additionalConfiguration,
|
||||
type: 'postgres',
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: nodeData.inputs?.port as number,
|
||||
username: user,
|
||||
password: password,
|
||||
database: nodeData.inputs?.database as string
|
||||
}
|
||||
|
||||
const args = {
|
||||
postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions,
|
||||
tableName: tableName
|
||||
}
|
||||
|
||||
const vectorStore = await TypeORMVectorStore.fromDataSource(embeddings, args)
|
||||
|
||||
// Rewrite the method to use pg pool connection instead of the default connection
|
||||
/* Otherwise a connection error is displayed when the chain tries to execute the function
|
||||
[chain/start] [1:chain:ConversationalRetrievalQAChain] Entering Chain run with input: { "question": "what the document is about", "chat_history": [] }
|
||||
[retriever/start] [1:chain:ConversationalRetrievalQAChain > 2:retriever:VectorStoreRetriever] Entering Retriever run with input: { "query": "what the document is about" }
|
||||
[ERROR]: uncaughtException: Illegal invocation TypeError: Illegal invocation at Socket.ref (node:net:1524:18) at Connection.ref (.../node_modules/pg/lib/connection.js:183:17) at Client.ref (.../node_modules/pg/lib/client.js:591:21) at BoundPool._pulseQueue (/node_modules/pg-pool/index.js:148:28) at .../node_modules/pg-pool/index.js:184:37 at process.processTicksAndRejections (node:internal/process/task_queues:77:11)
|
||||
*/
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
return await similaritySearchVectorWithScore(query, k, tableName, postgresConnectionOptions, filter)
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const similaritySearchVectorWithScore = async (
|
||||
query: number[],
|
||||
k: number,
|
||||
tableName: string,
|
||||
postgresConnectionOptions: ICommonObject,
|
||||
filter?: any
|
||||
) => {
|
||||
const embeddingString = `[${query.join(',')}]`
|
||||
const _filter = filter ?? '{}'
|
||||
|
||||
const queryString = `
|
||||
SELECT *, embedding <=> $1 as "_distance"
|
||||
FROM ${tableName}
|
||||
WHERE metadata @> $2
|
||||
ORDER BY "_distance" ASC
|
||||
LIMIT $3;`
|
||||
|
||||
const poolOptions = {
|
||||
host: postgresConnectionOptions.host,
|
||||
port: postgresConnectionOptions.port,
|
||||
user: postgresConnectionOptions.username,
|
||||
password: postgresConnectionOptions.password,
|
||||
database: postgresConnectionOptions.database
|
||||
}
|
||||
const pool = new Pool(poolOptions)
|
||||
const conn = await pool.connect()
|
||||
|
||||
const documents = await conn.query(queryString, [embeddingString, _filter, k])
|
||||
|
||||
conn.release()
|
||||
|
||||
const results = [] as [TypeORMVectorStoreDocument, number][]
|
||||
for (const doc of documents.rows) {
|
||||
if (doc._distance != null && doc.pageContent != null) {
|
||||
const document = new Document(doc) as TypeORMVectorStoreDocument
|
||||
document.id = doc.id
|
||||
results.push([document, doc._distance])
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Postgres_VectorStores }
|
||||
|
|
@ -14,6 +14,7 @@ class Postgres_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -28,6 +29,7 @@ class Postgres_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Postgres using pgvector (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -15,6 +15,7 @@ class PostgresUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -29,6 +30,7 @@ class PostgresUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Postgres using pgvector'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 6.8 KiB |
|
|
@ -0,0 +1,247 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { QdrantClient } from '@qdrant/js-client-rest'
|
||||
import { VectorStoreRetrieverInput } from 'langchain/vectorstores/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { QdrantVectorStore, QdrantLibArgs } from 'langchain/vectorstores/qdrant'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
type RetrieverConfig = Partial<VectorStoreRetrieverInput<QdrantVectorStore>>
|
||||
|
||||
class Qdrant_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Qdrant'
|
||||
this.name = 'qdrant'
|
||||
this.version = 1.0
|
||||
this.type = 'Qdrant'
|
||||
this.icon = 'qdrant.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Qdrant, a scalable open source vector database written in Rust'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Qdrant cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['qdrantApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Server URL',
|
||||
name: 'qdrantServerUrl',
|
||||
type: 'string',
|
||||
placeholder: 'http://localhost:6333'
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Collection Name',
|
||||
name: 'qdrantCollection',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Vector Dimension',
|
||||
name: 'qdrantVectorDimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Similarity',
|
||||
name: 'qdrantSimilarity',
|
||||
description: 'Similarity measure used in Qdrant.',
|
||||
type: 'options',
|
||||
default: 'Cosine',
|
||||
options: [
|
||||
{
|
||||
label: 'Cosine',
|
||||
name: 'Cosine'
|
||||
},
|
||||
{
|
||||
label: 'Euclid',
|
||||
name: 'Euclid'
|
||||
},
|
||||
{
|
||||
label: 'Dot',
|
||||
name: 'Dot'
|
||||
}
|
||||
],
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Additional Collection Cofiguration',
|
||||
name: 'qdrantCollectionConfiguration',
|
||||
description:
|
||||
'Refer to <a target="_blank" href="https://qdrant.tech/documentation/concepts/collections">collection docs</a> for more reference',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Search Filter',
|
||||
name: 'qdrantFilter',
|
||||
description: 'Only return points which satisfy the conditions',
|
||||
type: 'json',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Qdrant Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Qdrant Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
|
||||
const collectionName = nodeData.inputs?.qdrantCollection as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new QdrantClient({
|
||||
url: qdrantServerUrl,
|
||||
apiKey: qdrantApiKey
|
||||
})
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const dbConfig: QdrantLibArgs = {
|
||||
client,
|
||||
url: qdrantServerUrl,
|
||||
collectionName,
|
||||
collectionConfig: {
|
||||
vectors: {
|
||||
size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536,
|
||||
distance: qdrantSimilarity ?? 'Cosine'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string
|
||||
const collectionName = nodeData.inputs?.qdrantCollection as string
|
||||
let qdrantCollectionConfiguration = nodeData.inputs?.qdrantCollectionConfiguration
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity
|
||||
const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
let queryFilter = nodeData.inputs?.queryFilter
|
||||
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData)
|
||||
|
||||
const client = new QdrantClient({
|
||||
url: qdrantServerUrl,
|
||||
apiKey: qdrantApiKey
|
||||
})
|
||||
|
||||
const dbConfig: QdrantLibArgs = {
|
||||
client,
|
||||
collectionName
|
||||
}
|
||||
|
||||
const retrieverConfig: RetrieverConfig = {
|
||||
k
|
||||
}
|
||||
|
||||
if (qdrantCollectionConfiguration) {
|
||||
qdrantCollectionConfiguration =
|
||||
typeof qdrantCollectionConfiguration === 'object'
|
||||
? qdrantCollectionConfiguration
|
||||
: JSON.parse(qdrantCollectionConfiguration)
|
||||
dbConfig.collectionConfig = {
|
||||
...qdrantCollectionConfiguration,
|
||||
vectors: {
|
||||
...qdrantCollectionConfiguration.vectors,
|
||||
size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536,
|
||||
distance: qdrantSimilarity ?? 'Cosine'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (queryFilter) {
|
||||
retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter)
|
||||
}
|
||||
|
||||
const vectorStore = await QdrantVectorStore.fromExistingCollection(embeddings, dbConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(retrieverConfig)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Qdrant_VectorStores }
|
||||
|
|
@ -15,6 +15,7 @@ class Qdrant_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -29,6 +30,7 @@ class Qdrant_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Qdrant (i.e., documents have been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -17,6 +17,7 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -31,6 +32,7 @@ class QdrantUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Qdrant'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 11 KiB |
|
|
@ -0,0 +1,327 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { createClient, SearchOptions } from 'redis'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { RedisVectorStore, RedisVectorStoreConfig } from 'langchain/vectorstores/redis'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { escapeAllStrings, escapeSpecialChars, unEscapeSpecialChars } from './utils'
|
||||
|
||||
class Redis_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Redis'
|
||||
this.name = 'redis'
|
||||
this.version = 1.0
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Redis, an open source, in-memory data structure store'
|
||||
this.type = 'Redis'
|
||||
this.icon = 'redis.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['redisCacheUrlApi', 'redisCacheApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Index Name',
|
||||
name: 'indexName',
|
||||
placeholder: '<VECTOR_INDEX_NAME>',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Replace Index on Upsert',
|
||||
name: 'replaceIndex',
|
||||
description: 'Selecting this option will delete the existing index and recreate a new one when upserting',
|
||||
default: false,
|
||||
type: 'boolean'
|
||||
},
|
||||
{
|
||||
label: 'Content Field',
|
||||
name: 'contentKey',
|
||||
description: 'Name of the field (column) that contains the actual content',
|
||||
type: 'string',
|
||||
default: 'content',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Field',
|
||||
name: 'metadataKey',
|
||||
description: 'Name of the field (column) that contains the metadata of the document',
|
||||
type: 'string',
|
||||
default: 'metadata',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Vector Field',
|
||||
name: 'vectorKey',
|
||||
description: 'Name of the field (column) that contains the vector',
|
||||
type: 'string',
|
||||
default: 'content_vector',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Redis Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Redis Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(RedisVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let contentKey = nodeData.inputs?.contentKey as string
|
||||
let metadataKey = nodeData.inputs?.metadataKey as string
|
||||
let vectorKey = nodeData.inputs?.vectorKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const replaceIndex = nodeData.inputs?.replaceIndex as boolean
|
||||
|
||||
let redisUrl = getCredentialParam('redisUrl', credentialData, nodeData)
|
||||
if (!redisUrl || redisUrl === '') {
|
||||
const username = getCredentialParam('redisCacheUser', credentialData, nodeData)
|
||||
const password = getCredentialParam('redisCachePwd', credentialData, nodeData)
|
||||
const portStr = getCredentialParam('redisCachePort', credentialData, nodeData)
|
||||
const host = getCredentialParam('redisCacheHost', credentialData, nodeData)
|
||||
|
||||
redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr
|
||||
}
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
const document = new Document(flattenDocs[i])
|
||||
escapeAllStrings(document.metadata)
|
||||
finalDocs.push(document)
|
||||
}
|
||||
}
|
||||
|
||||
const redisClient = createClient({ url: redisUrl })
|
||||
await redisClient.connect()
|
||||
|
||||
try {
|
||||
const storeConfig: RedisVectorStoreConfig = {
|
||||
redisClient: redisClient,
|
||||
indexName: indexName
|
||||
}
|
||||
const isIndexExists = await checkIndexExists(redisClient, indexName)
|
||||
if (replaceIndex && isIndexExists) {
|
||||
let response = await redisClient.ft.dropIndex(indexName)
|
||||
if (process.env.DEBUG === 'true') {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Redis Vector Store :: Dropping index [${indexName}], Received Response [${response}]`)
|
||||
}
|
||||
}
|
||||
const vectorStore = await RedisVectorStore.fromDocuments(finalDocs, embeddings, storeConfig)
|
||||
|
||||
if (!contentKey || contentKey === '') contentKey = 'content'
|
||||
if (!metadataKey || metadataKey === '') metadataKey = 'metadata'
|
||||
if (!vectorKey || vectorKey === '') vectorKey = 'content_vector'
|
||||
|
||||
// Avoid Illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
return await similaritySearchVectorWithScore(
|
||||
query,
|
||||
k,
|
||||
indexName,
|
||||
metadataKey,
|
||||
vectorKey,
|
||||
contentKey,
|
||||
redisClient,
|
||||
filter
|
||||
)
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const indexName = nodeData.inputs?.indexName as string
|
||||
let contentKey = nodeData.inputs?.contentKey as string
|
||||
let metadataKey = nodeData.inputs?.metadataKey as string
|
||||
let vectorKey = nodeData.inputs?.vectorKey as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
const output = nodeData.outputs?.output as string
|
||||
|
||||
let redisUrl = getCredentialParam('redisUrl', credentialData, nodeData)
|
||||
if (!redisUrl || redisUrl === '') {
|
||||
const username = getCredentialParam('redisCacheUser', credentialData, nodeData)
|
||||
const password = getCredentialParam('redisCachePwd', credentialData, nodeData)
|
||||
const portStr = getCredentialParam('redisCachePort', credentialData, nodeData)
|
||||
const host = getCredentialParam('redisCacheHost', credentialData, nodeData)
|
||||
|
||||
redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr
|
||||
}
|
||||
|
||||
const redisClient = createClient({ url: redisUrl })
|
||||
await redisClient.connect()
|
||||
|
||||
const storeConfig: RedisVectorStoreConfig = {
|
||||
redisClient: redisClient,
|
||||
indexName: indexName
|
||||
}
|
||||
|
||||
const vectorStore = new RedisVectorStore(embeddings, storeConfig)
|
||||
|
||||
if (!contentKey || contentKey === '') contentKey = 'content'
|
||||
if (!metadataKey || metadataKey === '') metadataKey = 'metadata'
|
||||
if (!vectorKey || vectorKey === '') vectorKey = 'content_vector'
|
||||
|
||||
// Avoid Illegal invocation error
|
||||
vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => {
|
||||
return await similaritySearchVectorWithScore(query, k, indexName, metadataKey, vectorKey, contentKey, redisClient, filter)
|
||||
}
|
||||
|
||||
if (output === 'retriever') {
|
||||
return vectorStore.asRetriever(k)
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const checkIndexExists = async (redisClient: ReturnType<typeof createClient>, indexName: string) => {
|
||||
try {
|
||||
await redisClient.ft.info(indexName)
|
||||
} catch (err: any) {
|
||||
if (err?.message.includes('unknown command')) {
|
||||
throw new Error(
|
||||
'Failed to run FT.INFO command. Please ensure that you are running a RediSearch-capable Redis instance: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/redis#setup'
|
||||
)
|
||||
}
|
||||
// index doesn't exist
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
const buildQuery = (
|
||||
query: number[],
|
||||
k: number,
|
||||
metadataKey: string,
|
||||
vectorKey: string,
|
||||
contentKey: string,
|
||||
filter?: string[]
|
||||
): [string, SearchOptions] => {
|
||||
const vectorScoreField = 'vector_score'
|
||||
|
||||
let hybridFields = '*'
|
||||
// if a filter is set, modify the hybrid query
|
||||
if (filter && filter.length) {
|
||||
// `filter` is a list of strings, then it's applied using the OR operator in the metadata key
|
||||
hybridFields = `@${metadataKey}:(${filter.map(escapeSpecialChars).join('|')})`
|
||||
}
|
||||
|
||||
const baseQuery = `${hybridFields} => [KNN ${k} @${vectorKey} $vector AS ${vectorScoreField}]`
|
||||
const returnFields = [metadataKey, contentKey, vectorScoreField]
|
||||
|
||||
const options: SearchOptions = {
|
||||
PARAMS: {
|
||||
vector: Buffer.from(new Float32Array(query).buffer)
|
||||
},
|
||||
RETURN: returnFields,
|
||||
SORTBY: vectorScoreField,
|
||||
DIALECT: 2,
|
||||
LIMIT: {
|
||||
from: 0,
|
||||
size: k
|
||||
}
|
||||
}
|
||||
|
||||
return [baseQuery, options]
|
||||
}
|
||||
|
||||
const similaritySearchVectorWithScore = async (
|
||||
query: number[],
|
||||
k: number,
|
||||
indexName: string,
|
||||
metadataKey: string,
|
||||
vectorKey: string,
|
||||
contentKey: string,
|
||||
redisClient: ReturnType<typeof createClient>,
|
||||
filter?: string[]
|
||||
): Promise<[Document, number][]> => {
|
||||
const results = await redisClient.ft.search(indexName, ...buildQuery(query, k, metadataKey, vectorKey, contentKey, filter))
|
||||
const result: [Document, number][] = []
|
||||
|
||||
if (results.total) {
|
||||
for (const res of results.documents) {
|
||||
if (res.value) {
|
||||
const document = res.value
|
||||
if (document.vector_score) {
|
||||
const metadataString = unEscapeSpecialChars(document[metadataKey] as string)
|
||||
result.push([
|
||||
new Document({
|
||||
pageContent: document[contentKey] as string,
|
||||
metadata: JSON.parse(metadataString)
|
||||
}),
|
||||
Number(document.vector_score)
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Redis_VectorStores }
|
||||
|
|
@ -23,6 +23,7 @@ export abstract class RedisSearchBase {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -34,6 +35,7 @@ export abstract class RedisSearchBase {
|
|||
this.icon = 'redis.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -14,8 +14,8 @@ class RedisExisting_VectorStores extends RedisSearchBase implements INode {
|
|||
this.version = 1.0
|
||||
this.description = 'Load existing index from Redis (i.e: Document has been upserted)'
|
||||
|
||||
// Remove deleteIndex from inputs as it is not applicable while fetching data from Redis
|
||||
let input = this.inputs.find((i) => i.name === 'deleteIndex')
|
||||
// Remove replaceIndex from inputs as it is not applicable while fetching data from Redis
|
||||
let input = this.inputs.find((i) => i.name === 'replaceIndex')
|
||||
if (input) this.inputs.splice(this.inputs.indexOf(input), 1)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ class RedisUpsert_VectorStores extends RedisSearchBase implements INode {
|
|||
}
|
||||
}
|
||||
|
||||
return super.init(nodeData, _, options, flattenDocs)
|
||||
return super.init(nodeData, _, options, finalDocs)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,197 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from 'langchain/vectorstores/singlestore'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class SingleStore_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'SingleStore'
|
||||
this.name = 'singlestore'
|
||||
this.version = 1.0
|
||||
this.type = 'SingleStore'
|
||||
this.icon = 'singlestore.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using SingleStore, a fast and distributed cloud relational database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Needed when using SingleStore cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['singleStoreApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Host',
|
||||
name: 'host',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Database',
|
||||
name: 'database',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string',
|
||||
placeholder: 'embeddings',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Content Column Name',
|
||||
name: 'contentColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'content',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Vector Column Name',
|
||||
name: 'vectorColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'vector',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Column Name',
|
||||
name: 'metadataColumnName',
|
||||
type: 'string',
|
||||
placeholder: 'metadata',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'SingleStore Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'SingleStore Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
|
||||
const singleStoreConnectionConfig = {
|
||||
connectionOptions: {
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: 3306,
|
||||
user,
|
||||
password,
|
||||
database: nodeData.inputs?.database as string
|
||||
},
|
||||
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
|
||||
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
|
||||
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
|
||||
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
|
||||
} as SingleStoreVectorStoreConfig
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
|
||||
vectorStore.addDocuments.bind(vectorStore)(finalDocs)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const user = getCredentialParam('user', credentialData, nodeData)
|
||||
const password = getCredentialParam('password', credentialData, nodeData)
|
||||
|
||||
const singleStoreConnectionConfig = {
|
||||
connectionOptions: {
|
||||
host: nodeData.inputs?.host as string,
|
||||
port: 3306,
|
||||
user,
|
||||
password,
|
||||
database: nodeData.inputs?.database as string
|
||||
},
|
||||
...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}),
|
||||
...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}),
|
||||
...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}),
|
||||
...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {})
|
||||
} as SingleStoreVectorStoreConfig
|
||||
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: SingleStore_VectorStores }
|
||||
|
|
@ -11,6 +11,7 @@ class SingleStoreExisting_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -25,6 +26,7 @@ class SingleStoreExisting_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing document from SingleStore'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -13,6 +13,7 @@ class SingleStoreUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -27,6 +28,7 @@ class SingleStoreUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to SingleStore'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
Before Width: | Height: | Size: 2.8 KiB After Width: | Height: | Size: 2.8 KiB |
|
|
@ -1,20 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg width="256px" height="256px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid">
|
||||
<title>SingleStore</title>
|
||||
<defs>
|
||||
<linearGradient x1="67.3449258%" y1="-26.0044686%" x2="-18.5227789%" y2="22.9877555%" id="singleStoreLinearGradient-1">
|
||||
<stop stop-color="#FF7BFF" offset="0%"></stop>
|
||||
<stop stop-color="#AA00FF" offset="35.0158%"></stop>
|
||||
<stop stop-color="#8800CC" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
<linearGradient x1="36.2591509%" y1="-19.3628763%" x2="111.72205%" y2="44.9975357%" id="singleStoreLinearGradient-2">
|
||||
<stop stop-color="#FF7BFF" offset="3.54358%"></stop>
|
||||
<stop stop-color="#8800CC" offset="57.6537%"></stop>
|
||||
<stop stop-color="#311B92" offset="100%"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<g>
|
||||
<path d="M133.793438,0 C161.220114,7.62806846 186.208847,26.8506986 196.569923,50.3452712 C212.416431,88.4856134 208.759637,136.695058 191.389506,165.376569 C176.761849,188.8709 154.211058,201.381085 128.308006,201.075829 C88.0823106,200.770814 55.4752171,168.732936 55.1704441,128.456768 C55.1704441,88.1803574 86.8634599,54.9221798 128.308006,54.9221798 C135.012288,54.9221798 144.679052,55.851955 155.649674,60.4286222 C155.649674,60.4286222 147.762766,55.757287 127.50695,52.6192355 C69.3015772,44.9912153 0.621898574,89.095884 16.4683968,190.701711 C38.409617,229.757339 80.4639504,256.303989 128.308006,255.997284 C198.703093,255.692994 256.299161,198.024717 255.996071,127.236226 C255.996071,59.498847 200.836263,1.83073691 133.793438,0 Z" fill="url(#singleStoreLinearGradient-1)"></path>
|
||||
<path d="M181.635561,54.0037552 C171.884031,33.5605356 151.771183,17.3889203 127.087223,10.9813448 C121.601791,9.45574074 115.811828,8.84547014 109.412318,8.540359 C99.9653199,8.540359 90.8230945,9.76087603 81.376096,12.2018618 C57.9109865,19.2196838 41.455174,32.950265 31.7034025,43.6293966 C19.20906,57.9701518 10.9810571,72.9211776 6.1052197,87.8722034 C6.1052197,88.1774594 5.8004708,88.4824739 5.8004708,89.0927445 C5.4957219,90.3132857 4.27677462,93.9746678 4.27677462,94.8901944 C3.97202572,95.500465 3.97202572,96.4157502 3.66730098,97.0260207 C3.36255208,98.2465619 3.05780318,99.4668616 2.75307844,100.687403 C2.75307844,100.992659 2.75307844,101.297673 2.44832954,101.602688 C-5.47492441,140.963571 7.68750379,176.286091 15.6107577,189.406305 C17.5925312,192.688049 19.2199033,195.425935 20.8508738,197.938019 C2.87119611,100.298588 54.558966,53.6984992 113.373885,52.477958 C144.152582,51.8679289 174.931278,64.3778723 189.863709,89.3980006 C188.949389,75.6672745 188.035312,68.0392543 181.635561,54.0037552 Z" fill="url(#singleStoreLinearGradient-2)"></path>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 2.8 KiB |
|
|
@ -0,0 +1,171 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { createClient } from '@supabase/supabase-js'
|
||||
import { Document } from 'langchain/document'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
import { SupabaseLibArgs, SupabaseVectorStore } from 'langchain/vectorstores/supabase'
|
||||
|
||||
class Supabase_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Supabase'
|
||||
this.name = 'supabase'
|
||||
this.version = 1.0
|
||||
this.type = 'Supabase'
|
||||
this.icon = 'supabase.svg'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Supabase via pgvector extension'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['supabaseApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Supabase Project URL',
|
||||
name: 'supabaseProjUrl',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Table Name',
|
||||
name: 'tableName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Query Name',
|
||||
name: 'queryName',
|
||||
type: 'string'
|
||||
},
|
||||
{
|
||||
label: 'Supabase Metadata Filter',
|
||||
name: 'supabaseMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Supabase Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Supabase Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string
|
||||
const tableName = nodeData.inputs?.tableName as string
|
||||
const queryName = nodeData.inputs?.queryName as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData)
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await SupabaseVectorStore.fromDocuments(finalDocs, embeddings, {
|
||||
client,
|
||||
tableName: tableName,
|
||||
queryName: queryName
|
||||
})
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string
|
||||
const tableName = nodeData.inputs?.tableName as string
|
||||
const queryName = nodeData.inputs?.queryName as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const supabaseMetadataFilter = nodeData.inputs?.supabaseMetadataFilter
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData)
|
||||
|
||||
const client = createClient(supabaseProjUrl, supabaseApiKey)
|
||||
|
||||
const obj: SupabaseLibArgs = {
|
||||
client,
|
||||
tableName,
|
||||
queryName
|
||||
}
|
||||
|
||||
if (supabaseMetadataFilter) {
|
||||
const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter)
|
||||
obj.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Supabase_VectorStores }
|
||||
|
|
@ -12,6 +12,7 @@ class Supabase_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -26,6 +27,7 @@ class Supabase_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Supabase (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -14,6 +14,7 @@ class SupabaseUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -28,6 +29,7 @@ class SupabaseUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Supabase'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
|
@ -1,15 +0,0 @@
|
|||
<svg width="109" height="113" viewBox="0 0 109 113" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M63.7076 110.284C60.8481 113.885 55.0502 111.912 54.9813 107.314L53.9738 40.0625L99.1935 40.0625C107.384 40.0625 111.952 49.5226 106.859 55.9372L63.7076 110.284Z" fill="url(#paint0_linear)"/>
|
||||
<path d="M63.7076 110.284C60.8481 113.885 55.0502 111.912 54.9813 107.314L53.9738 40.0625L99.1935 40.0625C107.384 40.0625 111.952 49.5226 106.859 55.9372L63.7076 110.284Z" fill="url(#paint1_linear)" fill-opacity="0.2"/>
|
||||
<path d="M45.317 2.07103C48.1765 -1.53037 53.9745 0.442937 54.0434 5.041L54.4849 72.2922H9.83113C1.64038 72.2922 -2.92775 62.8321 2.1655 56.4175L45.317 2.07103Z" fill="#3ECF8E"/>
|
||||
<defs>
|
||||
<linearGradient id="paint0_linear" x1="53.9738" y1="54.9738" x2="94.1635" y2="71.8293" gradientUnits="userSpaceOnUse">
|
||||
<stop stop-color="#249361"/>
|
||||
<stop offset="1" stop-color="#3ECF8E"/>
|
||||
</linearGradient>
|
||||
<linearGradient id="paint1_linear" x1="36.1558" y1="30.5779" x2="54.4844" y2="65.0804" gradientUnits="userSpaceOnUse">
|
||||
<stop/>
|
||||
<stop offset="1" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.1 KiB |
|
|
@ -0,0 +1,242 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig, VectaraFile } from 'langchain/vectorstores/vectara'
|
||||
import { Document } from 'langchain/document'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Vectara_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Vectara'
|
||||
this.name = 'vectara'
|
||||
this.version = 1.0
|
||||
this.type = 'Vectara'
|
||||
this.icon = 'vectara.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert embedded data and perform similarity search upon query using Vectara, a LLM-powered search-as-a-service'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
credentialNames: ['vectaraApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'File',
|
||||
name: 'file',
|
||||
description:
|
||||
'File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes',
|
||||
type: 'file',
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Metadata Filter',
|
||||
name: 'filter',
|
||||
description:
|
||||
'Filter to apply to Vectara metadata. Refer to the <a target="_blank" href="https://docs.flowiseai.com/vector-stores/vectara">documentation</a> on how to use Vectara filters with Flowise.',
|
||||
type: 'string',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences Before',
|
||||
name: 'sentencesBefore',
|
||||
description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
default: 2,
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Sentences After',
|
||||
name: 'sentencesAfter',
|
||||
description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.',
|
||||
type: 'number',
|
||||
default: 2,
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Lambda',
|
||||
name: 'lambda',
|
||||
description:
|
||||
'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Defaults to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Vectara Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Vectara Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(VectaraStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
const customerId = getCredentialParam('customerID', credentialData, nodeData)
|
||||
const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',')
|
||||
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = {} as Embeddings
|
||||
const vectaraMetadataFilter = nodeData.inputs?.filter as string
|
||||
const sentencesBefore = nodeData.inputs?.sentencesBefore as number
|
||||
const sentencesAfter = nodeData.inputs?.sentencesAfter as number
|
||||
const lambda = nodeData.inputs?.lambda as number
|
||||
const fileBase64 = nodeData.inputs?.file
|
||||
|
||||
const vectaraArgs: VectaraLibArgs = {
|
||||
apiKey: apiKey,
|
||||
customerId: customerId,
|
||||
corpusId: corpusId,
|
||||
source: 'flowise'
|
||||
}
|
||||
|
||||
const vectaraFilter: VectaraFilter = {}
|
||||
if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter
|
||||
if (lambda) vectaraFilter.lambda = lambda
|
||||
|
||||
const vectaraContextConfig: VectaraContextConfig = {}
|
||||
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
|
||||
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
|
||||
vectaraFilter.contextConfig = vectaraContextConfig
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
let files: string[] = []
|
||||
if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) {
|
||||
files = JSON.parse(fileBase64)
|
||||
} else {
|
||||
files = [fileBase64]
|
||||
}
|
||||
|
||||
const vectaraFiles: VectaraFile[] = []
|
||||
for (const file of files) {
|
||||
const splitDataURI = file.split(',')
|
||||
splitDataURI.pop()
|
||||
const bf = Buffer.from(splitDataURI.pop() || '', 'base64')
|
||||
const blob = new Blob([bf])
|
||||
vectaraFiles.push({ blob: blob, fileName: getFileName(file) })
|
||||
}
|
||||
|
||||
try {
|
||||
if (finalDocs.length) await VectaraStore.fromDocuments(finalDocs, embeddings, vectaraArgs)
|
||||
if (vectaraFiles.length) {
|
||||
const vectorStore = new VectaraStore(vectaraArgs)
|
||||
await vectorStore.addFiles(vectaraFiles)
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
const customerId = getCredentialParam('customerID', credentialData, nodeData)
|
||||
const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',')
|
||||
|
||||
const vectaraMetadataFilter = nodeData.inputs?.filter as string
|
||||
const sentencesBefore = nodeData.inputs?.sentencesBefore as number
|
||||
const sentencesAfter = nodeData.inputs?.sentencesAfter as number
|
||||
const lambda = nodeData.inputs?.lambda as number
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const vectaraArgs: VectaraLibArgs = {
|
||||
apiKey: apiKey,
|
||||
customerId: customerId,
|
||||
corpusId: corpusId,
|
||||
source: 'flowise'
|
||||
}
|
||||
|
||||
const vectaraFilter: VectaraFilter = {}
|
||||
if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter
|
||||
if (lambda) vectaraFilter.lambda = lambda
|
||||
|
||||
const vectaraContextConfig: VectaraContextConfig = {}
|
||||
if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore
|
||||
if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter
|
||||
vectaraFilter.contextConfig = vectaraContextConfig
|
||||
|
||||
const vectorStore = new VectaraStore(vectaraArgs)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k, vectaraFilter)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
const getFileName = (fileBase64: string) => {
|
||||
let fileNames = []
|
||||
if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) {
|
||||
const files = JSON.parse(fileBase64)
|
||||
for (const file of files) {
|
||||
const splitDataURI = file.split(',')
|
||||
const filename = splitDataURI[splitDataURI.length - 1].split(':')[1]
|
||||
fileNames.push(filename)
|
||||
}
|
||||
return fileNames.join(', ')
|
||||
} else {
|
||||
const splitDataURI = fileBase64.split(',')
|
||||
const filename = splitDataURI[splitDataURI.length - 1].split(':')[1]
|
||||
return filename
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Vectara_VectorStores }
|
||||
|
|
@ -10,6 +10,7 @@ class VectaraExisting_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -24,6 +25,7 @@ class VectaraExisting_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Vectara (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ class VectaraUpload_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -24,6 +25,7 @@ class VectaraUpload_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upload files to Vectara'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ class VectaraUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -27,6 +28,7 @@ class VectaraUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Vectara'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||
|
|
@ -0,0 +1,213 @@
|
|||
import { flatten } from 'lodash'
|
||||
import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client'
|
||||
import { WeaviateLibArgs, WeaviateStore } from 'langchain/vectorstores/weaviate'
|
||||
import { Document } from 'langchain/document'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Weaviate_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Weaviate'
|
||||
this.name = 'weaviate'
|
||||
this.version = 1.0
|
||||
this.type = 'Weaviate'
|
||||
this.icon = 'weaviate.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Weaviate, a scalable open-source vector database'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
description: 'Only needed when using Weaviate cloud hosted',
|
||||
optional: true,
|
||||
credentialNames: ['weaviateApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Scheme',
|
||||
name: 'weaviateScheme',
|
||||
type: 'options',
|
||||
default: 'https',
|
||||
options: [
|
||||
{
|
||||
label: 'https',
|
||||
name: 'https'
|
||||
},
|
||||
{
|
||||
label: 'http',
|
||||
name: 'http'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Host',
|
||||
name: 'weaviateHost',
|
||||
type: 'string',
|
||||
placeholder: 'localhost:8080'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Index',
|
||||
name: 'weaviateIndex',
|
||||
type: 'string',
|
||||
placeholder: 'Test'
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Text Key',
|
||||
name: 'weaviateTextKey',
|
||||
type: 'string',
|
||||
placeholder: 'text',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Metadata Keys',
|
||||
name: 'weaviateMetadataKeys',
|
||||
type: 'string',
|
||||
rows: 4,
|
||||
placeholder: `["foo"]`,
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Weaviate Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Weaviate Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(WeaviateStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const weaviateScheme = nodeData.inputs?.weaviateScheme as string
|
||||
const weaviateHost = nodeData.inputs?.weaviateHost as string
|
||||
const weaviateIndex = nodeData.inputs?.weaviateIndex as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData)
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
host: weaviateHost
|
||||
}
|
||||
if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey)
|
||||
|
||||
const client: WeaviateClient = weaviate.client(clientConfig)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const obj: WeaviateLibArgs = {
|
||||
client,
|
||||
indexName: weaviateIndex
|
||||
}
|
||||
|
||||
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
||||
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
||||
|
||||
try {
|
||||
await WeaviateStore.fromDocuments(finalDocs, embeddings, obj)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const weaviateScheme = nodeData.inputs?.weaviateScheme as string
|
||||
const weaviateHost = nodeData.inputs?.weaviateHost as string
|
||||
const weaviateIndex = nodeData.inputs?.weaviateIndex as string
|
||||
const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string
|
||||
const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData)
|
||||
|
||||
const clientConfig: any = {
|
||||
scheme: weaviateScheme,
|
||||
host: weaviateHost
|
||||
}
|
||||
if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey)
|
||||
|
||||
const client: WeaviateClient = weaviate.client(clientConfig)
|
||||
|
||||
const obj: WeaviateLibArgs = {
|
||||
client,
|
||||
indexName: weaviateIndex
|
||||
}
|
||||
|
||||
if (weaviateTextKey) obj.textKey = weaviateTextKey
|
||||
if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, ''))
|
||||
|
||||
const vectorStore = await WeaviateStore.fromExistingIndex(embeddings, obj)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Weaviate_VectorStores }
|
||||
|
|
@ -12,6 +12,7 @@ class Weaviate_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -26,6 +27,7 @@ class Weaviate_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Weaviate (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
@ -14,6 +14,7 @@ class WeaviateUpsert_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -28,6 +29,7 @@ class WeaviateUpsert_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Upsert documents to Weaviate'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
Before Width: | Height: | Size: 54 KiB After Width: | Height: | Size: 54 KiB |
|
Before Width: | Height: | Size: 54 KiB |
|
|
@ -0,0 +1,282 @@
|
|||
import { flatten } from 'lodash'
|
||||
import { IDocument, ZepClient } from '@getzep/zep-js'
|
||||
import { ZepVectorStore, IZepConfig } from 'langchain/vectorstores/zep'
|
||||
import { Embeddings } from 'langchain/embeddings/base'
|
||||
import { Document } from 'langchain/document'
|
||||
import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface'
|
||||
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
|
||||
|
||||
class Zep_VectorStores implements INode {
|
||||
label: string
|
||||
name: string
|
||||
version: number
|
||||
description: string
|
||||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
outputs: INodeOutputsValue[]
|
||||
|
||||
constructor() {
|
||||
this.label = 'Zep'
|
||||
this.name = 'zep'
|
||||
this.version = 1.0
|
||||
this.type = 'Zep'
|
||||
this.icon = 'zep.png'
|
||||
this.category = 'Vector Stores'
|
||||
this.description =
|
||||
'Upsert embedded data and perform similarity search upon query using Zep, a fast and scalable building block for LLM apps'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'NEW'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
type: 'credential',
|
||||
optional: true,
|
||||
description: 'Configure JWT authentication on your Zep instance (Optional)',
|
||||
credentialNames: ['zepMemoryApi']
|
||||
}
|
||||
this.inputs = [
|
||||
{
|
||||
label: 'Document',
|
||||
name: 'document',
|
||||
type: 'Document',
|
||||
list: true,
|
||||
optional: true
|
||||
},
|
||||
{
|
||||
label: 'Embeddings',
|
||||
name: 'embeddings',
|
||||
type: 'Embeddings'
|
||||
},
|
||||
{
|
||||
label: 'Base URL',
|
||||
name: 'baseURL',
|
||||
type: 'string',
|
||||
default: 'http://127.0.0.1:8000'
|
||||
},
|
||||
{
|
||||
label: 'Zep Collection',
|
||||
name: 'zepCollection',
|
||||
type: 'string',
|
||||
placeholder: 'my-first-collection'
|
||||
},
|
||||
{
|
||||
label: 'Zep Metadata Filter',
|
||||
name: 'zepMetadataFilter',
|
||||
type: 'json',
|
||||
optional: true,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Embedding Dimension',
|
||||
name: 'dimension',
|
||||
type: 'number',
|
||||
default: 1536,
|
||||
additionalParams: true
|
||||
},
|
||||
{
|
||||
label: 'Top K',
|
||||
name: 'topK',
|
||||
description: 'Number of top results to fetch. Default to 4',
|
||||
placeholder: '4',
|
||||
type: 'number',
|
||||
additionalParams: true,
|
||||
optional: true
|
||||
}
|
||||
]
|
||||
this.outputs = [
|
||||
{
|
||||
label: 'Zep Retriever',
|
||||
name: 'retriever',
|
||||
baseClasses: this.baseClasses
|
||||
},
|
||||
{
|
||||
label: 'Zep Vector Store',
|
||||
name: 'vectorStore',
|
||||
baseClasses: [this.type, ...getBaseClasses(ZepVectorStore)]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
vectorStoreMethods = {
|
||||
async upsert(nodeData: INodeData, options: ICommonObject): Promise<void> {
|
||||
const baseURL = nodeData.inputs?.baseURL as string
|
||||
const zepCollection = nodeData.inputs?.zepCollection as string
|
||||
const dimension = (nodeData.inputs?.dimension as number) ?? 1536
|
||||
const docs = nodeData.inputs?.document as Document[]
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
|
||||
const flattenDocs = docs && docs.length ? flatten(docs) : []
|
||||
const finalDocs = []
|
||||
for (let i = 0; i < flattenDocs.length; i += 1) {
|
||||
if (flattenDocs[i] && flattenDocs[i].pageContent) {
|
||||
finalDocs.push(new Document(flattenDocs[i]))
|
||||
}
|
||||
}
|
||||
|
||||
const zepConfig: IZepConfig = {
|
||||
apiUrl: baseURL,
|
||||
collectionName: zepCollection,
|
||||
embeddingDimensions: dimension,
|
||||
isAutoEmbedded: false
|
||||
}
|
||||
if (apiKey) zepConfig.apiKey = apiKey
|
||||
|
||||
try {
|
||||
await ZepVectorStore.fromDocuments(finalDocs, embeddings, zepConfig)
|
||||
} catch (e) {
|
||||
throw new Error(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
|
||||
const baseURL = nodeData.inputs?.baseURL as string
|
||||
const zepCollection = nodeData.inputs?.zepCollection as string
|
||||
const zepMetadataFilter = nodeData.inputs?.zepMetadataFilter
|
||||
const dimension = nodeData.inputs?.dimension as number
|
||||
const embeddings = nodeData.inputs?.embeddings as Embeddings
|
||||
const output = nodeData.outputs?.output as string
|
||||
const topK = nodeData.inputs?.topK as string
|
||||
const k = topK ? parseFloat(topK) : 4
|
||||
|
||||
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
|
||||
const apiKey = getCredentialParam('apiKey', credentialData, nodeData)
|
||||
|
||||
const zepConfig: IZepConfig & Partial<ZepFilter> = {
|
||||
apiUrl: baseURL,
|
||||
collectionName: zepCollection,
|
||||
embeddingDimensions: dimension,
|
||||
isAutoEmbedded: false
|
||||
}
|
||||
if (apiKey) zepConfig.apiKey = apiKey
|
||||
if (zepMetadataFilter) {
|
||||
const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter)
|
||||
zepConfig.filter = metadatafilter
|
||||
}
|
||||
|
||||
const vectorStore = await ZepExistingVS.fromExistingIndex(embeddings, zepConfig)
|
||||
|
||||
if (output === 'retriever') {
|
||||
const retriever = vectorStore.asRetriever(k)
|
||||
return retriever
|
||||
} else if (output === 'vectorStore') {
|
||||
;(vectorStore as any).k = k
|
||||
return vectorStore
|
||||
}
|
||||
return vectorStore
|
||||
}
|
||||
}
|
||||
|
||||
interface ZepFilter {
|
||||
filter: Record<string, any>
|
||||
}
|
||||
|
||||
function zepDocsToDocumentsAndScore(results: IDocument[]): [Document, number][] {
|
||||
return results.map((d) => [
|
||||
new Document({
|
||||
pageContent: d.content,
|
||||
metadata: d.metadata
|
||||
}),
|
||||
d.score ? d.score : 0
|
||||
])
|
||||
}
|
||||
|
||||
function assignMetadata(value: string | Record<string, unknown> | object | undefined): Record<string, unknown> | undefined {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return value as Record<string, unknown>
|
||||
}
|
||||
if (value !== undefined) {
|
||||
console.warn('Metadata filters must be an object, Record, or undefined.')
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
class ZepExistingVS extends ZepVectorStore {
|
||||
filter?: Record<string, any>
|
||||
args?: IZepConfig & Partial<ZepFilter>
|
||||
|
||||
constructor(embeddings: Embeddings, args: IZepConfig & Partial<ZepFilter>) {
|
||||
super(embeddings, args)
|
||||
this.filter = args.filter
|
||||
this.args = args
|
||||
}
|
||||
|
||||
async initalizeCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
this.client = await ZepClient.init(args.apiUrl, args.apiKey)
|
||||
try {
|
||||
this.collection = await this.client.document.getCollection(args.collectionName)
|
||||
} catch (err) {
|
||||
if (err instanceof Error) {
|
||||
if (err.name === 'NotFoundError') {
|
||||
await this.createNewCollection(args)
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async createNewCollection(args: IZepConfig & Partial<ZepFilter>) {
|
||||
if (!args.embeddingDimensions) {
|
||||
throw new Error(
|
||||
`Collection ${args.collectionName} not found. You can create a new Collection by providing embeddingDimensions.`
|
||||
)
|
||||
}
|
||||
|
||||
this.collection = await this.client.document.addCollection({
|
||||
name: args.collectionName,
|
||||
description: args.description,
|
||||
metadata: args.metadata,
|
||||
embeddingDimensions: args.embeddingDimensions,
|
||||
isAutoEmbedded: false
|
||||
})
|
||||
}
|
||||
|
||||
async similaritySearchVectorWithScore(
|
||||
query: number[],
|
||||
k: number,
|
||||
filter?: Record<string, unknown> | undefined
|
||||
): Promise<[Document, number][]> {
|
||||
if (filter && this.filter) {
|
||||
throw new Error('cannot provide both `filter` and `this.filter`')
|
||||
}
|
||||
const _filters = filter ?? this.filter
|
||||
const ANDFilters = []
|
||||
for (const filterKey in _filters) {
|
||||
let filterVal = _filters[filterKey]
|
||||
if (typeof filterVal === 'string') filterVal = `"${filterVal}"`
|
||||
ANDFilters.push({ jsonpath: `$[*] ? (@.${filterKey} == ${filterVal})` })
|
||||
}
|
||||
const newfilter = {
|
||||
where: { and: ANDFilters }
|
||||
}
|
||||
await this.initalizeCollection(this.args!).catch((err) => {
|
||||
console.error('Error initializing collection:', err)
|
||||
throw err
|
||||
})
|
||||
const results = await this.collection.search(
|
||||
{
|
||||
embedding: new Float32Array(query),
|
||||
metadata: assignMetadata(newfilter)
|
||||
},
|
||||
k
|
||||
)
|
||||
return zepDocsToDocumentsAndScore(results)
|
||||
}
|
||||
|
||||
static async fromExistingIndex(embeddings: Embeddings, dbConfig: IZepConfig & Partial<ZepFilter>): Promise<ZepVectorStore> {
|
||||
const instance = new this(embeddings, dbConfig)
|
||||
return instance
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { nodeClass: Zep_VectorStores }
|
||||
|
|
@ -13,6 +13,7 @@ class Zep_Existing_VectorStores implements INode {
|
|||
type: string
|
||||
icon: string
|
||||
category: string
|
||||
badge: string
|
||||
baseClasses: string[]
|
||||
inputs: INodeParams[]
|
||||
credential: INodeParams
|
||||
|
|
@ -27,6 +28,7 @@ class Zep_Existing_VectorStores implements INode {
|
|||
this.category = 'Vector Stores'
|
||||
this.description = 'Load existing index from Zep (i.e: Document has been upserted)'
|
||||
this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever']
|
||||
this.badge = 'DEPRECATING'
|
||||
this.credential = {
|
||||
label: 'Connect Credential',
|
||||
name: 'credential',
|
||||
|
|
|
|||