Compare commits

...

3 Commits

Author SHA1 Message Date
Henry Heng 54ff43e8f1
Bugfix/HF custom endpoint (#2811)
include fix for hf custom endpoint
2024-07-16 21:42:24 +01:00
Ong Chung Yau 95b2cf7b7f
Feature/extract import all (#2796)
* use existing route to get all chatflows

* add export all chatflows functionality

* add read exported all chatflows json file functionality

* add save chatflows functionality in server

* chore rename saveChatflows to importChatflows and others

* chore rewrite snackbar message

* fix import chatflows when no data in chatflows db

* add handle when import file array length is 0

* chore update and add meaning comment in importChatflows

* update method of storing flowdata for importChatflows function

* Refresh/redirect to chatflows when import is successful

* fix lint

---------

Co-authored-by: Ilango <rajagopalilango@gmail.com>
2024-07-16 09:47:41 +08:00
Henry Heng 074bb738a3
Release/1.8.4 (#2805)
* 🥳 flowise release 1.8.4

* 🥳 flowise-components release 1.8.6
2024-07-15 15:34:33 +01:00
13 changed files with 334 additions and 68 deletions

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.8.3",
"version": "1.8.4",
"private": true,
"homepage": "https://flowiseai.com",
"workspaces": [

View File

@ -18,7 +18,7 @@ class ChatHuggingFace_ChatModels implements INode {
constructor() {
this.label = 'ChatHuggingFace'
this.name = 'chatHuggingFace'
this.version = 2.0
this.version = 3.0
this.type = 'ChatHuggingFace'
this.icon = 'HuggingFace.svg'
this.category = 'Chat Models'
@ -96,6 +96,16 @@ class ChatHuggingFace_ChatModels implements INode {
description: 'Frequency Penalty parameter may not apply to certain model. Please check available model parameters',
optional: true,
additionalParams: true
},
{
label: 'Stop Sequence',
name: 'stop',
type: 'string',
rows: 4,
placeholder: 'AI assistant:',
description: 'Sets the stop sequences to use. Use comma to seperate different sequences.',
optional: true,
additionalParams: true
}
]
}
@ -109,6 +119,7 @@ class ChatHuggingFace_ChatModels implements INode {
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
const endpoint = nodeData.inputs?.endpoint as string
const cache = nodeData.inputs?.cache as BaseCache
const stop = nodeData.inputs?.stop as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const huggingFaceApiKey = getCredentialParam('huggingFaceApiKey', credentialData, nodeData)
@ -123,7 +134,11 @@ class ChatHuggingFace_ChatModels implements INode {
if (topP) obj.topP = parseFloat(topP)
if (hfTopK) obj.topK = parseFloat(hfTopK)
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
if (endpoint) obj.endpoint = endpoint
if (endpoint) obj.endpointUrl = endpoint
if (stop) {
const stopSequences = stop.split(',')
obj.stopSequences = stopSequences
}
const huggingFace = new HuggingFaceInference(obj)
if (cache) huggingFace.cache = cache

View File

@ -1,32 +1,19 @@
import { LLM, BaseLLMParams } from '@langchain/core/language_models/llms'
import { getEnvironmentVariable } from '../../../src/utils'
import { GenerationChunk } from '@langchain/core/outputs'
import { CallbackManagerForLLMRun } from '@langchain/core/callbacks/manager'
export interface HFInput {
/** Model to use */
model: string
/** Sampling temperature to use */
temperature?: number
/**
* Maximum number of tokens to generate in the completion.
*/
maxTokens?: number
/** Total probability mass of tokens to consider at each step */
stopSequences?: string[]
topP?: number
/** Integer to define the top tokens considered within the sample operation to create new text. */
topK?: number
/** Penalizes repeated tokens according to frequency */
frequencyPenalty?: number
/** API key to use. */
apiKey?: string
/** Private endpoint to use. */
endpoint?: string
endpointUrl?: string
includeCredentials?: string | boolean
}
export class HuggingFaceInference extends LLM implements HFInput {
@ -40,6 +27,8 @@ export class HuggingFaceInference extends LLM implements HFInput {
temperature: number | undefined = undefined
stopSequences: string[] | undefined = undefined
maxTokens: number | undefined = undefined
topP: number | undefined = undefined
@ -50,7 +39,9 @@ export class HuggingFaceInference extends LLM implements HFInput {
apiKey: string | undefined = undefined
endpoint: string | undefined = undefined
endpointUrl: string | undefined = undefined
includeCredentials: string | boolean | undefined = undefined
constructor(fields?: Partial<HFInput> & BaseLLMParams) {
super(fields ?? {})
@ -58,11 +49,13 @@ export class HuggingFaceInference extends LLM implements HFInput {
this.model = fields?.model ?? this.model
this.temperature = fields?.temperature ?? this.temperature
this.maxTokens = fields?.maxTokens ?? this.maxTokens
this.stopSequences = fields?.stopSequences ?? this.stopSequences
this.topP = fields?.topP ?? this.topP
this.topK = fields?.topK ?? this.topK
this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty
this.endpoint = fields?.endpoint ?? ''
this.apiKey = fields?.apiKey ?? getEnvironmentVariable('HUGGINGFACEHUB_API_KEY')
this.endpointUrl = fields?.endpointUrl
this.includeCredentials = fields?.includeCredentials
if (!this.apiKey) {
throw new Error(
'Please set an API key for HuggingFace Hub in the environment variable HUGGINGFACEHUB_API_KEY or in the apiKey field of the HuggingFaceInference constructor.'
@ -74,31 +67,65 @@ export class HuggingFaceInference extends LLM implements HFInput {
return 'hf'
}
/** @ignore */
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
const { HfInference } = await HuggingFaceInference.imports()
const hf = new HfInference(this.apiKey)
const obj: any = {
invocationParams(options?: this['ParsedCallOptions']) {
return {
model: this.model,
parameters: {
// make it behave similar to openai, returning only the generated text
return_full_text: false,
temperature: this.temperature,
max_new_tokens: this.maxTokens,
stop: options?.stop ?? this.stopSequences,
top_p: this.topP,
top_k: this.topK,
repetition_penalty: this.frequencyPenalty
},
inputs: prompt
}
}
if (this.endpoint) {
hf.endpoint(this.endpoint)
} else {
obj.model = this.model
}
async *_streamResponseChunks(
prompt: string,
options: this['ParsedCallOptions'],
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<GenerationChunk> {
const hfi = await this._prepareHFInference()
const stream = await this.caller.call(async () =>
hfi.textGenerationStream({
...this.invocationParams(options),
inputs: prompt
})
)
for await (const chunk of stream) {
const token = chunk.token.text
yield new GenerationChunk({ text: token, generationInfo: chunk })
await runManager?.handleLLMNewToken(token ?? '')
// stream is done
if (chunk.generated_text)
yield new GenerationChunk({
text: '',
generationInfo: { finished: true }
})
}
const res = await this.caller.callWithOptions({ signal: options.signal }, hf.textGeneration.bind(hf), obj)
}
/** @ignore */
async _call(prompt: string, options: this['ParsedCallOptions']): Promise<string> {
const hfi = await this._prepareHFInference()
const args = { ...this.invocationParams(options), inputs: prompt }
const res = await this.caller.callWithOptions({ signal: options.signal }, hfi.textGeneration.bind(hfi), args)
return res.generated_text
}
/** @ignore */
private async _prepareHFInference() {
const { HfInference } = await HuggingFaceInference.imports()
const hfi = new HfInference(this.apiKey, {
includeCredentials: this.includeCredentials
})
return this.endpointUrl ? hfi.endpoint(this.endpointUrl) : hfi
}
/** @ignore */
static async imports(): Promise<{
HfInference: typeof import('@huggingface/inference').HfInference

View File

@ -1,6 +1,6 @@
{
"name": "flowise-components",
"version": "1.8.5",
"version": "1.8.6",
"description": "Flowiseai Components",
"main": "dist/src/index",
"types": "dist/src/index.d.ts",

View File

@ -1,6 +1,6 @@
{
"name": "flowise",
"version": "1.8.3",
"version": "1.8.4",
"description": "Flowiseai Server",
"main": "dist/index",
"types": "dist/index.d.ts",

View File

@ -1,11 +1,11 @@
import { Request, Response, NextFunction } from 'express'
import chatflowsService from '../../services/chatflows'
import { ChatFlow } from '../../database/entities/ChatFlow'
import { createRateLimiter } from '../../utils/rateLimit'
import { getApiKey } from '../../utils/apiKey'
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
import { NextFunction, Request, Response } from 'express'
import { StatusCodes } from 'http-status-codes'
import { ChatFlow } from '../../database/entities/ChatFlow'
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
import { ChatflowType } from '../../Interface'
import chatflowsService from '../../services/chatflows'
import { getApiKey } from '../../utils/apiKey'
import { createRateLimiter } from '../../utils/rateLimit'
const checkIfChatflowIsValidForStreaming = async (req: Request, res: Response, next: NextFunction) => {
try {
@ -105,6 +105,16 @@ const saveChatflow = async (req: Request, res: Response, next: NextFunction) =>
}
}
const importChatflows = async (req: Request, res: Response, next: NextFunction) => {
try {
const chatflows: Partial<ChatFlow>[] = req.body.Chatflows
const apiResponse = await chatflowsService.importChatflows(chatflows)
return res.json(apiResponse)
} catch (error) {
next(error)
}
}
const updateChatflow = async (req: Request, res: Response, next: NextFunction) => {
try {
if (typeof req.params === 'undefined' || !req.params.id) {
@ -167,6 +177,7 @@ export default {
getChatflowByApiKey,
getChatflowById,
saveChatflow,
importChatflows,
updateChatflow,
getSinglePublicChatflow,
getSinglePublicChatbotConfig

View File

@ -4,6 +4,7 @@ const router = express.Router()
// CREATE
router.post('/', chatflowsController.saveChatflow)
router.post('/importchatflows', chatflowsController.importChatflows)
// READ
router.get('/', chatflowsController.getAllChatflows)

View File

@ -1,10 +1,10 @@
import express from 'express'
import apikeyRouter from './apikey'
import assistantsRouter from './assistants'
import chatMessageRouter from './chat-messages'
import chatflowsRouter from './chatflows'
import chatflowsStreamingRouter from './chatflows-streaming'
import chatflowsUploadsRouter from './chatflows-uploads'
import chatMessageRouter from './chat-messages'
import componentsCredentialsRouter from './components-credentials'
import componentsCredentialsIconRouter from './components-credentials-icon'
import credentialsRouter from './credentials'
@ -12,10 +12,10 @@ import documentStoreRouter from './documentstore'
import feedbackRouter from './feedback'
import fetchLinksRouter from './fetch-links'
import flowConfigRouter from './flow-config'
import internalChatmessagesRouter from './internal-chat-messages'
import internalPredictionRouter from './internal-predictions'
import getUploadFileRouter from './get-upload-file'
import getUploadPathRouter from './get-upload-path'
import internalChatmessagesRouter from './internal-chat-messages'
import internalPredictionRouter from './internal-predictions'
import leadsRouter from './leads'
import loadPromptRouter from './load-prompts'
import marketplacesRouter from './marketplaces'
@ -27,18 +27,18 @@ import nodesRouter from './nodes'
import openaiAssistantsRouter from './openai-assistants'
import openaiAssistantsFileRouter from './openai-assistants-files'
import openaiAssistantsVectorStoreRouter from './openai-assistants-vector-store'
import pingRouter from './ping'
import predictionRouter from './predictions'
import promptListsRouter from './prompts-lists'
import publicChatbotRouter from './public-chatbots'
import publicChatflowsRouter from './public-chatflows'
import statsRouter from './stats'
import toolsRouter from './tools'
import upsertHistoryRouter from './upsert-history'
import variablesRouter from './variables'
import vectorRouter from './vectors'
import verifyRouter from './verify'
import versionRouter from './versions'
import upsertHistoryRouter from './upsert-history'
import pingRouter from './ping'
const router = express.Router()

View File

@ -1,19 +1,18 @@
import { StatusCodes } from 'http-status-codes'
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
import { getRunningExpressApp } from '../../utils/getRunningExpressApp'
import { ChatflowType, IChatFlow } from '../../Interface'
import { ChatFlow } from '../../database/entities/ChatFlow'
import { getAppVersion, getTelemetryFlowObj, isFlowValidForStream, constructGraphs, getEndingNodes } from '../../utils'
import logger from '../../utils/logger'
import { removeFolderFromStorage } from 'flowise-components'
import { IReactFlowObject } from '../../Interface'
import { utilGetUploadsConfig } from '../../utils/getUploadsConfig'
import { StatusCodes } from 'http-status-codes'
import { ChatflowType, IChatFlow, IReactFlowObject } from '../../Interface'
import { ChatFlow } from '../../database/entities/ChatFlow'
import { ChatMessage } from '../../database/entities/ChatMessage'
import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback'
import { UpsertHistory } from '../../database/entities/UpsertHistory'
import { containsBase64File, updateFlowDataWithFilePaths } from '../../utils/fileRepository'
import { InternalFlowiseError } from '../../errors/internalFlowiseError'
import { getErrorMessage } from '../../errors/utils'
import documentStoreService from '../../services/documentstore'
import { constructGraphs, getAppVersion, getEndingNodes, getTelemetryFlowObj, isFlowValidForStream } from '../../utils'
import { containsBase64File, updateFlowDataWithFilePaths } from '../../utils/fileRepository'
import { getRunningExpressApp } from '../../utils/getRunningExpressApp'
import { utilGetUploadsConfig } from '../../utils/getUploadsConfig'
import logger from '../../utils/logger'
// Check if chatflow valid for streaming
const checkIfChatflowIsValidForStreaming = async (chatflowId: string): Promise<any> => {
@ -198,6 +197,60 @@ const saveChatflow = async (newChatFlow: ChatFlow): Promise<any> => {
}
}
const importChatflows = async (newChatflows: Partial<ChatFlow>[]): Promise<any> => {
try {
const appServer = getRunningExpressApp()
// step 1 - check whether file chatflows array is zero
if (newChatflows.length == 0) throw new Error('No chatflows in this file.')
// step 2 - check whether ids are duplicate in database
let ids = '('
let count: number = 0
const lastCount = newChatflows.length - 1
newChatflows.forEach((newChatflow) => {
ids += `'${newChatflow.id}'`
if (lastCount != count) ids += ','
if (lastCount == count) ids += ')'
count += 1
})
const selectResponse = await appServer.AppDataSource.getRepository(ChatFlow)
.createQueryBuilder('cf')
.select('cf.id')
.where(`cf.id IN ${ids}`)
.getMany()
const foundIds = selectResponse.map((response) => {
return response.id
})
// step 3 - remove ids that are only duplicate
const prepChatflows: Partial<ChatFlow>[] = newChatflows.map((newChatflow) => {
let id: string = ''
if (newChatflow.id) id = newChatflow.id
let flowData: string = ''
if (newChatflow.flowData) flowData = newChatflow.flowData
if (foundIds.includes(id)) {
newChatflow.id = undefined
newChatflow.name += ' with new id'
}
newChatflow.type = 'CHATFLOW'
newChatflow.flowData = JSON.stringify(JSON.parse(flowData))
return newChatflow
})
// step 4 - transactional insert array of entities
const insertResponse = await appServer.AppDataSource.getRepository(ChatFlow).insert(prepChatflows)
return insertResponse
} catch (error) {
throw new InternalFlowiseError(
StatusCodes.INTERNAL_SERVER_ERROR,
`Error: chatflowsService.saveChatflows - ${getErrorMessage(error)}`
)
}
}
const updateChatflow = async (chatflow: ChatFlow, updateChatFlow: ChatFlow): Promise<any> => {
try {
const appServer = getRunningExpressApp()
@ -299,6 +352,7 @@ export default {
getChatflowByApiKey,
getChatflowById,
saveChatflow,
importChatflows,
updateChatflow,
getSinglePublicChatflow,
getSinglePublicChatbotConfig

View File

@ -1,6 +1,6 @@
{
"name": "flowise-ui",
"version": "1.8.3",
"version": "1.8.4",
"license": "SEE LICENSE IN LICENSE.md",
"homepage": "https://flowiseai.com",
"author": {

View File

@ -10,6 +10,8 @@ const getSpecificChatflowFromPublicEndpoint = (id) => client.get(`/public-chatfl
const createNewChatflow = (body) => client.post(`/chatflows`, body)
const importChatflows = (body) => client.post(`/chatflows/importchatflows`, body)
const updateChatflow = (id, body) => client.put(`/chatflows/${id}`, body)
const deleteChatflow = (id) => client.delete(`/chatflows/${id}`)
@ -24,6 +26,7 @@ export default {
getSpecificChatflow,
getSpecificChatflowFromPublicEndpoint,
createNewChatflow,
importChatflows,
updateChatflow,
deleteChatflow,
getIsChatflowStreaming,

View File

@ -1,13 +1,15 @@
import { useState, useRef, useEffect } from 'react'
import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction, MENU_OPEN, REMOVE_DIRTY } from '@/store/actions'
import { sanitizeChatflows } from '@/utils/genericHelper'
import useNotifier from '@/utils/useNotifier'
import PropTypes from 'prop-types'
import { useSelector } from 'react-redux'
import { useEffect, useRef, useState } from 'react'
import { useDispatch, useSelector } from 'react-redux'
// material-ui
import { useTheme } from '@mui/material/styles'
import {
Box,
ButtonBase,
Avatar,
Box,
Button,
ButtonBase,
ClickAwayListener,
Divider,
List,
@ -18,20 +20,27 @@ import {
Popper,
Typography
} from '@mui/material'
import { useTheme } from '@mui/material/styles'
// third-party
import PerfectScrollbar from 'react-perfect-scrollbar'
// project imports
import MainCard from '@/ui-component/cards/MainCard'
import Transitions from '@/ui-component/extended/Transitions'
import AboutDialog from '@/ui-component/dialog/AboutDialog'
import Transitions from '@/ui-component/extended/Transitions'
// assets
import { IconLogout, IconSettings, IconInfoCircle } from '@tabler/icons-react'
import { IconFileExport, IconFileUpload, IconInfoCircle, IconLogout, IconSettings, IconX } from '@tabler/icons-react'
import './index.css'
//API
import chatFlowsApi from '@/api/chatflows'
// Hooks
import useApi from '@/hooks/useApi'
import { useLocation, useNavigate } from 'react-router-dom'
// ==============================|| PROFILE MENU ||============================== //
const ProfileSection = ({ username, handleLogout }) => {
@ -43,6 +52,17 @@ const ProfileSection = ({ username, handleLogout }) => {
const [aboutDialogOpen, setAboutDialogOpen] = useState(false)
const anchorRef = useRef(null)
const inputRef = useRef()
const navigate = useNavigate()
const location = useLocation()
// ==============================|| Snackbar ||============================== //
useNotifier()
const dispatch = useDispatch()
const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args))
const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args))
const handleClose = (event) => {
if (anchorRef.current && anchorRef.current.contains(event.target)) {
@ -55,6 +75,106 @@ const ProfileSection = ({ username, handleLogout }) => {
setOpen((prevOpen) => !prevOpen)
}
const errorFailed = (message) => {
enqueueSnackbar({
message: message,
options: {
key: new Date().getTime() + Math.random(),
variant: 'error',
persist: true,
action: (key) => (
<Button style={{ color: 'white' }} onClick={() => closeSnackbar(key)}>
<IconX />
</Button>
)
}
})
}
const importChatflowsApi = useApi(chatFlowsApi.importChatflows)
const fileChange = (e) => {
if (!e.target.files) return
const file = e.target.files[0]
const reader = new FileReader()
reader.onload = (evt) => {
if (!evt?.target?.result) {
return
}
const chatflows = JSON.parse(evt.target.result)
importChatflowsApi.request(chatflows)
}
reader.readAsText(file)
}
const importChatflowsSuccess = () => {
dispatch({ type: REMOVE_DIRTY })
enqueueSnackbar({
message: `Import chatflows successful`,
options: {
key: new Date().getTime() + Math.random(),
variant: 'success',
action: (key) => (
<Button style={{ color: 'white' }} onClick={() => closeSnackbar(key)}>
<IconX />
</Button>
)
}
})
}
useEffect(() => {
if (importChatflowsApi.error) errorFailed(`Failed to import chatflows: ${importChatflowsApi.error.response.data.message}`)
if (importChatflowsApi.data) {
importChatflowsSuccess()
// if current location is /chatflows, refresh the page
if (location.pathname === '/chatflows') navigate(0)
else {
// if not redirect to /chatflows
dispatch({ type: MENU_OPEN, id: 'chatflows' })
navigate('/chatflows')
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [importChatflowsApi.error, importChatflowsApi.data])
const importAllChatflows = () => {
inputRef.current.click()
}
const getAllChatflowsApi = useApi(chatFlowsApi.getAllChatflows)
const exportChatflowsSuccess = () => {
dispatch({ type: REMOVE_DIRTY })
enqueueSnackbar({
message: `Export chatflows successful`,
options: {
key: new Date().getTime() + Math.random(),
variant: 'success',
action: (key) => (
<Button style={{ color: 'white' }} onClick={() => closeSnackbar(key)}>
<IconX />
</Button>
)
}
})
}
useEffect(() => {
if (getAllChatflowsApi.error) errorFailed(`Failed to export Chatflows: ${getAllChatflowsApi.error.response.data.message}`)
if (getAllChatflowsApi.data) {
const sanitizedChatflows = sanitizeChatflows(getAllChatflowsApi.data)
const dataStr = JSON.stringify({ Chatflows: sanitizedChatflows }, null, 2)
const dataUri = 'data:application/json;charset=utf-8,' + encodeURIComponent(dataStr)
const exportFileDefaultName = 'AllChatflows.json'
const linkElement = document.createElement('a')
linkElement.setAttribute('href', dataUri)
linkElement.setAttribute('download', exportFileDefaultName)
linkElement.click()
exportChatflowsSuccess()
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [getAllChatflowsApi.error, getAllChatflowsApi.data])
const prevOpen = useRef(open)
useEffect(() => {
if (prevOpen.current === true && open === false) {
@ -135,6 +255,29 @@ const ProfileSection = ({ username, handleLogout }) => {
}
}}
>
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={() => {
getAllChatflowsApi.request()
}}
>
<ListItemIcon>
<IconFileExport stroke={1.5} size='1.3rem' />
</ListItemIcon>
<ListItemText primary={<Typography variant='body2'>Export Chatflows</Typography>} />
</ListItemButton>
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={() => {
importAllChatflows()
}}
>
<ListItemIcon>
<IconFileUpload stroke={1.5} size='1.3rem' />
</ListItemIcon>
<ListItemText primary={<Typography variant='body2'>Import Chatflows</Typography>} />
</ListItemButton>
<input ref={inputRef} type='file' hidden onChange={fileChange} />
<ListItemButton
sx={{ borderRadius: `${customization.borderRadius}px` }}
onClick={() => {

View File

@ -340,6 +340,18 @@ export const getFolderName = (base64ArrayStr) => {
}
}
export const sanitizeChatflows = (arrayChatflows) => {
const sanitizedChatflows = arrayChatflows.map((chatFlow) => {
const sanitizeFlowData = generateExportFlowData(JSON.parse(chatFlow.flowData))
return {
id: chatFlow.id,
name: chatFlow.name,
flowData: JSON.stringify(sanitizeFlowData, null, 2)
}
})
return sanitizedChatflows
}
export const generateExportFlowData = (flowData) => {
const nodes = flowData.nodes
const edges = flowData.edges