Skip to content

Commit

Permalink
Feature/externalize model list (#2113)
Browse files Browse the repository at this point in the history
* fixes

* test json

* test json

* externalize model list

* externalize model list

* correcting awsChatBedRock Name

* lint fixes

* externalize models

* externalize chat models

* externalize chat models

* externalize llm models

* externalize llm models

* lint fixes

* addition of models for ChatMistral

* updating version numbers for all imapacted nodes.

* marketplace template updates

* update embeddings, marketplaces, add chatCohere

* config changes for new env variable

* removal of local models.json

* update models json file

---------

Co-authored-by: Henry <[email protected]>
  • Loading branch information
vinodkiran and HenryHengZJ committed Apr 12, 2024
1 parent 4daf29d commit 788d40f
Show file tree
Hide file tree
Showing 76 changed files with 789 additions and 4,044 deletions.
1 change: 1 addition & 0 deletions CONTRIBUTING-ZH.md
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ Flowise 支持不同的环境变量来配置您的实例。您可以在 `package
| SECRETKEY_PATH | 保存加密密钥(用于加密/解密凭据)的位置 | 字符串 | `your-path/Flowise/packages/server` |
| FLOWISE_SECRETKEY_OVERWRITE | 加密密钥用于替代存储在 SECRETKEY_PATH 中的密钥 | 字符串 |
| DISABLE_FLOWISE_TELEMETRY | 关闭遥测 | 字符串 |
| MODEL_LIST_CONFIG_JSON | 加载模型的位置 | 字符 | `/your_model_list_config_file_path` |

您也可以在使用 `npx` 时指定环境变量。例如:

Expand Down
1 change: 1 addition & 0 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ Flowise support different environment variables to configure your instance. You
| SECRETKEY_PATH | Location where encryption key (used to encrypt/decrypt credentials) is saved | String | `your-path/Flowise/packages/server` |
| FLOWISE_SECRETKEY_OVERWRITE | Encryption key to be used instead of the key stored in SECRETKEY_PATH | String |
| DISABLE_FLOWISE_TELEMETRY | Turn off telemetry | Boolean |
| MODEL_LIST_CONFIG_JSON | File path to load list of models from your local config file | String | `/your_model_list_config_file_path` |

You can also specify the env variables when using `npx`. For example:

Expand Down
6 changes: 5 additions & 1 deletion docker/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,8 @@ BLOB_STORAGE_PATH=/root/.flowise/storage
# LANGCHAIN_API_KEY=your_api_key
# LANGCHAIN_PROJECT=your_project

# DISABLE_FLOWISE_TELEMETRY=true
# DISABLE_FLOWISE_TELEMETRY=true

# Uncomment the following line to enable model list config, load the list of models from your local config file
# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format
# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path
1 change: 1 addition & 0 deletions docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ services:
- LOG_PATH=${LOG_PATH}
- BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH}
- DISABLE_FLOWISE_TELEMETRY=${DISABLE_FLOWISE_TELEMETRY}
- MODEL_LIST_CONFIG_JSON=${MODEL_LIST_CONFIG_JSON}
ports:
- '${PORT}:${PORT}'
volumes:
Expand Down
69 changes: 17 additions & 52 deletions packages/components/nodes/chatmodels/AWSBedrock/AWSChatBedrock.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import { BaseCache } from '@langchain/core/caches'
import { BaseChatModelParams } from '@langchain/core/language_models/chat_models'
import { BaseBedrockInput } from '@langchain/community/dist/utils/bedrock'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { BedrockChat } from './FlowiseAWSChatBedrock'
import { getModels, getRegions, MODEL_TYPE } from '../../../src/modelLoader'

/**
* @author Michael Connor <[email protected]>
Expand All @@ -23,7 +24,7 @@ class AWSChatBedrock_ChatModels implements INode {
constructor() {
this.label = 'AWS ChatBedrock'
this.name = 'awsChatBedrock'
this.version = 4.0
this.version = 5.0
this.type = 'AWSChatBedrock'
this.icon = 'aws.svg'
this.category = 'Chat Models'
Expand All @@ -46,61 +47,15 @@ class AWSChatBedrock_ChatModels implements INode {
{
label: 'Region',
name: 'region',
type: 'options',
options: [
{ label: 'af-south-1', name: 'af-south-1' },
{ label: 'ap-east-1', name: 'ap-east-1' },
{ label: 'ap-northeast-1', name: 'ap-northeast-1' },
{ label: 'ap-northeast-2', name: 'ap-northeast-2' },
{ label: 'ap-northeast-3', name: 'ap-northeast-3' },
{ label: 'ap-south-1', name: 'ap-south-1' },
{ label: 'ap-south-2', name: 'ap-south-2' },
{ label: 'ap-southeast-1', name: 'ap-southeast-1' },
{ label: 'ap-southeast-2', name: 'ap-southeast-2' },
{ label: 'ap-southeast-3', name: 'ap-southeast-3' },
{ label: 'ap-southeast-4', name: 'ap-southeast-4' },
{ label: 'ap-southeast-5', name: 'ap-southeast-5' },
{ label: 'ap-southeast-6', name: 'ap-southeast-6' },
{ label: 'ca-central-1', name: 'ca-central-1' },
{ label: 'ca-west-1', name: 'ca-west-1' },
{ label: 'cn-north-1', name: 'cn-north-1' },
{ label: 'cn-northwest-1', name: 'cn-northwest-1' },
{ label: 'eu-central-1', name: 'eu-central-1' },
{ label: 'eu-central-2', name: 'eu-central-2' },
{ label: 'eu-north-1', name: 'eu-north-1' },
{ label: 'eu-south-1', name: 'eu-south-1' },
{ label: 'eu-south-2', name: 'eu-south-2' },
{ label: 'eu-west-1', name: 'eu-west-1' },
{ label: 'eu-west-2', name: 'eu-west-2' },
{ label: 'eu-west-3', name: 'eu-west-3' },
{ label: 'il-central-1', name: 'il-central-1' },
{ label: 'me-central-1', name: 'me-central-1' },
{ label: 'me-south-1', name: 'me-south-1' },
{ label: 'sa-east-1', name: 'sa-east-1' },
{ label: 'us-east-1', name: 'us-east-1' },
{ label: 'us-east-2', name: 'us-east-2' },
{ label: 'us-gov-east-1', name: 'us-gov-east-1' },
{ label: 'us-gov-west-1', name: 'us-gov-west-1' },
{ label: 'us-west-1', name: 'us-west-1' },
{ label: 'us-west-2', name: 'us-west-2' }
],
type: 'asyncOptions',
loadMethod: 'listRegions',
default: 'us-east-1'
},
{
label: 'Model Name',
name: 'model',
type: 'options',
options: [
{ label: 'anthropic.claude-3-haiku', name: 'anthropic.claude-3-haiku-20240307-v1:0' },
{ label: 'anthropic.claude-3-sonnet', name: 'anthropic.claude-3-sonnet-20240229-v1:0' },
{ label: 'anthropic.claude-instant-v1', name: 'anthropic.claude-instant-v1' },
{ label: 'anthropic.claude-v2:1', name: 'anthropic.claude-v2:1' },
{ label: 'anthropic.claude-v2', name: 'anthropic.claude-v2' },
{ label: 'meta.llama2-13b-chat-v1', name: 'meta.llama2-13b-chat-v1' },
{ label: 'meta.llama2-70b-chat-v1', name: 'meta.llama2-70b-chat-v1' },
{ label: 'mistral.mistral-7b-instruct-v0:2', name: 'mistral.mistral-7b-instruct-v0:2' },
{ label: 'mistral.mixtral-8x7b-instruct-v0:1', name: 'mistral.mixtral-8x7b-instruct-v0:1' }
],
type: 'asyncOptions',
loadMethod: 'listModels',
default: 'anthropic.claude-3-haiku'
},
{
Expand Down Expand Up @@ -142,6 +97,16 @@ class AWSChatBedrock_ChatModels implements INode {
]
}

//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'awsChatBedrock')
},
async listRegions(): Promise<INodeOptionsValue[]> {
return await getRegions(MODEL_TYPE.CHAT, 'awsChatBedrock')
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const iRegion = nodeData.inputs?.region as string
const iModel = nodeData.inputs?.model as string
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import { AzureOpenAIInput, ChatOpenAI as LangchainChatOpenAI, OpenAIChatInput } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatOpenAI } from '../ChatOpenAI/FlowiseChatOpenAI'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'

class AzureChatOpenAI_ChatModels implements INode {
label: string
Expand All @@ -20,7 +21,7 @@ class AzureChatOpenAI_ChatModels implements INode {
constructor() {
this.label = 'Azure ChatOpenAI'
this.name = 'azureChatOpenAI'
this.version = 3.0
this.version = 4.0
this.type = 'AzureChatOpenAI'
this.icon = 'Azure.svg'
this.category = 'Chat Models'
Expand All @@ -42,31 +43,8 @@ class AzureChatOpenAI_ChatModels implements INode {
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'gpt-4',
name: 'gpt-4'
},
{
label: 'gpt-4-32k',
name: 'gpt-4-32k'
},
{
label: 'gpt-35-turbo',
name: 'gpt-35-turbo'
},
{
label: 'gpt-35-turbo-16k',
name: 'gpt-35-turbo-16k'
},
{
label: 'gpt-4-vision-preview',
name: 'gpt-4-vision-preview'
}
],
default: 'gpt-35-turbo',
optional: true
type: 'asyncOptions',
loadMethod: 'listModels'
},
{
label: 'Temperature',
Expand Down Expand Up @@ -151,6 +129,13 @@ class AzureChatOpenAI_ChatModels implements INode {
]
}

//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'azureChatOpenAI')
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string
const temperature = nodeData.inputs?.temperature as string
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { OpenAI, ALL_AVAILABLE_OPENAI_MODELS } from 'llamaindex'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'

interface AzureOpenAIConfig {
apiKey?: string
Expand All @@ -25,7 +26,7 @@ class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
constructor() {
this.label = 'AzureChatOpenAI'
this.name = 'azureChatOpenAI_LlamaIndex'
this.version = 1.0
this.version = 2.0
this.type = 'AzureChatOpenAI'
this.icon = 'Azure.svg'
this.category = 'Chat Models'
Expand All @@ -42,27 +43,9 @@ class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'gpt-4',
name: 'gpt-4'
},
{
label: 'gpt-4-32k',
name: 'gpt-4-32k'
},
{
label: 'gpt-3.5-turbo',
name: 'gpt-3.5-turbo'
},
{
label: 'gpt-3.5-turbo-16k',
name: 'gpt-3.5-turbo-16k'
}
],
default: 'gpt-3.5-turbo-16k',
optional: true
type: 'asyncOptions',
loadMethod: 'listModels',
default: 'gpt-3.5-turbo-16k'
},
{
label: 'Temperature',
Expand Down Expand Up @@ -99,6 +82,13 @@ class AzureChatOpenAI_LlamaIndex_ChatModels implements INode {
]
}

//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'azureChatOpenAI_LlamaIndex')
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
const temperature = nodeData.inputs?.temperature as string
Expand Down
50 changes: 13 additions & 37 deletions packages/components/nodes/chatmodels/ChatAnthropic/ChatAnthropic.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import { AnthropicInput, ChatAnthropic as LangchainChatAnthropic } from '@langchain/anthropic'
import { BaseCache } from '@langchain/core/caches'
import { BaseLLMParams } from '@langchain/core/language_models/llms'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatAnthropic } from './FlowiseChatAnthropic'
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'

class ChatAnthropic_ChatModels implements INode {
label: string
Expand All @@ -20,7 +21,7 @@ class ChatAnthropic_ChatModels implements INode {
constructor() {
this.label = 'ChatAnthropic'
this.name = 'chatAnthropic'
this.version = 5.0
this.version = 6.0
this.type = 'ChatAnthropic'
this.icon = 'Anthropic.svg'
this.category = 'Chat Models'
Expand All @@ -42,41 +43,9 @@ class ChatAnthropic_ChatModels implements INode {
{
label: 'Model Name',
name: 'modelName',
type: 'options',
options: [
{
label: 'claude-3-haiku',
name: 'claude-3-haiku-20240307',
description: 'Fastest and most compact model, designed for near-instant responsiveness'
},
{
label: 'claude-3-opus',
name: 'claude-3-opus-20240229',
description: 'Most powerful model for highly complex tasks'
},
{
label: 'claude-3-sonnet',
name: 'claude-3-sonnet-20240229',
description: 'Ideal balance of intelligence and speed for enterprise workloads'
},
{
label: 'claude-2.0 (legacy)',
name: 'claude-2.0',
description: 'Claude 2 latest major version, automatically get updates to the model as they are released'
},
{
label: 'claude-2.1 (legacy)',
name: 'claude-2.1',
description: 'Claude 2 latest full version'
},
{
label: 'claude-instant-1.2 (legacy)',
name: 'claude-instant-1.2',
description: 'Claude Instant latest major version, automatically get updates to the model as they are released'
}
],
default: 'claude-3-haiku',
optional: true
type: 'asyncOptions',
loadMethod: 'listModels',
default: 'claude-3-haiku'
},
{
label: 'Temperature',
Expand Down Expand Up @@ -122,6 +91,13 @@ class ChatAnthropic_ChatModels implements INode {
]
}

//@ts-ignore
loadMethods = {
async listModels(): Promise<INodeOptionsValue[]> {
return await getModels(MODEL_TYPE.CHAT, 'chatAnthropic')
}
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
Expand Down
Loading

0 comments on commit 788d40f

Please sign in to comment.