From 5598ba7980fb285a0f31414ca65483d23472fbfe Mon Sep 17 00:00:00 2001 From: shijianyue Date: Wed, 7 Feb 2024 23:57:59 +0800 Subject: [PATCH] :sparkles: feat(llm): support Ollama AI Provider (local llm) --- .env.example | 8 ++ Dockerfile | 3 + docs/Deployment/Environment-Variable.md | 10 +++ docs/Deployment/Environment-Variable.zh-CN.md | 10 +++ src/app/api/chat/[provider]/agentRuntime.ts | 14 ++++ src/app/api/config/route.ts | 2 + src/app/api/errorResponse.ts | 3 + src/app/settings/llm/Ollama/index.tsx | 75 +++++++++++++++++ src/app/settings/llm/page.tsx | 7 ++ src/components/ModelProviderIcon/index.tsx | 6 +- src/config/modelProviders/index.ts | 3 + src/config/modelProviders/ollama.ts | 27 +++++++ src/config/server/provider.ts | 6 ++ src/const/settings.ts | 4 + src/libs/agent-runtime/error.ts | 3 + src/libs/agent-runtime/index.ts | 1 + src/libs/agent-runtime/ollama/index.ts | 80 +++++++++++++++++++ src/libs/agent-runtime/types/type.ts | 1 + src/locales/default/common.ts | 1 + src/locales/default/setting.ts | 16 ++++ src/services/_auth.ts | 8 ++ .../settings/selectors/modelProvider.ts | 25 +++++- src/types/settings/modelProvider.ts | 7 ++ 23 files changed, 317 insertions(+), 3 deletions(-) create mode 100644 src/app/settings/llm/Ollama/index.tsx create mode 100644 src/config/modelProviders/ollama.ts create mode 100644 src/libs/agent-runtime/ollama/index.ts diff --git a/.env.example b/.env.example index 80c71c6b77e4..80f55efd9c4d 100644 --- a/.env.example +++ b/.env.example @@ -59,6 +59,14 @@ OPENAI_API_KEY=sk-xxxxxxxxx #AWS_ACCESS_KEY_ID=xxxxxxxxxxxxxxxxxxx #AWS_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +######################################## +######### Ollama AI Service ########## +######################################## + +# You can use ollama to get and run LLM locally, learn more about it via https://github.com/ollama/ollama +# The local/remote ollama service url +# OLLAMA_PROXY_URL=http://127.0.0.1:11434/v1 + ######################################## ############ Market Service ############ ######################################## diff --git a/Dockerfile b/Dockerfile index 34ad12d2c921..798b7caae085 100644 --- a/Dockerfile +++ b/Dockerfile @@ -78,4 +78,7 @@ ENV ZHIPU_API_KEY "" # Moonshot ENV MOONSHOT_API_KEY "" +# Ollama +ENV OLLAMA_PROXY_URL "" + CMD ["node", "server.js"] diff --git a/docs/Deployment/Environment-Variable.md b/docs/Deployment/Environment-Variable.md index fd05a4df69f2..866d9774aabc 100644 --- a/docs/Deployment/Environment-Variable.md +++ b/docs/Deployment/Environment-Variable.md @@ -18,6 +18,7 @@ LobeChat provides additional configuration options during deployment, which can - [Moonshot AI](#moonshot-ai) - [Google AI](#google-ai) - [AWS Bedrock](#aws-bedrock) + - [Ollama](#ollama) - [Plugin Service](#plugin-service) - [`PLUGINS_INDEX_URL`](#plugins_index_url) - [`PLUGIN_SETTINGS`](#plugin_settings) @@ -208,6 +209,15 @@ If you need to use Azure OpenAI to provide model services, you can refer to the - Default Value: `us-east-1` - Example: `us-east-1` +### Ollama + +#### `OLLAMA_PROXY_URL` + +- Type: Optional +- Description: To enable the Ollama service provider, if set up which will appear as selectable model card in the language model setting page, you can also specify a custom language model. +- Default: - +- Example: `http://127.0.0.1:11434/v1` + ## Plugin Service ### `PLUGINS_INDEX_URL` diff --git a/docs/Deployment/Environment-Variable.zh-CN.md b/docs/Deployment/Environment-Variable.zh-CN.md index 8d40e8aa98eb..ef699fc9e926 100644 --- a/docs/Deployment/Environment-Variable.zh-CN.md +++ b/docs/Deployment/Environment-Variable.zh-CN.md @@ -18,6 +18,7 @@ LobeChat 在部署时提供了一些额外的配置项,使用环境变量进 - [Moonshot AI](#moonshot-ai) - [Google AI](#google-ai) - [AWS Bedrock](#aws-bedrock) + - [Ollama](#ollama) - [插件服务](#插件服务) - [`PLUGINS_INDEX_URL`](#plugins_index_url) - [`PLUGIN_SETTINGS`](#plugin_settings) @@ -206,6 +207,15 @@ LobeChat 在部署时提供了一些额外的配置项,使用环境变量进 - 默认值:`us-east-1` - 示例:`us-east-1` +### Ollama + +#### `OLLAMA_PROXY_URL` + +- 类型:可选 +- 描述:用于启用 Ollama 服务,设置后可在语言模型列表内展示可选开源语言模型,也可以指定自定义语言模型 +- 默认值:- +- 示例:`http://127.0.0.1:11434/v1` + ## 插件服务 ### `PLUGINS_INDEX_URL` diff --git a/src/app/api/chat/[provider]/agentRuntime.ts b/src/app/api/chat/[provider]/agentRuntime.ts index bd8bba2646ca..4e868df2d20a 100644 --- a/src/app/api/chat/[provider]/agentRuntime.ts +++ b/src/app/api/chat/[provider]/agentRuntime.ts @@ -6,6 +6,7 @@ import { LobeBedrockAI, LobeGoogleAI, LobeMoonshotAI, + LobeOllamaAI, LobeOpenAI, LobeRuntimeAI, LobeZhipuAI, @@ -66,6 +67,12 @@ class AgentRuntime { case ModelProvider.Bedrock: { runtimeModel = this.initBedrock(payload); + break; + } + + case ModelProvider.Ollama: { + runtimeModel = this.initOllama(payload); + break; } } @@ -138,6 +145,13 @@ class AgentRuntime { return new LobeBedrockAI({ accessKeyId, accessKeySecret, region }); } + + private static initOllama(payload: JWTPayload) { + const { OLLAMA_PROXY_URL } = getServerConfig(); + const baseUrl = payload?.endpoint || OLLAMA_PROXY_URL; + + return new LobeOllamaAI(baseUrl); + } } export default AgentRuntime; diff --git a/src/app/api/config/route.ts b/src/app/api/config/route.ts index a389ab182954..eea1978cbe90 100644 --- a/src/app/api/config/route.ts +++ b/src/app/api/config/route.ts @@ -14,6 +14,7 @@ export const GET = async () => { ENABLED_AWS_BEDROCK, ENABLED_GOOGLE, ENABLE_OAUTH_SSO, + ENABLE_OLLAMA, } = getServerConfig(); const config: GlobalServerConfig = { @@ -23,6 +24,7 @@ export const GET = async () => { bedrock: { enabled: ENABLED_AWS_BEDROCK }, google: { enabled: ENABLED_GOOGLE }, moonshot: { enabled: ENABLED_MOONSHOT }, + ollama: { enabled: ENABLE_OLLAMA }, zhipu: { enabled: ENABLED_ZHIPU }, }, }; diff --git a/src/app/api/errorResponse.ts b/src/app/api/errorResponse.ts index 9dde96feb6ab..2a6748cbbe70 100644 --- a/src/app/api/errorResponse.ts +++ b/src/app/api/errorResponse.ts @@ -37,6 +37,9 @@ const getStatus = (errorType: ILobeAgentRuntimeErrorType | ErrorType) => { case AgentRuntimeErrorType.MoonshotBizError: { return 476; } + case AgentRuntimeErrorType.OllamaBizError: { + return 478; + } } return errorType as number; }; diff --git a/src/app/settings/llm/Ollama/index.tsx b/src/app/settings/llm/Ollama/index.tsx new file mode 100644 index 000000000000..d545c199eddc --- /dev/null +++ b/src/app/settings/llm/Ollama/index.tsx @@ -0,0 +1,75 @@ +import { Ollama } from '@lobehub/icons'; +import { Form, type ItemGroup } from '@lobehub/ui'; +import { Form as AntForm, Input, Switch } from 'antd'; +import { useTheme } from 'antd-style'; +import { debounce } from 'lodash-es'; +import { memo } from 'react'; +import { useTranslation } from 'react-i18next'; +import { Flexbox } from 'react-layout-kit'; + +import { FORM_STYLE } from '@/const/layoutTokens'; +import { ModelProvider } from '@/libs/agent-runtime'; +import { useGlobalStore } from '@/store/global'; +import { modelProviderSelectors } from '@/store/global/selectors'; + +import Checker from '../Checker'; +import { LLMProviderBaseUrlKey, LLMProviderConfigKey } from '../const'; +import { useSyncSettings } from '../useSyncSettings'; + +const providerKey = 'ollama'; + +const OllamaProvider = memo(() => { + const { t } = useTranslation('setting'); + const [form] = AntForm.useForm(); + const theme = useTheme(); + const [toggleProviderEnabled, setSettings] = useGlobalStore((s) => [ + s.toggleProviderEnabled, + s.setSettings, + ]); + const enabled = useGlobalStore(modelProviderSelectors.enableOllama); + + useSyncSettings(form); + + const model: ItemGroup = { + children: [ + { + children: , + desc: t('llm.Ollama.endpoint.desc'), + label: t('llm.Ollama.endpoint.title'), + name: [LLMProviderConfigKey, providerKey, LLMProviderBaseUrlKey], + }, + { + children: , + desc: t('llm.Ollama.customModelName.desc'), + label: t('llm.Ollama.customModelName.title'), + name: [LLMProviderConfigKey, providerKey, 'customModelName'], + }, + { + children: , + desc: t('llm.Ollama.checker.desc'), + label: t('llm.checker.title'), + minWidth: undefined, + }, + ], + defaultActive: enabled, + extra: ( + { + toggleProviderEnabled(providerKey, enabled); + }} + value={enabled} + /> + ), + title: ( + + + + ), + }; + + return ( +
+ ); +}); + +export default OllamaProvider; diff --git a/src/app/settings/llm/page.tsx b/src/app/settings/llm/page.tsx index 935bc708059b..4efbc01282aa 100644 --- a/src/app/settings/llm/page.tsx +++ b/src/app/settings/llm/page.tsx @@ -7,17 +7,23 @@ import { Trans, useTranslation } from 'react-i18next'; import Footer from '@/app/settings/features/Footer'; import PageTitle from '@/components/PageTitle'; import { MORE_MODEL_PROVIDER_REQUEST_URL } from '@/const/url'; +import { useGlobalStore } from '@/store/global'; import { useSwitchSideBarOnInit } from '@/store/global/hooks/useSwitchSettingsOnInit'; import { SettingsTabs } from '@/store/global/initialState'; +import { modelProviderSelectors } from '@/store/global/selectors'; import Bedrock from './Bedrock'; import Google from './Google'; import Moonshot from './Moonshot'; +import Ollama from './Ollama'; import OpenAI from './OpenAI'; import Zhipu from './Zhipu'; export default memo(() => { useSwitchSideBarOnInit(SettingsTabs.LLM); + const enableOllamaFormServerConfig = useGlobalStore( + modelProviderSelectors.enableOllamaFromServerConfig, + ); const { t } = useTranslation('setting'); return ( <> @@ -28,6 +34,7 @@ export default memo(() => { + {enableOllamaFormServerConfig ? : null}