diff --git a/frontend/.env.example b/frontend/.env.example
index 149cc8e8..e072464f 100644
--- a/frontend/.env.example
+++ b/frontend/.env.example
@@ -1,5 +1,5 @@
# add a access code to lock your lobe-chat application, you can set a long password to avoid leaking. If this value contains a comma, it is a password array.
-#ACCESS_CODE=lobe66
+# ACCESS_CODE=lobe66
# add your custom model name, multi model separate by comma. for example gpt-3.5-1106,gpt-4-1106
# CUSTOM_MODELS=model1,model2,model3
@@ -14,7 +14,7 @@
########################################
# you openai api key
-OPENAI_API_KEY=sk-xxxxxxxxx
+OPENAI_API_KEY = sk-xxxxxxxxx
# use a proxy to connect to the OpenAI API
# OPENAI_PROXY_URL=https://api.openai.com/v1
@@ -40,27 +40,27 @@ OPENAI_API_KEY=sk-xxxxxxxxx
############ ZhiPu AI Service ##########
########################################
-#ZHIPU_API_KEY=xxxxxxxxxxxxxxxxxxx.xxxxxxxxxxxxx
+# ZHIPU_API_KEY=xxxxxxxxxxxxxxxxxxx.xxxxxxxxxxxxx
########################################
########## Moonshot AI Service #########
########################################
-#MOONSHOT_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# MOONSHOT_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
########### Google AI Service ##########
########################################
-#GOOGLE_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# GOOGLE_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
######### AWS Bedrock Service ##########
########################################
-#AWS_REGION=us-east-1
-#AWS_ACCESS_KEY_ID=xxxxxxxxxxxxxxxxxxx
-#AWS_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# AWS_REGION=us-east-1
+# AWS_ACCESS_KEY_ID=xxxxxxxxxxxxxxxxxxx
+# AWS_SECRET_ACCESS_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
######### Ollama AI Service ##########
@@ -73,19 +73,19 @@ OPENAI_API_KEY=sk-xxxxxxxxx
########### Mistral AI Service ##########
########################################
-#MISTRAL_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# MISTRAL_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
######### Perplexity Service ##########
########################################
-#PERPLEXITY_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# PERPLEXITY_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
######### Anthropic Service ##########
########################################
-#ANTHROPIC_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+# ANTHROPIC_API_KEY=xxxxxxxxxxxxxxxxxxxxxxxxxxxxx
########################################
############ Market Service ############
@@ -98,6 +98,9 @@ OPENAI_API_KEY=sk-xxxxxxxxx
############ Plugin Service ############
########################################
+# you can use ChatChat.The local/remote ChatChat service url
+CHATCHAT_PROXY_URL = 'http://localhost:7861/v1'
+
# The LobeChat plugins store index url
# PLUGINS_INDEX_URL=https://chat-plugins.lobehub.com
diff --git a/frontend/src/app/api/chat/[provider]/agentRuntime.ts b/frontend/src/app/api/chat/[provider]/agentRuntime.ts
index d681036a..8bef5918 100644
--- a/frontend/src/app/api/chat/[provider]/agentRuntime.ts
+++ b/frontend/src/app/api/chat/[provider]/agentRuntime.ts
@@ -20,6 +20,7 @@ import {
LobePerplexityAI,
LobeRuntimeAI,
LobeZhipuAI,
+ LobeChatChatAI,
ModelProvider,
} from '@/libs/agent-runtime';
import { TraceClient } from '@/libs/traces';
@@ -167,6 +168,11 @@ class AgentRuntime {
runtimeModel = this.initMistral(payload);
break;
}
+
+ case ModelProvider.ChatChat: {
+ runtimeModel = this.initChatChat(payload);
+ break;
+ }
}
return new AgentRuntime(runtimeModel);
@@ -268,6 +274,13 @@ class AgentRuntime {
return new LobeMistralAI({ apiKey });
}
+
+ private static initChatChat(payload: JWTPayload) {
+ const { CHATCHAT_PROXY_URL } = getServerConfig();
+ const baseURL = payload?.endpoint || CHATCHAT_PROXY_URL;
+
+ return new LobeChatChatAI({ baseURL });
+ }
}
export default AgentRuntime;
diff --git a/frontend/src/app/chat/features/SessionListContent/ListItem/index.tsx b/frontend/src/app/chat/features/SessionListContent/ListItem/index.tsx
index e1b5cda6..a39bf1d0 100644
--- a/frontend/src/app/chat/features/SessionListContent/ListItem/index.tsx
+++ b/frontend/src/app/chat/features/SessionListContent/ListItem/index.tsx
@@ -3,6 +3,7 @@ import { useHover } from 'ahooks';
import { createStyles, useResponsive } from 'antd-style';
import { memo, useMemo, useRef } from 'react';
import Avatar from '@/components/Avatar';
+
const { Item } = List;
const useStyles = createStyles(({ css, token, responsive }) => {
diff --git a/frontend/src/app/settings/(mobile)/features/Header/Home.tsx b/frontend/src/app/settings/(mobile)/features/Header/Home.tsx
index 9d06c51e..a5814358 100644
--- a/frontend/src/app/settings/(mobile)/features/Header/Home.tsx
+++ b/frontend/src/app/settings/(mobile)/features/Header/Home.tsx
@@ -1,6 +1,7 @@
import { MobileNavBar } from '@lobehub/ui';
import { memo } from 'react';
import Logo from '@/components/Logo';
+
const Header = memo(() => {
return } />;
});
diff --git a/frontend/src/app/settings/llm/ChatChat/index.tsx b/frontend/src/app/settings/llm/ChatChat/index.tsx
new file mode 100644
index 00000000..5d7c82e8
--- /dev/null
+++ b/frontend/src/app/settings/llm/ChatChat/index.tsx
@@ -0,0 +1,65 @@
+import { Input, Flex } from 'antd';
+import { useTheme } from 'antd-style';
+import { memo } from 'react';
+import { useTranslation } from 'react-i18next';
+import Avatar from 'next/image';
+
+import { imageUrl } from '@/const/url';
+
+import { ModelProvider } from '@/libs/agent-runtime';
+
+import Checker from '../components/Checker';
+import ProviderConfig from '../components/ProviderConfig';
+import { LLMProviderBaseUrlKey, LLMProviderConfigKey } from '../const';
+
+const providerKey = 'chatchat';
+
+const ChatChatProvider = memo(() => {
+ const { t } = useTranslation('setting');
+ const theme = useTheme();
+
+ return (
+ ,
+ desc: t('llm.ChatChat.endpoint.desc'),
+ label: t('llm.ChatChat.endpoint.title'),
+ name: [LLMProviderConfigKey, providerKey, LLMProviderBaseUrlKey],
+ },
+ {
+ children: (
+
+ ),
+ desc: t('llm.ChatChat.customModelName.desc'),
+ label: t('llm.ChatChat.customModelName.title'),
+ name: [LLMProviderConfigKey, providerKey, 'customModelName'],
+ },
+ {
+ children: ,
+ desc: t('llm.ChatChat.checker.desc'),
+ label: t('llm.checker.title'),
+ minWidth: undefined,
+ },
+ ]}
+ provider={providerKey}
+ title={
+
+
+ { 'ChatChat' }
+
+ }
+ />
+ );
+});
+
+export default ChatChatProvider;
diff --git a/frontend/src/app/settings/llm/index.tsx b/frontend/src/app/settings/llm/index.tsx
index 72ba4868..c085bc39 100644
--- a/frontend/src/app/settings/llm/index.tsx
+++ b/frontend/src/app/settings/llm/index.tsx
@@ -17,6 +17,7 @@ import Ollama from './Ollama';
import OpenAI from './OpenAI';
import Perplexity from './Perplexity';
import Zhipu from './Zhipu';
+import ChatChat from './ChatChat'
export default memo<{ showOllama: boolean }>(({ showOllama }) => {
const { t } = useTranslation('setting');
@@ -34,6 +35,7 @@ export default memo<{ showOllama: boolean }>(({ showOllama }) => {
{showOllama && }
+