[add]添加获取模型接口

This commit is contained in:
VLOU 2024-05-10 00:46:24 +08:00
parent 7061cb6297
commit e2f14482cb
11 changed files with 235 additions and 2 deletions

View File

@ -0,0 +1,40 @@
import { getServerConfig } from '@/config/server';
import { createErrorResponse } from '@/app/api/errorResponse';
import { LOBE_CHAT_AUTH_HEADER, OAUTH_AUTHORIZED } from '@/const/auth';
import { getJWTPayload } from '../../chat/auth';
export const GET = async (req: Request) => {
// get Authorization from header
const authorization = req.headers.get(LOBE_CHAT_AUTH_HEADER);
const { CHATCHAT_PROXY_URL } = getServerConfig();
let baseURL = CHATCHAT_PROXY_URL;
// 为了方便拿到 endpoint这里直接解析 JWT
if (authorization) {
const jwtPayload = await getJWTPayload(authorization);
if (jwtPayload.endpoint) {
baseURL = jwtPayload.endpoint;
}
}
let res: Response;
try {
console.log('get models from:', baseURL)
res = await fetch(`${baseURL}/models`);
if (!res.ok) {
// throw new Error(`Failed to fetch models: ${res.status}`);
return createErrorResponse(500, { error: `Failed to fetch models: ${res.status}` });
}
return res;
} catch (e) {
return createErrorResponse(500, { error: e });
}
}

View File

@ -11,6 +11,7 @@ import { ModelProvider } from '@/libs/agent-runtime';
import Checker from '../components/Checker';
import ProviderConfig from '../components/ProviderConfig';
import { LLMProviderBaseUrlKey, LLMProviderConfigKey } from '../const';
import ModelSelector from '../components/ModelSeletor';
const providerKey = 'chatchat';
@ -39,6 +40,11 @@ const ChatChatProvider = memo(() => {
label: t('llm.ChatChat.customModelName.title'),
name: [LLMProviderConfigKey, providerKey, 'customModelName'],
},
{
children: <ModelSelector provider={ModelProvider.ChatChat} />,
desc: t('llm.selectorModel.desc'),
label: t('llm.selectorModel.title'),
},
{
children: <Checker model={'gml-4'} provider={ModelProvider.ChatChat} />,
desc: t('llm.ChatChat.checker.desc'),

View File

@ -0,0 +1,110 @@
import { CheckCircleFilled } from '@ant-design/icons';
import { Alert, Highlighter } from '@lobehub/ui';
import { Button } from 'antd';
import { useTheme } from 'antd-style';
import { memo, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Flexbox } from 'react-layout-kit';
import { useIsMobile } from '@/hooks/useIsMobile';
import { ModelSelectorError } from '@/types/message';
import { modelsServer } from '@/services/models';
import { useGlobalStore } from '@/store/global';
import { GlobalLLMProviderKey } from '@/types/settings/modelProvider';
import { currentSettings } from '@/store/global/slices/settings/selectors/settings';
interface FetchModelParams {
provider: GlobalLLMProviderKey;
}
const ModelSelector = memo<FetchModelParams>(({ provider }) => {
const { t } = useTranslation('setting');
const [loading, setLoading] = useState(false);
const [pass, setPass] = useState(false);
const theme = useTheme();
const [error, setError] = useState<ModelSelectorError | undefined>();
const [setConfig, languageModel ] = useGlobalStore((s) => [
s.setModelProviderConfig,
currentSettings(s).languageModel,
]);
const enable = languageModel[provider]?.enabled || false;
// 过滤格式
const filterModel = (data: any[] = []) => {
return data.map((item) => {
return {
tokens: item?.tokens || 8000,
displayName: item.displayName || item.id,
functionCall: false, // false 默认都不能用使用插件chatchat 的插件还没弄
...item
}
})
}
const processProviderModels = () => {
if(!enable) return
setLoading(true);
modelsServer.getModels(provider).then((data) => {
if (data.error) {
setError({ message: data.error, type: 500});
} else {
// 更新模型
setConfig(provider, { models: filterModel(data.data) });
setError(undefined);
setPass(true);
}
}).finally(() => {
setLoading(false);
})
}
const isMobile = useIsMobile();
return (
<Flexbox align={isMobile ? 'flex-start' : 'flex-end'} gap={8}>
<Flexbox align={'center'} direction={isMobile ? 'horizontal-reverse' : 'horizontal'} gap={12}>
{pass && (
<Flexbox gap={4} horizontal>
<CheckCircleFilled
style={{
color: theme.colorSuccess,
}}
/>
{t('llm.selectorModel.pass')}
</Flexbox>
)}
<Button loading={loading} onClick={processProviderModels}>
{t('llm.selectorModel.button')}
</Button>
</Flexbox>
{error && (
<Flexbox gap={8} style={{ maxWidth: '600px', width: '100%' }}>
<Alert
banner
extra={
<Flexbox>
<Highlighter copyButtonSize={'small'} language={'json'} type={'pure'}>
{JSON.stringify(error, null, 2)}
</Highlighter>
</Flexbox>
}
message={t(`response.${error.type}` as any, { ns: 'error' })}
showIcon
type={'error'}
/>
</Flexbox>
)}
</Flexbox>
);
});
export default ModelSelector;

View File

@ -44,7 +44,7 @@ const ModelSwitchPanel = memo<PropsWithChildren>(({ children }) => {
provider.chatModels
.filter((c) => !c.hidden)
.map((model) => ({
key: model.id,
key: `${provider.id}-${model.id}`,
label: <ModelItemRender {...model} />,
onClick: () => {
updateAgentConfig({ model: model.id, provider: provider.id });

View File

@ -198,6 +198,13 @@ export default {
},
},
selectorModel: {
button: '更新',
desc: '选择代理地址所有模型,默认/v1/models获取',
pass: '更新成功',
title: '更新模型到本地',
},
checker: {
button: '检查',
desc: '测试 Api Key 与代理地址是否正确填写',

View File

@ -36,6 +36,9 @@ export const API_ENDPOINTS = mapWithBasePath({
// image
images: '/api/openai/images',
// models
models: (provider: string) => withBasePath(`/api/models/${provider}`),
// TTS & STT
stt: '/api/openai/stt',
tts: '/api/openai/tts',

View File

@ -0,0 +1,28 @@
import { getMessageError } from "@/utils/fetch";
import { API_ENDPOINTS } from "./_url";
import { createHeaderWithAuth } from "./_auth";
import { ModelsResponse } from "@/types/models";
import { GlobalLLMProviderKey } from "@/types/settings/modelProvider";
class ModelsServer{
getModels = async (provider: GlobalLLMProviderKey): Promise<ModelsResponse> => {
const headers = await createHeaderWithAuth({ provider, headers: { 'Content-Type': 'application/json' } });
try {
const res = await fetch(API_ENDPOINTS.models(provider), {
headers,
});
if (!res.ok) {
throw await getMessageError(res);
}
return res.json();
} catch (error) {
return { error: JSON.stringify(error) };
}
}
}
export const modelsServer = new ModelsServer();

View File

@ -63,6 +63,7 @@ const anthropicAPIKey = (s: GlobalStore) => modelProvider(s).anthropic.apiKey;
const enableChatChat = (s: GlobalStore) => modelProvider(s).chatchat.enabled;
const chatChatProxyUrl = (s: GlobalStore) => modelProvider(s).chatchat.endpoint;
const chatChatModels = (s: GlobalStore) => modelProvider(s).chatchat.models || [];
// const azureModelList = (s: GlobalStore): ModelProviderCard => {
// const azure = azureConfig(s);
@ -138,6 +139,12 @@ const modelSelectList = (s: GlobalStore): ModelProviderCard[] => {
const ollamaChatModels = processChatModels(ollamaModelConfig, OllamaProvider.chatModels);
const chatChatModelConfig = parseModelString(
currentSettings(s).languageModel.chatchat.customModelName
)
const chatChatChatModels = processChatModels(chatChatModelConfig, chatChatModels(s))
return [
{
...OpenAIProvider,
@ -152,7 +159,7 @@ const modelSelectList = (s: GlobalStore): ModelProviderCard[] => {
{ ...PerplexityProvider, enabled: enablePerplexity(s) },
{ ...AnthropicProvider, enabled: enableAnthropic(s) },
{ ...MistralProvider, enabled: enableMistral(s) },
{ ...ChatChatProvider, enabled: enableChatChat(s) },
{ ...ChatChatProvider, chatModels: chatChatChatModels, enabled: enableChatChat(s) },
];
};

View File

@ -17,6 +17,11 @@ export interface ChatMessageError {
type: ErrorType | IPluginErrorType | ILobeAgentRuntimeErrorType;
}
export interface ModelSelectorError {
message: string;
type: ErrorType;
}
export interface ChatTranslate extends Translate {
content?: string;
}

View File

@ -0,0 +1,15 @@
interface Model {
id: string;
created: number; // 时间戳
platform_name: string;
owned_by: string;
object: string;
tokens?: number;
displayName?: string;
}
export interface ModelsResponse {
object?: 'list';
data?: Model[];
error?: string;
}

View File

@ -1,3 +1,5 @@
import { ChatModelCard } from "../llm";
export type CustomModels = { displayName: string; id: string }[];
export interface OpenAIConfig {
@ -22,23 +24,27 @@ export interface AzureOpenAIConfig {
deployments: string;
enabled: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface ZhiPuConfig {
apiKey?: string;
enabled: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface MoonshotConfig {
apiKey?: string;
enabled: boolean;
models?: ChatModelCard[]
}
export interface GoogleConfig {
apiKey?: string;
enabled: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface AWSBedrockConfig {
@ -46,34 +52,40 @@ export interface AWSBedrockConfig {
enabled: boolean;
region?: string;
secretAccessKey?: string;
models?: ChatModelCard[]
}
export interface OllamaConfig {
customModelName?: string;
enabled?: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface PerplexityConfig {
apiKey?: string;
enabled: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface AnthropicConfig {
apiKey?: string;
enabled: boolean;
models?: ChatModelCard[]
}
export interface MistralConfig {
apiKey?: string;
enabled: boolean;
models?: ChatModelCard[]
}
export interface ChatChatConfig {
customModelName?: string;
enabled?: boolean;
endpoint?: string;
models?: ChatModelCard[]
}
export interface GlobalLLMConfig {