From ca2090d89b7fff3c7fd5fd6900f6cc72b74999f1 Mon Sep 17 00:00:00 2001 From: ceerrep Date: Mon, 17 Feb 2025 00:11:54 +0800 Subject: [PATCH] feat: use model name in openai endpoint --- ktransformers/server/api/openai/endpoints/chat.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/ktransformers/server/api/openai/endpoints/chat.py b/ktransformers/server/api/openai/endpoints/chat.py index 4da3bc9..52f1ff8 100644 --- a/ktransformers/server/api/openai/endpoints/chat.py +++ b/ktransformers/server/api/openai/endpoints/chat.py @@ -7,16 +7,13 @@ from ktransformers.server.utils.create_interface import get_interface from ktransformers.server.schemas.assistants.streaming import chat_stream_response from ktransformers.server.schemas.endpoints.chat import ChatCompletionCreate,ChatCompletionChunk,ChatCompletionObject from ktransformers.server.backend.base import BackendInterfaceBase +from ktransformers.server.config.config import Config router = APIRouter() -models = [ - {"id": "0", "name": "ktranformers-model"}, -] - @router.get('/models', tags=['openai']) async def list_models(): - return models + return [{"id": Config().model_name, "name": Config().model_name}] @router.post('/chat/completions', tags=['openai'])