From 584c7d563961092a9792de4686c3b8e53f6b7137 Mon Sep 17 00:00:00 2001 From: ceerrep Date: Tue, 18 Feb 2025 23:49:51 +0800 Subject: [PATCH] fix: object type for non-streaming response --- ktransformers/server/api/openai/endpoints/chat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ktransformers/server/api/openai/endpoints/chat.py b/ktransformers/server/api/openai/endpoints/chat.py index 4cf04d7..f84538a 100644 --- a/ktransformers/server/api/openai/endpoints/chat.py +++ b/ktransformers/server/api/openai/endpoints/chat.py @@ -33,7 +33,7 @@ async def chat_completion(request:Request,create:ChatCompletionCreate): yield chunk return chat_stream_response(request,inner()) else: - comp = ChatCompletionObject(id=id,object='chat.completion.chunk',created=int(time())) + comp = ChatCompletionObject(id=id,object='chat.completion',created=int(time())) comp.usage = Usage(completion_tokens=1, prompt_tokens=1, total_tokens=2) async for token in interface.inference(input_message,id): comp.append_token(token)