diff --git a/model-providers/model_providers.yaml b/model-providers/model_providers.yaml index 86171fb1..d88736b3 100644 --- a/model-providers/model_providers.yaml +++ b/model-providers/model_providers.yaml @@ -24,6 +24,6 @@ xinference: model_type: 'llm' model_credentials: server_url: 'http://127.0.0.1:9997/' - model_uid: 'gpt-3.5-turbo' + model_uid: 'chatglm3-6b' diff --git a/model-providers/model_providers/bootstrap_web/openai_bootstrap_web.py b/model-providers/model_providers/bootstrap_web/openai_bootstrap_web.py index 31b2cb77..9e20d7aa 100644 --- a/model-providers/model_providers/bootstrap_web/openai_bootstrap_web.py +++ b/model-providers/model_providers/bootstrap_web/openai_bootstrap_web.py @@ -398,14 +398,16 @@ class RESTFulOpenAIBootstrapBaseWeb(OpenAIBootstrapBaseWeb): _convert_to_message(message) for message in chat_request.messages ] - tools = [ - PromptMessageTool( - name=f.function.name, - description=f.function.description, - parameters=f.function.parameters, - ) - for f in chat_request.tools - ] + tools = [] + if chat_request.tools: + tools = [ + PromptMessageTool( + name=f.function.name, + description=f.function.description, + parameters=f.function.parameters, + ) + for f in chat_request.tools + ] if chat_request.functions: tools.extend( [ diff --git a/model-providers/model_providers/core/bootstrap/openai_protocol.py b/model-providers/model_providers/core/bootstrap/openai_protocol.py index ec5ddc3f..2945c0ba 100644 --- a/model-providers/model_providers/core/bootstrap/openai_protocol.py +++ b/model-providers/model_providers/core/bootstrap/openai_protocol.py @@ -140,11 +140,11 @@ class ChatCompletionRequest(BaseModel): tools: Optional[List[FunctionAvailable]] = None functions: Optional[List[FunctionDefinition]] = None function_call: Optional[FunctionCallDefinition] = None - temperature: Optional[float] = None - top_p: Optional[float] = None + temperature: Optional[float] = 0.75 + top_p: Optional[float] = 0.75 top_k: Optional[float] = None n: int = 1 - max_tokens: Optional[int] = None + max_tokens: Optional[int] = 256 stop: Optional[list[str]] = None stream: Optional[bool] = False