mirror of
https://github.com/RYDE-WORK/Langchain-Chatchat.git
synced 2026-01-19 21:37:20 +08:00
make formater
This commit is contained in:
parent
d485edad6d
commit
64ca17061a
@ -9,13 +9,16 @@ chatchat沿用了 monorepo的组织方式, 项目的代码库包含了多个包
|
||||
|
||||
```shell
|
||||
.
|
||||
├── docs # 文档
|
||||
├── chatchat-server # 服务端
|
||||
│ └── tests
|
||||
│ ├── integration_tests # 集成测试 (每个包都有,为了简洁没有展示)
|
||||
│ └── unit_tests # 单元测试 (每个包都有,为了简洁没有展示)
|
||||
├── frontend # 前端
|
||||
└── model-providers # 模型平台
|
||||
├── docker
|
||||
├── docs # 文档
|
||||
├── frontend # 前端
|
||||
├── libs
|
||||
│ ├── chatchat-server # 服务端
|
||||
│ │ └── tests
|
||||
│ │ ├── integration_tests # 集成测试 (每个包都有,为了简洁没有展示)
|
||||
│ │ └── unit_tests # 单元测试 (每个包都有,为了简洁没有展示)
|
||||
│ └── model-providers # 模型平台
|
||||
|
||||
|
||||
```
|
||||
根目录还包含以下文件:
|
||||
@ -29,8 +32,8 @@ Makefile: 包含用于构建,linting和文档和cookbook的快捷方式的文
|
||||
|
||||
代码库中的代码分为两个部分:
|
||||
|
||||
- /chatchat-server目录包含chatchat服务端代码。
|
||||
- /frontend目录包含chatchat前端代码。
|
||||
- /model-providers目录包含对供应商模型转换的代码。
|
||||
- libs/chatchat-server目录包含chatchat服务端代码。
|
||||
- libs/model-providers目录包含对供应商模型转换的代码。
|
||||
- frontend目录包含chatchat前端代码。
|
||||
|
||||
详细的
|
||||
|
||||
@ -25,8 +25,8 @@ if __name__ == "__main__":
|
||||
logging_conf = get_config_dict(
|
||||
"INFO",
|
||||
get_log_file(log_path="logs", sub_dir=f"local_{get_timestamp_ms()}"),
|
||||
1024*1024*1024*3,
|
||||
1024*1024*1024*3,
|
||||
1024 * 1024 * 1024 * 3,
|
||||
1024 * 1024 * 1024 * 3,
|
||||
)
|
||||
boot = (
|
||||
BootstrapWebBuilder()
|
||||
|
||||
@ -184,7 +184,9 @@ class RESTFulOpenAIBootstrapBaseWeb(OpenAIBootstrapBaseWeb):
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="zhipuai", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for model in (
|
||||
provider_model_bundle_llm.configuration.custom_configuration.models
|
||||
):
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -198,7 +200,9 @@ class RESTFulOpenAIBootstrapBaseWeb(OpenAIBootstrapBaseWeb):
|
||||
)
|
||||
|
||||
# 获取预定义模型
|
||||
ai_models.extend(provider_model_bundle_llm.model_type_instance.predefined_models())
|
||||
ai_models.extend(
|
||||
provider_model_bundle_llm.model_type_instance.predefined_models()
|
||||
)
|
||||
|
||||
logger.info(f"ai_models: {ai_models}")
|
||||
|
||||
@ -244,7 +248,6 @@ class RESTFulOpenAIBootstrapBaseWeb(OpenAIBootstrapBaseWeb):
|
||||
else:
|
||||
input += token
|
||||
|
||||
|
||||
else:
|
||||
input = embeddings_request.input
|
||||
|
||||
|
||||
@ -100,9 +100,8 @@ def logging_conf() -> dict:
|
||||
return get_config_dict(
|
||||
"INFO",
|
||||
get_log_file(log_path="logs", sub_dir=f"local_{get_timestamp_ms()}"),
|
||||
|
||||
1024*1024*1024*3,
|
||||
1024*1024*1024*3,
|
||||
1024 * 1024 * 1024 * 3,
|
||||
1024 * 1024 * 1024 * 3,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@ -5,8 +5,8 @@ from typing import List
|
||||
import pytest
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers import BootstrapWebBuilder, _to_custom_provide_configuration
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers.core.model_manager import ModelManager
|
||||
from model_providers.core.model_runtime.entities.model_entities import (
|
||||
AIModelEntity,
|
||||
@ -32,14 +32,15 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
ai_models: List[AIModelEntity] = []
|
||||
for model_type in ModelType.__members__.values():
|
||||
try:
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="deepseek", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for (
|
||||
model
|
||||
) in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -56,7 +57,6 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
logger.info(f"ai_models: {ai_models}")
|
||||
|
||||
|
||||
|
||||
def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> None:
|
||||
logging.config.dictConfig(logging_conf) # type: ignore
|
||||
# 读取配置文件
|
||||
@ -72,12 +72,9 @@ def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
for model_type in ModelType.__members__.values():
|
||||
models_by_model_type = ProvidersWrapper(
|
||||
provider_manager=provider_manager
|
||||
).get_models_by_model_type(model_type=model_type.to_origin_model_type())
|
||||
|
||||
print(f"{model_type.to_origin_model_type()}:{models_by_model_type}")
|
||||
|
||||
|
||||
|
||||
@ -4,8 +4,8 @@ import logging
|
||||
import pytest
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers import BootstrapWebBuilder, _to_custom_provide_configuration
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers.core.model_manager import ModelManager
|
||||
from model_providers.core.model_runtime.entities.model_entities import ModelType
|
||||
from model_providers.core.provider_manager import ProviderManager
|
||||
@ -34,7 +34,9 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="ollama", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for (
|
||||
model
|
||||
) in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -51,8 +53,6 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
logger.info(f"ai_models: {ai_models}")
|
||||
|
||||
|
||||
|
||||
|
||||
def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> None:
|
||||
logging.config.dictConfig(logging_conf) # type: ignore
|
||||
# 读取配置文件
|
||||
@ -68,12 +68,9 @@ def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
for model_type in ModelType.__members__.values():
|
||||
models_by_model_type = ProvidersWrapper(
|
||||
provider_manager=provider_manager
|
||||
).get_models_by_model_type(model_type=model_type.to_origin_model_type())
|
||||
|
||||
print(f"{model_type.to_origin_model_type()}:{models_by_model_type}")
|
||||
|
||||
|
||||
|
||||
@ -4,8 +4,8 @@ import logging
|
||||
import pytest
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers import BootstrapWebBuilder, _to_custom_provide_configuration
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers.core.model_manager import ModelManager
|
||||
from model_providers.core.model_runtime.entities.model_entities import ModelType
|
||||
from model_providers.core.provider_manager import ProviderManager
|
||||
@ -28,14 +28,15 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
ai_models: List[AIModelEntity] = []
|
||||
for model_type in ModelType.__members__.values():
|
||||
try:
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="openai", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for (
|
||||
model
|
||||
) in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -67,11 +68,9 @@ def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
for model_type in ModelType.__members__.values():
|
||||
models_by_model_type = ProvidersWrapper(
|
||||
provider_manager=provider_manager
|
||||
).get_models_by_model_type(model_type=model_type.to_origin_model_type())
|
||||
|
||||
print(f"{model_type.to_origin_model_type()}:{models_by_model_type}")
|
||||
|
||||
|
||||
@ -4,8 +4,8 @@ import logging
|
||||
import pytest
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers import BootstrapWebBuilder, _to_custom_provide_configuration
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers.core.model_manager import ModelManager
|
||||
from model_providers.core.model_runtime.entities.model_entities import ModelType
|
||||
from model_providers.core.provider_manager import ProviderManager
|
||||
@ -34,7 +34,9 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="xinference", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for (
|
||||
model
|
||||
) in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -51,7 +53,6 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
logger.info(f"ai_models: {ai_models}")
|
||||
|
||||
|
||||
|
||||
def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> None:
|
||||
logging.config.dictConfig(logging_conf) # type: ignore
|
||||
# 读取配置文件
|
||||
@ -67,11 +68,9 @@ def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
for model_type in ModelType.__members__.values():
|
||||
models_by_model_type = ProvidersWrapper(
|
||||
provider_manager=provider_manager
|
||||
).get_models_by_model_type(model_type=model_type.to_origin_model_type())
|
||||
|
||||
print(f"{model_type.to_origin_model_type()}:{models_by_model_type}")
|
||||
|
||||
|
||||
@ -4,8 +4,8 @@ import logging
|
||||
import pytest
|
||||
from omegaconf import OmegaConf
|
||||
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers import BootstrapWebBuilder, _to_custom_provide_configuration
|
||||
from model_providers.core.bootstrap.providers_wapper import ProvidersWrapper
|
||||
from model_providers.core.model_manager import ModelManager
|
||||
from model_providers.core.model_runtime.entities.model_entities import ModelType
|
||||
from model_providers.core.provider_manager import ProviderManager
|
||||
@ -28,14 +28,15 @@ def test_provider_manager_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
ai_models: List[AIModelEntity] = []
|
||||
for model_type in ModelType.__members__.values():
|
||||
try:
|
||||
provider_model_bundle_llm = provider_manager.get_provider_model_bundle(
|
||||
provider="zhipuai", model_type=model_type
|
||||
)
|
||||
for model in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
for (
|
||||
model
|
||||
) in provider_model_bundle_llm.configuration.custom_configuration.models:
|
||||
if model.model_type == model_type:
|
||||
ai_models.append(
|
||||
provider_model_bundle_llm.model_type_instance.get_model_schema(
|
||||
@ -67,11 +68,9 @@ def test_provider_wrapper_models(logging_conf: dict, providers_file: str) -> Non
|
||||
provider_name_to_provider_model_records_dict=provider_name_to_provider_model_records_dict,
|
||||
)
|
||||
|
||||
|
||||
for model_type in ModelType.__members__.values():
|
||||
models_by_model_type = ProvidersWrapper(
|
||||
provider_manager=provider_manager
|
||||
).get_models_by_model_type(model_type=model_type.to_origin_model_type())
|
||||
|
||||
print(f"{model_type.to_origin_model_type()}:{models_by_model_type}")
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user