From e6b97f13cb201594ddbfbd39225e67b63a7b7cbe Mon Sep 17 00:00:00 2001 From: glide-the <2533736852@qq.com> Date: Tue, 7 May 2024 20:15:56 +0800 Subject: [PATCH] =?UTF-8?q?=E9=85=8D=E7=BD=AE=E7=9A=84=E5=8A=A0=E8=BD=BD?= =?UTF-8?q?=E8=A1=8C=E4=B8=BA=E4=BF=AE=E6=94=B9?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- chatchat-server/chatchat/init_database.py | 2 +- .../chatchat/server/agent/container.py | 4 +- .../server/agent/tools_factory/text2image.py | 22 ++++---- .../server/api_server/openai_routes.py | 6 ++- .../chatchat/server/api_server/server_app.py | 2 +- .../chatchat/server/api_server/tool_routes.py | 4 +- chatchat-server/chatchat/server/chat/chat.py | 2 +- .../chatchat/server/chat/feedback.py | 7 ++- chatchat-server/chatchat/server/chat/utils.py | 5 +- .../kb_service/es_kb_service.py | 6 ++- .../kb_summary/summary_chunk.py | 5 +- .../chatchat/server/knowledge_base/utils.py | 5 +- .../chatchat/server/minx_chat_openai.py | 51 ------------------- chatchat-server/chatchat/server/utils.py | 12 +++-- chatchat-server/chatchat/webui_pages/utils.py | 7 ++- 15 files changed, 60 insertions(+), 80 deletions(-) delete mode 100644 chatchat-server/chatchat/server/minx_chat_openai.py diff --git a/chatchat-server/chatchat/init_database.py b/chatchat-server/chatchat/init_database.py index e42046fc..67c28963 100644 --- a/chatchat-server/chatchat/init_database.py +++ b/chatchat-server/chatchat/init_database.py @@ -2,7 +2,7 @@ import sys sys.path.append("chatchat") from chatchat.server.knowledge_base.migrate import (create_tables, reset_tables, import_from_db, folder2db, prune_db_docs, prune_folder_files) -from chatchat.configs.model_config import DEFAULT_EMBEDDING_MODEL +from chatchat.configs import DEFAULT_EMBEDDING_MODEL from datetime import datetime diff --git a/chatchat-server/chatchat/server/agent/container.py b/chatchat-server/chatchat/server/agent/container.py index 80217e93..e510d82c 100644 --- a/chatchat-server/chatchat/server/agent/container.py +++ b/chatchat-server/chatchat/server/agent/container.py @@ -1,6 +1,8 @@ -from chatchat.configs import logger +import logging from chatchat.server.utils import get_tool_config +logger = logging.getLogger(__name__) + class ModelContainer: def __init__(self): diff --git a/chatchat-server/chatchat/server/agent/tools_factory/text2image.py b/chatchat-server/chatchat/server/agent/tools_factory/text2image.py index b190067b..6971b024 100644 --- a/chatchat-server/chatchat/server/agent/tools_factory/text2image.py +++ b/chatchat-server/chatchat/server/agent/tools_factory/text2image.py @@ -10,21 +10,21 @@ from chatchat.server.utils import get_tool_config from .tools_registry import regist_tool, BaseToolOutput import openai -from chatchat.configs.basic_config import MEDIA_PATH +from chatchat.configs import MEDIA_PATH from chatchat.server.utils import MsgType def get_image_model_config() -> dict: - from chatchat.configs.model_config import LLM_MODEL_CONFIG, ONLINE_LLM_MODEL - - model = LLM_MODEL_CONFIG.get("image_model") - if model: - name = list(model.keys())[0] - if config := ONLINE_LLM_MODEL.get(name): - config = {**list(model.values())[0], **config} - config.setdefault("model_name", name) - return config - + # from chatchat.configs import LLM_MODEL_CONFIG, ONLINE_LLM_MODEL + # TODO ONLINE_LLM_MODEL的配置被删除,此处业务需要修改 + # model = LLM_MODEL_CONFIG.get("image_model") + # if model: + # name = list(model.keys())[0] + # if config := ONLINE_LLM_MODEL.get(name): + # config = {**list(model.values())[0], **config} + # config.setdefault("model_name", name) + # return config + pass @regist_tool(title="文生图", return_direct=True) def text2images( diff --git a/chatchat-server/chatchat/server/api_server/openai_routes.py b/chatchat-server/chatchat/server/api_server/openai_routes.py index 5e713073..108e144f 100644 --- a/chatchat-server/chatchat/server/api_server/openai_routes.py +++ b/chatchat-server/chatchat/server/api_server/openai_routes.py @@ -16,9 +16,13 @@ from openai.types.file_object import FileObject from sse_starlette.sse import EventSourceResponse from .api_schemas import * -from chatchat.configs import logger, BASE_TEMP_DIR, log_verbose +from chatchat.configs import BASE_TEMP_DIR, log_verbose from chatchat.server.utils import get_model_info, get_config_platforms, get_OpenAIClient +import logging + +logger = logging.getLogger() + DEFAULT_API_CONCURRENCIES = 5 # 默认单个模型最大并发数 model_semaphores: Dict[Tuple[str, str], asyncio.Semaphore] = {} # key: (model_name, platform) diff --git a/chatchat-server/chatchat/server/api_server/server_app.py b/chatchat-server/chatchat/server/api_server/server_app.py index 3689044f..fcd5d2e0 100644 --- a/chatchat-server/chatchat/server/api_server/server_app.py +++ b/chatchat-server/chatchat/server/api_server/server_app.py @@ -9,7 +9,7 @@ from starlette.responses import RedirectResponse import uvicorn from chatchat.configs import VERSION, MEDIA_PATH, CHATCHAT_ROOT -from chatchat.configs.server_config import OPEN_CROSS_DOMAIN +from chatchat.configs import OPEN_CROSS_DOMAIN from chatchat.server.api_server.chat_routes import chat_router from chatchat.server.api_server.kb_routes import kb_router from chatchat.server.api_server.openai_routes import openai_router diff --git a/chatchat-server/chatchat/server/api_server/tool_routes.py b/chatchat-server/chatchat/server/api_server/tool_routes.py index 3093b40e..0a55299c 100644 --- a/chatchat-server/chatchat/server/api_server/tool_routes.py +++ b/chatchat-server/chatchat/server/api_server/tool_routes.py @@ -4,9 +4,11 @@ from typing import List from fastapi import APIRouter, Request, Body -from chatchat.configs import logger from chatchat.server.utils import BaseResponse, get_tool, get_tool_config +import logging + +logger = logging.getLogger() tool_router = APIRouter(prefix="/tools", tags=["Toolkits"]) diff --git a/chatchat-server/chatchat/server/chat/chat.py b/chatchat-server/chatchat/server/chat/chat.py index 6147b376..ba99acd0 100644 --- a/chatchat-server/chatchat/server/chat/chat.py +++ b/chatchat-server/chatchat/server/chat/chat.py @@ -13,7 +13,7 @@ from langchain.chains import LLMChain from langchain.prompts.chat import ChatPromptTemplate from langchain.prompts import PromptTemplate -from chatchat.configs.model_config import LLM_MODEL_CONFIG +from chatchat.configs import LLM_MODEL_CONFIG from chatchat.server.agent.agent_factory.agents_registry import agents_registry from chatchat.server.agent.container import container from chatchat.server.api_server.api_schemas import OpenAIChatOutput diff --git a/chatchat-server/chatchat/server/chat/feedback.py b/chatchat-server/chatchat/server/chat/feedback.py index d41d409d..56227ff0 100644 --- a/chatchat-server/chatchat/server/chat/feedback.py +++ b/chatchat-server/chatchat/server/chat/feedback.py @@ -1,8 +1,13 @@ from fastapi import Body -from chatchat.configs import logger, log_verbose +from chatchat.configs import log_verbose from chatchat.server.utils import BaseResponse from chatchat.server.db.repository import feedback_message_to_db +import logging + +logger = logging.getLogger() + + def chat_feedback(message_id: str = Body("", max_length=32, description="聊天记录id"), score: int = Body(0, max=100, description="用户评分,满分100,越大表示评价越高"), reason: str = Body("", description="用户评分理由,比如不符合事实等") diff --git a/chatchat-server/chatchat/server/chat/utils.py b/chatchat-server/chatchat/server/chat/utils.py index 71a16c58..38634cc8 100644 --- a/chatchat-server/chatchat/server/chat/utils.py +++ b/chatchat-server/chatchat/server/chat/utils.py @@ -1,9 +1,12 @@ from functools import lru_cache from chatchat.server.pydantic_v2 import BaseModel, Field from langchain.prompts.chat import ChatMessagePromptTemplate -from chatchat.configs import logger, log_verbose from typing import List, Tuple, Dict, Union +import logging + +logger = logging.getLogger() + class History(BaseModel): """ diff --git a/chatchat-server/chatchat/server/knowledge_base/kb_service/es_kb_service.py b/chatchat-server/chatchat/server/knowledge_base/kb_service/es_kb_service.py index 9b65d81e..19813bf1 100644 --- a/chatchat-server/chatchat/server/knowledge_base/kb_service/es_kb_service.py +++ b/chatchat-server/chatchat/server/knowledge_base/kb_service/es_kb_service.py @@ -7,7 +7,11 @@ from chatchat.server.knowledge_base.kb_service.base import KBService, SupportedV from chatchat.server.knowledge_base.utils import KnowledgeFile from chatchat.server.utils import get_Embeddings from elasticsearch import Elasticsearch, BadRequestError -from chatchat.configs import logger, kbs_config, KB_ROOT_PATH +from chatchat.configs import kbs_config, KB_ROOT_PATH + +import logging + +logger = logging.getLogger() class ESKBService(KBService): diff --git a/chatchat-server/chatchat/server/knowledge_base/kb_summary/summary_chunk.py b/chatchat-server/chatchat/server/knowledge_base/kb_summary/summary_chunk.py index b7c63491..1e93317a 100644 --- a/chatchat-server/chatchat/server/knowledge_base/kb_summary/summary_chunk.py +++ b/chatchat-server/chatchat/server/knowledge_base/kb_summary/summary_chunk.py @@ -3,7 +3,6 @@ from typing import List, Optional from langchain.schema.language_model import BaseLanguageModel from chatchat.server.knowledge_base.model.kb_document_model import DocumentWithVSId -from chatchat.configs import (logger) from langchain.chains import StuffDocumentsChain, LLMChain from langchain.prompts import PromptTemplate @@ -14,6 +13,10 @@ from langchain.chains.combine_documents.map_reduce import ReduceDocumentsChain, import sys import asyncio +import logging + +logger = logging.getLogger() + class SummaryAdapter: _OVERLAP_SIZE: int diff --git a/chatchat-server/chatchat/server/knowledge_base/utils.py b/chatchat-server/chatchat/server/knowledge_base/utils.py index 423c56eb..c8ae0980 100644 --- a/chatchat-server/chatchat/server/knowledge_base/utils.py +++ b/chatchat-server/chatchat/server/knowledge_base/utils.py @@ -5,7 +5,6 @@ from chatchat.configs import ( CHUNK_SIZE, OVERLAP_SIZE, ZH_TITLE_ENHANCE, - logger, log_verbose, text_splitter_dict, TEXT_SPLITTER_NAME, @@ -22,6 +21,10 @@ from typing import List, Union, Dict, Tuple, Generator import chardet from langchain_community.document_loaders import JSONLoader, TextLoader +import logging + +logger = logging.getLogger() + def validate_kb_name(knowledge_base_id: str) -> bool: # 检查是否包含预期外的字符或路径攻击关键字 diff --git a/chatchat-server/chatchat/server/minx_chat_openai.py b/chatchat-server/chatchat/server/minx_chat_openai.py deleted file mode 100644 index b5362bba..00000000 --- a/chatchat-server/chatchat/server/minx_chat_openai.py +++ /dev/null @@ -1,51 +0,0 @@ -from typing import ( - TYPE_CHECKING, - Any, - Tuple -) -import sys -import logging - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - import tiktoken - - -class MinxChatOpenAI: - - @staticmethod - def import_tiktoken() -> Any: - try: - import tiktoken - except ImportError: - raise ValueError( - "Could not import tiktoken python package. " - "This is needed in order to calculate get_token_ids. " - "Please install it with `pip install tiktoken`." - ) - return tiktoken - - @staticmethod - def get_encoding_model(self) -> Tuple[str, "tiktoken.Encoding"]: - tiktoken_ = MinxChatOpenAI.import_tiktoken() - if self.tiktoken_model_name is not None: - model = self.tiktoken_model_name - else: - model = self.model_name - if model == "gpt-3.5-turbo": - # gpt-3.5-turbo may change over time. - # Returning num tokens assuming gpt-3.5-turbo-0301. - model = "gpt-3.5-turbo-0301" - elif model == "gpt-4": - # gpt-4 may change over time. - # Returning num tokens assuming gpt-4-0314. - model = "gpt-4-0314" - # Returns the number of tokens used by a list of messages. - try: - encoding = tiktoken_.encoding_for_model(model) - except Exception as e: - logger.warning("Warning: model not found. Using cl100k_base encoding.") - model = "cl100k_base" - encoding = tiktoken_.get_encoding(model) - return model, encoding diff --git a/chatchat-server/chatchat/server/utils.py b/chatchat-server/chatchat/server/utils.py index 2dab65e2..8efcd0f8 100644 --- a/chatchat-server/chatchat/server/utils.py +++ b/chatchat-server/chatchat/server/utils.py @@ -24,13 +24,15 @@ from typing import ( Tuple, Literal, ) -import logging -from chatchat.configs import (logger, log_verbose, HTTPX_DEFAULT_TIMEOUT, +from chatchat.configs import (log_verbose, HTTPX_DEFAULT_TIMEOUT, DEFAULT_LLM_MODEL, DEFAULT_EMBEDDING_MODEL, TEMPERATURE, MODEL_PLATFORMS) from chatchat.server.pydantic_v2 import BaseModel, Field -from chatchat.server.minx_chat_openai import MinxChatOpenAI # TODO: still used? + +import logging + +logger = logging.getLogger() async def wrap_done(fn: Awaitable, event: asyncio.Event): @@ -490,7 +492,7 @@ def MakeFastAPIOffline( def api_address() -> str: - from chatchat.configs.server_config import API_SERVER + from chatchat.configs import API_SERVER host = API_SERVER["host"] if host == "0.0.0.0": @@ -500,7 +502,7 @@ def api_address() -> str: def webui_address() -> str: - from chatchat.configs.server_config import WEBUI_SERVER + from chatchat.configs import WEBUI_SERVER host = WEBUI_SERVER["host"] port = WEBUI_SERVER["port"] diff --git a/chatchat-server/chatchat/webui_pages/utils.py b/chatchat-server/chatchat/webui_pages/utils.py index 7b5ccb2b..7d8cc5e5 100644 --- a/chatchat-server/chatchat/webui_pages/utils.py +++ b/chatchat-server/chatchat/webui_pages/utils.py @@ -13,7 +13,7 @@ from chatchat.configs import ( ZH_TITLE_ENHANCE, VECTOR_SEARCH_TOP_K, HTTPX_DEFAULT_TIMEOUT, - logger, log_verbose, + log_verbose, ) import httpx import contextlib @@ -22,7 +22,10 @@ import os from io import BytesIO from chatchat.server.utils import set_httpx_config, api_address, get_httpx_client -from pprint import pprint + +import logging + +logger = logging.getLogger() set_httpx_config()