diff --git a/server/chat/knowledge_base_chat.py b/server/chat/knowledge_base_chat.py index 7316e5e8..4c461aa6 100644 --- a/server/chat/knowledge_base_chat.py +++ b/server/chat/knowledge_base_chat.py @@ -58,9 +58,6 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入", prompt_template = get_prompt_template(prompt_name) input_msg = History(role="user", content=prompt_template).to_msg_template(False) - # 用户最后一个问题会进入PROMPT_TEMPLATE,不用再作为history 了 - if len(history) >= 1: - history.pop() chat_prompt = ChatPromptTemplate.from_messages( [i.to_msg_template() for i in history] + [input_msg]) diff --git a/webui_pages/dialogue/dialogue.py b/webui_pages/dialogue/dialogue.py index 17555362..a90d3c04 100644 --- a/webui_pages/dialogue/dialogue.py +++ b/webui_pages/dialogue/dialogue.py @@ -167,7 +167,6 @@ def dialogue_page(api: ApiRequest): chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标 elif dialogue_mode == "知识库问答": - history = get_messages_history(history_len) chat_box.ai_say([ f"正在查询知识库 `{selected_kb}` ...", Markdown("...", in_expander=True, title="知识库匹配结果"), @@ -196,6 +195,7 @@ def dialogue_page(api: ApiRequest): for d in api.search_engine_chat(prompt, search_engine_name=search_engine, top_k=se_top_k, + history=history, model=llm_model, temperature=temperature): if error_msg := check_error_msg(d): # check whether error occured diff --git a/webui_pages/utils.py b/webui_pages/utils.py index 4095437d..176dbb4f 100644 --- a/webui_pages/utils.py +++ b/webui_pages/utils.py @@ -429,6 +429,7 @@ class ApiRequest: query: str, search_engine_name: str, top_k: int = SEARCH_ENGINE_TOP_K, + history: List[Dict] = [], stream: bool = True, model: str = LLM_MODEL, temperature: float = TEMPERATURE, @@ -445,6 +446,7 @@ class ApiRequest: "query": query, "search_engine_name": search_engine_name, "top_k": top_k, + "history": history, "stream": stream, "model_name": model, "temperature": temperature,