mirror of
https://github.com/RYDE-WORK/Langchain-Chatchat.git
synced 2026-02-06 23:15:53 +08:00
1. 搜索引擎问答支持历史记录;
2. 修复知识库问答历史记录传参错误:用户输入被传入history,问题出在webui中重复获取历史消息,api知识库对话接口并无问题。
This commit is contained in:
parent
902ba0c321
commit
bb7ce601fc
@ -58,9 +58,6 @@ async def knowledge_base_chat(query: str = Body(..., description="用户输入",
|
|||||||
|
|
||||||
prompt_template = get_prompt_template(prompt_name)
|
prompt_template = get_prompt_template(prompt_name)
|
||||||
input_msg = History(role="user", content=prompt_template).to_msg_template(False)
|
input_msg = History(role="user", content=prompt_template).to_msg_template(False)
|
||||||
# 用户最后一个问题会进入PROMPT_TEMPLATE,不用再作为history 了
|
|
||||||
if len(history) >= 1:
|
|
||||||
history.pop()
|
|
||||||
chat_prompt = ChatPromptTemplate.from_messages(
|
chat_prompt = ChatPromptTemplate.from_messages(
|
||||||
[i.to_msg_template() for i in history] + [input_msg])
|
[i.to_msg_template() for i in history] + [input_msg])
|
||||||
|
|
||||||
|
|||||||
@ -167,7 +167,6 @@ def dialogue_page(api: ApiRequest):
|
|||||||
chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标
|
chat_box.update_msg(text, streaming=False) # 更新最终的字符串,去除光标
|
||||||
|
|
||||||
elif dialogue_mode == "知识库问答":
|
elif dialogue_mode == "知识库问答":
|
||||||
history = get_messages_history(history_len)
|
|
||||||
chat_box.ai_say([
|
chat_box.ai_say([
|
||||||
f"正在查询知识库 `{selected_kb}` ...",
|
f"正在查询知识库 `{selected_kb}` ...",
|
||||||
Markdown("...", in_expander=True, title="知识库匹配结果"),
|
Markdown("...", in_expander=True, title="知识库匹配结果"),
|
||||||
@ -196,6 +195,7 @@ def dialogue_page(api: ApiRequest):
|
|||||||
for d in api.search_engine_chat(prompt,
|
for d in api.search_engine_chat(prompt,
|
||||||
search_engine_name=search_engine,
|
search_engine_name=search_engine,
|
||||||
top_k=se_top_k,
|
top_k=se_top_k,
|
||||||
|
history=history,
|
||||||
model=llm_model,
|
model=llm_model,
|
||||||
temperature=temperature):
|
temperature=temperature):
|
||||||
if error_msg := check_error_msg(d): # check whether error occured
|
if error_msg := check_error_msg(d): # check whether error occured
|
||||||
|
|||||||
@ -429,6 +429,7 @@ class ApiRequest:
|
|||||||
query: str,
|
query: str,
|
||||||
search_engine_name: str,
|
search_engine_name: str,
|
||||||
top_k: int = SEARCH_ENGINE_TOP_K,
|
top_k: int = SEARCH_ENGINE_TOP_K,
|
||||||
|
history: List[Dict] = [],
|
||||||
stream: bool = True,
|
stream: bool = True,
|
||||||
model: str = LLM_MODEL,
|
model: str = LLM_MODEL,
|
||||||
temperature: float = TEMPERATURE,
|
temperature: float = TEMPERATURE,
|
||||||
@ -445,6 +446,7 @@ class ApiRequest:
|
|||||||
"query": query,
|
"query": query,
|
||||||
"search_engine_name": search_engine_name,
|
"search_engine_name": search_engine_name,
|
||||||
"top_k": top_k,
|
"top_k": top_k,
|
||||||
|
"history": history,
|
||||||
"stream": stream,
|
"stream": stream,
|
||||||
"model_name": model,
|
"model_name": model,
|
||||||
"temperature": temperature,
|
"temperature": temperature,
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user