mirror of
https://github.com/RYDE-WORK/Langchain-Chatchat.git
synced 2026-01-25 00:05:58 +08:00
- 重构 api.py:
- 按模块划分为不同的 router
- 添加 openai 兼容的转发接口,项目默认使用该接口以实现模型负载均衡
- 添加 /tools 接口,可以获取/调用编写的 agent tools
- 移除所有 EmbeddingFuncAdapter,统一改用 get_Embeddings
- 待办:
- /chat/chat 接口改为 openai 兼容
- 添加 /chat/kb_chat 接口,openai 兼容
- 改变 ntlk/knowledge_base/logs 等数据目录位置
47 lines
1.4 KiB
Python
47 lines
1.4 KiB
Python
from functools import lru_cache
|
||
from server.pydantic_types import BaseModel, Field
|
||
from langchain.prompts.chat import ChatMessagePromptTemplate
|
||
from configs import logger, log_verbose
|
||
from typing import List, Tuple, Dict, Union
|
||
|
||
|
||
class History(BaseModel):
|
||
"""
|
||
对话历史
|
||
可从dict生成,如
|
||
h = History(**{"role":"user","content":"你好"})
|
||
也可转换为tuple,如
|
||
h.to_msy_tuple = ("human", "你好")
|
||
"""
|
||
role: str = Field(...)
|
||
content: str = Field(...)
|
||
|
||
def to_msg_tuple(self):
|
||
return "ai" if self.role=="assistant" else "human", self.content
|
||
|
||
def to_msg_template(self, is_raw=True) -> ChatMessagePromptTemplate:
|
||
role_maps = {
|
||
"ai": "assistant",
|
||
"human": "user",
|
||
}
|
||
role = role_maps.get(self.role, self.role)
|
||
if is_raw: # 当前默认历史消息都是没有input_variable的文本。
|
||
content = "{% raw %}" + self.content + "{% endraw %}"
|
||
else:
|
||
content = self.content
|
||
|
||
return ChatMessagePromptTemplate.from_template(
|
||
content,
|
||
"jinja2",
|
||
role=role,
|
||
)
|
||
|
||
@classmethod
|
||
def from_data(cls, h: Union[List, Tuple, Dict]) -> "History":
|
||
if isinstance(h, (list,tuple)) and len(h) >= 2:
|
||
h = cls(role=h[0], content=h[1])
|
||
elif isinstance(h, dict):
|
||
h = cls(**h)
|
||
|
||
return h
|