diff --git a/chains/llmchain_with_history.py b/chains/llmchain_with_history.py index 2dbdff03..9707c00c 100644 --- a/chains/llmchain_with_history.py +++ b/chains/llmchain_with_history.py @@ -1,6 +1,6 @@ from server.utils import get_ChatOpenAI from configs.model_config import LLM_MODEL, TEMPERATURE -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, diff --git a/server/agent/custom_template.py b/server/agent/custom_template.py index e427c0d7..b59f9b33 100644 --- a/server/agent/custom_template.py +++ b/server/agent/custom_template.py @@ -45,7 +45,9 @@ Thought: from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.prompts import StringPromptTemplate -from langchain import OpenAI, SerpAPIWrapper, LLMChain +from langchain.llms import OpenAI +from langchain.utilities import SerpAPIWrapper +from langchain.chains import LLMChain from typing import List, Union from langchain.schema import AgentAction, AgentFinish, OutputParserException from server.agent.tools import tools diff --git a/server/agent/math.py b/server/agent/math.py index aa6c7593..a00667af 100644 --- a/server/agent/math.py +++ b/server/agent/math.py @@ -1,4 +1,4 @@ -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate from langchain.chains import LLMMathChain from server.utils import wrap_done, get_ChatOpenAI from configs.model_config import LLM_MODEL, TEMPERATURE diff --git a/server/agent/translator.py b/server/agent/translator.py index c740d0bf..65e79ae1 100644 --- a/server/agent/translator.py +++ b/server/agent/translator.py @@ -1,4 +1,5 @@ -from langchain import PromptTemplate, LLMChain +from langchain.prompts import PromptTemplate +from langchain.chains import LLMChain import sys import os diff --git a/server/agent/weather.py b/server/agent/weather.py index 60131731..9cf0bdf2 100644 --- a/server/agent/weather.py +++ b/server/agent/weather.py @@ -262,7 +262,7 @@ class LLMWeatherChain(Chain): return cls(llm_chain=llm_chain, **kwargs) -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate _PROMPT_TEMPLATE = """用户将会向您咨询天气问题,您不需要自己回答天气问题,而是将用户提问的信息提取出来区,市和时间三个元素后使用我为你编写好的工具进行查询并返回结果,格式为 区+市+时间 每个元素用空格隔开。如果缺少信息,则用 None 代替。 问题: ${{用户的问题}} diff --git a/server/chat/agent_chat.py b/server/chat/agent_chat.py index 80313d3d..58942396 100644 --- a/server/chat/agent_chat.py +++ b/server/chat/agent_chat.py @@ -7,7 +7,7 @@ from fastapi import Body from fastapi.responses import StreamingResponse from configs.model_config import LLM_MODEL, TEMPERATURE, HISTORY_LEN from server.utils import wrap_done, get_ChatOpenAI -from langchain import LLMChain +from langchain.chains import LLMChain from typing import AsyncIterable import asyncio from langchain.prompts.chat import ChatPromptTemplate diff --git a/server/chat/chat.py b/server/chat/chat.py index 15a0250c..6d3c9ce5 100644 --- a/server/chat/chat.py +++ b/server/chat/chat.py @@ -2,7 +2,7 @@ from fastapi import Body from fastapi.responses import StreamingResponse from configs import LLM_MODEL, TEMPERATURE from server.utils import wrap_done, get_ChatOpenAI -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.callbacks import AsyncIteratorCallbackHandler from typing import AsyncIterable import asyncio diff --git a/server/chat/knowledge_base_chat.py b/server/chat/knowledge_base_chat.py index f5cee99f..9c70ee59 100644 --- a/server/chat/knowledge_base_chat.py +++ b/server/chat/knowledge_base_chat.py @@ -3,7 +3,7 @@ from fastapi.responses import StreamingResponse from configs import (LLM_MODEL, VECTOR_SEARCH_TOP_K, SCORE_THRESHOLD, TEMPERATURE) from server.utils import wrap_done, get_ChatOpenAI from server.utils import BaseResponse, get_prompt_template -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.callbacks import AsyncIteratorCallbackHandler from typing import AsyncIterable, List, Optional import asyncio diff --git a/server/chat/search_engine_chat.py b/server/chat/search_engine_chat.py index 24845aad..00708b72 100644 --- a/server/chat/search_engine_chat.py +++ b/server/chat/search_engine_chat.py @@ -6,7 +6,7 @@ from fastapi.responses import StreamingResponse from fastapi.concurrency import run_in_threadpool from server.utils import wrap_done, get_ChatOpenAI from server.utils import BaseResponse, get_prompt_template -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.callbacks import AsyncIteratorCallbackHandler from typing import AsyncIterable import asyncio diff --git a/tests/agent/test_agent_function.py b/tests/agent/test_agent_function.py index 0ee9863c..e860cb7a 100644 --- a/tests/agent/test_agent_function.py +++ b/tests/agent/test_agent_function.py @@ -3,7 +3,7 @@ import os sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) from configs import LLM_MODEL, TEMPERATURE from server.utils import get_ChatOpenAI -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.agents import LLMSingleActionAgent, AgentExecutor from server.agent.tools import tools, tool_names from langchain.memory import ConversationBufferWindowMemory