From 0b2c03b6e962ba542f9e32bd16bb0ae75f923cdf Mon Sep 17 00:00:00 2001 From: leehk Date: Fri, 7 Mar 2025 21:42:57 +0800 Subject: [PATCH] update --- .gitignore | 4 ++- .../src/rag_cot_evaluation/python_env.yml | 1 + app/streamlit/Chatbot.py | 7 ----- app/streamlit/Dockerfile | 5 +++- app/streamlit/requirements.txt | 28 +++++++++---------- 5 files changed, 22 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 655ed34..ffc3c5e 100644 --- a/.gitignore +++ b/.gitignore @@ -206,4 +206,6 @@ data/* **/llm-template2/* **/llmops/outputs/* **/*.zip -**/llm-examples/* \ No newline at end of file +**/llm-examples/* +**/*.ipynb_checkpoints +**/*.ipynb \ No newline at end of file diff --git a/app/llmops/src/rag_cot_evaluation/python_env.yml b/app/llmops/src/rag_cot_evaluation/python_env.yml index 7d91512..35b30a6 100644 --- a/app/llmops/src/rag_cot_evaluation/python_env.yml +++ b/app/llmops/src/rag_cot_evaluation/python_env.yml @@ -12,6 +12,7 @@ build_dependencies: - langchain_google_genai - langchain-deepseek - langchain-community + - mlflow[genai] # Dependencies required to run the project. dependencies: - mlflow==2.8.1 \ No newline at end of file diff --git a/app/streamlit/Chatbot.py b/app/streamlit/Chatbot.py index ce971e5..00ae5ee 100644 --- a/app/streamlit/Chatbot.py +++ b/app/streamlit/Chatbot.py @@ -11,9 +11,6 @@ from langchain_community.llms.moonshot import Moonshot import torch torch.classes.__path__ = [os.path.join(torch.__path__[0], torch.classes.__file__)] -# # # or simply: -# torch.classes.__path__ = [] - os.environ["TOKENIZERS_PARALLELISM"] = "false" GEMINI_API_KEY = config("GOOGLE_API_KEY", cast=str, default="123456") @@ -31,15 +28,12 @@ if "messages" not in st.session_state: for msg in st.session_state.messages: st.chat_message(msg["role"]).write(msg["content"]) -print('i am here1') # Load data from ChromaDB chroma_client = chromadb.PersistentClient(path=INPUT_CHROMADB_LOCAL) collection = chroma_client.get_collection(name=COLLECTION_NAME) -print('i am here2') # Initialize embedding model model = SentenceTransformer(EMBEDDING_MODEL) -print('i am here3') if CHAT_MODEL_PROVIDER == "deepseek": # Initialize DeepSeek model @@ -88,7 +82,6 @@ Provide the answer with language that is similar to the question asked. """ answer_prompt = PromptTemplate(template=answer_template, input_variables=["cot", "question"]) answer_chain = answer_prompt | llm -print('i am here4') if prompt := st.chat_input(): diff --git a/app/streamlit/Dockerfile b/app/streamlit/Dockerfile index 1c55f0c..d582af3 100644 --- a/app/streamlit/Dockerfile +++ b/app/streamlit/Dockerfile @@ -4,7 +4,8 @@ WORKDIR /app/streamlit COPY requirements.txt ./ -RUN pip install --no-cache-dir -r requirements.txt +# RUN pip install --no-cache-dir -r requirements.txt +RUN pip install -r requirements.txt COPY Chatbot.py . COPY .env . @@ -13,6 +14,8 @@ COPY .env . COPY initialize_sentence_transformer.py . RUN python initialize_sentence_transformer.py +COPY pages ./pages + EXPOSE 8501 ENTRYPOINT ["streamlit", "run", "Chatbot.py"] \ No newline at end of file diff --git a/app/streamlit/requirements.txt b/app/streamlit/requirements.txt index a7df356..8a3f02d 100644 --- a/app/streamlit/requirements.txt +++ b/app/streamlit/requirements.txt @@ -1,14 +1,14 @@ -streamlit>=1.28 -langchain>=0.0.217 -openai>=1.2 -duckduckgo-search -anthropic>=0.3.0 -trubrics>=1.4.3 -streamlit-feedback -langchain-community -chromadb -python-decouple -langchain_google_genai -langchain-deepseek -sentence_transformers -watchdog \ No newline at end of file +streamlit==1.28.0 +langchain +openai==1.65.4 +duckduckgo_search==7.5.0 +anthropic==0.49.0 +trubrics==1.8.3 +streamlit-feedback==0.1.4 +langchain-community==0.3.19 +chromadb==0.6.3 +python-decouple==3.8 +langchain-google-genai==2.0.10 +langchain-deepseek==0.1.2 +sentence-transformers==3.4.1 +watchdog==6.0.0 \ No newline at end of file