mirror of
https://github.com/aimingmed/aimingmed-ai.git
synced 2026-01-19 13:23:23 +08:00
update
This commit is contained in:
parent
d681f86fea
commit
e5cb3b73c7
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"python.testing.pytestArgs": [
|
||||||
|
"app"
|
||||||
|
],
|
||||||
|
"python.testing.unittestEnabled": false,
|
||||||
|
"python.testing.pytestEnabled": true
|
||||||
|
}
|
||||||
30
app/streamlit/.github/workflows/app-testing.yml
vendored
Normal file
30
app/streamlit/.github/workflows/app-testing.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
name: App testing
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
streamlit:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- uses: streamlit/streamlit-app-action@v0.0.3
|
||||||
|
with:
|
||||||
|
app-path: Chatbot.py
|
||||||
|
ruff: true
|
||||||
|
pytest-args: -v --junit-xml=test-results.xml
|
||||||
|
- if: always()
|
||||||
|
uses: pmeier/pytest-results-action@v0.6.0
|
||||||
|
with:
|
||||||
|
path: test-results.xml
|
||||||
|
summary: true
|
||||||
|
display-options: fEX
|
||||||
@ -1,5 +1,4 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
import streamlit as st
|
import streamlit as st
|
||||||
import chromadb
|
import chromadb
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
|||||||
@ -1,39 +1,47 @@
|
|||||||
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
import streamlit as st
|
import chromadb
|
||||||
from unittest.mock import patch
|
from langchain.prompts import PromptTemplate
|
||||||
|
from langchain_google_genai import ChatGoogleGenerativeAI
|
||||||
|
from langchain_deepseek import ChatDeepSeek
|
||||||
|
from langchain_community.llms.moonshot import Moonshot
|
||||||
|
|
||||||
# add app/streamlit to sys.path
|
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, "/Users/leehongkai/projects/aimingmed/aimingmed-ai/app/streamlit")
|
sys.path.append(".")
|
||||||
|
import streamlit as st
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import patch
|
||||||
|
from Chatbot import CHAT_MODEL_PROVIDER, INPUT_CHROMADB_LOCAL, COLLECTION_NAME, cot_template, answer_template
|
||||||
|
|
||||||
from unittest.mock import patch, MagicMock
|
@pytest.fixture(autouse=True)
|
||||||
|
def mock_session_state():
|
||||||
|
with patch.object(st, "session_state", {"messages": []}):
|
||||||
|
yield
|
||||||
|
|
||||||
|
def test_prompt_templates():
|
||||||
|
# Test that the prompt templates are correctly formatted
|
||||||
|
assert "documents_text" in cot_template
|
||||||
|
assert "question" in cot_template
|
||||||
|
assert "cot" in answer_template
|
||||||
|
assert "question" in answer_template
|
||||||
|
|
||||||
def test_title():
|
def test_chromadb_connection():
|
||||||
with patch("streamlit.title") as mock_title, \
|
# Test that the ChromaDB client is initialized correctly
|
||||||
patch("streamlit.session_state", new_callable=MagicMock) as mock_session_state:
|
chroma_client = chromadb.PersistentClient(path=INPUT_CHROMADB_LOCAL)
|
||||||
import Chatbot
|
collection = chroma_client.get_collection(name=COLLECTION_NAME)
|
||||||
st.session_state["messages"] = []
|
assert collection is not None
|
||||||
mock_title.assert_called_once_with("💬 RAG AI for Medical Guideline")
|
|
||||||
|
|
||||||
def test_caption():
|
@pytest.mark.skipif(CHAT_MODEL_PROVIDER not in ["deepseek", "gemini", "moonshot"], reason="requires a valid CHAT_MODEL_PROVIDER")
|
||||||
with patch("streamlit.caption") as mock_caption, \
|
def test_llm_initialization():
|
||||||
patch("streamlit.session_state", new_callable=MagicMock) as mock_session_state:
|
# Test that the correct LLM is initialized based on the CHAT_MODEL_PROVIDER environment variable
|
||||||
import Chatbot
|
if CHAT_MODEL_PROVIDER == "deepseek":
|
||||||
st.session_state["messages"] = []
|
llm = ChatDeepSeek(model="deepseek-chat")
|
||||||
mock_caption.assert_called()
|
assert isinstance(llm, ChatDeepSeek)
|
||||||
|
elif CHAT_MODEL_PROVIDER == "gemini":
|
||||||
def test_chat_input():
|
llm = ChatGoogleGenerativeAI(model="gemini-1.5-flash")
|
||||||
with patch("streamlit.chat_input", return_value="test_prompt") as mock_chat_input, \
|
assert isinstance(llm, ChatGoogleGenerativeAI)
|
||||||
patch("streamlit.session_state", new_callable=MagicMock) as mock_session_state:
|
elif CHAT_MODEL_PROVIDER == "moonshot":
|
||||||
import Chatbot
|
llm = Moonshot(model="moonshot-v1-128k")
|
||||||
st.session_state["messages"] = []
|
assert isinstance(llm, Moonshot)
|
||||||
mock_chat_input.assert_called_once()
|
llm = Moonshot(model="moonshot-v1-128k")
|
||||||
|
assert isinstance(llm, Moonshot)
|
||||||
def test_chat_message():
|
|
||||||
with patch("streamlit.chat_message") as mock_chat_message, \
|
|
||||||
patch("streamlit.session_state", new_callable=MagicMock) as mock_session_state:
|
|
||||||
with patch("streamlit.chat_input", return_value="test_prompt"):
|
|
||||||
import Chatbot
|
|
||||||
st.session_state["messages"] = []
|
|
||||||
mock_chat_message.assert_called()
|
|
||||||
Loading…
x
Reference in New Issue
Block a user