Merge pull request #61 from aimingmed/feature/backend-frontend-structure

try
This commit is contained in:
Hong Kai LEE 2025-04-17 13:13:01 +08:00 committed by GitHub
commit a49935b4d1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 109 additions and 95 deletions

View File

@ -1,4 +1,4 @@
name: Build
name: Build + CI
# Triggers: Equivalent to ADO trigger block
on:
@ -36,7 +36,7 @@ jobs:
image_config:
- IMAGE_NAME: backend-aimingmedai
BUILD_CONTEXT: ./app/backend
DOCKERFILE: ./app/backend/Dockerfile
DOCKERFILE: ./app/backend/Dockerfile.prod
- IMAGE_NAME: frontend-aimingmedai
BUILD_CONTEXT: ./app/frontend
DOCKERFILE: ./app/frontend/Dockerfile.test
@ -73,17 +73,17 @@ jobs:
testContainerName: tests-aimingmedai
# Pass test environment variables as JSON string
testEnvs: >
[
'[
"FRONTEND_URL=http://frontend:80",
"BACKEND_URL=http://backend:80",
"ENVIRONMENT=dev",
"TESTING=1",
]
]'
# Pass test directories as JSON string
tests: >
[
'[
"tests/integration/backend",
]
]'
# Pass image definitions for compose setup as JSON string
# Sensitive values should be passed via secrets and referenced within the template
images: >

View File

@ -146,8 +146,8 @@ jobs:
- name: Run Tests
shell: bash
run: |
TEST_DIRS="${{ inputs.tests }}"
TEST_ENVS_JSON="${{ inputs.testEnvs }}"
TEST_DIRS='["tests/integration/backend"]'
TEST_ENVS_JSON='["FRONTEND_URL=http://frontend:80","BACKEND_URL=http://backend:80","ENVIRONMENT=dev","TESTING=1"]'
RESULTS_PATH="${{ inputs.testResultsPath }}"
STAGING_DIR="${{ runner.temp }}/test-results" # Use runner temp dir for results
mkdir -p "$STAGING_DIR"
@ -156,10 +156,9 @@ jobs:
ENV_ARGS=""
if [[ "$TEST_ENVS_JSON" != "[]" ]]; then
# Convert JSON array string to individual env vars
IFS=',' read -r -a env_array <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[][]')
for env in "${env_array[@]}"; do
ENV_ARGS+=" -e \"$env\""
done
while IFS= read -r line; do
ENV_ARGS+=" -e \"$line\""
done <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[]')
else
# Add a dummy env var if none are provided, as required by original script logic
ENV_ARGS+=" -e DUMMY_ENV_TEST_RUN_ID=${{ github.run_id }}"

View File

@ -1,5 +1,5 @@
# pull official base image
FROM python:3.11-slim-bullseye AS base
FROM python:3.11-slim-bullseye
# create directory for the app user
RUN mkdir -p /home/app
@ -16,13 +16,15 @@ WORKDIR $APP_HOME
# set environment variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV ENVIRONMENT=dev
ENV TESTING=1
# add app
COPY . $APP_HOME
# install python dependencies
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
RUN pipenv install --deploy
RUN pipenv install --deploy --dev
# chown all the files to the app user
RUN chown -R app:app $APP_HOME
@ -30,27 +32,10 @@ RUN chown -R app:app $APP_HOME
# change to the app user
USER app
# TEST
FROM base AS test
ENV ENVIRONMENT=dev
ENV TESTING=1
RUN pipenv install --deploy --dev
# run tests
# pytest
RUN pipenv run pytest tests --disable-warnings
# BUILD
FROM base AS builder
ENV ENVIRONMENT=prod
ENV TESTING=0
# expose the port the app runs on
EXPOSE 80

View File

@ -5,18 +5,14 @@
# pull official base image
FROM python:3.11-slim-bookworm AS builder
# set working directory
WORKDIR /usr/src/app
# set environment variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# install system dependencies
# RUN apt-get update && apt-get -y install build-essential \
# && apt-get clean \
# && rm -rf /var/lib/apt/lists/*
ENV ENVIRONMENT=dev
ENV TESTING=1
# install python dependencies
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
@ -25,10 +21,10 @@ RUN pipenv install --deploy --dev
# add app
COPY . /usr/src/app
# RUN pipenv run pip install black==23.12.1 flake8==7.0.0 isort==5.13.2
# RUN pipenv run flake8 .
# RUN pipenv run black --exclude=migrations . --check
# RUN pipenv run isort . --check-only
RUN pipenv run pytest tests --disable-warnings
RUN pipenv run flake8 .
RUN pipenv run black --exclude=migrations . --check
RUN pipenv run isort . --check-only
#########
# FINAL #
@ -58,9 +54,9 @@ ENV TESTING=0
# install python dependencies
COPY --from=builder /usr/src/app/Pipfile .
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
RUN pipenv install --deploy --dev
COPY --from=builder /usr/src/app/Pipfile .
RUN pipenv install --deploy
RUN pipenv run pip install "uvicorn[standard]==0.26.0"
# add app
@ -73,7 +69,7 @@ RUN chown -R app:app $APP_HOME
USER app
# expose the port the app runs on
EXPOSE 8765
EXPOSE 80
# run uvicorn
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]

View File

@ -5,8 +5,6 @@ from decouple import config
from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from langchain_deepseek import ChatDeepSeek
from models.adaptive_rag import grading, query, routing
from .utils import ConnectionManager
router = APIRouter()
@ -17,7 +15,7 @@ os.environ["TAVILY_API_KEY"] = config("TAVILY_API_KEY", cast=str)
# Initialize the DeepSeek chat model
llm_chat = ChatDeepSeek(
model="deepseek-chat",
model="deepseek-chat",
temperature=0,
max_tokens=None,
timeout=None,
@ -27,28 +25,36 @@ llm_chat = ChatDeepSeek(
# Initialize the connection manager
manager = ConnectionManager()
@router.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket):
await manager.connect(websocket)
try:
while True:
data = await websocket.receive_text()
try:
data_json = json.loads(data)
if isinstance(data_json, list) and len(data_json) > 0 and 'content' in data_json[0]:
async for chunk in llm_chat.astream(data_json[0]['content']):
await manager.send_personal_message(json.dumps({"type": "message", "payload": chunk.content}), websocket)
else:
await manager.send_personal_message("Invalid message format", websocket)
data = await websocket.receive_text()
except json.JSONDecodeError:
await manager.broadcast("Invalid JSON message")
try:
data_json = json.loads(data)
if (
isinstance(data_json, list)
and len(data_json) > 0
and "content" in data_json[0]
):
async for chunk in llm_chat.astream(data_json[0]["content"]):
await manager.send_personal_message(
json.dumps({"type": "message", "payload": chunk.content}),
websocket,
)
else:
await manager.send_personal_message(
"Invalid message format", websocket
)
except json.JSONDecodeError:
await manager.broadcast("Invalid JSON message")
except WebSocketDisconnect:
manager.disconnect(websocket)
await manager.broadcast("Client disconnected")
except WebSocketDisconnect:
manager.disconnect(websocket)
await manager.broadcast("Client disconnected")

View File

@ -22,4 +22,3 @@ class ConnectionManager:
json_message = {"type": "message", "payload": message}
for connection in self.active_connections:
await connection.send_text(json.dumps(json_message))

View File

@ -14,4 +14,4 @@ class Settings(BaseSettings):
@lru_cache()
def get_settings() -> BaseSettings:
log.info("Loading config settings from the environment...")
return Settings()
return Settings()

View File

@ -1,21 +1,19 @@
import logging
import uvicorn
from fastapi import Depends, FastAPI
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from api import chatbot, ping
from config import Settings, get_settings
log = logging.getLogger("uvicorn")
origins = ["http://localhost:8004"]
def create_application() -> FastAPI:
application = FastAPI()
application.include_router(ping.router, tags=["ping"])
application.include_router(
chatbot.router, tags=["chatbot"])
application.include_router(chatbot.router, tags=["chatbot"])
return application
@ -28,7 +26,3 @@ app.add_middleware(
allow_methods=["*"],
allow_headers=["*"],
)
# if __name__ == "__main__":
# uvicorn.run("main:app", host="0.0.0.0", port=80, reload=True)

View File

@ -8,6 +8,7 @@ class GradeDocuments(BaseModel):
description="Documents are relevant to the question, 'yes' or 'no'"
)
class GradeHallucinations(BaseModel):
"""Binary score for hallucination present in generation answer."""
@ -15,9 +16,10 @@ class GradeHallucinations(BaseModel):
description="Answer is grounded in the facts, 'yes' or 'no'"
)
class GradeAnswer(BaseModel):
"""Binary score to assess answer addresses question."""
binary_score: str = Field(
description="Answer addresses the question, 'yes' or 'no'"
)
)

View File

@ -4,6 +4,6 @@ from pydantic import BaseModel, Field
class QueryRequest(BaseModel):
query: str = Field(..., description="The question to ask the model")
class QueryResponse(BaseModel):
response: str = Field(..., description="The model's response")

View File

@ -9,4 +9,4 @@ class RouteQuery(BaseModel):
datasource: Literal["vectorstore", "web_search"] = Field(
...,
description="Given a user question choose to route it to web search or a vectorstore.",
)
)

2
app/backend/setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 119

View File

@ -1,11 +0,0 @@
import json
import os
import sys
import unittest
from unittest.mock import AsyncMock, MagicMock
from fastapi import WebSocket, WebSocketDisconnect
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
from api.chatbot import llm_chat, manager, websocket_endpoint

View File

@ -5,11 +5,12 @@ from unittest.mock import AsyncMock, MagicMock
from fastapi import WebSocket
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
from api.utils import ConnectionManager
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
# Test for ConnectionManager class
class TestConnectionManager(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self):
self.manager = ConnectionManager()
@ -38,8 +39,13 @@ class TestConnectionManager(unittest.IsolatedAsyncioTestCase):
self.manager.active_connections = [mock_websocket1, mock_websocket2]
message = "Broadcast message"
await self.manager.broadcast(message)
mock_websocket1.send_text.assert_awaited_once_with('{"type": "message", "payload": "Broadcast message"}')
mock_websocket2.send_text.assert_awaited_once_with('{"type": "message", "payload": "Broadcast message"}')
mock_websocket1.send_text.assert_awaited_once_with(
'{"type": "message", "payload": "Broadcast message"}'
)
mock_websocket2.send_text.assert_awaited_once_with(
'{"type": "message", "payload": "Broadcast message"}'
)
if __name__ == '__main__':
unittest.main()
if __name__ == "__main__":
unittest.main()

View File

@ -11,7 +11,7 @@ services:
build:
context: ./backend
dockerfile: Dockerfile
container_name: backend
container_name: backend-aimingmedai
platform: linux/amd64
# command: pipenv run uvicorn main:app --reload --workers 1 --host 0.0.0.0 --port 8765
volumes:
@ -26,7 +26,7 @@ services:
build:
context: ./frontend
dockerfile: Dockerfile.test
container_name: frontend
container_name: frontend-aimingmedai
volumes:
- ./frontend:/usr/src/app
- /usr/src/app/node_modules
@ -40,6 +40,10 @@ services:
tests:
build:
context: ./tests
container_name: tests-aimingmedai
# depends_on:
# - backend
# - frontend
environment:
FRONTEND_URL: http://frontend:80
BACKEND_URL: http://backend:80

View File

@ -11,6 +11,9 @@ evonik-dummy = "*"
pyrsistent = "*"
pyjwt = "*"
pydantic = "*"
websockets = "*"
pytest-asyncio = "*"
pytest-cov = "*"
[dev-packages]
autopep8 = "*"

View File

@ -0,0 +1,29 @@
import pytest
import subprocess
import requests
import json
import time
import os
import asyncio
import websockets
@pytest.mark.asyncio
async def test_chatbot_integration():
# Send a request to the chatbot endpoint
url = "ws://backend-aimingmedai:80/ws"
data = [{"content": "Hello"}]
try:
async with websockets.connect(url) as websocket:
await websocket.send(json.dumps(data))
response = await websocket.recv()
assert response is not None
try:
response_json = json.loads(response)
assert "type" in response_json
assert "payload" in response_json
assert response_json["payload"] == ""
except json.JSONDecodeError:
assert False, "Invalid JSON response"
except Exception as e:
pytest.fail(f"Request failed: {e}")