Merge pull request #72 from aimingmed/develop

Multi-container Unit-Test and CI included in Github Action Pipeline
This commit is contained in:
Hong Kai LEE 2025-04-19 09:01:32 +08:00 committed by GitHub
commit 6a7973a4a4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 1216 additions and 312 deletions

View File

@ -1,142 +1,115 @@
name: CI - build name: Build + CI
# Triggers: Equivalent to ADO trigger block
on: on:
pull_request: pull_request:
branches: branches:
- develop - develop
- main # paths:
# - 'app/**'
# - '.github/workflows/**'
env: # Concurrency control: Ensures only one run per branch at a time, Equivalent to batch: true
IMAGE: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')/aimingmed-ai-backend concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs: jobs:
# This job defines the matrix and calls the reusable workflow for each image build
build: build:
name: Build Docker Image name: Build ${{ matrix.image_config.IMAGE_NAME }}
runs-on: ubuntu-latest # Define necessary permissions if needed (e.g., for GitHub Packages)
permissions: permissions:
contents: read contents: read
packages: write packages: write # If pushing to GitHub Packages registry
steps:
- name: Check disk space
run: df -h
- name: Cleanup Docker resources
if: always()
run: |
docker system prune -a -f --volumes
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check disk space
run: df -h
- name: Checkout
uses: actions/checkout@v3
with:
ref: develop
- name: Log in to GitHub Packages
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Pull image
run: |
docker pull ${{ env.IMAGE }}:latest || true
- name: Check disk space
if: always()
run: df -h
- name: Build image
run: |
docker build \
--cache-from ${{ env.IMAGE }}:latest \
--tag ${{ env.IMAGE }}:latest \
--file ./app/backend/Dockerfile.prod \
"./app/backend"
- name: Push image
run: |
docker push ${{ env.IMAGE }}:latest
- name: Check disk space
if: always()
run: df -h
- name: Cleanup Docker resources
if: always()
run: docker system prune -a -f --volumes
- name: Check disk space
if: always()
run: df -h
# Use secrets defined in the repository/organization settings
# 'inherit' makes all secrets available to the called workflow
secrets: inherit
# Define the matrix strategy based on the 'images' object from the original ADO build.yml
strategy:
fail-fast: false # Don't cancel other matrix jobs if one fails
matrix:
# We wrap the image configuration in a single 'image_config' key
# to pass it more easily if needed, but primarily access sub-keys directly.
image_config:
- IMAGE_NAME: backend-aimingmedai
BUILD_CONTEXT: ./app/backend
DOCKERFILE: ./app/backend/Dockerfile.prod
- IMAGE_NAME: frontend-aimingmedai
BUILD_CONTEXT: ./app/frontend
DOCKERFILE: ./app/frontend/Dockerfile.test
- IMAGE_NAME: tests-aimingmedai
BUILD_CONTEXT: ./app/tests
DOCKERFILE: ./app/tests/Dockerfile
# Call the reusable workflow
uses: ./.github/workflows/template_build.yml # Path to the reusable workflow file
# Pass inputs required by the reusable workflow
with:
# Pass values from the matrix context and global env
project_name: aimingmed-ai
image_repo: "ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')"
image_name: ${{ matrix.image_config.IMAGE_NAME }}
build_context: ${{ matrix.image_config.BUILD_CONTEXT }}
dockerfile: ${{ matrix.image_config.DOCKERFILE }}
build_id: ${{ github.run_id }}
commit_sha: ${{ github.sha }}
# TEST Stage equivalent
test: test:
name: Test Docker Image name: Run Integration Tests
runs-on: ubuntu-latest needs: build # Ensure this job runs after the build job
needs: build # Define necessary permissions if needed (e.g., for GitHub Packages)
permissions: permissions:
contents: read contents: read
packages: write packages: write # If pushing to GitHub Packages registry
steps: # Call the reusable workflow for testing
- name: Check disk space uses: ./.github/workflows/template_test.yml # Path to the reusable workflow file
run: df -h with:
- name: Checkout projectName: aimingmed-ai
uses: actions/checkout@v3 image_repo: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')
with: testContainerName: tests-aimingmedai
ref: develop # Todos: This part is not working the testEnvs is not being taken up corrrectly by Run Tests
- name: Log in to GitHub Packages # Pass test environment variables as JSON string
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io testEnvs: >
env: '[
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} "FRONTEND_URL=http://frontend:80",
- name: Cleanup Docker resources "BACKEND_URL=http://backend:80",
if: always() "ENVIRONMENT=dev",
run: docker system prune -a -f --volumes "TESTING=1",
- name: Remove unnecessary files ]'
run: | # Todos: This part is not working the testEnvs is not being taken up corrrectly by Run Tests
sudo rm -rf /usr/share/dotnet # Pass test directories as JSON string
sudo rm -rf /opt/ghc tests: >
sudo rm -rf "/usr/local/share/boost" '[
sudo rm -rf "$AGENT_TOOLSDIRECTORY" "tests/integration/backend",
- name: Pull image ]'
run: | # Pass image definitions for compose setup as JSON string
docker pull ${{ env.IMAGE }}:latest || true # Sensitive values should be passed via secrets and referenced within the template
- name: Check disk space images: >
if: always() [
run: df -h {
- name: Build image "name": "backend-aimingmedai",
run: | "ports" : ["8004:80"],
docker build \ "env": {
--cache-from ${{ env.IMAGE }}:latest \ "ENVIRONMENT": "dev",
--tag ${{ env.IMAGE }}:latest \ "TESTING": "1",
--file ./app/backend/Dockerfile.prod \ "DEEPSEEK_API_KEY": "sk-XXXXXXXXXX",
"./app/backend" "TAVILY_API_KEY": "tvly-dev-wXXXXXX"
- name: Check disk space
if: always() }
run: df -h },
- name: Validate Docker image {
run: docker inspect ${{ env.IMAGE }}:latest "name": "frontend-aimingmedai",
- name: Run container "ports" : ["3004:80"],
run: | "depends_on": ["backend-aimingmedai"],
docker run \ "env": {
-d \ "ENVIRONMENT": "dev",
-e DEEPSEEK_API_KEY=${{ secrets.DEEPSEEK_API_KEY }} \ "TESTING": "1",
-e TAVILY_API_KEY=${{ secrets.TAVILY_API_KEY }} \ "LOG_LEVEL": "DEBUG"
-e ENVIRONMENT=dev \ }
-e TESTING=0 \ }
-e PORT=8765 \ ]
-e LOG_LEVEL=DEBUG \
--name backend-backend \
-p 8004:8765 \
${{ env.IMAGE }}:latest
- name: Monitor memory usage
run: free -h
- name: Get container logs
if: failure()
run: docker logs backend-backend
- name: Pytest
run: docker exec backend-backend pipenv run python -m pytest .
# - name: Flake8
# run: docker exec backend-backend pipenv run python -m flake8 .
# - name: Black
# run: docker exec backend-backend pipenv run python -m black . --check
- name: isort
if: always()
run: docker exec backend-backend pipenv run python -m isort . --check-only
- name: Cleanup container at end of job
if: always()
run: docker stop backend-backend || true && docker rm backend-backend || true

142
.github/workflows/obsolete/build.yml vendored Normal file
View File

@ -0,0 +1,142 @@
name: CI - build
on:
pull_request:
branches:
- develop
- main
env:
IMAGE: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')/aimingmed-ai-backend
jobs:
build:
name: Build Docker Image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Check disk space
run: df -h
- name: Cleanup Docker resources
if: always()
run: |
docker system prune -a -f --volumes
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check disk space
run: df -h
- name: Checkout
uses: actions/checkout@v3
with:
ref: develop
- name: Log in to GitHub Packages
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Pull image
run: |
docker pull ${{ env.IMAGE }}:latest || true
- name: Check disk space
if: always()
run: df -h
- name: Build image
run: |
docker build \
--cache-from ${{ env.IMAGE }}:latest \
--tag ${{ env.IMAGE }}:latest \
--file ./app/backend/Dockerfile.prod \
"./app/backend"
- name: Push image
run: |
docker push ${{ env.IMAGE }}:latest
- name: Check disk space
if: always()
run: df -h
- name: Cleanup Docker resources
if: always()
run: docker system prune -a -f --volumes
- name: Check disk space
if: always()
run: df -h
test:
name: Test Docker Image
runs-on: ubuntu-latest
needs: build
permissions:
contents: read
packages: write
steps:
- name: Check disk space
run: df -h
- name: Checkout
uses: actions/checkout@v3
with:
ref: develop
- name: Log in to GitHub Packages
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Cleanup Docker resources
if: always()
run: docker system prune -a -f --volumes
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Pull image
run: |
docker pull ${{ env.IMAGE }}:latest || true
- name: Check disk space
if: always()
run: df -h
- name: Build image
run: |
docker build \
--cache-from ${{ env.IMAGE }}:latest \
--tag ${{ env.IMAGE }}:latest \
--file ./app/backend/Dockerfile.prod \
"./app/backend"
- name: Check disk space
if: always()
run: df -h
- name: Validate Docker image
run: docker inspect ${{ env.IMAGE }}:latest
- name: Run container
run: |
docker run \
-d \
-e DEEPSEEK_API_KEY=${{ secrets.DEEPSEEK_API_KEY }} \
-e TAVILY_API_KEY=${{ secrets.TAVILY_API_KEY }} \
-e ENVIRONMENT=dev \
-e TESTING=0 \
-e PORT=8765 \
-e LOG_LEVEL=DEBUG \
--name backend-backend \
-p 8004:8765 \
${{ env.IMAGE }}:latest
- name: Monitor memory usage
run: free -h
- name: Get container logs
if: failure()
run: docker logs backend-backend
- name: Pytest
run: docker exec backend-backend pipenv run python -m pytest .
# - name: Flake8
# run: docker exec backend-backend pipenv run python -m flake8 .
# - name: Black
# run: docker exec backend-backend pipenv run python -m black . --check
- name: isort
if: always()
run: docker exec backend-backend pipenv run python -m isort . --check-only
- name: Cleanup container at end of job
if: always()
run: docker stop backend-backend || true && docker rm backend-backend || true

102
.github/workflows/template_build.yml vendored Normal file
View File

@ -0,0 +1,102 @@
name: Reusable Docker Build Template
# Define inputs expected from the calling workflow
on:
workflow_call:
inputs:
project_name:
required: true
type: string
image_repo:
required: true
type: string
image_name:
required: true
type: string
build_context:
required: true
type: string
dockerfile:
required: true
type: string
build_id:
required: true
type: string # Pass run_id as string
commit_sha:
required: true
type: string
jobs:
build-single-image:
# This job executes the build steps for the specific image configuration passed via inputs
name: Build ${{ inputs.image_name }}
runs-on: ubuntu-latest
timeout-minutes: 120 # From original ADO template
steps:
- name: Checkout repo
# Checks out the repository code
uses: actions/checkout@v3
with:
ref: develop # Use the branch specified in the calling workflow
- name: Set up Docker Buildx
# Recommended for improved build features and caching
uses: docker/setup-buildx-action@v3
- name: Log in to GitHub Packages
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Nuke Docker Cache
# Equivalent to CmdLine@2 Nuke Cache
run: |
echo "Pruning Docker system..."
docker system prune -a -f --volumes
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Check disk space
run: df -h
- name: Define Image Tags
# Define tags consistently using inputs
id: tags
run: |
echo "image_repo_path=${{ inputs.image_repo }}/${{ inputs.image_name }}" >> $GITHUB_OUTPUT
echo "tag_build_id=${{ inputs.build_id }}" >> $GITHUB_OUTPUT
echo "tag_commit_sha=${{ inputs.commit_sha }}" >> $GITHUB_OUTPUT
- name: Pull Latest Image for Cache
# Pulls the latest tag if it exists
continue-on-error: true # Mimics '|| true'
run: |
echo "Attempting to pull latest image for cache: ${{ steps.tags.outputs.image_repo_path }}:latest"
docker pull ${{ steps.tags.outputs.image_repo_path }}:latest || true
- name: Build Final Image
run: |
echo "Building final image without intermediate cache..."
docker build \
-f ${{ inputs.dockerfile }} \
--pull \
--cache-from type=registry,ref=${{ steps.tags.outputs.image_repo_path }}:latest \
-t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_build_id }} \
-t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_commit_sha }} \
-t ${{ steps.tags.outputs.image_repo_path }}:latest \
${{ inputs.build_context }}
- name: Push Final Image Tags
# Pushes the final tags (build id, commit sha, latest)
run: |
echo "Pushing final image tags..."
docker push ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_build_id }}
docker push ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_commit_sha }}
docker push ${{ steps.tags.outputs.image_repo_path }}:latest

245
.github/workflows/template_test.yml vendored Normal file
View File

@ -0,0 +1,245 @@
name: Reusable Test Workflow
on:
workflow_call:
inputs:
projectName:
required: true
type: string
image_repo:
required: true
type: string
images: # JSON string defining services for compose
required: true
type: string
tests: # JSON string array of test directories/commands
required: true
type: string
testEnvs: # JSON string array of env vars for the test runner container
required: false
type: string
default: '[]'
testComposeFilePath: # Path where the generated compose file will be saved
required: false
type: string
default: ./test_compose.yml # Use .yml extension for docker compose v2
testContainerName:
required: false
type: string
default: tests # Name of the image containing the tests
testResultsPath: # Path inside the test container where results are stored
required: false
type: string
default: /usr/src/app/results
testResultsFilename:
required: false
type: string
default: results.xml
jobs:
compose_and_test:
name: Compose Services and Run Tests
runs-on: ubuntu-latest
env:
# Env vars needed for compose file generation/execution
IMAGE_REPO: ${{ inputs.image_repo }}
PROJECT_NAME: ${{ inputs.projectName }}
TAG: ${{ github.run_id }} # Use run_id as the build tag
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
ref: develop
- name: Log in to GitHub Packages
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Nuke Docker Cache
# Equivalent to CmdLine@2 Nuke Cache
run: |
echo "Pruning Docker system..."
docker system prune -a -f --volumes
- name: Remove unnecessary files
run: |
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Define Image Repo and other tags
# Define tags consistently using inputs
id: tags
run: |
echo "image_repo_path=${{ inputs.image_repo }}" >> $GITHUB_OUTPUT
- name: Create Docker-Compose File from Inputs
id: create_compose
shell: pwsh
run: |
# Load Inputs (parse JSON strings)
$imagesJson = '${{ inputs.images }}'
Write-Host "Substituted Images JSON: $imagesJson" # Debugging - remove sensitive info if public
$images = $imagesJson | ConvertFrom-Json
$testComposePath = "${{ inputs.testComposeFilePath }}"
# create compose structure (using YAML structure for Docker Compose v2+)
$compose = @{ services = @{}; networks = @{} }
$compose.networks.test = @{ external = $false; name = "test-network-${{ github.run_id }}" } # Use unique network name per run
# Generate services Section Based on Images inputs
foreach ($img in $images) {
$serviceName = $img.name
$svc = @{}
$svc.container_name = $serviceName
$svc.image = "${{ steps.tags.outputs.image_repo_path }}/$($serviceName):${{ env.TAG }}" # Use run_id tag
if ($img.depends_on) {
$svc.depends_on = $img.depends_on
}
if ($img.env) {
$svc.environment = $img.env
} else {
$svc.environment = @{}
}
$svc.networks = @("test") # Assign service to the custom network
if ($img.ports) {
$svc.ports = $img.ports
}
$compose.services.$serviceName = $svc
}
# Convert PS object to YAML and write file
# Installing powershell-yaml module might be needed on some runners
# Install-Module -Name powershell-yaml -Force -Scope CurrentUser # Uncomment if needed
# Import-Module powershell-yaml # Uncomment if needed
# $compose | ConvertTo-Yaml | Out-File -Encoding utf8 $testComposePath
# Alternative: Convert to JSON then use a tool to convert JSON to YAML, or just use JSON with compose
# Using JSON with docker compose -f is often simpler
$compose | ConvertTo-Json -Depth 10 | Out-File -Encoding utf8 $testComposePath.replace('.yml','.json')
echo "COMPOSE_FILE_PATH=$($testComposePath.replace('.yml','.json'))" >> $env:GITHUB_OUTPUT
# Removed 'docker network create test' - using isolated compose network now
- name: Clean Docker Services (if any previous)
run: |
docker compose -f ${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }} down -v --remove-orphans || true
continue-on-error: true
- name: Start Docker Compose Services
run: |
echo "Using compose file: ${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}"
cat "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" # Print generated compose file (check secrets aren't exposed if public)
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" up -d
- name: Print Service Logs on Failure or Success
if: always() # Always run this step
run: |
echo "Printing final logs from Docker Compose services..."
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" logs
- name: Wait for Services
run: |
echo "Waiting 60 seconds for services to initialize..."
sleep 60
echo "Compose logs after wait:"
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" logs
- name: Check Docker Services Health
run: |
echo "Checking health of Docker services..."
# Check if all services are healthy
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" ps
- name: Debug Network Connections
if: always() # Run even if previous steps failed
run: |
echo "--- Inspecting network: test-network-${{ github.run_id }} ---"
docker network inspect test-network-${{ github.run_id }}
echo "--- Listing running containers (docker ps) ---"
docker ps -a --format "table {{.ID}}\t{{.Names}}\t{{.Image}}\t{{.Status}}\t{{.Ports}}\t{{.Networks}}"
echo "--- Backend Service Logs ---"
docker logs backend-aimingmedai || echo "Could not get logs for backend-aimingmedai" # Replace with actual service name
- name: Run Tests
shell: bash
run: |
TEST_DIRS='["tests/integration/backend"]'
TEST_ENVS_JSON='["ENVIRONMENT=dev","TESTING=1", "DEEPSEEK_API_KEY=sk-XXXXXXXXXX","TAVILY_API_KEY=tvly-dev-wXXXXXX"]'
RESULTS_PATH="${{ inputs.testResultsPath }}"
STAGING_DIR="${{ runner.temp }}/test-results" # Use runner temp dir for results
mkdir -p "$STAGING_DIR"
# Prepare environment variables for docker run
ENV_ARGS=""
if [[ "$TEST_ENVS_JSON" != "[]" ]]; then
# Convert JSON array string to individual env vars
while IFS= read -r line; do
ENV_ARGS+=" -e \"$line\""
done <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[]')
else
# Add a dummy env var if none are provided, as required by original script logic
ENV_ARGS+=" -e DUMMY_ENV_TEST_RUN_ID=${{ github.run_id }}"
fi
echo "Env args: $ENV_ARGS" # Debugging
# Get the dynamically generated network name
COMPOSE_NETWORK_NAME=$(docker network ls --filter name=test-network-${{ github.run_id }} --format "{{.Name}}")
echo "Using Network: $COMPOSE_NETWORK_NAME"
# Loop through test directories and execute tests
echo "$TEST_DIRS" | jq -r '.[]' | while read test_dir; do
test_dir=$(echo $test_dir | sed 's/"//g') # Remove quotes
echo "Running test: $test_dir"
docker run \
--network "$COMPOSE_NETWORK_NAME" \
$ENV_ARGS \
-v "$STAGING_DIR:$RESULTS_PATH" \
--rm \
"${{ steps.tags.outputs.image_repo_path }}/${{ inputs.testContainerName }}:${{ github.run_id }}" \
"$test_dir"
# Add error handling if needed (e.g., exit script if a test run fails)
if [ $? -ne 0 ]; then
echo "Test failed: $test_dir"
# exit 1 # Uncomment to stop on first failure
fi
done
# Copy results file to expected location for upload artifact (adjust filename if needed)
# Assuming all test runs output to the *same* results file, overwriting previous ones.
# If they output to different files, adjust this copy/rename logic.
if [[ -f "$STAGING_DIR/${{ inputs.testResultsFilename }}" ]]; then
cp "$STAGING_DIR/${{ inputs.testResultsFilename }}" "${{ runner.temp }}/${{ inputs.testResultsFilename }}"
else
echo "Warning: Test results file ${{ inputs.testResultsFilename }} not found in $STAGING_DIR"
fi
- name: Upload Test Results Artifact
if: always() # Run even if tests fail
uses: actions/upload-artifact@v4
with:
name: test-results-${{ github.run_id }}
path: ${{ runner.temp }}/${{ inputs.testResultsFilename }} # Path to the results file on the runner
retention-days: 7
# Optional: Publish Test Results for UI display
- name: Publish Test Results
if: success() || failure() # always run even if the previous step fails
uses: mikepenz/action-junit-report@v5
with:
report_paths: ${{ runner.temp }}/${{ inputs.testResultsFilename }}
include_passed: true
- name: Docker Compose Down
if: always() # Always run cleanup
run: |
echo "Bringing down Docker Compose services..."
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" down -v --remove-orphans

View File

@ -1,4 +1,4 @@
[![CI/CD - build](https://github.com/aimingmed/aimingmed-ai/actions/workflows/build.yml/badge.svg)](https://github.com/aimingmed/aimingmed-ai/actions/workflows/build.yml) [![Build + CI](https://github.com/aimingmed/aimingmed-ai/actions/workflows/build.yml/badge.svg)](https://github.com/aimingmed/aimingmed-ai/actions/workflows/build.yml)
## Important note: ## Important note:

View File

@ -1,23 +1,43 @@
# pull official base image # pull official base image
FROM python:3.11-slim FROM python:3.11-slim-bullseye
# set working directory # create directory for the app user
WORKDIR /usr/src/app RUN mkdir -p /home/app
# create the app user
RUN addgroup --system app && adduser --system --group app
# create the appropriate directories
ENV HOME=/home/app
ENV APP_HOME=/home/app/backend
RUN mkdir $APP_HOME
WORKDIR $APP_HOME
# set environment variables # set environment variables
ENV PYTHONDONTWRITEBYTECODE 1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED 1 ENV PYTHONUNBUFFERED=1
ENV ENVIRONMENT=dev
# install system dependencies ENV TESTING=1
RUN apt-get update \
&& apt-get -y install build-essential netcat-traditional gcc \
&& apt-get clean
# install python dependencies
RUN pip install --upgrade pip setuptools wheel -i https://pypi.tuna.tsinghua.edu.cn/simple
RUN pip install pipenv -i https://pypi.tuna.tsinghua.edu.cn/simple
COPY ./Pipfile .
RUN pipenv install --deploy --dev
# add app # add app
COPY . . COPY . $APP_HOME
# install python dependencies
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
RUN pipenv install --deploy --dev
# chown all the files to the app user
RUN chown -R app:app $APP_HOME
# change to the app user
USER app
# pytest
RUN pipenv run pytest tests --disable-warnings
# expose the port the app runs on
EXPOSE 80
# run uvicorn
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]

View File

@ -5,18 +5,14 @@
# pull official base image # pull official base image
FROM python:3.11-slim-bookworm AS builder FROM python:3.11-slim-bookworm AS builder
# set working directory # set working directory
WORKDIR /usr/src/app WORKDIR /usr/src/app
# set environment variables # set environment variables
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
ENV ENVIRONMENT=dev
# install system dependencies ENV TESTING=1
# RUN apt-get update && apt-get -y install build-essential \
# && apt-get clean \
# && rm -rf /var/lib/apt/lists/*
# install python dependencies # install python dependencies
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
@ -25,10 +21,11 @@ RUN pipenv install --deploy --dev
# add app # add app
COPY . /usr/src/app COPY . /usr/src/app
# RUN pipenv run pip install black==23.12.1 flake8==7.0.0 isort==5.13.2 RUN export DEEPSEEK_API_KEY=sk-XXXXXXXXXX; export TAVILY_API_KEY=tvly-dev-wXXXXXX;\
# RUN pipenv run flake8 . pipenv run pytest tests --disable-warnings
# RUN pipenv run black --exclude=migrations . --check RUN pipenv run flake8 .
# RUN pipenv run isort . --check-only RUN pipenv run black --exclude=migrations . --check
RUN pipenv run isort . --check-only
######### #########
# FINAL # # FINAL #
@ -56,16 +53,11 @@ ENV PYTHONUNBUFFERED=1
ENV ENVIRONMENT=prod ENV ENVIRONMENT=prod
ENV TESTING=0 ENV TESTING=0
# install system dependencies
# RUN apt-get update \
# && apt-get -y install build-essential \
# && apt-get clean \
#&& rm -rf /var/lib/apt/lists/*
# install python dependencies # install python dependencies
COPY --from=builder /usr/src/app/Pipfile .
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
RUN pipenv install --deploy --dev COPY --from=builder /usr/src/app/Pipfile .
RUN pipenv install --deploy
RUN pipenv run pip install "uvicorn[standard]==0.26.0" RUN pipenv run pip install "uvicorn[standard]==0.26.0"
# add app # add app
@ -78,7 +70,7 @@ RUN chown -R app:app $APP_HOME
USER app USER app
# expose the port the app runs on # expose the port the app runs on
EXPOSE 8765 EXPOSE 80
# run uvicorn # run uvicorn
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "8765"] CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]

View File

@ -6,7 +6,7 @@ name = "pypi"
[packages] [packages]
fastapi = "==0.115.9" fastapi = "==0.115.9"
starlette = "==0.45.3" starlette = "==0.45.3"
uvicorn = "==0.26.0" uvicorn = {version = "==0.26.0", extras = ["standard"]}
pydantic-settings = "==2.1.0" pydantic-settings = "==2.1.0"
gunicorn = "==21.0.1" gunicorn = "==21.0.1"
python-decouple = "==3.8" python-decouple = "==3.8"

187
app/backend/Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "a6778423552ff7c5442034df8bdbfa526ab8ccc841ccb5bb37c1afd3abb3f3be" "sha256": "5d9bbaeb520a4c6fc604de7dbc6ee0d9a087b0a07610eba4d66e4dcc89d468e2"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": { "requires": {
@ -364,19 +364,19 @@
}, },
"google-auth": { "google-auth": {
"hashes": [ "hashes": [
"sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2",
"sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a" "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"
], ],
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==2.38.0" "version": "==2.39.0"
}, },
"googleapis-common-protos": { "googleapis-common-protos": {
"hashes": [ "hashes": [
"sha256:0b30452ff9c7a27d80bfc5718954063e8ab53dd3697093d3bc99581f5fd24212", "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257",
"sha256:3e1b904a27a33c821b4b749fd31d334c0c9c30e6113023d495e48979a3dc9c5f" "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"
], ],
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==1.69.2" "version": "==1.70.0"
}, },
"grpcio": { "grpcio": {
"hashes": [ "hashes": [
@ -739,11 +739,11 @@
}, },
"langsmith": { "langsmith": {
"hashes": [ "hashes": [
"sha256:4588aad24623320cdf355f7594e583874c27e70460e6e6446a416ebb702b8cf7", "sha256:8d20bd08fa6c3bce54cb600ddc521cd218a1c3410f90d9266179bf83a7ff0897",
"sha256:80d591a4c62c14950ba497bb8b565ad9bd8d07e102b643916f0d2af1a7b2daaf" "sha256:ee780ae3eac69998c336817c0b9f5ccfecaaaa3e67d94b7ef726b58ab3e72a25"
], ],
"markers": "python_version >= '3.9' and python_version < '4.0'", "markers": "python_version >= '3.9' and python_version < '4.0'",
"version": "==0.3.30" "version": "==0.3.31"
}, },
"markdown-it-py": { "markdown-it-py": {
"hashes": [ "hashes": [
@ -1032,11 +1032,11 @@
}, },
"openai": { "openai": {
"hashes": [ "hashes": [
"sha256:b58ea39ba589de07db85c9905557ac12d2fc77600dcd2b92a08b99c9a3dce9e0", "sha256:592c25b8747a7cad33a841958f5eb859a785caea9ee22b9e4f4a2ec062236526",
"sha256:f52d1f673fb4ce6069a40d544a80fcb062eba1b3f489004fac4f9923a074c425" "sha256:aff3e0f9fb209836382ec112778667027f4fd6ae38bdb2334bc9e173598b092a"
], ],
"markers": "python_version >= '3.8'", "markers": "python_version >= '3.8'",
"version": "==1.73.0" "version": "==1.74.0"
}, },
"opentelemetry-api": { "opentelemetry-api": {
"hashes": [ "hashes": [
@ -1210,80 +1210,90 @@
}, },
"pillow": { "pillow": {
"hashes": [ "hashes": [
"sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83", "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928",
"sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96", "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b",
"sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65", "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91",
"sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a", "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97",
"sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352", "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4",
"sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f", "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193",
"sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20", "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95",
"sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c", "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941",
"sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114", "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f",
"sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49", "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f",
"sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91", "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3",
"sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044",
"sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2", "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb",
"sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5", "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681",
"sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884", "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d",
"sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e", "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2",
"sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c", "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb",
"sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196", "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d",
"sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756", "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406",
"sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861", "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70",
"sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269", "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79",
"sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1", "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e",
"sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb", "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013",
"sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a", "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d",
"sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081", "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2",
"sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1", "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36",
"sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8", "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7",
"sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90", "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751",
"sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc", "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c",
"sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5", "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c",
"sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1", "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c",
"sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3", "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b",
"sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35", "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1",
"sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f", "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd",
"sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c", "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8",
"sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2", "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691",
"sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2", "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14",
"sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf", "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b",
"sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65", "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f",
"sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b", "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0",
"sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442", "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed",
"sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2", "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0",
"sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade", "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909",
"sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482", "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22",
"sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe", "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788",
"sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc", "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16",
"sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a", "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156",
"sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec", "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad",
"sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3", "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076",
"sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a", "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7",
"sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07", "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e",
"sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6", "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6",
"sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f", "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772",
"sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e", "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155",
"sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192", "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830",
"sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0", "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67",
"sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6", "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4",
"sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73", "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61",
"sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f", "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8",
"sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6", "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01",
"sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547", "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e",
"sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9", "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1",
"sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457", "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d",
"sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8", "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579",
"sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26", "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6",
"sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5", "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1",
"sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab", "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7",
"sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070", "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047",
"sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71", "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443",
"sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9", "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a",
"sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761" "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf",
"sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd",
"sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193",
"sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600",
"sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c",
"sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363",
"sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e",
"sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35",
"sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9",
"sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28",
"sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b"
], ],
"markers": "python_version >= '3.9'", "markers": "python_version >= '3.9'",
"version": "==11.1.0" "version": "==11.2.1"
}, },
"posthog": { "posthog": {
"hashes": [ "hashes": [
@ -2151,11 +2161,11 @@
}, },
"transformers": { "transformers": {
"hashes": [ "hashes": [
"sha256:5cb8259098b75ff4b5dd04533a318f7c4750d5307d9617e6d0593526432c404d", "sha256:e292fcab3990c6defe6328f0f7d2004283ca81a7a07b2de9a46d67fd81ea1409",
"sha256:ed221c31581e97127cff5de775b05f05d19698b439d7d638ff445502a7f37331" "sha256:fd3279633ceb2b777013234bbf0b4f5c2d23c4626b05497691f00cfda55e8a83"
], ],
"markers": "python_full_version >= '3.9.0'", "markers": "python_full_version >= '3.9.0'",
"version": "==4.51.2" "version": "==4.51.3"
}, },
"typer": { "typer": {
"hashes": [ "hashes": [
@ -2197,7 +2207,6 @@
"sha256:48bfd350fce3c5c57af5fb4995fded8fb50da3b4feb543eb18ad7e0d54589602", "sha256:48bfd350fce3c5c57af5fb4995fded8fb50da3b4feb543eb18ad7e0d54589602",
"sha256:cdb58ef6b8188c6c174994b2b1ba2150a9a8ae7ea5fb2f1b856b94a815d6071d" "sha256:cdb58ef6b8188c6c174994b2b1ba2150a9a8ae7ea5fb2f1b856b94a815d6071d"
], ],
"index": "pypi",
"markers": "python_version >= '3.8'", "markers": "python_version >= '3.8'",
"version": "==0.26.0" "version": "==0.26.0"
}, },

View File

@ -5,15 +5,17 @@ from decouple import config
from fastapi import APIRouter, WebSocket, WebSocketDisconnect from fastapi import APIRouter, WebSocket, WebSocketDisconnect
from langchain_deepseek import ChatDeepSeek from langchain_deepseek import ChatDeepSeek
from models.adaptive_rag import grading, query, routing
from .utils import ConnectionManager from .utils import ConnectionManager
router = APIRouter() router = APIRouter()
# Load environment variables # Load environment variables
os.environ["DEEPSEEK_API_KEY"] = config("DEEPSEEK_API_KEY", cast=str) os.environ["DEEPSEEK_API_KEY"] = config(
os.environ["TAVILY_API_KEY"] = config("TAVILY_API_KEY", cast=str) "DEEPSEEK_API_KEY", cast=str, default="sk-XXXXXXXXXX"
)
os.environ["TAVILY_API_KEY"] = config(
"TAVILY_API_KEY", cast=str, default="tvly-dev-wXXXXXX"
)
# Initialize the DeepSeek chat model # Initialize the DeepSeek chat model
llm_chat = ChatDeepSeek( llm_chat = ChatDeepSeek(
@ -27,28 +29,36 @@ llm_chat = ChatDeepSeek(
# Initialize the connection manager # Initialize the connection manager
manager = ConnectionManager() manager = ConnectionManager()
@router.websocket("/ws") @router.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket): async def websocket_endpoint(websocket: WebSocket):
await manager.connect(websocket) await manager.connect(websocket)
try: try:
while True: while True:
data = await websocket.receive_text() data = await websocket.receive_text()
try: try:
data_json = json.loads(data) data_json = json.loads(data)
if isinstance(data_json, list) and len(data_json) > 0 and 'content' in data_json[0]: if (
async for chunk in llm_chat.astream(data_json[0]['content']): isinstance(data_json, list)
await manager.send_personal_message(json.dumps({"type": "message", "payload": chunk.content}), websocket) and len(data_json) > 0
else: and "content" in data_json[0]
await manager.send_personal_message("Invalid message format", websocket) ):
async for chunk in llm_chat.astream(data_json[0]["content"]):
await manager.send_personal_message(
json.dumps({"type": "message", "payload": chunk.content}),
websocket,
)
else:
await manager.send_personal_message(
"Invalid message format", websocket
)
except json.JSONDecodeError: except json.JSONDecodeError:
await manager.broadcast("Invalid JSON message") await manager.broadcast("Invalid JSON message")
except WebSocketDisconnect: except WebSocketDisconnect:
manager.disconnect(websocket) manager.disconnect(websocket)
await manager.broadcast("Client disconnected") await manager.broadcast("Client disconnected")
except WebSocketDisconnect: except WebSocketDisconnect:
manager.disconnect(websocket) manager.disconnect(websocket)
await manager.broadcast("Client disconnected") await manager.broadcast("Client disconnected")

View File

@ -22,4 +22,3 @@ class ConnectionManager:
json_message = {"type": "message", "payload": message} json_message = {"type": "message", "payload": message}
for connection in self.active_connections: for connection in self.active_connections:
await connection.send_text(json.dumps(json_message)) await connection.send_text(json.dumps(json_message))

View File

@ -1,21 +1,19 @@
import logging import logging
import uvicorn from fastapi import FastAPI
from fastapi import Depends, FastAPI
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from api import chatbot, ping from api import chatbot, ping
from config import Settings, get_settings
log = logging.getLogger("uvicorn") log = logging.getLogger("uvicorn")
origins = ["http://localhost:8004"] origins = ["http://localhost:8004"]
def create_application() -> FastAPI: def create_application() -> FastAPI:
application = FastAPI() application = FastAPI()
application.include_router(ping.router, tags=["ping"]) application.include_router(ping.router, tags=["ping"])
application.include_router( application.include_router(chatbot.router, tags=["chatbot"])
chatbot.router, tags=["chatbot"])
return application return application
@ -28,7 +26,3 @@ app.add_middleware(
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],
) )
# if __name__ == "__main__":
# uvicorn.run("main:app", host="0.0.0.0", port=8765, reload=True)

View File

@ -8,6 +8,7 @@ class GradeDocuments(BaseModel):
description="Documents are relevant to the question, 'yes' or 'no'" description="Documents are relevant to the question, 'yes' or 'no'"
) )
class GradeHallucinations(BaseModel): class GradeHallucinations(BaseModel):
"""Binary score for hallucination present in generation answer.""" """Binary score for hallucination present in generation answer."""
@ -15,6 +16,7 @@ class GradeHallucinations(BaseModel):
description="Answer is grounded in the facts, 'yes' or 'no'" description="Answer is grounded in the facts, 'yes' or 'no'"
) )
class GradeAnswer(BaseModel): class GradeAnswer(BaseModel):
"""Binary score to assess answer addresses question.""" """Binary score to assess answer addresses question."""

View File

@ -4,6 +4,6 @@ from pydantic import BaseModel, Field
class QueryRequest(BaseModel): class QueryRequest(BaseModel):
query: str = Field(..., description="The question to ask the model") query: str = Field(..., description="The question to ask the model")
class QueryResponse(BaseModel): class QueryResponse(BaseModel):
response: str = Field(..., description="The model's response") response: str = Field(..., description="The model's response")

2
app/backend/setup.cfg Normal file
View File

@ -0,0 +1,2 @@
[flake8]
max-line-length = 119

View File

@ -1,11 +0,0 @@
import json
import os
import sys
import unittest
from unittest.mock import AsyncMock, MagicMock
from fastapi import WebSocket, WebSocketDisconnect
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
from api.chatbot import llm_chat, manager, websocket_endpoint

View File

@ -5,11 +5,12 @@ from unittest.mock import AsyncMock, MagicMock
from fastapi import WebSocket from fastapi import WebSocket
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
from api.utils import ConnectionManager from api.utils import ConnectionManager
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")))
# Test for ConnectionManager class
class TestConnectionManager(unittest.IsolatedAsyncioTestCase): class TestConnectionManager(unittest.IsolatedAsyncioTestCase):
async def asyncSetUp(self): async def asyncSetUp(self):
self.manager = ConnectionManager() self.manager = ConnectionManager()
@ -38,8 +39,13 @@ class TestConnectionManager(unittest.IsolatedAsyncioTestCase):
self.manager.active_connections = [mock_websocket1, mock_websocket2] self.manager.active_connections = [mock_websocket1, mock_websocket2]
message = "Broadcast message" message = "Broadcast message"
await self.manager.broadcast(message) await self.manager.broadcast(message)
mock_websocket1.send_text.assert_awaited_once_with('{"type": "message", "payload": "Broadcast message"}') mock_websocket1.send_text.assert_awaited_once_with(
mock_websocket2.send_text.assert_awaited_once_with('{"type": "message", "payload": "Broadcast message"}') '{"type": "message", "payload": "Broadcast message"}'
)
mock_websocket2.send_text.assert_awaited_once_with(
'{"type": "message", "payload": "Broadcast message"}'
)
if __name__ == '__main__':
if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -10,14 +10,14 @@ services:
backend: backend:
build: build:
context: ./backend context: ./backend
dockerfile: Dockerfile.prod dockerfile: Dockerfile
container_name: backend container_name: backend-aimingmedai
platform: linux/amd64 platform: linux/amd64
# command: pipenv run uvicorn main:app --reload --workers 1 --host 0.0.0.0 --port 8765 # command: pipenv run uvicorn main:app --reload --workers 1 --host 0.0.0.0 --port 8765
volumes: volumes:
- ./backend:/usr/src/app - ./backend:/home/app/backend
ports: ports:
- "8000:8765" - "8004:80"
environment: environment:
- ENVIRONMENT=dev - ENVIRONMENT=dev
- TESTING=0 - TESTING=0
@ -25,14 +25,25 @@ services:
frontend: frontend:
build: build:
context: ./frontend context: ./frontend
dockerfile: Dockerfile.local dockerfile: Dockerfile.test
container_name: frontend container_name: frontend-aimingmedai
volumes: volumes:
- ./frontend:/usr/src/app - ./frontend:/usr/src/app
- /usr/src/app/node_modules - /usr/src/app/node_modules
ports: ports:
- "3000:5173" - "3004:80"
depends_on: depends_on:
- backend - backend
environment: environment:
LOG_LEVEL: "DEBUG" LOG_LEVEL: "DEBUG"
tests:
build:
context: ./tests
container_name: tests-aimingmedai
# depends_on:
# - backend
# - frontend
environment:
FRONTEND_URL: http://frontend:80
BACKEND_URL: http://backend:80

1
app/frontend/.env.test Normal file
View File

@ -0,0 +1 @@
REACT_APP_BASE_DOMAIN_NAME_PORT=localhost:8004

View File

@ -0,0 +1,18 @@
####### BUILDER IMAGE #######
# Build stage
FROM node:alpine
WORKDIR /usr/src/app
# Copy everything else, test and build
COPY . /usr/src/app
# Build the app with a specific .env file
ARG ENV_FILE=.env.test
COPY ${ENV_FILE} /usr/src/app/.env
# Copy dependency files and install dependencies
RUN npm install && npm i --save-dev @types/jest
EXPOSE 80
CMD [ "npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "80" ]

View File

@ -1,6 +1,6 @@
import React, { useState, useEffect, useRef } from 'react'; import React, { useState, useEffect, useRef } from 'react';
const BASE_DOMAIN_NAME = import.meta.env.REACT_APP_DOMAIN_NAME || 'localhost'; const BASE_DOMAIN_NAME_PORT = import.meta.env.REACT_APP_DOMAIN_NAME_PORT || 'localhost:8004';
interface Message { interface Message {
@ -16,7 +16,7 @@ const App: React.FC = () => {
useEffect(() => { useEffect(() => {
mounted.current = true; mounted.current = true;
const ws = new WebSocket(`ws://${BASE_DOMAIN_NAME}:8000/ws`); const ws = new WebSocket(`ws://${BASE_DOMAIN_NAME_PORT}/ws`);
setSocket(ws); setSocket(ws);
ws.onopen = () => { ws.onopen = () => {
console.log('WebSocket connection opened'); console.log('WebSocket connection opened');

14
app/tests/Dockerfile Normal file
View File

@ -0,0 +1,14 @@
# TEST
FROM python:3.11-slim-bullseye
WORKDIR /usr/src/app
COPY Pipfile .
RUN pip3 install --upgrade pip &&\
pip3 install pipenv &&\
pipenv install --dev
COPY tests ./tests
ENTRYPOINT ["pipenv", "run", "pytest", "--junit-xml=results/results.xml"]

22
app/tests/Pipfile Normal file
View File

@ -0,0 +1,22 @@
[[source]]
url = "https://pypi.org/simple"
verify_ssl = true
name = "pypi"
[packages]
pytest = "*"
requests = "*"
jsonschema = "*"
evonik-dummy = "*"
pyrsistent = "*"
pyjwt = "*"
pydantic = "*"
websockets = "*"
pytest-asyncio = "*"
pytest-cov = "*"
[dev-packages]
autopep8 = "*"
[requires]
python_version = "3.11"

276
app/tests/Pipfile.lock generated Normal file
View File

@ -0,0 +1,276 @@
{
"_meta": {
"hash": {
"sha256": "c1b2460cc178bc5b3ed94b7d759cc4838a5b71891722ee9de151e81d45237011"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.11"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.org/simple",
"verify_ssl": true
}
]
},
"default": {
"annotated-types": {
"hashes": [
"sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53",
"sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"
],
"markers": "python_version >= '3.8'",
"version": "==0.7.0"
},
"attrs": {
"hashes": [
"sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3",
"sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"
],
"markers": "python_version >= '3.8'",
"version": "==25.3.0"
},
"certifi": {
"hashes": [
"sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
"sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"
],
"markers": "python_version >= '3.6'",
"version": "==2025.1.31"
},
"charset-normalizer": {
"hashes": [
"sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3",
"sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125",
"sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"
],
"markers": "python_version >= '3.7'",
"version": "==3.4.1"
},
"evonik-dummy": {
"hashes": [
"sha256:7c6fc100f311c11d7396e1b434a62d1265539b6d63e735fa9586499341eeebeb",
"sha256:c22d45dcba7e14c0167575d8cd9b30c1b42321dbd7ca684521d38b3aaffa9a1e"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==0.0.13"
},
"exrex": {
"hashes": [
"sha256:1228f2e3afb008cacf6adc3ec20b098f1303886f4382b57cdf3b7259fb7c0ad3",
"sha256:3f582add0700d4141e279625252e7a0655f9ad81feab3b8ab9077966668a2c99"
],
"version": "==0.12.0"
},
"faker": {
"hashes": [
"sha256:ad9dc66a3b84888b837ca729e85299a96b58fdaef0323ed0baace93c9614af06",
"sha256:dc2f730be71cb770e9c715b13374d80dbcee879675121ab51f9683d262ae9a1c"
],
"markers": "python_version >= '3.9'",
"version": "==37.1.0"
},
"idna": {
"hashes": [
"sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9",
"sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"
],
"markers": "python_version >= '3.6'",
"version": "==3.10"
},
"iniconfig": {
"hashes": [
"sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7",
"sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"
],
"markers": "python_version >= '3.8'",
"version": "==2.1.0"
},
"jsonschema": {
"hashes": [
"sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4",
"sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==4.23.0"
},
"jsonschema-specifications": {
"hashes": [
"sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272",
"sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"
],
"markers": "python_version >= '3.9'",
"version": "==2024.10.1"
},
"packaging": {
"hashes": [
"sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759",
"sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"
],
"markers": "python_version >= '3.8'",
"version": "==24.2"
},
"pluggy": {
"hashes": [
"sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1",
"sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"
],
"markers": "python_version >= '3.8'",
"version": "==1.5.0"
},
"pydantic": {
"hashes": [
"sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3",
"sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.11.3"
},
"pydantic-core": {
"hashes": [
"sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df",
"sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"
],
"markers": "python_version >= '3.9'",
"version": "==2.33.1"
},
"pyjwt": {
"hashes": [
"sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953",
"sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.10.1"
},
"pyrsistent": {
"hashes": [
"sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f",
"sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e",
"sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958",
"sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34",
"sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca",
"sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d",
"sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d",
"sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4",
"sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714",
"sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf",
"sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee",
"sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8",
"sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224",
"sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d",
"sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054",
"sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656",
"sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7",
"sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423",
"sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce",
"sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e",
"sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3",
"sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0",
"sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f",
"sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b",
"sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce",
"sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a",
"sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174",
"sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86",
"sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f",
"sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b",
"sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98",
"sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==0.20.0"
},
"pytest": {
"hashes": [
"sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820",
"sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==8.3.5"
},
"referencing": {
"hashes": [
"sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa",
"sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"
],
"markers": "python_version >= '3.9'",
"version": "==0.36.2"
},
"requests": {
"hashes": [
"sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760",
"sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==2.32.3"
},
"rpds-py": {
"hashes": [
"sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e",
"sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"
],
"markers": "python_version >= '3.9'",
"version": "==0.24.0"
},
"typing-extensions": {
"hashes": [
"sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c",
"sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"
],
"markers": "python_version >= '3.8'",
"version": "==4.13.2"
},
"typing-inspection": {
"hashes": [
"sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f",
"sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"
],
"markers": "python_version >= '3.9'",
"version": "==0.4.0"
},
"tzdata": {
"hashes": [
"sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8",
"sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"
],
"markers": "python_version >= '2'",
"version": "==2025.2"
},
"urllib3": {
"hashes": [
"sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
"sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
],
"markers": "python_version >= '3.9'",
"version": "==2.4.0"
}
},
"develop": {
"autopep8": {
"hashes": [
"sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758",
"sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==2.3.2"
},
"pycodestyle": {
"hashes": [
"sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9",
"sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"
],
"markers": "python_version >= '3.9'",
"version": "==2.13.0"
}
}
}

66
app/tests/README.md Normal file
View File

@ -0,0 +1,66 @@
# tests
Here, we provide integration tests for (all) components.
These integration tests can be run locally or via docker-compose.
## run using docker-compose
Run all tests using the dummy implementations of backend and jobs:
```bash
docker-compose up -d --build backend frontend
docker-compose up --build tests
```
Stop all containers:
```bash
docker-compose down -v
```
## run locally
You can start the dummy implementations as follows:
```bash
docker-compose up -d --build backend frontend
```
To run the tests locally, you must specify the base urls of backend and jobs api:
```bash
export BACKEND_URL="http://localhost:8004"
export FRONTEND_URL="http://localhost:3000"
```
All values default to `http://localhost:8080`.
### install dependencies
To install the dependencies for the tests, execute the following (in `/app/tests/`):
```bash
pipenv install
```
### run tests
To run the tests locally, execute the following:
```bash
pipenv run pytest tests/integration/
```
To execute tests for jobs / backend only, execute the following:
```bash
pipenv run pytest tests/integration/jobs
pipenv run pytest tests/integration/backend
```
You can also run single groups of tests, e.g.:
```bash
pipenv run pytest tests/integration/backend/tests/test_constraint_types.py -k create
```

0
app/tests/tests/.gitkeep Normal file
View File

View File

@ -0,0 +1,11 @@
import pytest
import websockets
@pytest.mark.asyncio
async def test_websocket_connection():
url = "ws://backend-aimingmedai:80/ws"
try:
async with websockets.connect(url):
assert True # If the connection is established, the test passes
except Exception:
assert False # If any exception occurs, the test fails