Merge pull request #60 from aimingmed/feature/backend-frontend-structure

update wiht github lowecse
This commit is contained in:
Hong Kai LEE 2025-04-17 10:12:10 +08:00 committed by GitHub
commit 190210f0df
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 45 additions and 36 deletions

View File

@ -50,7 +50,7 @@ jobs:
with: with:
# Pass values from the matrix context and global env # Pass values from the matrix context and global env
project_name: aimingmed-ai project_name: aimingmed-ai
image_repo: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]') image_repo: "ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')"
image_name: ${{ matrix.image_config.IMAGE_NAME }} image_name: ${{ matrix.image_config.IMAGE_NAME }}
build_context: ${{ matrix.image_config.BUILD_CONTEXT }} build_context: ${{ matrix.image_config.BUILD_CONTEXT }}
dockerfile: ${{ matrix.image_config.DOCKERFILE }} dockerfile: ${{ matrix.image_config.DOCKERFILE }}
@ -70,6 +70,7 @@ jobs:
with: with:
projectName: aimingmed-ai projectName: aimingmed-ai
image_repo: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]') image_repo: ghcr.io/$(echo $GITHUB_REPOSITORY | tr '[A-Z]' '[a-z]')
testContainerName: tests-aimingmedai
# Pass test environment variables as JSON string # Pass test environment variables as JSON string
testEnvs: > testEnvs: >
[ [
@ -92,8 +93,8 @@ jobs:
"env": { "env": {
"ENVIRONMENT": "dev", "ENVIRONMENT": "dev",
"TESTING": "1", "TESTING": "1",
"DEEPSEEK_API_KEY": "", "DEEPSEEK_API_KEY": "sk-XXXXXXXXXX",
"TAVILY_API_KEY": "" "TAVILY_API_KEY": "tvly-dev-wXXXXXX"
} }
}, },

View File

@ -1,12 +0,0 @@
name: Debug Environment
on:
workflow_dispatch: # Allows manual triggering
jobs:
debug:
runs-on: ubuntu-latest
steps:
- name: Print GITHUB_REPOSITORY
run: |
echo "GITHUB_REPOSITORY: ${{ github.repository }}"

View File

@ -71,18 +71,18 @@ jobs:
sudo rm -rf "/usr/local/share/boost" sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- name: Define Image Repo and other tags
# Define tags consistently using inputs
id: tags
run: |
echo "image_repo_path=${{ inputs.image_repo }}" >> $GITHUB_OUTPUT
- name: Create Docker-Compose File from Inputs - name: Create Docker-Compose File from Inputs
id: create_compose id: create_compose
shell: pwsh shell: pwsh
run: | run: |
# Load Inputs (parse JSON strings) # Load Inputs (parse JSON strings)
$imagesJson = '${{ inputs.images }}' $imagesJson = '${{ inputs.images }}'
$deepseekApiKey = '${{ secrets.DEEPSEEK_API_KEY }}'
$tavilyApiKey = '${{ secrets.TAVILY_API_KEY }}'
# Substitute secrets *before* parsing JSON using environment variables
# Be very careful with escaping if secrets contain special JSON characters
$imagesJson = $imagesJson -replace '"DEEPSEEK_API_KEY": ""', "`"DEEPSEEK_API_KEY`": `"$deepseekApiKey`""
$imagesJson = $imagesJson -replace '"TAVILY_API_KEY": ""', "`"TAVILY_API_KEY`": `"$tavilyApiKey`""
Write-Host "Substituted Images JSON: $imagesJson" # Debugging - remove sensitive info if public Write-Host "Substituted Images JSON: $imagesJson" # Debugging - remove sensitive info if public
$images = $imagesJson | ConvertFrom-Json $images = $imagesJson | ConvertFrom-Json
@ -97,7 +97,7 @@ jobs:
foreach ($img in $images) { foreach ($img in $images) {
$serviceName = $img.name $serviceName = $img.name
$svc = @{} $svc = @{}
$svc.image = "${{ env.IMAGE_REPO }}/$($serviceName):${{ env.TAG }}" # Use run_id tag $svc.image = "${{ steps.tags.outputs.image_repo_path }}/$($serviceName):${{ env.TAG }}" # Use run_id tag
if ($img.depends_on) { if ($img.depends_on) {
$svc.depends_on = $img.depends_on $svc.depends_on = $img.depends_on
@ -146,8 +146,8 @@ jobs:
- name: Run Tests - name: Run Tests
shell: bash shell: bash
run: | run: |
TEST_DIRS='${{ inputs.tests }}' # Get JSON array string TEST_DIRS="${{ inputs.tests }}"
TEST_ENVS_JSON='${{ inputs.testEnvs }}' # Get JSON array string TEST_ENVS_JSON="${{ inputs.testEnvs }}"
RESULTS_PATH="${{ inputs.testResultsPath }}" RESULTS_PATH="${{ inputs.testResultsPath }}"
STAGING_DIR="${{ runner.temp }}/test-results" # Use runner temp dir for results STAGING_DIR="${{ runner.temp }}/test-results" # Use runner temp dir for results
mkdir -p "$STAGING_DIR" mkdir -p "$STAGING_DIR"
@ -155,12 +155,14 @@ jobs:
# Prepare environment variables for docker run # Prepare environment variables for docker run
ENV_ARGS="" ENV_ARGS=""
if [[ "$TEST_ENVS_JSON" != "[]" ]]; then if [[ "$TEST_ENVS_JSON" != "[]" ]]; then
while IFS= read -r line; do # Convert JSON array string to individual env vars
ENV_ARGS+=" -e \"$line\"" IFS=',' read -r -a env_array <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[][]')
done <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[]') for env in "${env_array[@]}"; do
ENV_ARGS+=" -e \"$env\""
done
else else
# Add a dummy env var if none are provided, as required by original script logic # Add a dummy env var if none are provided, as required by original script logic
ENV_ARGS+=" -e DUMMY_ENV_TEST_RUN_ID=${{ github.run_id }}" ENV_ARGS+=" -e DUMMY_ENV_TEST_RUN_ID=${{ github.run_id }}"
fi fi
echo "Env args: $ENV_ARGS" # Debugging echo "Env args: $ENV_ARGS" # Debugging
@ -169,7 +171,7 @@ jobs:
echo "Using Network: $COMPOSE_NETWORK_NAME" echo "Using Network: $COMPOSE_NETWORK_NAME"
# Loop through test directories and execute tests # Loop through test directories and execute tests
echo "$TEST_DIRS" | jq -c '.[]' | while read test_dir; do echo "$TEST_DIRS" | jq -r '.[]' | while read test_dir; do
test_dir=$(echo $test_dir | sed 's/"//g') # Remove quotes test_dir=$(echo $test_dir | sed 's/"//g') # Remove quotes
echo "Running test: $test_dir" echo "Running test: $test_dir"
docker run \ docker run \
@ -177,7 +179,7 @@ jobs:
$ENV_ARGS \ $ENV_ARGS \
-v "$STAGING_DIR:$RESULTS_PATH" \ -v "$STAGING_DIR:$RESULTS_PATH" \
--rm \ --rm \
"${{ env.IMAGE_REPO }}/${{ inputs.testContainerName }}:${{ github.run_id }}" \ "${{ steps.tags.outputs.image_repo_path }}/${{ inputs.testContainerName }}:${{ github.run_id }}" \
"$test_dir" "$test_dir"
# Add error handling if needed (e.g., exit script if a test run fails) # Add error handling if needed (e.g., exit script if a test run fails)
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then

View File

@ -1,5 +1,5 @@
# pull official base image # pull official base image
FROM python:3.11-slim-bullseye FROM python:3.11-slim-bullseye AS base
# create directory for the app user # create directory for the app user
RUN mkdir -p /home/app RUN mkdir -p /home/app
@ -16,16 +16,13 @@ WORKDIR $APP_HOME
# set environment variables # set environment variables
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
ENV ENVIRONMENT=prod
ENV TESTING=0
# add app # add app
COPY . $APP_HOME COPY . $APP_HOME
# install python dependencies # install python dependencies
RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip
RUN pipenv install --deploy --dev RUN pipenv install --deploy
# chown all the files to the app user # chown all the files to the app user
RUN chown -R app:app $APP_HOME RUN chown -R app:app $APP_HOME
@ -33,8 +30,29 @@ RUN chown -R app:app $APP_HOME
# change to the app user # change to the app user
USER app USER app
# TEST
FROM base AS test
ENV ENVIRONMENT=dev
ENV TESTING=1
RUN pipenv install --deploy --dev
# run tests
RUN pipenv run pytest tests --disable-warnings
# BUILD
FROM base AS builder
ENV ENVIRONMENT=prod
ENV TESTING=0
# expose the port the app runs on # expose the port the app runs on
EXPOSE 80 EXPOSE 80
# run uvicorn # run uvicorn
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"] CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]