diff --git a/.github/workflows/build_new.yml b/.github/workflows/build_new.yml new file mode 100644 index 0000000..c617713 --- /dev/null +++ b/.github/workflows/build_new.yml @@ -0,0 +1,68 @@ +# .github/workflows/build.yml +# This workflow triggers the build process and calls the reusable template. + +name: Build aimingmed-ai ${{ github.ref_name }}+${{ github.run_id }} + +# Triggers: Equivalent to ADO trigger block +on: + push: + branches: + - development + paths: + - 'app/**' + - '.github/workflows/**' + # Allow manual triggering from GitHub UI + workflow_dispatch: + +# Concurrency: Equivalent to batch: true +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# Global environment variables accessible in the calling job +env: + PROJECT_NAME: aimingmed-ai + REPO: https://github.com/aimingmed # Updated repo for aimingmed-ai + +jobs: + # This job defines the matrix and calls the reusable workflow for each image + call-build-template: + name: Build ${{ matrix.image_config.IMAGE_NAME }} + # Define necessary permissions if needed (e.g., for GitHub Packages) + permissions: + contents: read + packages: write # If pushing to GitHub Packages registry + + # Use secrets defined in the repository/organization settings + # 'inherit' makes all secrets available to the called workflow + secrets: inherit + + # Define the matrix strategy based on the 'images' object from the original ADO build.yml + strategy: + fail-fast: false # Don't cancel other matrix jobs if one fails + matrix: + # We wrap the image configuration in a single 'image_config' key + # to pass it more easily if needed, but primarily access sub-keys directly. + image_config: + - IMAGE_NAME: backend-aimingmedai + BUILD_CONTEXT: ./app/backend + DOCKERFILE: ./app/backend/Dockerfile + INTERMEDIATE_CONTAINER: builder + ARGS: "" # Default empty ARGS + + # Call the reusable workflow + uses: templates/template_build.yml # Path to the reusable workflow file + # Pass inputs required by the reusable workflow + with: + # Pass values from the matrix context and global env + project_name: ${{ env.PROJECT_NAME }} + repo: ${{ env.REPO }} + image_name: ${{ matrix.image_config.IMAGE_NAME }} + build_context: ${{ matrix.image_config.BUILD_CONTEXT }} + dockerfile: ${{ matrix.image_config.DOCKERFILE }} + # Provide default empty strings if matrix values might be null/undefined + intermediate_container: ${{ matrix.image_config.INTERMEDIATE_CONTAINER || '' }} + args: ${{ matrix.image_config.ARGS || '' }} + # Pass run-specific context needed for tagging + build_id: ${{ github.run_id }} + commit_sha: ${{ github.sha }} \ No newline at end of file diff --git a/.github/workflows/templates/template_build.yml b/.github/workflows/templates/template_build.yml new file mode 100644 index 0000000..32892b4 --- /dev/null +++ b/.github/workflows/templates/template_build.yml @@ -0,0 +1,170 @@ +# This is a reusable workflow template containing the Docker build logic for one image. +# It's called by build.yml for each item in the matrix. + +name: Reusable Docker Build Template + +# Define inputs expected from the calling workflow +on: + workflow_call: + inputs: + project_name: + required: true + type: string + repo: + required: true + type: string + image_name: + required: true + type: string + build_context: + required: true + type: string + dockerfile: + required: true + type: string + intermediate_container: + required: false # Optional input + type: string + default: '' + args: + required: false # Optional input + type: string + default: '' + build_id: + required: true + type: string # Pass run_id as string + commit_sha: + required: true + type: string + # Define secrets required by this reusable workflow + # These will be provided by the caller using 'secrets: inherit' or explicit mapping + secrets: + ARTIFACTORY_USER: + required: true + ARTIFACTORY_PASSWORD: + required: true + # ARTIFACT_FEED_READ_TOKEN_B64 is needed by the caller to construct 'args', + # but not directly used in this template file unless ARGS logic changes. + # If needed directly, add it here too. + +jobs: + build-single-image: + # This job executes the build steps for the specific image configuration passed via inputs + name: Build ${{ inputs.image_name }} + runs-on: ubuntu-latest + timeout-minutes: 120 # From original ADO template + + steps: + - name: Checkout repo + # Checks out the repository code + uses: actions/checkout@v3 + with: + ref: develop # Use the branch specified in the calling workflow + - name: Set up Docker Buildx + # Recommended for improved build features and caching + uses: docker/setup-buildx-action@v3 + + - name: Log in to GitHub Packages + run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Nuke Docker Cache + # Equivalent to CmdLine@2 Nuke Cache + run: | + echo "Pruning Docker system..." + docker system prune -a -f --volumes + # Comment from ADO: we clear the cache to prevent it from filling up... + + - name: Remove unnecessary files + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + + - name: Check disk space + run: df -h + + - name: Define Image Tags + # Define tags consistently using inputs + id: tags + run: | + echo "image_repo_path=${{ inputs.repo }}/${{ inputs.project_name }}/${{ inputs.image_name }}" >> $GITHUB_OUTPUT + echo "tag_build_id=${{ inputs.build_id }}" >> $GITHUB_OUTPUT + echo "tag_commit_sha=${{ inputs.commit_sha }}" >> $GITHUB_OUTPUT + + - name: Pull Latest Image for Cache + # Pulls the latest tag if it exists + continue-on-error: true # Mimics '|| true' + run: | + echo "Attempting to pull latest image for cache: ${{ steps.tags.outputs.image_repo_path }}:latest" + docker pull ${{ steps.tags.outputs.image_repo_path }}:latest + + - name: Pull Intermediate Image for Cache + # Pulls the intermediate tag if specified and exists + if: inputs.intermediate_container != '' + continue-on-error: true # Mimics '|| true' + run: | + echo "Attempting to pull intermediate image for cache: ${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }}" + docker pull ${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }} + + - name: Build Intermediate Image + # Builds the intermediate target if specified + if: inputs.intermediate_container != '' + run: | + echo "Building intermediate image: ${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }}" + docker build \ + -f ${{ inputs.dockerfile }} \ + --pull \ + --cache-from type=registry,ref=${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }} \ + --target ${{ inputs.intermediate_container }} \ + ${{ inputs.args }} \ + ${{ inputs.build_context }} + + - name: Build Final Image (with Intermediate Cache) + # Builds the final image using intermediate cache if specified + if: inputs.intermediate_container != '' + run: | + echo "Building final image with intermediate cache..." + docker build \ + -f ${{ inputs.dockerfile }} \ + --pull \ + --cache-from type=registry,ref=${{ steps.tags.outputs.image_repo_path }}:latest \ + --cache-from type=registry,ref=${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_build_id }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_commit_sha }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:latest \ + ${{ inputs.args }} \ + ${{ inputs.build_context }} + + - name: Build Final Image (without Intermediate Cache) + # Builds the final image without intermediate cache if not specified + if: inputs.intermediate_container == '' + run: | + echo "Building final image without intermediate cache..." + docker build \ + -f ${{ inputs.dockerfile }} \ + --pull \ + --cache-from type=registry,ref=${{ steps.tags.outputs.image_repo_path }}:latest \ + -t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_build_id }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_commit_sha }} \ + -t ${{ steps.tags.outputs.image_repo_path }}:latest \ + ${{ inputs.args }} \ + ${{ inputs.build_context }} + + - name: Push Final Image Tags + # Pushes the final tags (build id, commit sha, latest) + run: | + echo "Pushing final image tags..." + docker push ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_build_id }} + docker push ${{ steps.tags.outputs.image_repo_path }}:${{ steps.tags.outputs.tag_commit_sha }} + docker push ${{ steps.tags.outputs.image_repo_path }}:latest + + - name: Push Intermediate Image + # Pushes the intermediate tag if it was built + if: inputs.intermediate_container != '' + run: | + echo "Pushing intermediate image tag..." + docker push ${{ steps.tags.outputs.image_repo_path }}:${{ inputs.intermediate_container }} \ No newline at end of file diff --git a/.github/workflows/templates/template_test.yml b/.github/workflows/templates/template_test.yml new file mode 100644 index 0000000..4f19491 --- /dev/null +++ b/.github/workflows/templates/template_test.yml @@ -0,0 +1,276 @@ +# This is a reusable workflow template for setting up Docker Compose and running integration tests. + +name: Reusable Integration Test Template + +# Define inputs expected from the calling workflow +on: + workflow_call: + inputs: + project_name: + required: true + type: string + repo: + required: true + type: string + # JSON string representing the list of image configurations for docker-compose + images_config_json: + required: true + type: string + # JSON string representing the list of test directories to execute + tests_to_run_json: + required: true + type: string + # Newline-separated string of environment variables for the test runner container + test_envs_newline: + required: false + type: string + default: "" + test_container_name: + required: false + type: string + default: "tests" # Default from ADO template + test_results_path_in_container: + required: false + type: string + default: "/usr/src/app/results" # Default from ADO template + test_results_filename: + required: false + type: string + default: "results.xml" # Default from ADO template + # Use the build ID (run_id) for tagging images pulled/run + build_id: + required: true + type: string # Pass run_id as string + + # Define secrets required by this reusable workflow + secrets: + ARTIFACTORY_USER: + required: true + ARTIFACTORY_PASSWORD: + required: true + +jobs: + compose-and-test: + name: Run Integration Tests + runs-on: ubuntu-latest + # Set default shell to pwsh for the compose generation step + defaults: + run: + shell: pwsh + steps: + - name: Checkout repo + # Checks out the repository code + uses: actions/checkout@v4 + with: + fetch-depth: 1 # As per original ADO checkout + + - name: Create Docker-Compose File from JSON input + # Translates the PowerShell script from ADO template_test.yml + # Uses PowerShell Core (pwsh) available on ubuntu-latest runners + env: + # Pass inputs as environment variables for the script + IMAGES_JSON: ${{ inputs.images_config_json }} + REPO_VAR: ${{ inputs.repo }} + PROJECT_NAME_VAR: ${{ inputs.project_name }} + TAG_VAR: ${{ inputs.build_id }} + run: | + # Load Images Parameter from environment variable + # Use -Raw to prevent PowerShell from potentially misinterpreting complex JSON + $imagesJson = Get-Content -Raw -Path Env:IMAGES_JSON + # Write-Host "DEBUG: Received images JSON: $imagesJson" # Debugging line + try { + $images = $imagesJson | ConvertFrom-Json + } catch { + Write-Error "Failed to parse IMAGES_JSON: $($_.Exception.Message)" + Write-Error "JSON content was: $imagesJson" + exit 1 + } + + # create compose Outlet + $compose = @{version = "3.6"; services = @{}} + # Define network (matches ADO template) + $compose.networks = @{test = @{ external = $true; name = "test" }} + + # Generate services Section Based on Images inputs + foreach ($imageInfo in $images) { + $svc = [ordered]@{} # Use ordered dictionary for consistency + + # Determine image name - use explicit image if provided, otherwise construct it + if ($imageInfo.PSObject.Properties['image']) { + $svc.image = $imageInfo.image + } else { + # Construct image path using REPO, PROJECT_NAME, image name, and TAG + $svc.image = "${env:REPO_VAR}/${env:PROJECT_NAME_VAR}/$($imageInfo.name):${env:TAG_VAR}" + } + + # Add depends_on if present in the input + if ($imageInfo.PSObject.Properties['depends_on']) { + $svc.depends_on = $imageInfo.depends_on + } + + # Add environment variables if present, otherwise empty object + $svc.environment = if ($imageInfo.PSObject.Properties['env']) { $imageInfo.env } else { @{} } + + # Assign service to test network + $svc.networks = @("test") + + # Add the service definition to the compose structure + $compose.services.($imageInfo.name) = $svc + } + + # Write File as YAML (standard for docker-compose) + # Use ConvertTo-Yaml function (requires installing module) or ConvertTo-Json + # Let's stick to JSON for direct translation, assuming docker-compose handles it, + # but rename to .yml as that's standard practice. + # Update: docker-compose CLI generally expects YAML. Let's output YAML. + # We need to install a module for ConvertTo-Yaml. + Install-Module -Name PSYaml -Force -Scope CurrentUser -ErrorAction SilentlyContinue + Import-Module PSYaml -ErrorAction SilentlyContinue + + try { + $yamlOutput = $compose | ConvertTo-Yaml -Depth 10 + # Write-Host "DEBUG: Generated YAML: $yamlOutput" # Debugging line + $yamlOutput | Out-File -Encoding UTF8 ./test_compose.yml + Write-Host "Successfully generated test_compose.yml" + } catch { + Write-Error "Failed to convert to YAML or write file: $($_.Exception.Message)" + # Fallback or alternative if YAML fails - write JSON + # Write-Host "Falling back to JSON output as test_compose.json" + # $compose | ConvertTo-Json -Depth 10 | Out-File -Encoding UTF8 ./test_compose.json + exit 1 + } + + - name: Login to Artifactory + # Uses secrets passed from the calling workflow via inherit + uses: docker/login-action@v3 + # Switch back to default shell (bash) if needed for subsequent steps + shell: bash + with: + registry: ${{ inputs.repo }} + username: ${{ secrets.ARTIFACTORY_USER }} + password: ${{ secrets.ARTIFACTORY_PASSWORD }} + + - name: Create Docker Test Network + # Equivalent to ADO script task, || true becomes continue-on-error + shell: bash + continue-on-error: true + run: docker network create test + + - name: Clean Docker Services Before Test + # Equivalent to ADO script task + shell: bash + run: docker-compose -f test_compose.yml down -v --remove-orphans + + - name: Start Docker Compose Services + # Equivalent to ADO script task + shell: bash + env: + # Pass variables needed by compose file if image tag wasn't hardcoded during generation + # Note: The PowerShell script above now bakes the full image path with tag in. + # TAG: ${{ inputs.build_id }} + # REPO: ${{ inputs.repo }} + # PROJECT_NAME: ${{ inputs.project_name }} + run: | + echo "--- Generated docker-compose.yml ---" + cat test_compose.yml + echo "------------------------------------" + docker-compose -f test_compose.yml up -d + echo "Docker containers started." + + - name: Wait for Services + # Equivalent to ADO sleep task + shell: bash + run: | + echo "Waiting 60 seconds for services to initialize..." + sleep 60 + echo "Wait complete. Checking container status:" + docker ps -a + + - name: Prepare Test Environment File + # Create a file from the newline-separated input string for docker run --env-file + shell: bash + run: | + echo "${{ inputs.test_envs_newline }}" > ./test_env_vars.env + echo "Created test_env_vars.env file." + # Add build ID as a default test env var like in ADO coalesce + echo "DUMMY_ENV_TEST_RUN_ID=${{ inputs.build_id }}" >> ./test_env_vars.env + + - name: Create Results Directory on Runner + shell: bash + run: mkdir -p ${{ runner.temp }}/test-results + + - name: Run Tests Iteratively + # This step iterates through the test directories provided in the JSON input + shell: bash + env: + TESTS_JSON: ${{ inputs.tests_to_run_json }} + TEST_CONTAINER_IMAGE: ${{ inputs.repo }}/${{ inputs.project_name }}/${{ inputs.test_container_name }}:${{ inputs.build_id }} + RESULTS_PATH_HOST: ${{ runner.temp }}/test-results + RESULTS_PATH_CONTAINER: ${{ inputs.test_results_path_in_container }} + run: | + echo "Running tests for image: $TEST_CONTAINER_IMAGE" + echo "Host results dir: $RESULTS_PATH_HOST" + echo "Container results dir: $RESULTS_PATH_CONTAINER" + + # Check if jq is installed, install if not + if ! command -v jq &> /dev/null + then + echo "jq could not be found, installing..." + sudo apt-get update && sudo apt-get install -y jq + fi + + # Parse the JSON array of test directories + # Use mapfile/readarray for safer parsing than simple iteration + readarray -t test_dirs < <(echo $TESTS_JSON | jq -c -r '.[]') + + if [ ${#test_dirs[@]} -eq 0 ]; then + echo "Warning: No test directories found in TESTS_JSON input." + exit 0 # Exit successfully if no tests are specified + fi + + echo "Found ${#test_dirs[@]} test directories to run." + + # Loop through the array and run docker for each test directory + for test_dir in "${test_dirs[@]}"; do + echo "--- Running test: $test_dir ---" + docker run \ + --network test \ + --env-file ./test_env_vars.env \ + -v "$RESULTS_PATH_HOST":"$RESULTS_PATH_CONTAINER" \ + --rm \ + "$TEST_CONTAINER_IMAGE" \ + "$test_dir" || echo "Test run failed for $test_dir" # Continue even if one test fails + echo "--- Finished test: $test_dir ---" + done + echo "All specified test runs attempted." + + - name: Publish Test Results + # Uses a dedicated action to publish results, equivalent to PublishTestResults@2 + # This action looks for JUnit XML files in the specified path. + if: always() # Run this even if previous steps fail + uses: dorny/test-reporter@v1 + with: + name: Integration Test Results (${{ inputs.project_name }}) + # Path is relative to the workspace root, but we put results in runner.temp + # Need to adjust path pattern to match the mounted host directory + path: ${{ runner.temp }}/test-results/${{ inputs.test_results_filename }} + reporter: java-junit # Specify the format + fail-on-error: false # Don't fail the workflow step if parsing/upload fails + + - name: Print Docker Logs on Failure/Completion + # Equivalent to ADO CmdLine@2 Print Logs + if: always() # Run this even if previous steps fail + shell: bash + run: | + echo "--- Printing Docker Compose logs ---" + docker-compose -f test_compose.yml logs + echo "------------------------------------" + + - name: Clean Docker Services After Test + # Optional: Clean up containers/networks after run + if: always() + shell: bash + run: | + echo "Cleaning up docker-compose services..." + docker-compose -f test_compose.yml down -v --remove-orphans + echo "Cleanup complete." diff --git a/app/backend/Dockerfile b/app/backend/Dockerfile index 5ccef9c..f22deb9 100644 --- a/app/backend/Dockerfile +++ b/app/backend/Dockerfile @@ -1,23 +1,40 @@ # pull official base image -FROM python:3.11-slim +FROM python:3.11-slim-bullseye -# set working directory -WORKDIR /usr/src/app +# create directory for the app user +RUN mkdir -p /home/app + +# create the app user +RUN addgroup --system app && adduser --system --group app + +# create the appropriate directories +ENV HOME=/home/app +ENV APP_HOME=/home/app/backend +RUN mkdir $APP_HOME +WORKDIR $APP_HOME # set environment variables -ENV PYTHONDONTWRITEBYTECODE 1 -ENV PYTHONUNBUFFERED 1 +ENV PYTHONDONTWRITEBYTECODE=1 +ENV PYTHONUNBUFFERED=1 +ENV ENVIRONMENT=prod +ENV TESTING=0 -# install system dependencies -RUN apt-get update \ - && apt-get -y install build-essential netcat-traditional gcc \ - && apt-get clean - -# install python dependencies -RUN pip install --upgrade pip setuptools wheel -i https://pypi.tuna.tsinghua.edu.cn/simple -RUN pip install pipenv -i https://pypi.tuna.tsinghua.edu.cn/simple -COPY ./Pipfile . -RUN pipenv install --deploy --dev # add app -COPY . . +COPY . $APP_HOME + +# install python dependencies +RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple pipenv && rm -rf ~/.cache/pip +RUN pipenv install --deploy --dev + +# chown all the files to the app user +RUN chown -R app:app $APP_HOME + +# change to the app user +USER app + +# expose the port the app runs on +EXPOSE 8765 + +# run uvicorn +CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "8765"] \ No newline at end of file diff --git a/app/backend/Dockerfile.prod b/app/backend/Dockerfile.prod index c653e01..1279116 100644 --- a/app/backend/Dockerfile.prod +++ b/app/backend/Dockerfile.prod @@ -56,11 +56,6 @@ ENV PYTHONUNBUFFERED=1 ENV ENVIRONMENT=prod ENV TESTING=0 -# install system dependencies -# RUN apt-get update \ -# && apt-get -y install build-essential \ -# && apt-get clean \ -#&& rm -rf /var/lib/apt/lists/* # install python dependencies COPY --from=builder /usr/src/app/Pipfile . diff --git a/app/backend/Pipfile b/app/backend/Pipfile index 7e292f1..9ab5c9d 100644 --- a/app/backend/Pipfile +++ b/app/backend/Pipfile @@ -6,7 +6,7 @@ name = "pypi" [packages] fastapi = "==0.115.9" starlette = "==0.45.3" -uvicorn = "==0.26.0" +uvicorn = {version = "==0.26.0", extras = ["standard"]} pydantic-settings = "==2.1.0" gunicorn = "==21.0.1" python-decouple = "==3.8" diff --git a/app/backend/Pipfile.lock b/app/backend/Pipfile.lock index 83958d5..102f2b1 100644 --- a/app/backend/Pipfile.lock +++ b/app/backend/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "a6778423552ff7c5442034df8bdbfa526ab8ccc841ccb5bb37c1afd3abb3f3be" + "sha256": "5d9bbaeb520a4c6fc604de7dbc6ee0d9a087b0a07610eba4d66e4dcc89d468e2" }, "pipfile-spec": 6, "requires": { @@ -364,19 +364,19 @@ }, "google-auth": { "hashes": [ - "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4", - "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a" + "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2", + "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7" ], "markers": "python_version >= '3.7'", - "version": "==2.38.0" + "version": "==2.39.0" }, "googleapis-common-protos": { "hashes": [ - "sha256:0b30452ff9c7a27d80bfc5718954063e8ab53dd3697093d3bc99581f5fd24212", - "sha256:3e1b904a27a33c821b4b749fd31d334c0c9c30e6113023d495e48979a3dc9c5f" + "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", + "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8" ], "markers": "python_version >= '3.7'", - "version": "==1.69.2" + "version": "==1.70.0" }, "grpcio": { "hashes": [ @@ -739,11 +739,11 @@ }, "langsmith": { "hashes": [ - "sha256:4588aad24623320cdf355f7594e583874c27e70460e6e6446a416ebb702b8cf7", - "sha256:80d591a4c62c14950ba497bb8b565ad9bd8d07e102b643916f0d2af1a7b2daaf" + "sha256:8d20bd08fa6c3bce54cb600ddc521cd218a1c3410f90d9266179bf83a7ff0897", + "sha256:ee780ae3eac69998c336817c0b9f5ccfecaaaa3e67d94b7ef726b58ab3e72a25" ], "markers": "python_version >= '3.9' and python_version < '4.0'", - "version": "==0.3.30" + "version": "==0.3.31" }, "markdown-it-py": { "hashes": [ @@ -1032,11 +1032,11 @@ }, "openai": { "hashes": [ - "sha256:b58ea39ba589de07db85c9905557ac12d2fc77600dcd2b92a08b99c9a3dce9e0", - "sha256:f52d1f673fb4ce6069a40d544a80fcb062eba1b3f489004fac4f9923a074c425" + "sha256:592c25b8747a7cad33a841958f5eb859a785caea9ee22b9e4f4a2ec062236526", + "sha256:aff3e0f9fb209836382ec112778667027f4fd6ae38bdb2334bc9e173598b092a" ], "markers": "python_version >= '3.8'", - "version": "==1.73.0" + "version": "==1.74.0" }, "opentelemetry-api": { "hashes": [ @@ -1210,80 +1210,90 @@ }, "pillow": { "hashes": [ - "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83", - "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96", - "sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65", - "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a", - "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352", - "sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f", - "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20", - "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c", - "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114", - "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49", - "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91", - "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0", - "sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2", - "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5", - "sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884", - "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e", - "sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c", - "sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196", - "sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756", - "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861", - "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269", - "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1", - "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb", - "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a", - "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081", - "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1", - "sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8", - "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90", - "sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc", - "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5", - "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1", - "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3", - "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35", - "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f", - "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c", - "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2", - "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2", - "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf", - "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65", - "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b", - "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442", - "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2", - "sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade", - "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482", - "sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe", - "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc", - "sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a", - "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec", - "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3", - "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a", - "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07", - "sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6", - "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f", - "sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e", - "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192", - "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0", - "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6", - "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73", - "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f", - "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6", - "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547", - "sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9", - "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457", - "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8", - "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26", - "sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5", - "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab", - "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070", - "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71", - "sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9", - "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761" + "sha256:014ca0050c85003620526b0ac1ac53f56fc93af128f7546623cc8e31875ab928", + "sha256:036e53f4170e270ddb8797d4c590e6dd14d28e15c7da375c18978045f7e6c37b", + "sha256:062b7a42d672c45a70fa1f8b43d1d38ff76b63421cbbe7f88146b39e8a558d91", + "sha256:0c3e6d0f59171dfa2e25d7116217543310908dfa2770aa64b8f87605f8cacc97", + "sha256:0c7b29dbd4281923a2bfe562acb734cee96bbb129e96e6972d315ed9f232bef4", + "sha256:0f5c7eda47bf8e3c8a283762cab94e496ba977a420868cb819159980b6709193", + "sha256:127bf6ac4a5b58b3d32fc8289656f77f80567d65660bc46f72c0d77e6600cc95", + "sha256:14e33b28bf17c7a38eede290f77db7c664e4eb01f7869e37fa98a5aa95978941", + "sha256:14f73f7c291279bd65fda51ee87affd7c1e097709f7fdd0188957a16c264601f", + "sha256:191955c55d8a712fab8934a42bfefbf99dd0b5875078240943f913bb66d46d9f", + "sha256:1d535df14716e7f8776b9e7fee118576d65572b4aad3ed639be9e4fa88a1cad3", + "sha256:208653868d5c9ecc2b327f9b9ef34e0e42a4cdd172c2988fd81d62d2bc9bc044", + "sha256:21e1470ac9e5739ff880c211fc3af01e3ae505859392bf65458c224d0bf283eb", + "sha256:225c832a13326e34f212d2072982bb1adb210e0cc0b153e688743018c94a2681", + "sha256:25a5f306095c6780c52e6bbb6109624b95c5b18e40aab1c3041da3e9e0cd3e2d", + "sha256:2728567e249cdd939f6cc3d1f049595c66e4187f3c34078cbc0a7d21c47482d2", + "sha256:2b490402c96f907a166615e9a5afacf2519e28295f157ec3a2bb9bd57de638cb", + "sha256:312c77b7f07ab2139924d2639860e084ec2a13e72af54d4f08ac843a5fc9c79d", + "sha256:31df6e2d3d8fc99f993fd253e97fae451a8db2e7207acf97859732273e108406", + "sha256:35ca289f712ccfc699508c4658a1d14652e8033e9b69839edf83cbdd0ba39e70", + "sha256:3692b68c87096ac6308296d96354eddd25f98740c9d2ab54e1549d6c8aea9d79", + "sha256:36d6b82164c39ce5482f649b437382c0fb2395eabc1e2b1702a6deb8ad647d6e", + "sha256:39ad2e0f424394e3aebc40168845fee52df1394a4673a6ee512d840d14ab3013", + "sha256:3e645b020f3209a0181a418bffe7b4a93171eef6c4ef6cc20980b30bebf17b7d", + "sha256:3fe735ced9a607fee4f481423a9c36701a39719252a9bb251679635f99d0f7d2", + "sha256:4b835d89c08a6c2ee7781b8dd0a30209a8012b5f09c0a665b65b0eb3560b6f36", + "sha256:4d375eb838755f2528ac8cbc926c3e31cc49ca4ad0cf79cff48b20e30634a4a7", + "sha256:4eb92eca2711ef8be42fd3f67533765d9fd043b8c80db204f16c8ea62ee1a751", + "sha256:5119225c622403afb4b44bad4c1ca6c1f98eed79db8d3bc6e4e160fc6339d66c", + "sha256:562d11134c97a62fe3af29581f083033179f7ff435f78392565a1ad2d1c2c45c", + "sha256:598174aef4589af795f66f9caab87ba4ff860ce08cd5bb447c6fc553ffee603c", + "sha256:63b5dff3a68f371ea06025a1a6966c9a1e1ee452fc8020c2cd0ea41b83e9037b", + "sha256:6ebce70c3f486acf7591a3d73431fa504a4e18a9b97ff27f5f47b7368e4b9dd1", + "sha256:738db0e0941ca0376804d4de6a782c005245264edaa253ffce24e5a15cbdc7bd", + "sha256:7491cf8a79b8eb867d419648fff2f83cb0b3891c8b36da92cc7f1931d46108c8", + "sha256:74ee3d7ecb3f3c05459ba95eed5efa28d6092d751ce9bf20e3e253a4e497e691", + "sha256:750f96efe0597382660d8b53e90dd1dd44568a8edb51cb7f9d5d918b80d4de14", + "sha256:78092232a4ab376a35d68c4e6d5e00dfd73454bd12b230420025fbe178ee3b0b", + "sha256:78afba22027b4accef10dbd5eed84425930ba41b3ea0a86fa8d20baaf19d807f", + "sha256:7bdb5e09068332578214cadd9c05e3d64d99e0e87591be22a324bdbc18925be0", + "sha256:80f1df8dbe9572b4b7abdfa17eb5d78dd620b1d55d9e25f834efdbee872d3aed", + "sha256:85d27ea4c889342f7e35f6d56e7e1cb345632ad592e8c51b693d7b7556043ce0", + "sha256:8b02d8f9cb83c52578a0b4beadba92e37d83a4ef11570a8688bbf43f4ca50909", + "sha256:8ce2e8411c7aaef53e6bb29fe98f28cd4fbd9a1d9be2eeea434331aac0536b22", + "sha256:8f4f3724c068be008c08257207210c138d5f3731af6c155a81c2b09a9eb3a788", + "sha256:9622e3b6c1d8b551b6e6f21873bdcc55762b4b2126633014cea1803368a9aa16", + "sha256:9b7b0d4fd2635f54ad82785d56bc0d94f147096493a79985d0ab57aedd563156", + "sha256:9bc7ae48b8057a611e5fe9f853baa88093b9a76303937449397899385da06fad", + "sha256:9db98ab6565c69082ec9b0d4e40dd9f6181dab0dd236d26f7a50b8b9bfbd5076", + "sha256:9ee66787e095127116d91dea2143db65c7bb1e232f617aa5957c0d9d2a3f23a7", + "sha256:a0a6709b47019dff32e678bc12c63008311b82b9327613f534e496dacaefb71e", + "sha256:a64dd61998416367b7ef979b73d3a85853ba9bec4c2925f74e588879a58716b6", + "sha256:aa442755e31c64037aa7c1cb186e0b369f8416c567381852c63444dd666fb772", + "sha256:ad275964d52e2243430472fc5d2c2334b4fc3ff9c16cb0a19254e25efa03a155", + "sha256:b0e130705d568e2f43a17bcbe74d90958e8a16263868a12c3e0d9c8162690830", + "sha256:b10428b3416d4f9c61f94b494681280be7686bda15898a3a9e08eb66a6d92d67", + "sha256:b2dbea1012ccb784a65349f57bbc93730b96e85b42e9bf7b01ef40443db720b4", + "sha256:b4ba4be812c7a40280629e55ae0b14a0aafa150dd6451297562e1764808bbe61", + "sha256:b93a07e76d13bff9444f1a029e0af2964e654bfc2e2c2d46bfd080df5ad5f3d8", + "sha256:bf2c33d6791c598142f00c9c4c7d47f6476731c31081331664eb26d6ab583e01", + "sha256:c27476257b2fdcd7872d54cfd119b3a9ce4610fb85c8e32b70b42e3680a29a1e", + "sha256:c8bd62331e5032bc396a93609982a9ab6b411c05078a52f5fe3cc59234a3abd1", + "sha256:c97209e85b5be259994eb5b69ff50c5d20cca0f458ef9abd835e262d9d88b39d", + "sha256:cc1c3bc53befb6096b84165956e886b1729634a799e9d6329a0c512ab651e579", + "sha256:cc5d875d56e49f112b6def6813c4e3d3036d269c008bf8aef72cd08d20ca6df6", + "sha256:d189ba1bebfbc0c0e529159631ec72bb9e9bc041f01ec6d3233d6d82eb823bc1", + "sha256:d4e5c5edee874dce4f653dbe59db7c73a600119fbea8d31f53423586ee2aafd7", + "sha256:d57a75d53922fc20c165016a20d9c44f73305e67c351bbc60d1adaf662e74047", + "sha256:da3104c57bbd72948d75f6a9389e6727d2ab6333c3617f0a89d72d4940aa0443", + "sha256:dd6b20b93b3ccc9c1b597999209e4bc5cf2853f9ee66e3fc9a400a78733ffc9a", + "sha256:e0409af9f829f87a2dfb7e259f78f317a5351f2045158be321fd135973fff7bf", + "sha256:e0b55f27f584ed623221cfe995c912c61606be8513bfa0e07d2c674b4516d9dd", + "sha256:e616e7154c37669fc1dfc14584f11e284e05d1c650e1c0f972f281c4ccc53193", + "sha256:e6def7eed9e7fa90fde255afaf08060dc4b343bbe524a8f69bdd2a2f0018f600", + "sha256:ea926cfbc3957090becbcbbb65ad177161a2ff2ad578b5a6ec9bb1e1cd78753c", + "sha256:f0d3348c95b766f54b76116d53d4cb171b52992a1027e7ca50c81b43b9d9e363", + "sha256:f6b0c664ccb879109ee3ca702a9272d877f4fcd21e5eb63c26422fd6e415365e", + "sha256:f781dcb0bc9929adc77bad571b8621ecb1e4cdef86e940fe2e5b5ee24fd33b35", + "sha256:f91ebf30830a48c825590aede79376cb40f110b387c17ee9bd59932c961044f9", + "sha256:fdec757fea0b793056419bca3e9932eb2b0ceec90ef4813ea4c1e072c389eb28", + "sha256:fe15238d3798788d00716637b3d4e7bb6bde18b26e5d08335a96e88564a36b6b" ], "markers": "python_version >= '3.9'", - "version": "==11.1.0" + "version": "==11.2.1" }, "posthog": { "hashes": [ @@ -2151,11 +2161,11 @@ }, "transformers": { "hashes": [ - "sha256:5cb8259098b75ff4b5dd04533a318f7c4750d5307d9617e6d0593526432c404d", - "sha256:ed221c31581e97127cff5de775b05f05d19698b439d7d638ff445502a7f37331" + "sha256:e292fcab3990c6defe6328f0f7d2004283ca81a7a07b2de9a46d67fd81ea1409", + "sha256:fd3279633ceb2b777013234bbf0b4f5c2d23c4626b05497691f00cfda55e8a83" ], "markers": "python_full_version >= '3.9.0'", - "version": "==4.51.2" + "version": "==4.51.3" }, "typer": { "hashes": [ @@ -2197,7 +2207,6 @@ "sha256:48bfd350fce3c5c57af5fb4995fded8fb50da3b4feb543eb18ad7e0d54589602", "sha256:cdb58ef6b8188c6c174994b2b1ba2150a9a8ae7ea5fb2f1b856b94a815d6071d" ], - "index": "pypi", "markers": "python_version >= '3.8'", "version": "==0.26.0" }, diff --git a/app/docker-compose.yml b/app/docker-compose.yml index e93d213..0677bd8 100644 --- a/app/docker-compose.yml +++ b/app/docker-compose.yml @@ -10,12 +10,12 @@ services: backend: build: context: ./backend - dockerfile: Dockerfile.prod + dockerfile: Dockerfile container_name: backend platform: linux/amd64 # command: pipenv run uvicorn main:app --reload --workers 1 --host 0.0.0.0 --port 8765 volumes: - - ./backend:/usr/src/app + - ./backend:/home/app/backend ports: - "8000:8765" environment: