mirror of
https://github.com/aimingmed/aimingmed-ai.git
synced 2026-01-19 13:23:23 +08:00
Merge pull request #56 from aimingmed/feature/backend-frontend-structure
Feature/backend frontend structure
This commit is contained in:
commit
ab9c7b28cb
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@ -139,4 +139,4 @@ jobs:
|
||||
run: docker exec backend-backend pipenv run python -m isort . --check-only
|
||||
- name: Cleanup container at end of job
|
||||
if: always()
|
||||
run: docker stop backend-backend || true && docker rm backend-backend || true
|
||||
run: docker stop backend-backend || true && docker rm backend-backend || true
|
||||
58
.github/workflows/build_new.yml
vendored
58
.github/workflows/build_new.yml
vendored
@ -2,14 +2,14 @@ name: Build
|
||||
|
||||
# Triggers: Equivalent to ADO trigger block
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches:
|
||||
- develop
|
||||
# paths:
|
||||
# - 'app/**'
|
||||
# - '.github/workflows/**'
|
||||
|
||||
# Concurrency: Equivalent to batch: true
|
||||
# Concurrency control: Ensures only one run per branch at a time, Equivalent to batch: true
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
@ -20,8 +20,8 @@ env:
|
||||
REPO: https://github.com/aimingmed # Updated repo for aimingmed-ai
|
||||
|
||||
jobs:
|
||||
# This job defines the matrix and calls the reusable workflow for each image
|
||||
call-build-template:
|
||||
# This job defines the matrix and calls the reusable workflow for each image build
|
||||
build:
|
||||
name: Build ${{ matrix.image_config.IMAGE_NAME }}
|
||||
# Define necessary permissions if needed (e.g., for GitHub Packages)
|
||||
permissions:
|
||||
@ -57,4 +57,52 @@ jobs:
|
||||
build_context: ${{ matrix.image_config.BUILD_CONTEXT }}
|
||||
dockerfile: ${{ matrix.image_config.DOCKERFILE }}
|
||||
build_id: ${{ github.run_id }}
|
||||
commit_sha: ${{ github.sha }}
|
||||
commit_sha: ${{ github.sha }}
|
||||
|
||||
# TEST Stage equivalent
|
||||
test:
|
||||
name: Run Tests
|
||||
needs: build # Ensure this job runs after the build job
|
||||
|
||||
# Call the reusable workflow for testing
|
||||
uses: ./.github/workflows/template_test.yml # Path to the reusable workflow file
|
||||
with:
|
||||
projectName: ${{ env.PROJECT_NAME }}
|
||||
repo: ${{ env.ARTIFACTORY_REPO }}
|
||||
# Pass test environment variables as JSON string
|
||||
testEnvs: >
|
||||
[
|
||||
"FRONTEND_URL=http://frontend:80",
|
||||
"BACKEND_URL=http://backend:80",
|
||||
"ENVIRONMENT=dev",
|
||||
"TESTING=1",
|
||||
]
|
||||
# Pass test directories as JSON string
|
||||
tests: >
|
||||
[
|
||||
"tests/integration/backend",
|
||||
]
|
||||
# Pass image definitions for compose setup as JSON string
|
||||
# Sensitive values should be passed via secrets and referenced within the template
|
||||
images: >
|
||||
[
|
||||
{
|
||||
"name": "backend-aimingmedai",
|
||||
"env": {
|
||||
"ENVIRONMENT": "dev",
|
||||
"TESTING": "1",
|
||||
"DEEPSEEK_API_KEY": "${{ secrets.DEEPSEEK_API_KEY }}",
|
||||
"TAVILY_API_KEY": "${{ secrets.TAVILY_API_KEY }}"
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "frontend-aimingmedai",
|
||||
"depends_on": ["backend-aimingmedai"],
|
||||
"env": {
|
||||
"ENVIRONMENT": "dev",
|
||||
"TESTING": "1",
|
||||
"LOG_LEVEL": "DEBUG"
|
||||
}
|
||||
}
|
||||
]
|
||||
6
.github/workflows/template_build.yml
vendored
6
.github/workflows/template_build.yml
vendored
@ -28,7 +28,11 @@ on:
|
||||
commit_sha:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
secrets:
|
||||
DEEPSEEK_API_KEY:
|
||||
required: true
|
||||
TAVILY_API_KEY:
|
||||
required: true
|
||||
jobs:
|
||||
build-single-image:
|
||||
# This job executes the build steps for the specific image configuration passed via inputs
|
||||
|
||||
207
.github/workflows/template_test.yml
vendored
Normal file
207
.github/workflows/template_test.yml
vendored
Normal file
@ -0,0 +1,207 @@
|
||||
name: Reusable Test Workflow
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
projectName:
|
||||
required: true
|
||||
type: string
|
||||
repo:
|
||||
required: true
|
||||
type: string
|
||||
images: # JSON string defining services for compose
|
||||
required: true
|
||||
type: string
|
||||
tests: # JSON string array of test directories/commands
|
||||
required: true
|
||||
type: string
|
||||
testEnvs: # JSON string array of env vars for the test runner container
|
||||
required: false
|
||||
type: string
|
||||
default: '[]'
|
||||
testComposeFilePath: # Path where the generated compose file will be saved
|
||||
required: false
|
||||
type: string
|
||||
default: ./test_compose.yml # Use .yml extension for docker compose v2
|
||||
testContainerName:
|
||||
required: false
|
||||
type: string
|
||||
default: tests # Name of the image containing the tests
|
||||
testResultsPath: # Path inside the test container where results are stored
|
||||
required: false
|
||||
type: string
|
||||
default: /usr/src/app/results
|
||||
testResultsFilename:
|
||||
required: false
|
||||
type: string
|
||||
default: results.xml
|
||||
|
||||
jobs:
|
||||
compose_and_test:
|
||||
name: Compose Services and Run Tests
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
# Env vars needed for compose file generation/execution
|
||||
REPO: ${{ inputs.repo }}
|
||||
PROJECT_NAME: ${{ inputs.projectName }}
|
||||
TAG: ${{ github.run_id }} # Use run_id as the build tag
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: develop
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Log in to GitHub Packages
|
||||
run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create Docker-Compose File from Inputs
|
||||
id: create_compose
|
||||
shell: pwsh
|
||||
run: |
|
||||
# Load Inputs (parse JSON strings)
|
||||
$imagesJson = '${{ inputs.images }}'
|
||||
# Substitute secrets *before* parsing JSON using environment variables
|
||||
# Be very careful with escaping if secrets contain special JSON characters
|
||||
|
||||
Write-Host "Substituted Images JSON: $imagesJson" # Debugging - remove sensitive info if public
|
||||
$images = $imagesJson | ConvertFrom-Json
|
||||
|
||||
$testComposePath = "${{ inputs.testComposeFilePath }}"
|
||||
|
||||
# create compose structure (using YAML structure for Docker Compose v2+)
|
||||
$compose = @{ version = "3.8"; services = @{}; networks = @{} }
|
||||
$compose.networks.test = @{ external = $false; name = "test-network-${{ github.run_id }}" } # Use unique network name per run
|
||||
|
||||
# Generate services Section Based on Images inputs
|
||||
foreach ($img in $images) {
|
||||
$serviceName = $img.name
|
||||
$svc = @{}
|
||||
$svc.image = "${{ env.REPO }}/${{ env.PROJECT_NAME }}/$($serviceName):${{ env.TAG }}" # Use run_id tag
|
||||
if ($img.depends_on) {
|
||||
$svc.depends_on = $img.depends_on
|
||||
}
|
||||
if ($img.env) {
|
||||
$svc.environment = $img.env
|
||||
} else {
|
||||
$svc.environment = @{}
|
||||
}
|
||||
$svc.networks = @("test") # Assign service to the custom network
|
||||
|
||||
$compose.services.$serviceName = $svc
|
||||
}
|
||||
|
||||
# Convert PS object to YAML and write file
|
||||
# Installing powershell-yaml module might be needed on some runners
|
||||
# Install-Module -Name powershell-yaml -Force -Scope CurrentUser # Uncomment if needed
|
||||
# Import-Module powershell-yaml # Uncomment if needed
|
||||
# $compose | ConvertTo-Yaml | Out-File -Encoding utf8 $testComposePath
|
||||
|
||||
# Alternative: Convert to JSON then use a tool to convert JSON to YAML, or just use JSON with compose
|
||||
# Using JSON with docker compose -f is often simpler
|
||||
$compose | ConvertTo-Json -Depth 10 | Out-File -Encoding utf8 $testComposePath.replace('.yml','.json')
|
||||
echo "COMPOSE_FILE_PATH=$($testComposePath.replace('.yml','.json'))" >> $env:GITHUB_OUTPUT
|
||||
|
||||
# Removed 'docker network create test' - using isolated compose network now
|
||||
|
||||
- name: Clean Docker Services (if any previous)
|
||||
run: |
|
||||
docker compose -f ${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }} down -v --remove-orphans || true
|
||||
continue-on-error: true
|
||||
|
||||
- name: Start Docker Compose Services
|
||||
run: |
|
||||
echo "Using compose file: ${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}"
|
||||
cat "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" # Print generated compose file (check secrets aren't exposed if public)
|
||||
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" up -d
|
||||
|
||||
- name: Wait for Services
|
||||
run: |
|
||||
echo "Waiting 60 seconds for services to initialize..."
|
||||
sleep 60
|
||||
echo "Compose logs after wait:"
|
||||
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" logs
|
||||
|
||||
- name: Run Tests
|
||||
shell: bash
|
||||
run: |
|
||||
TEST_DIRS='${{ inputs.tests }}' # Get JSON array string
|
||||
TEST_ENVS_JSON='${{ inputs.testEnvs }}' # Get JSON array string
|
||||
RESULTS_PATH="${{ inputs.testResultsPath }}"
|
||||
STAGING_DIR="${{ runner.temp }}/test-results" # Use runner temp dir for results
|
||||
mkdir -p "$STAGING_DIR"
|
||||
|
||||
# Prepare environment variables for docker run
|
||||
ENV_ARGS=""
|
||||
if [[ "$TEST_ENVS_JSON" != "[]" ]]; then
|
||||
while IFS= read -r line; do
|
||||
ENV_ARGS+=" -e \"$line\""
|
||||
done <<< $(echo "$TEST_ENVS_JSON" | jq -r '.[]')
|
||||
else
|
||||
# Add a dummy env var if none are provided, as required by original script logic
|
||||
ENV_ARGS+=" -e DUMMY_ENV_TEST_RUN_ID=${{ github.run_id }}"
|
||||
fi
|
||||
echo "Env args: $ENV_ARGS" # Debugging
|
||||
|
||||
# Get the dynamically generated network name
|
||||
COMPOSE_NETWORK_NAME=$(docker network ls --filter name=test-network-${{ github.run_id }} --format "{{.Name}}")
|
||||
echo "Using Network: $COMPOSE_NETWORK_NAME"
|
||||
|
||||
# Loop through test directories and execute tests
|
||||
echo "$TEST_DIRS" | jq -c '.[]' | while read test_dir; do
|
||||
test_dir=$(echo $test_dir | sed 's/"//g') # Remove quotes
|
||||
echo "Running test: $test_dir"
|
||||
docker run \
|
||||
--network "$COMPOSE_NETWORK_NAME" \
|
||||
$ENV_ARGS \
|
||||
-v "$STAGING_DIR:$RESULTS_PATH" \
|
||||
--rm \
|
||||
"${{ inputs.repo }}/${{ inputs.projectName }}/${{ inputs.testContainerName }}:${{ github.run_id }}" \
|
||||
"$test_dir"
|
||||
# Add error handling if needed (e.g., exit script if a test run fails)
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Test failed: $test_dir"
|
||||
# exit 1 # Uncomment to stop on first failure
|
||||
fi
|
||||
done
|
||||
|
||||
# Copy results file to expected location for upload artifact (adjust filename if needed)
|
||||
# Assuming all test runs output to the *same* results file, overwriting previous ones.
|
||||
# If they output to different files, adjust this copy/rename logic.
|
||||
if [[ -f "$STAGING_DIR/${{ inputs.testResultsFilename }}" ]]; then
|
||||
cp "$STAGING_DIR/${{ inputs.testResultsFilename }}" "${{ runner.temp }}/${{ inputs.testResultsFilename }}"
|
||||
else
|
||||
echo "Warning: Test results file ${{ inputs.testResultsFilename }} not found in $STAGING_DIR"
|
||||
fi
|
||||
|
||||
- name: Upload Test Results Artifact
|
||||
if: always() # Run even if tests fail
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results-${{ github.run_id }}
|
||||
path: ${{ runner.temp }}/${{ inputs.testResultsFilename }} # Path to the results file on the runner
|
||||
retention-days: 7
|
||||
|
||||
# Optional: Publish Test Results for UI display
|
||||
- name: Publish Test Results
|
||||
if: always() # Run even if tests fail
|
||||
uses: dorny/test-reporter@v1
|
||||
with:
|
||||
name: JUnit Test Report
|
||||
path: ${{ runner.temp }}/${{ inputs.testResultsFilename }} # Path to the JUnit XML file
|
||||
reporter: java-junit # Specify the format
|
||||
|
||||
- name: Print Service Logs on Failure or Success
|
||||
if: always() # Always run this step
|
||||
run: |
|
||||
echo "Printing final logs from Docker Compose services..."
|
||||
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" logs
|
||||
|
||||
- name: Docker Compose Down
|
||||
if: always() # Always run cleanup
|
||||
run: |
|
||||
echo "Bringing down Docker Compose services..."
|
||||
docker compose -f "${{ steps.create_compose.outputs.COMPOSE_FILE_PATH }}" down -v --remove-orphans
|
||||
276
.github/workflows/templates/template_test.yml
vendored
276
.github/workflows/templates/template_test.yml
vendored
@ -1,276 +0,0 @@
|
||||
# This is a reusable workflow template for setting up Docker Compose and running integration tests.
|
||||
|
||||
name: Reusable Integration Test Template
|
||||
|
||||
# Define inputs expected from the calling workflow
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
project_name:
|
||||
required: true
|
||||
type: string
|
||||
repo:
|
||||
required: true
|
||||
type: string
|
||||
# JSON string representing the list of image configurations for docker-compose
|
||||
images_config_json:
|
||||
required: true
|
||||
type: string
|
||||
# JSON string representing the list of test directories to execute
|
||||
tests_to_run_json:
|
||||
required: true
|
||||
type: string
|
||||
# Newline-separated string of environment variables for the test runner container
|
||||
test_envs_newline:
|
||||
required: false
|
||||
type: string
|
||||
default: ""
|
||||
test_container_name:
|
||||
required: false
|
||||
type: string
|
||||
default: "tests" # Default from ADO template
|
||||
test_results_path_in_container:
|
||||
required: false
|
||||
type: string
|
||||
default: "/usr/src/app/results" # Default from ADO template
|
||||
test_results_filename:
|
||||
required: false
|
||||
type: string
|
||||
default: "results.xml" # Default from ADO template
|
||||
# Use the build ID (run_id) for tagging images pulled/run
|
||||
build_id:
|
||||
required: true
|
||||
type: string # Pass run_id as string
|
||||
|
||||
# Define secrets required by this reusable workflow
|
||||
secrets:
|
||||
ARTIFACTORY_USER:
|
||||
required: true
|
||||
ARTIFACTORY_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
compose-and-test:
|
||||
name: Run Integration Tests
|
||||
runs-on: ubuntu-latest
|
||||
# Set default shell to pwsh for the compose generation step
|
||||
defaults:
|
||||
run:
|
||||
shell: pwsh
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
# Checks out the repository code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1 # As per original ADO checkout
|
||||
|
||||
- name: Create Docker-Compose File from JSON input
|
||||
# Translates the PowerShell script from ADO template_test.yml
|
||||
# Uses PowerShell Core (pwsh) available on ubuntu-latest runners
|
||||
env:
|
||||
# Pass inputs as environment variables for the script
|
||||
IMAGES_JSON: ${{ inputs.images_config_json }}
|
||||
REPO_VAR: ${{ inputs.repo }}
|
||||
PROJECT_NAME_VAR: ${{ inputs.project_name }}
|
||||
TAG_VAR: ${{ inputs.build_id }}
|
||||
run: |
|
||||
# Load Images Parameter from environment variable
|
||||
# Use -Raw to prevent PowerShell from potentially misinterpreting complex JSON
|
||||
$imagesJson = Get-Content -Raw -Path Env:IMAGES_JSON
|
||||
# Write-Host "DEBUG: Received images JSON: $imagesJson" # Debugging line
|
||||
try {
|
||||
$images = $imagesJson | ConvertFrom-Json
|
||||
} catch {
|
||||
Write-Error "Failed to parse IMAGES_JSON: $($_.Exception.Message)"
|
||||
Write-Error "JSON content was: $imagesJson"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# create compose Outlet
|
||||
$compose = @{version = "3.6"; services = @{}}
|
||||
# Define network (matches ADO template)
|
||||
$compose.networks = @{test = @{ external = $true; name = "test" }}
|
||||
|
||||
# Generate services Section Based on Images inputs
|
||||
foreach ($imageInfo in $images) {
|
||||
$svc = [ordered]@{} # Use ordered dictionary for consistency
|
||||
|
||||
# Determine image name - use explicit image if provided, otherwise construct it
|
||||
if ($imageInfo.PSObject.Properties['image']) {
|
||||
$svc.image = $imageInfo.image
|
||||
} else {
|
||||
# Construct image path using REPO, PROJECT_NAME, image name, and TAG
|
||||
$svc.image = "${env:REPO_VAR}/${env:PROJECT_NAME_VAR}/$($imageInfo.name):${env:TAG_VAR}"
|
||||
}
|
||||
|
||||
# Add depends_on if present in the input
|
||||
if ($imageInfo.PSObject.Properties['depends_on']) {
|
||||
$svc.depends_on = $imageInfo.depends_on
|
||||
}
|
||||
|
||||
# Add environment variables if present, otherwise empty object
|
||||
$svc.environment = if ($imageInfo.PSObject.Properties['env']) { $imageInfo.env } else { @{} }
|
||||
|
||||
# Assign service to test network
|
||||
$svc.networks = @("test")
|
||||
|
||||
# Add the service definition to the compose structure
|
||||
$compose.services.($imageInfo.name) = $svc
|
||||
}
|
||||
|
||||
# Write File as YAML (standard for docker-compose)
|
||||
# Use ConvertTo-Yaml function (requires installing module) or ConvertTo-Json
|
||||
# Let's stick to JSON for direct translation, assuming docker-compose handles it,
|
||||
# but rename to .yml as that's standard practice.
|
||||
# Update: docker-compose CLI generally expects YAML. Let's output YAML.
|
||||
# We need to install a module for ConvertTo-Yaml.
|
||||
Install-Module -Name PSYaml -Force -Scope CurrentUser -ErrorAction SilentlyContinue
|
||||
Import-Module PSYaml -ErrorAction SilentlyContinue
|
||||
|
||||
try {
|
||||
$yamlOutput = $compose | ConvertTo-Yaml -Depth 10
|
||||
# Write-Host "DEBUG: Generated YAML: $yamlOutput" # Debugging line
|
||||
$yamlOutput | Out-File -Encoding UTF8 ./test_compose.yml
|
||||
Write-Host "Successfully generated test_compose.yml"
|
||||
} catch {
|
||||
Write-Error "Failed to convert to YAML or write file: $($_.Exception.Message)"
|
||||
# Fallback or alternative if YAML fails - write JSON
|
||||
# Write-Host "Falling back to JSON output as test_compose.json"
|
||||
# $compose | ConvertTo-Json -Depth 10 | Out-File -Encoding UTF8 ./test_compose.json
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Login to Artifactory
|
||||
# Uses secrets passed from the calling workflow via inherit
|
||||
uses: docker/login-action@v3
|
||||
# Switch back to default shell (bash) if needed for subsequent steps
|
||||
shell: bash
|
||||
with:
|
||||
registry: ${{ inputs.repo }}
|
||||
username: ${{ secrets.ARTIFACTORY_USER }}
|
||||
password: ${{ secrets.ARTIFACTORY_PASSWORD }}
|
||||
|
||||
- name: Create Docker Test Network
|
||||
# Equivalent to ADO script task, || true becomes continue-on-error
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
run: docker network create test
|
||||
|
||||
- name: Clean Docker Services Before Test
|
||||
# Equivalent to ADO script task
|
||||
shell: bash
|
||||
run: docker-compose -f test_compose.yml down -v --remove-orphans
|
||||
|
||||
- name: Start Docker Compose Services
|
||||
# Equivalent to ADO script task
|
||||
shell: bash
|
||||
env:
|
||||
# Pass variables needed by compose file if image tag wasn't hardcoded during generation
|
||||
# Note: The PowerShell script above now bakes the full image path with tag in.
|
||||
# TAG: ${{ inputs.build_id }}
|
||||
# REPO: ${{ inputs.repo }}
|
||||
# PROJECT_NAME: ${{ inputs.project_name }}
|
||||
run: |
|
||||
echo "--- Generated docker-compose.yml ---"
|
||||
cat test_compose.yml
|
||||
echo "------------------------------------"
|
||||
docker-compose -f test_compose.yml up -d
|
||||
echo "Docker containers started."
|
||||
|
||||
- name: Wait for Services
|
||||
# Equivalent to ADO sleep task
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Waiting 60 seconds for services to initialize..."
|
||||
sleep 60
|
||||
echo "Wait complete. Checking container status:"
|
||||
docker ps -a
|
||||
|
||||
- name: Prepare Test Environment File
|
||||
# Create a file from the newline-separated input string for docker run --env-file
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.test_envs_newline }}" > ./test_env_vars.env
|
||||
echo "Created test_env_vars.env file."
|
||||
# Add build ID as a default test env var like in ADO coalesce
|
||||
echo "DUMMY_ENV_TEST_RUN_ID=${{ inputs.build_id }}" >> ./test_env_vars.env
|
||||
|
||||
- name: Create Results Directory on Runner
|
||||
shell: bash
|
||||
run: mkdir -p ${{ runner.temp }}/test-results
|
||||
|
||||
- name: Run Tests Iteratively
|
||||
# This step iterates through the test directories provided in the JSON input
|
||||
shell: bash
|
||||
env:
|
||||
TESTS_JSON: ${{ inputs.tests_to_run_json }}
|
||||
TEST_CONTAINER_IMAGE: ${{ inputs.repo }}/${{ inputs.project_name }}/${{ inputs.test_container_name }}:${{ inputs.build_id }}
|
||||
RESULTS_PATH_HOST: ${{ runner.temp }}/test-results
|
||||
RESULTS_PATH_CONTAINER: ${{ inputs.test_results_path_in_container }}
|
||||
run: |
|
||||
echo "Running tests for image: $TEST_CONTAINER_IMAGE"
|
||||
echo "Host results dir: $RESULTS_PATH_HOST"
|
||||
echo "Container results dir: $RESULTS_PATH_CONTAINER"
|
||||
|
||||
# Check if jq is installed, install if not
|
||||
if ! command -v jq &> /dev/null
|
||||
then
|
||||
echo "jq could not be found, installing..."
|
||||
sudo apt-get update && sudo apt-get install -y jq
|
||||
fi
|
||||
|
||||
# Parse the JSON array of test directories
|
||||
# Use mapfile/readarray for safer parsing than simple iteration
|
||||
readarray -t test_dirs < <(echo $TESTS_JSON | jq -c -r '.[]')
|
||||
|
||||
if [ ${#test_dirs[@]} -eq 0 ]; then
|
||||
echo "Warning: No test directories found in TESTS_JSON input."
|
||||
exit 0 # Exit successfully if no tests are specified
|
||||
fi
|
||||
|
||||
echo "Found ${#test_dirs[@]} test directories to run."
|
||||
|
||||
# Loop through the array and run docker for each test directory
|
||||
for test_dir in "${test_dirs[@]}"; do
|
||||
echo "--- Running test: $test_dir ---"
|
||||
docker run \
|
||||
--network test \
|
||||
--env-file ./test_env_vars.env \
|
||||
-v "$RESULTS_PATH_HOST":"$RESULTS_PATH_CONTAINER" \
|
||||
--rm \
|
||||
"$TEST_CONTAINER_IMAGE" \
|
||||
"$test_dir" || echo "Test run failed for $test_dir" # Continue even if one test fails
|
||||
echo "--- Finished test: $test_dir ---"
|
||||
done
|
||||
echo "All specified test runs attempted."
|
||||
|
||||
- name: Publish Test Results
|
||||
# Uses a dedicated action to publish results, equivalent to PublishTestResults@2
|
||||
# This action looks for JUnit XML files in the specified path.
|
||||
if: always() # Run this even if previous steps fail
|
||||
uses: dorny/test-reporter@v1
|
||||
with:
|
||||
name: Integration Test Results (${{ inputs.project_name }})
|
||||
# Path is relative to the workspace root, but we put results in runner.temp
|
||||
# Need to adjust path pattern to match the mounted host directory
|
||||
path: ${{ runner.temp }}/test-results/${{ inputs.test_results_filename }}
|
||||
reporter: java-junit # Specify the format
|
||||
fail-on-error: false # Don't fail the workflow step if parsing/upload fails
|
||||
|
||||
- name: Print Docker Logs on Failure/Completion
|
||||
# Equivalent to ADO CmdLine@2 Print Logs
|
||||
if: always() # Run this even if previous steps fail
|
||||
shell: bash
|
||||
run: |
|
||||
echo "--- Printing Docker Compose logs ---"
|
||||
docker-compose -f test_compose.yml logs
|
||||
echo "------------------------------------"
|
||||
|
||||
- name: Clean Docker Services After Test
|
||||
# Optional: Clean up containers/networks after run
|
||||
if: always()
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Cleaning up docker-compose services..."
|
||||
docker-compose -f test_compose.yml down -v --remove-orphans
|
||||
echo "Cleanup complete."
|
||||
@ -34,7 +34,7 @@ RUN chown -R app:app $APP_HOME
|
||||
USER app
|
||||
|
||||
# expose the port the app runs on
|
||||
EXPOSE 8765
|
||||
EXPOSE 80
|
||||
|
||||
# run uvicorn
|
||||
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "8765"]
|
||||
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]
|
||||
@ -76,4 +76,4 @@ USER app
|
||||
EXPOSE 8765
|
||||
|
||||
# run uvicorn
|
||||
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "8765"]
|
||||
CMD ["pipenv", "run", "uvicorn", "main:app", "--reload", "--workers", "1", "--host", "0.0.0.0", "--port", "80"]
|
||||
@ -31,4 +31,4 @@ app.add_middleware(
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# uvicorn.run("main:app", host="0.0.0.0", port=8765, reload=True)
|
||||
# uvicorn.run("main:app", host="0.0.0.0", port=80, reload=True)
|
||||
@ -17,7 +17,7 @@ services:
|
||||
volumes:
|
||||
- ./backend:/home/app/backend
|
||||
ports:
|
||||
- "8000:8765"
|
||||
- "8004:80"
|
||||
environment:
|
||||
- ENVIRONMENT=dev
|
||||
- TESTING=0
|
||||
@ -31,8 +31,15 @@ services:
|
||||
- ./frontend:/usr/src/app
|
||||
- /usr/src/app/node_modules
|
||||
ports:
|
||||
- "3000:5173"
|
||||
- "3004:80"
|
||||
depends_on:
|
||||
- backend
|
||||
environment:
|
||||
LOG_LEVEL: "DEBUG"
|
||||
|
||||
tests:
|
||||
build:
|
||||
context: ./tests
|
||||
environment:
|
||||
FRONTEND_URL: http://frontend:80
|
||||
BACKEND_URL: http://backend:80
|
||||
|
||||
@ -1 +1 @@
|
||||
REACT_APP_BASE_DOMAIN_NAME=localhost
|
||||
REACT_APP_BASE_DOMAIN_NAME_PORT=localhost:8004
|
||||
@ -14,5 +14,5 @@ COPY ${ENV_FILE} /usr/src/app/.env
|
||||
# Copy dependency files and install dependencies
|
||||
RUN npm install && npm i --save-dev @types/jest
|
||||
|
||||
EXPOSE 5173
|
||||
CMD [ "npm", "run", "dev" ]
|
||||
EXPOSE 80
|
||||
CMD [ "npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "80" ]
|
||||
@ -1,6 +1,6 @@
|
||||
import React, { useState, useEffect, useRef } from 'react';
|
||||
|
||||
const BASE_DOMAIN_NAME = import.meta.env.REACT_APP_DOMAIN_NAME || 'localhost';
|
||||
const BASE_DOMAIN_NAME_PORT = import.meta.env.REACT_APP_DOMAIN_NAME_PORT || 'localhost:8004';
|
||||
|
||||
|
||||
interface Message {
|
||||
@ -16,7 +16,7 @@ const App: React.FC = () => {
|
||||
|
||||
useEffect(() => {
|
||||
mounted.current = true;
|
||||
const ws = new WebSocket(`ws://${BASE_DOMAIN_NAME}:8000/ws`);
|
||||
const ws = new WebSocket(`ws://${BASE_DOMAIN_NAME_PORT}/ws`);
|
||||
setSocket(ws);
|
||||
ws.onopen = () => {
|
||||
console.log('WebSocket connection opened');
|
||||
|
||||
14
app/tests/Dockerfile
Normal file
14
app/tests/Dockerfile
Normal file
@ -0,0 +1,14 @@
|
||||
# TEST
|
||||
FROM python:3.11-slim-bullseye
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY Pipfile .
|
||||
|
||||
RUN pip3 install --upgrade pip &&\
|
||||
pip3 install pipenv &&\
|
||||
pipenv install --dev
|
||||
|
||||
COPY tests ./tests
|
||||
|
||||
ENTRYPOINT ["pipenv", "run", "pytest", "--junit-xml=results/results.xml"]
|
||||
19
app/tests/Pipfile
Normal file
19
app/tests/Pipfile
Normal file
@ -0,0 +1,19 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
pytest = "*"
|
||||
requests = "*"
|
||||
jsonschema = "*"
|
||||
evonik-dummy = "*"
|
||||
pyrsistent = "*"
|
||||
pyjwt = "*"
|
||||
pydantic = "*"
|
||||
|
||||
[dev-packages]
|
||||
autopep8 = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.11"
|
||||
276
app/tests/Pipfile.lock
generated
Normal file
276
app/tests/Pipfile.lock
generated
Normal file
@ -0,0 +1,276 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "c1b2460cc178bc5b3ed94b7d759cc4838a5b71891722ee9de151e81d45237011"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.11"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"annotated-types": {
|
||||
"hashes": [
|
||||
"sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53",
|
||||
"sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==0.7.0"
|
||||
},
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3",
|
||||
"sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==25.3.0"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651",
|
||||
"sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2025.1.31"
|
||||
},
|
||||
"charset-normalizer": {
|
||||
"hashes": [
|
||||
"sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3",
|
||||
"sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125",
|
||||
"sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.4.1"
|
||||
},
|
||||
"evonik-dummy": {
|
||||
"hashes": [
|
||||
"sha256:7c6fc100f311c11d7396e1b434a62d1265539b6d63e735fa9586499341eeebeb",
|
||||
"sha256:c22d45dcba7e14c0167575d8cd9b30c1b42321dbd7ca684521d38b3aaffa9a1e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==0.0.13"
|
||||
},
|
||||
"exrex": {
|
||||
"hashes": [
|
||||
"sha256:1228f2e3afb008cacf6adc3ec20b098f1303886f4382b57cdf3b7259fb7c0ad3",
|
||||
"sha256:3f582add0700d4141e279625252e7a0655f9ad81feab3b8ab9077966668a2c99"
|
||||
],
|
||||
"version": "==0.12.0"
|
||||
},
|
||||
"faker": {
|
||||
"hashes": [
|
||||
"sha256:ad9dc66a3b84888b837ca729e85299a96b58fdaef0323ed0baace93c9614af06",
|
||||
"sha256:dc2f730be71cb770e9c715b13374d80dbcee879675121ab51f9683d262ae9a1c"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==37.1.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9",
|
||||
"sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==3.10"
|
||||
},
|
||||
"iniconfig": {
|
||||
"hashes": [
|
||||
"sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7",
|
||||
"sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==2.1.0"
|
||||
},
|
||||
"jsonschema": {
|
||||
"hashes": [
|
||||
"sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4",
|
||||
"sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==4.23.0"
|
||||
},
|
||||
"jsonschema-specifications": {
|
||||
"hashes": [
|
||||
"sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272",
|
||||
"sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2024.10.1"
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759",
|
||||
"sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==24.2"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
"sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1",
|
||||
"sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==1.5.0"
|
||||
},
|
||||
"pydantic": {
|
||||
"hashes": [
|
||||
"sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3",
|
||||
"sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.11.3"
|
||||
},
|
||||
"pydantic-core": {
|
||||
"hashes": [
|
||||
"sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df",
|
||||
"sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.33.1"
|
||||
},
|
||||
"pyjwt": {
|
||||
"hashes": [
|
||||
"sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953",
|
||||
"sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.10.1"
|
||||
},
|
||||
"pyrsistent": {
|
||||
"hashes": [
|
||||
"sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f",
|
||||
"sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e",
|
||||
"sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958",
|
||||
"sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34",
|
||||
"sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca",
|
||||
"sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d",
|
||||
"sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d",
|
||||
"sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4",
|
||||
"sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714",
|
||||
"sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf",
|
||||
"sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee",
|
||||
"sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8",
|
||||
"sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224",
|
||||
"sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d",
|
||||
"sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054",
|
||||
"sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656",
|
||||
"sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7",
|
||||
"sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423",
|
||||
"sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce",
|
||||
"sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e",
|
||||
"sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3",
|
||||
"sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0",
|
||||
"sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f",
|
||||
"sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b",
|
||||
"sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce",
|
||||
"sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a",
|
||||
"sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174",
|
||||
"sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86",
|
||||
"sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f",
|
||||
"sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b",
|
||||
"sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98",
|
||||
"sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==0.20.0"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820",
|
||||
"sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==8.3.5"
|
||||
},
|
||||
"referencing": {
|
||||
"hashes": [
|
||||
"sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa",
|
||||
"sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==0.36.2"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760",
|
||||
"sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==2.32.3"
|
||||
},
|
||||
"rpds-py": {
|
||||
"hashes": [
|
||||
"sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e",
|
||||
"sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==0.24.0"
|
||||
},
|
||||
"typing-extensions": {
|
||||
"hashes": [
|
||||
"sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c",
|
||||
"sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==4.13.2"
|
||||
},
|
||||
"typing-inspection": {
|
||||
"hashes": [
|
||||
"sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f",
|
||||
"sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==0.4.0"
|
||||
},
|
||||
"tzdata": {
|
||||
"hashes": [
|
||||
"sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8",
|
||||
"sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"
|
||||
],
|
||||
"markers": "python_version >= '2'",
|
||||
"version": "==2025.2"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466",
|
||||
"sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.4.0"
|
||||
}
|
||||
},
|
||||
"develop": {
|
||||
"autopep8": {
|
||||
"hashes": [
|
||||
"sha256:89440a4f969197b69a995e4ce0661b031f455a9f776d2c5ba3dbd83466931758",
|
||||
"sha256:ce8ad498672c845a0c3de2629c15b635ec2b05ef8177a6e7c91c74f3e9b51128"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.3.2"
|
||||
},
|
||||
"pycodestyle": {
|
||||
"hashes": [
|
||||
"sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9",
|
||||
"sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"
|
||||
],
|
||||
"markers": "python_version >= '3.9'",
|
||||
"version": "==2.13.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
66
app/tests/README.md
Normal file
66
app/tests/README.md
Normal file
@ -0,0 +1,66 @@
|
||||
# tests
|
||||
|
||||
Here, we provide integration tests for (all) components.
|
||||
|
||||
These integration tests can be run locally or via docker-compose.
|
||||
|
||||
## run using docker-compose
|
||||
|
||||
Run all tests using the dummy implementations of backend and jobs:
|
||||
|
||||
```bash
|
||||
docker-compose up -d --build backend frontend
|
||||
docker-compose up --build tests
|
||||
```
|
||||
|
||||
Stop all containers:
|
||||
|
||||
```bash
|
||||
docker-compose down -v
|
||||
```
|
||||
|
||||
## run locally
|
||||
|
||||
You can start the dummy implementations as follows:
|
||||
|
||||
```bash
|
||||
docker-compose up -d --build backend frontend
|
||||
```
|
||||
|
||||
To run the tests locally, you must specify the base urls of backend and jobs api:
|
||||
|
||||
```bash
|
||||
export BACKEND_URL="http://localhost:8004"
|
||||
export FRONTEND_URL="http://localhost:3000"
|
||||
```
|
||||
|
||||
All values default to `http://localhost:8080`.
|
||||
|
||||
### install dependencies
|
||||
|
||||
To install the dependencies for the tests, execute the following (in `/app/tests/`):
|
||||
|
||||
```bash
|
||||
pipenv install
|
||||
```
|
||||
|
||||
### run tests
|
||||
|
||||
To run the tests locally, execute the following:
|
||||
|
||||
```bash
|
||||
pipenv run pytest tests/integration/
|
||||
```
|
||||
|
||||
To execute tests for jobs / backend only, execute the following:
|
||||
|
||||
```bash
|
||||
pipenv run pytest tests/integration/jobs
|
||||
pipenv run pytest tests/integration/backend
|
||||
```
|
||||
|
||||
You can also run single groups of tests, e.g.:
|
||||
|
||||
```bash
|
||||
pipenv run pytest tests/integration/backend/tests/test_constraint_types.py -k create
|
||||
```
|
||||
Loading…
x
Reference in New Issue
Block a user