diff --git a/.github/actions/setup-python-project/action.yaml b/.github/actions/setup-python-project/action.yaml index 8db10a3d..edf5cba7 100644 --- a/.github/actions/setup-python-project/action.yaml +++ b/.github/actions/setup-python-project/action.yaml @@ -14,7 +14,7 @@ runs: - name: "Install Poetry" shell: bash run: | - curl -sSL https://install.python-poetry.org | python3 - + curl --proto "=https" -sSL https://install.python-poetry.org | python3 - echo "$HOME/.local/bin" >> $GITHUB_PATH - name: "Install dependencies" shell: bash diff --git a/.github/workflows/cicd-1-pull-request.yaml b/.github/workflows/cicd-1-pull-request.yaml index e40173c4..e5c39b6e 100644 --- a/.github/workflows/cicd-1-pull-request.yaml +++ b/.github/workflows/cicd-1-pull-request.yaml @@ -78,14 +78,19 @@ jobs: python_version: "${{ needs.metadata.outputs.python_version }}" terraform_version: "${{ needs.metadata.outputs.terraform_version }}" version: "${{ needs.metadata.outputs.version }}" - secrets: inherit + secrets: + IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID: ${{ secrets.IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID }} + IDP_AWS_REPORT_UPLOAD_REGION: ${{ secrets.IDP_AWS_REPORT_UPLOAD_REGION }} + IDP_AWS_REPORT_UPLOAD_ROLE_NAME: ${{ secrets.IDP_AWS_REPORT_UPLOAD_ROLE_NAME }} + IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT: ${{ secrets.IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT }} test-stage: # Recommended maximum execution time is 5 minutes name: "Test stage" needs: [metadata, commit-stage] uses: ./.github/workflows/stage-2-test.yaml with: python_version: "${{ needs.metadata.outputs.python_version }}" - secrets: inherit + secrets: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} build-stage: # Recommended maximum execution time is 3 minutes name: "Build stage" needs: [metadata, test-stage] @@ -99,7 +104,6 @@ jobs: python_version: "${{ needs.metadata.outputs.python_version }}" terraform_version: "${{ needs.metadata.outputs.terraform_version }}" version: "${{ needs.metadata.outputs.version }}" - secrets: inherit acceptance-stage: # Recommended maximum execution time is 10 minutes name: "Acceptance stage" needs: [metadata, build-stage] @@ -113,4 +117,3 @@ jobs: python_version: "${{ needs.metadata.outputs.python_version }}" terraform_version: "${{ needs.metadata.outputs.terraform_version }}" version: "${{ needs.metadata.outputs.version }}" - secrets: inherit diff --git a/.github/workflows/cicd-2-publish.yaml b/.github/workflows/cicd-2-publish.yaml index 13edc3a8..72b78689 100644 --- a/.github/workflows/cicd-2-publish.yaml +++ b/.github/workflows/cicd-2-publish.yaml @@ -96,7 +96,7 @@ jobs: run: echo "secret_exist=${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL != '' }}" >> $GITHUB_OUTPUT - name: "Notify on publishing packages" if: steps.check.outputs.secret_exist == 'true' - uses: nhs-england-tools/notify-msteams-action@v1.0.0 + uses: nhs-england-tools/notify-msteams-action@450ab55046abcfb0ea7d0943e04aa1b950f08e01 with: github-token: ${{ secrets.GITHUB_TOKEN }} teams-webhook-url: ${{ secrets.TEAMS_NOTIFICATION_WEBHOOK_URL }} diff --git a/.github/workflows/preview-env.yml b/.github/workflows/preview-env.yml index 28901d2a..0b91a4b2 100644 --- a/.github/workflows/preview-env.yml +++ b/.github/workflows/preview-env.yml @@ -11,6 +11,7 @@ env: TF_STATE_BUCKET: "cds-cdg-dev-tfstate-900119715266" CORE_STATE_KEY: "dev/terraform.tfstate" PREVIEW_STATE_PREFIX: "dev/preview/" + python_version: "3.14" jobs: preview: @@ -36,9 +37,9 @@ jobs: # Configure AWS credentials (OIDC recommended) - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@56d6a583f00f6bad6d19d91d53a7bc3b8143d0e9 + uses: aws-actions/configure-aws-credentials@4c2b9cc816c86555b61460789ac95da17d7e829b with: - role-to-assume: ${{ env.AWS_ROLE_ARN }} + role-to-assume: ${{ env.AWS_ROLE_ARN }} aws-region: ${{ env.AWS_REGION }} - name: Login to Amazon ECR @@ -85,21 +86,21 @@ jobs: fi echo "alb_rule_priority=$PRIORITY" >> $GITHUB_OUTPUT + - name: Setup Python project + if: github.event.action != 'closed' + uses: ./.github/actions/setup-python-project + with: + python-version: ${{ env.python_version }} + - name: Build Docker image if: github.event.action != 'closed' + env: + PYTHON_VERSION: ${{ env.python_version }} run: | IMAGE_TAG="${{ steps.meta.outputs.branch_name }}" ECR_URL="${{ steps.meta.outputs.ecr_url }}" - # For now we are going to simply pull a test image from ECR and re-tag - # ready for pushing to the preview env repo. - # This is just so the pipeline can be tested while final build steps are worked out. - docker pull "${ECR_URL}:latest" - docker tag "${ECR_URL}:latest" "${ECR_URL}:${IMAGE_TAG}" - - # docker build \ - # --tag "${ECR_URL}:${IMAGE_TAG}" \ - # . + make build IMAGE_TAG="${IMAGE_TAG}" ECR_URL="${ECR_URL}" - name: Push Docker image to ECR if: github.event.action != 'closed' diff --git a/.github/workflows/stage-1-commit.yaml b/.github/workflows/stage-1-commit.yaml index e1ba638e..d366def5 100644 --- a/.github/workflows/stage-1-commit.yaml +++ b/.github/workflows/stage-1-commit.yaml @@ -31,6 +31,19 @@ on: description: "Version of the software, set by the CI/CD pipeline workflow" required: true type: string + secrets: + IDP_AWS_REPORT_UPLOAD_ACCOUNT_ID: + description: "AWS Account ID for report upload" + required: true + IDP_AWS_REPORT_UPLOAD_REGION: + description: "AWS Region for report upload" + required: true + IDP_AWS_REPORT_UPLOAD_ROLE_NAME: + description: "AWS Role Name for report upload" + required: true + IDP_AWS_REPORT_UPLOAD_BUCKET_ENDPOINT: + description: "AWS Bucket Endpoint for report upload" + required: true jobs: scan-secrets: diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 1004bb4b..66e4a240 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -11,6 +11,10 @@ on: description: "Python version, set by the CI/CD pipeline workflow" required: true type: string + secrets: + SONAR_TOKEN: + description: "SonarCloud token for authentication" + required: true jobs: create-coverage-name: @@ -222,7 +226,7 @@ jobs: name: ${{ needs.create-coverage-name.outputs.coverage-name }} path: coverage-reports/ - name: "SonarCloud Scan" - uses: SonarSource/sonarqube-scan-action@v7 + uses: SonarSource/sonarqube-scan-action@a31c9398be7ace6bbfaf30c0bd5d415f843d45e9 env: SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} with: diff --git a/Makefile b/Makefile index a201000d..3e634518 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,15 @@ docker := doas docker else docker := docker endif + +IMAGE_REPOSITORY ?= localhost/gateway-api-image +IMAGE_TAG ?= latest + +ifdef ECR_URL +IMAGE_REPOSITORY := ${ECR_URL} +endif + +IMAGE_NAME := ${IMAGE_REPOSITORY}:${IMAGE_TAG} # ============================================================================== # Example CI/CD targets are: dependencies, build, publish, deploy, clean, etc. @@ -25,7 +34,7 @@ build-gateway-api: dependencies @poetry run mypy --no-namespace-packages . @echo "Packaging dependencies..." @poetry build --format=wheel - @pip install "dist/gateway_api-0.1.0-py3-none-any.whl" --target "./target/gateway-api" + @pip install "dist/gateway_api-0.1.0-py3-none-any.whl" --target "./target/gateway-api" --platform musllinux_1_1_x86_64 --only-binary=:all: # Copy main file separately as it is not included within the package. @cp lambda_handler.py ./target/gateway-api/ @rm -rf ../infrastructure/images/gateway-api/resources/build/ @@ -36,9 +45,9 @@ build-gateway-api: dependencies .PHONY: build build: build-gateway-api # Build the project artefact @Pipeline - @echo "Building Docker image using Docker. Utilising python version: ${PYTHON_VERSION} ..." - @$(docker) buildx build --load --provenance=false --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t localhost/gateway-api-image infrastructure/images/gateway-api - @echo "Docker image 'gateway-api-image' built successfully!" + @echo "Building Docker x86 image using Docker. Utilising python version: ${PYTHON_VERSION} ..." + @$(docker) buildx build --platform linux/amd64 --load --provenance=false --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t ${IMAGE_NAME} infrastructure/images/gateway-api + @echo "Docker image '${IMAGE_NAME}' built successfully!" publish: # Publish the project artefact @Pipeline # TODO: Implement the artefact publishing step @@ -46,9 +55,9 @@ publish: # Publish the project artefact @Pipeline deploy: clean build # Deploy the project artefact to the target environment @Pipeline @if [[ -n "$${IN_BUILD_CONTAINER}" ]]; then \ echo "Starting using local docker network ..." ; \ - $(docker) run --name gateway-api -p 5000:8080 --network gateway-local -d localhost/gateway-api-image ; \ + $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 --network gateway-local -d ${IMAGE_NAME} ; \ else \ - $(docker) run --name gateway-api -p 5000:8080 -d localhost/gateway-api-image ; \ + $(docker) run --platform linux/amd64 --name gateway-api -p 5000:8080 -d ${IMAGE_NAME} ; \ fi clean:: stop # Clean-up project resources (main) @Operations diff --git a/infrastructure/environments/preview/main.tf b/infrastructure/environments/preview/main.tf index cfa32753..30dfdbb2 100644 --- a/infrastructure/environments/preview/main.tf +++ b/infrastructure/environments/preview/main.tf @@ -42,8 +42,11 @@ locals { # e.g. "feature-123.dev.endpoints.clinical-data-gateway.national.nhs.uk" effective_host_name = "${var.branch_name}.${local.base_domain}" - branch_safe = replace(replace(var.branch_name, "/", "-"), " ", "-") - log_group_name = "/ecs/preview/${local.branch_safe}" + branch_after_feature = startswith(var.branch_name, "feature-") ? substr(var.branch_name, length("feature-"), length(var.branch_name) - length("feature-")) : var.branch_name + branch_after_bug = startswith(local.branch_after_feature, "bug-") ? substr(local.branch_after_feature, length("bug-"), length(local.branch_after_feature) - length("bug-")) : local.branch_after_feature + branch_source = length(local.branch_after_bug) > 0 ? local.branch_after_bug : var.branch_name + branch_safe = replace(replace(local.branch_source, "/", "-"), " ", "-") + log_group_name = "/ecs/preview/${local.branch_safe}" # Default image tag to branch_name if not provided effective_image_tag = length(var.image_tag) > 0 ? var.image_tag : var.branch_name @@ -62,20 +65,20 @@ locals { ############################ resource "aws_lb_target_group" "branch" { - name = trim(substr(replace(var.branch_name, ".", "-"), 0, 32), "-") + name = trim(substr(replace(local.branch_safe, ".", "-"), 0, 32), "-") port = var.container_port protocol = "HTTP" target_type = "ip" vpc_id = local.vpc_id - # health_check { - # path = "/" - # matcher = "200-399" - # interval = 30 - # timeout = 5 - # unhealthy_threshold = 2 - # healthy_threshold = 2 - # } + health_check { + path = "/" + matcher = "200-499" + interval = 30 + timeout = 5 + unhealthy_threshold = 2 + healthy_threshold = 2 + } } resource "aws_lb_listener_rule" "branch" { diff --git a/infrastructure/environments/preview/variables.tf b/infrastructure/environments/preview/variables.tf index 0f674d82..15ddaedb 100644 --- a/infrastructure/environments/preview/variables.tf +++ b/infrastructure/environments/preview/variables.tf @@ -18,7 +18,7 @@ variable "image_tag" { variable "container_port" { description = "The port on which the container listens." type = number - default = 80 + default = 8080 } variable "desired_count" { diff --git a/infrastructure/images/build-container/Dockerfile b/infrastructure/images/build-container/Dockerfile index b029cdfe..e062232e 100644 --- a/infrastructure/images/build-container/Dockerfile +++ b/infrastructure/images/build-container/Dockerfile @@ -26,9 +26,8 @@ else \ rm -r /resources/dev-certificates; \ fi -RUN apk update - -RUN apk add --no-cache --update bash \ +RUN apk update && \ + apk add --no-cache --update bash \ # Required to manage user permissions. doas \ shadow \ @@ -63,14 +62,11 @@ RUN echo "permit :wheel" >> /etc/doas.conf \ ENV PYENV_ROOT="/.pyenv" ENV PATH="$PYENV_ROOT/bin:$PATH" -# Install Python (via pyenv) -RUN curl -fsSL https://pyenv.run | bash - -RUN /.pyenv/bin/pyenv install ${PYTHON_VERSION} -RUN /.pyenv/bin/pyenv global ${PYTHON_VERSION} - -# Initialise pyenv for use. -RUN /.pyenv/bin/pyenv init - +# Install Python (via pyenv) and intialize it. +RUN curl --proto "=https" -fsSL https://pyenv.run | bash && \ + /.pyenv/bin/pyenv install ${PYTHON_VERSION} && \ + /.pyenv/bin/pyenv global ${PYTHON_VERSION} && \ + /.pyenv/bin/pyenv init - COPY /resources/.bashrc /root/.bashrc @@ -83,17 +79,17 @@ WORKDIR /asdf # If we're running on an arm64 architecture download the arm64 executeable. RUN if [ "$(uname -m)" = "aarch64" ] ; then \ - echo "Installing ARM asdf executable..." ; \ - wget -O asdf.tar.gz "$ASDF_DOWNLOAD_URL/asdf-v0.18.0-linux-arm64.tar.gz"; \ -else \ - echo "Installing x86 asdf executable..." ; \ - wget -O asdf.tar.gz "$ASDF_DOWNLOAD_URL/asdf-v0.18.0-linux-amd64.tar.gz"; \ -fi + echo "Installing ARM asdf executable..." && \ + wget -O asdf.tar.gz "$ASDF_DOWNLOAD_URL/asdf-v0.18.0-linux-arm64.tar.gz"; \ + else \ + echo "Installing x86 asdf executable..." && \ + wget -O asdf.tar.gz "$ASDF_DOWNLOAD_URL/asdf-v0.18.0-linux-amd64.tar.gz"; \ + fi && \ + tar -xvf asdf.tar.gz && \ -RUN tar -xvf asdf.tar.gz + # Install editorconfig-checker to validate formatting. + mkdir /editorconfig -# Install editorconfig-checker to validate formatting. -RUN mkdir /editorconfig WORKDIR /editorconfig RUN if [ "$(uname -m)" = "aarch64" ] ; then \ @@ -116,34 +112,37 @@ WORKDIR /resources # Run install other development plugins via asdf. RUN bash "./install-asdf-plugins.sh" -WORKDIR /workspace/clinical-data-gateway-api +WORKDIR /workspaces/clinical-data-gateway-api # Install pipx and poetry ENV PIPX_BIN_DIR="/pipx/bin" ENV PIPX_HOME="/pipx/lib" -RUN bash -c "source ~/.bashrc && pip install --root-user-action ignore pipx && pipx install poetry && pipx install ruff" +RUN bash -c "source ~/.bashrc && \ + pip install --root-user-action ignore pipx && \ + pipx install poetry && \ + pipx install ruff" ENV PATH="$PIPX_BIN_DIR:$PATH" # Create a virtual environment for development -RUN bash -c "source ~/.bashrc && pyenv virtualenv ${PYTHON_VERSION} gateway" \ +RUN bash -c "source ~/.bashrc && pyenv virtualenv ${PYTHON_VERSION} gateway" && \ # Install markdownlint-cli - && npm install -g --ignore-scripts markdownlint-cli \ - && export PATH \ - && addgroup dev \ + npm install -g --ignore-scripts markdownlint-cli && \ + export PATH && \ + addgroup dev && \ # Allow access to pyenv tools for dev users. - && chown -R root:dev /.pyenv \ - && chmod -R g+w /.pyenv \ + chown -R root:dev /.pyenv && \ + chmod -R g+w /.pyenv && \ # Allow access to asdf tools for dev users. - && chown -R root:dev /.asdf \ - && chmod -R g+w /.asdf \ + chown -R root:dev /.asdf && \ + chmod -R g+w /.asdf && \ # Create a new user based on the default vscode user. - && groupmod -n gateway-dev vscode \ - && usermod -l gateway-dev -d /home/gateway-dev -m vscode \ - && addgroup gateway-dev dev \ - && addgroup gateway-dev wheel \ + groupmod -n gateway-dev vscode && \ + usermod -l gateway-dev -d /home/gateway-dev -m vscode && \ + addgroup gateway-dev dev && \ + addgroup gateway-dev wheel \ # Change default shell to bash for gateway-dev user. && chsh -s /bin/bash gateway-dev diff --git a/scripts/docker/docker.mk b/scripts/docker/docker.mk index afa8bca5..1ded670a 100644 --- a/scripts/docker/docker.mk +++ b/scripts/docker/docker.mk @@ -47,12 +47,6 @@ docker-shellscript-lint: # Lint all Docker module shell scripts @Quality file=$${file} scripts/shellscript-linter.sh done -# ============================================================================== -# Module tests and examples - please DO NOT edit this section! - -docker-test-suite-run: # Run Docker test suite @ExamplesAndTests - scripts/docker/tests/docker.test.sh - # ============================================================================== ${VERBOSE}.SILENT: \ @@ -64,4 +58,3 @@ ${VERBOSE}.SILENT: \ docker-push \ docker-run \ docker-shellscript-lint \ - docker-test-suite-run \ diff --git a/scripts/docker/tests/Dockerfile b/scripts/docker/tests/Dockerfile deleted file mode 100644 index b5ea5606..00000000 --- a/scripts/docker/tests/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -# `*:latest` will be replaced with a corresponding version stored in the '.tool-versions' file -# hadolint ignore=DL3007 -FROM python:latest diff --git a/scripts/githooks/scan-secrets.sh b/scripts/githooks/scan-secrets.sh index 3fda1ed0..271c0b81 100755 --- a/scripts/githooks/scan-secrets.sh +++ b/scripts/githooks/scan-secrets.sh @@ -52,6 +52,10 @@ function get-cmd-to-run() { "staged-changes") cmd="protect --source $dir --verbose --staged" ;; + *) + echo "Unknown check value: '$check'. Expected one of whole-history, last-commit, staged-changes." >&2 + exit 126 + ;; esac # Include base line file if it exists if [[ -f "$dir/scripts/config/.gitleaks-baseline.json" ]]; then diff --git a/scripts/tests/run-test.sh b/scripts/tests/run-test.sh index b29d2a1e..d2c3177c 100755 --- a/scripts/tests/run-test.sh +++ b/scripts/tests/run-test.sh @@ -6,7 +6,7 @@ set -euo pipefail # Usage: run-test.sh # Where test-type is one of: unit, integration, contract, schema, acceptance -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Usage: $0 " echo "Where test-type is one of: unit, integration, contract, schema, acceptance" exit 1 @@ -16,15 +16,15 @@ TEST_TYPE="$1" # Validate test type early if [[ ! "$TEST_TYPE" =~ ^(unit|integration|contract|schema|acceptance)$ ]]; then - echo "Error: Unknown test type '$TEST_TYPE'" - echo "Valid types are: unit, integration, contract, schema, acceptance" + echo "Error: Unknown test type '$TEST_TYPE'" >&2 + echo "Valid types are: unit, integration, contract, schema, acceptance" >&2 exit 1 fi cd "$(git rev-parse --show-toplevel)" # Determine test path based on test type -if [ "$TEST_TYPE" = "unit" ]; then +if [[ "$TEST_TYPE" = "unit" ]]; then TEST_PATH="test_*.py src/*/test_*.py" else TEST_PATH="tests/${TEST_TYPE}/" @@ -36,7 +36,7 @@ mkdir -p test-artefacts echo "Running ${TEST_TYPE} tests..." # Set coverage path based on test type -if [ "$TEST_TYPE" = "unit" ]; then +if [[ "$TEST_TYPE" = "unit" ]]; then COV_PATH="." else COV_PATH="src/gateway_api" diff --git a/sonar-project.properties b/sonar-project.properties index 4ce10922..46c5f586 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -5,9 +5,14 @@ sonar.host.url=https://sonarcloud.io sonar.qualitygate.wait=true sonar.sourceEncoding=UTF-8 +# source files to be analyzed sonar.sources=.github,gateway-api,infrastructure,scripts,Makefile -sonar.tests=tests,gateway-api/tests -sonar.exclusions=docs/**,gateway-api/test-artefacts/**,gateway-api/coverage-html/**,gateway-api/tests/**,**/__pycache__/**,**/.venv/**,**/*.drawio +sonar.exclusions=docs/**,gateway-api/test-artefacts/**,gateway-api/coverage-html/**,gateway-api/src/**/test_*.py,**/__pycache__/**,**/.venv/** + +# test files to be analyzed +sonar.tests=gateway-api +sonar.test.inclusions=gateway-api/**/test_*.py, gateway-api/tests/** + sonar.coverage.exclusions=**/tests/**,**/features/**,**/test_*.py,**/stubs/** # Set Python version for more precise analysis sonar.python.version=3.14 diff --git a/tests/.gitkeep b/tests/.gitkeep deleted file mode 100644 index e69de29b..00000000