diff --git a/.github/workflows/deploy-backend.yml b/.github/workflows/deploy-backend.yml index ddf0d8145..5c7e87955 100644 --- a/.github/workflows/deploy-backend.yml +++ b/.github/workflows/deploy-backend.yml @@ -37,6 +37,11 @@ on: required: false type: boolean default: true + deploy_mock_pds: + description: Deploy the mock PDS Lambda for perf testing. Ref only. + required: false + type: boolean + default: false environment: required: true type: string @@ -99,6 +104,11 @@ on: required: true type: boolean default: false + deploy_mock_pds: + description: Deploy the mock PDS Lambda for perf testing. Ref only. + required: false + type: boolean + default: false sub_environment: type: string description: Set the sub environment name e.g. pr-xxx, or green/blue in higher environments @@ -170,6 +180,28 @@ jobs: image_tag_prefix: ${{ inputs.sub_environment }}- allow_implicit_tag_prefix_reuse: ${{ inputs.sub_environment == 'internal-dev' || startsWith(inputs.sub_environment, 'pr-') }} + deploy-mock-pds-image: + name: Deploy mock_pds image + if: ${{ inputs.deploy_mock_pds && inputs.sub_environment == 'ref' }} + uses: ./.github/workflows/deploy-lambda-artifact.yml + with: + lambda_name: mock_pds + tf_var_suffix: mock_pds + environment: ${{ inputs.environment }} + sub_environment: ${{ inputs.sub_environment }} + build_image: ${{ fromJson(inputs.lambda_build_flags).mock_pds || false }} + image_version: ${{ fromJson(inputs.lambda_image_overrides).mock_pds || '' }} + run_diff_check: ${{ inputs.run_diff_check }} + diff_base_sha: ${{ inputs.diff_base_sha }} + diff_head_sha: ${{ inputs.diff_head_sha }} + lambda_paths: tests/perf_tests/mock_pds/ + shared_paths: lambdas/shared/src/common/ + docker_context_path: tests/perf_tests + dockerfile_path: tests/perf_tests/mock_pds/Dockerfile + ecr_repository: imms-mock-pds-repo + image_tag_prefix: ${{ inputs.sub_environment }}- + allow_implicit_tag_prefix_reuse: false + terraform-plan: permissions: actions: read @@ -177,12 +209,15 @@ jobs: contents: read needs: - deploy-lambda-images - if: ${{ !cancelled() && needs.deploy-lambda-images.result == 'success' }} + - deploy-mock-pds-image + if: ${{ !cancelled() && needs.deploy-lambda-images.result == 'success' && (needs.deploy-mock-pds-image.result == 'success' || needs.deploy-mock-pds-image.result == 'skipped') }} outputs: image_uris_json: ${{ steps.lambda-images.outputs.image_uris_json }} runs-on: ubuntu-latest environment: name: ${{ inputs.environment }} + env: + TF_VAR_mock_pds_enabled: ${{ inputs.deploy_mock_pds && inputs.sub_environment == 'ref' }} steps: - name: Checkout uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98 @@ -271,6 +306,8 @@ jobs: runs-on: ubuntu-latest environment: name: ${{ inputs.environment }} + env: + TF_VAR_mock_pds_enabled: ${{ inputs.deploy_mock_pds && inputs.sub_environment == 'ref' }} steps: - name: Checkout uses: actions/checkout@0c366fd6a839edf440554fa01a7085ccba70ac98 diff --git a/.github/workflows/quality-checks.yml b/.github/workflows/quality-checks.yml index 5566bb2b3..722a6ef7e 100644 --- a/.github/workflows/quality-checks.yml +++ b/.github/workflows/quality-checks.yml @@ -184,6 +184,18 @@ jobs: poetry run coverage run --source=src -m unittest discover || echo "mns_publisher tests failed" >> ../../failed_tests.txt poetry run coverage xml -o ../../mns_publisher-coverage.xml + - name: Run unittest with coverage-mock-pds + working-directory: tests/perf_tests/mock_pds + id: mock_pds + env: + POETRY_INSTALLER_ONLY_BINARY: ":all:" + PYTHONPATH: ${{ github.workspace }}/tests/perf_tests/mock_pds/src + continue-on-error: true + run: | + poetry install + poetry run coverage run --source=src -m unittest discover -s tests -v || echo "mock_pds tests failed" >> ../../../failed_tests.txt + poetry run coverage xml -o ../../../mock_pds-coverage.xml + - name: Run unittest with coverage-mns-subscription working-directory: lambdas/mns_subscription id: mns_subscription diff --git a/README.md b/README.md index 0d174ca15..2b4dbf282 100644 --- a/README.md +++ b/README.md @@ -62,9 +62,10 @@ GitHub Actions for our entire pipeline. ### Tests -| Folder | Description | -| ---------------- | ----------------------------------------------------------------------------- | -| `e2e_automation` | End-to-end tests executed during PR pipelines using the pytest-bdd framework. | +| Folder | Description | +| ---------------- | --------------------------------------------------------------------------------------------------------- | +| `e2e_automation` | End-to-end tests executed during PR pipelines using the pytest-bdd framework. | +| `perf_tests` | Locust performance tests and the test-only mock PDS Lambda package used for rate-limit validation in ref. | --- diff --git a/infrastructure/account/lambda_ecr_repos.tf b/infrastructure/account/lambda_ecr_repos.tf index 6a7933fbf..05e02537a 100644 --- a/infrastructure/account/lambda_ecr_repos.tf +++ b/infrastructure/account/lambda_ecr_repos.tf @@ -38,6 +38,10 @@ locals { name = "imms-mns-publisher-repo" lambda_source_names = ["*-mns-publisher-lambda"] } + mock_pds = { + name = "imms-mock-pds-repo" + lambda_source_names = ["*-mock-pds-lambda"] + } ack_backend = { name = "imms-ackbackend-repo" lambda_source_names = ["*-ack-lambda"] diff --git a/infrastructure/instance/environments/dev/pr/variables.tfvars b/infrastructure/instance/environments/dev/pr/variables.tfvars index 130fea83c..d7a8bf9b1 100644 --- a/infrastructure/instance/environments/dev/pr/variables.tfvars +++ b/infrastructure/instance/environments/dev/pr/variables.tfvars @@ -6,3 +6,4 @@ mns_environment = "dev" error_alarm_notifications_enabled = false create_mesh_processor = false has_sub_environment_scope = true +mock_pds_enabled = false diff --git a/infrastructure/instance/environments/dev/ref/variables.tfvars b/infrastructure/instance/environments/dev/ref/variables.tfvars index e6256cc11..4f213bd7b 100644 --- a/infrastructure/instance/environments/dev/ref/variables.tfvars +++ b/infrastructure/instance/environments/dev/ref/variables.tfvars @@ -3,6 +3,9 @@ immunisation_account_id = "345594581768" dspp_core_account_id = "603871901111" pds_environment = "ref" mns_environment = "dev" +mock_pds_enabled = false +mock_pds_average_rate_limit = 125 +mock_pds_spike_rate_limit = 450 error_alarm_notifications_enabled = true create_mesh_processor = false has_sub_environment_scope = true diff --git a/infrastructure/instance/mock_pds.tf b/infrastructure/instance/mock_pds.tf new file mode 100644 index 000000000..afb30a270 --- /dev/null +++ b/infrastructure/instance/mock_pds.tf @@ -0,0 +1,161 @@ +locals { + mock_pds_lambda_name = "${local.short_prefix}-mock-pds-lambda" +} + +resource "aws_iam_role" "mock_pds_lambda_exec_role" { + count = var.mock_pds_enabled ? 1 : 0 + + name = "${local.mock_pds_lambda_name}-exec-role" + assume_role_policy = jsonencode({ + Version = "2012-10-17", + Statement = [{ + Effect = "Allow", + Sid = "", + Principal = { + Service = "lambda.amazonaws.com" + }, + Action = "sts:AssumeRole" + }] + }) +} + +resource "aws_iam_policy" "mock_pds_lambda_exec_policy" { + count = var.mock_pds_enabled ? 1 : 0 + + name = "${local.mock_pds_lambda_name}-exec-policy" + policy = jsonencode({ + Version = "2012-10-17", + Statement = [ + { + Effect = "Allow" + Action = [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents" + ] + Resource = "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/lambda/${local.mock_pds_lambda_name}:*" + }, + { + Effect = "Allow", + Action = [ + "ec2:CreateNetworkInterface", + "ec2:DescribeNetworkInterfaces", + "ec2:DeleteNetworkInterface" + ], + Resource = "*" + } + ] + }) +} + +resource "aws_iam_policy" "mock_pds_lambda_kms_access_policy" { + count = var.mock_pds_enabled ? 1 : 0 + + name = "${local.mock_pds_lambda_name}-kms-policy" + description = "Allow mock PDS Lambda to decrypt environment variables" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "kms:Decrypt" + ] + Resource = data.aws_kms_key.existing_lambda_encryption_key.arn + } + ] + }) +} + +resource "aws_iam_role_policy_attachment" "mock_pds_lambda_exec_policy_attachment" { + count = var.mock_pds_enabled ? 1 : 0 + + role = aws_iam_role.mock_pds_lambda_exec_role[0].name + policy_arn = aws_iam_policy.mock_pds_lambda_exec_policy[0].arn +} + +resource "aws_iam_role_policy_attachment" "mock_pds_lambda_kms_policy_attachment" { + count = var.mock_pds_enabled ? 1 : 0 + + role = aws_iam_role.mock_pds_lambda_exec_role[0].name + policy_arn = aws_iam_policy.mock_pds_lambda_kms_access_policy[0].arn +} + +resource "aws_cloudwatch_log_group" "mock_pds_lambda_log_group" { + count = var.mock_pds_enabled ? 1 : 0 + + name = "/aws/lambda/${local.mock_pds_lambda_name}" + retention_in_days = 30 +} + +resource "aws_lambda_function" "mock_pds_lambda" { + count = var.mock_pds_enabled ? 1 : 0 + + function_name = local.mock_pds_lambda_name + role = aws_iam_role.mock_pds_lambda_exec_role[0].arn + package_type = "Image" + image_uri = var.mock_pds_image_uri + architectures = ["x86_64"] + timeout = 30 + + vpc_config { + subnet_ids = local.private_subnet_ids + security_group_ids = [data.aws_security_group.existing_securitygroup.id] + } + + environment { + variables = { + REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address + REDIS_PORT = tostring(data.aws_elasticache_cluster.existing_redis.port) + MOCK_PDS_AVERAGE_LIMIT = tostring(var.mock_pds_average_rate_limit) + MOCK_PDS_AVERAGE_WINDOW_SECONDS = tostring(var.mock_pds_average_window_seconds) + MOCK_PDS_SPIKE_LIMIT = tostring(var.mock_pds_spike_rate_limit) + MOCK_PDS_SPIKE_WINDOW_SECONDS = tostring(var.mock_pds_spike_window_seconds) + MOCK_PDS_GP_ODS_CODE = var.mock_pds_gp_ods_code + } + } + + kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn + + depends_on = [ + aws_cloudwatch_log_group.mock_pds_lambda_log_group, + aws_iam_policy.mock_pds_lambda_exec_policy + ] +} + +resource "aws_lambda_function_url" "mock_pds_lambda_url" { + count = var.mock_pds_enabled ? 1 : 0 + + function_name = aws_lambda_function.mock_pds_lambda[0].function_name + authorization_type = "NONE" +} + +resource "aws_lambda_permission" "mock_pds_lambda_url_invoke" { + count = var.mock_pds_enabled ? 1 : 0 + + statement_id = "AllowPublicInvokeFunctionUrl" + action = "lambda:InvokeFunctionUrl" + function_name = aws_lambda_function.mock_pds_lambda[0].function_name + principal = "*" + function_url_auth_type = "NONE" +} + +resource "aws_cloudwatch_log_metric_filter" "mock_pds_throttle_logs" { + count = var.mock_pds_enabled ? 1 : 0 + + name = "${local.short_prefix}-MockPdsThrottleLogs" + pattern = "Mock PDS rate limit exceeded" + log_group_name = aws_cloudwatch_log_group.mock_pds_lambda_log_group[0].name + + metric_transformation { + name = "${local.short_prefix}-MockPdsThrottleRequests" + namespace = "${local.short_prefix}-MockPds" + value = "1" + } +} + +output "mock_pds_function_url" { + value = var.mock_pds_enabled ? aws_lambda_function_url.mock_pds_lambda_url[0].function_url : null + description = "Function URL for the mock PDS endpoint." +} \ No newline at end of file diff --git a/infrastructure/instance/variables.tf b/infrastructure/instance/variables.tf index 9aa6e05a1..1c89b7b22 100644 --- a/infrastructure/instance/variables.tf +++ b/infrastructure/instance/variables.tf @@ -79,6 +79,42 @@ variable "mns_environment" { default = "int" } +variable "mock_pds_enabled" { + description = "Enable the mock PDS Lambda in test environments." + type = bool + default = false +} + +variable "mock_pds_average_rate_limit" { + description = "Average mock PDS request rate, in requests per second." + type = number + default = 125 +} + +variable "mock_pds_average_window_seconds" { + description = "Average mock PDS rate limiting window in seconds." + type = number + default = 60 +} + +variable "mock_pds_spike_rate_limit" { + description = "Spike mock PDS request rate, in requests per second." + type = number + default = 450 +} + +variable "mock_pds_spike_window_seconds" { + description = "Spike mock PDS rate limiting window in seconds." + type = number + default = 1 +} + +variable "mock_pds_gp_ods_code" { + description = "Deterministic GP ODS code returned by the mock PDS service." + type = string + default = "Y12345" +} + variable "mesh_no_invocation_period_seconds" { description = "The maximum duration the MESH Processor Lambda can go without being invoked before the no-invocation alarm is triggered." type = number @@ -136,6 +172,17 @@ variable "ack_backend_image_uri" { } } +variable "mock_pds_image_uri" { + description = "Immutable URI of the mock PDS Lambda container image in ECR. Required when mock_pds_enabled is true; supplied by CI/CD." + type = string + default = "" + + validation { + condition = !var.mock_pds_enabled || trimspace(var.mock_pds_image_uri) != "" + error_message = "mock_pds_image_uri must be provided when mock_pds_enabled is true." + } +} + variable "batch_processor_filter_image_uri" { description = "Immutable URI of the batch processor filter Lambda container image in ECR. Must be supplied by CI/CD." type = string diff --git a/tests/perf_tests/Makefile b/tests/perf_tests/Makefile index bf8fd833f..4c23a580d 100644 --- a/tests/perf_tests/Makefile +++ b/tests/perf_tests/Makefile @@ -1,7 +1,31 @@ +LOCUST_FILE ?= src/locustfile.py +LOCUST ?= poetry run locust -f $(LOCUST_FILE) +MOCK_LOCUST_FILE ?= src/locustfile_pds_rate_limit.py +MOCK_LOCUST ?= poetry run locust -f $(MOCK_LOCUST_FILE) + test: - poetry run locust -f src/locustfile.py + $(LOCUST) test-read-only: - poetry run locust -f src/locustfile.py SearchUser + $(LOCUST) SearchUser + +mockpds: + $(MOCK_LOCUST) MockPdsUser + +mockpdstest-average: + PERF_LOAD_PROFILE=average PERF_MOCK_PDS_AVERAGE_RPS=125 $(MOCK_LOCUST) MockPdsUser + +mockpdstest-boundary: + PERF_LOAD_PROFILE=average PERF_MOCK_PDS_AVERAGE_RPS=130 $(MOCK_LOCUST) MockPdsUser + +mockpdstest-spike: + PERF_LOAD_PROFILE=spike $(MOCK_LOCUST) MockPdsUser + +mockpdstest-ui: + @if [ "$(PERF_LOAD_PROFILE)" != "average" ] && [ "$(PERF_LOAD_PROFILE)" != "spike" ]; then \ + echo "PERF_LOAD_PROFILE must be set to average or spike"; \ + exit 1; \ + fi + PERF_LOAD_PROFILE=$(PERF_LOAD_PROFILE) $(MOCK_LOCUST) MockPdsUser -.PHONY: test test-read-only +.PHONY: test test-read-only mockpds mockpdstest-average mockpdstest-boundary mockpdstest-spike mockpdstest-ui diff --git a/tests/perf_tests/README.md b/tests/perf_tests/README.md index 5ddede2dd..dbca7e6cb 100644 --- a/tests/perf_tests/README.md +++ b/tests/perf_tests/README.md @@ -4,6 +4,43 @@ This project contains Locust performance tests for the Immunisation FHIR API. To run them, ensure you have the `APIGEE_ENVIRONMENT` : Currently, only the ref environment is supported. +`PERF_SUPPLIER_SYSTEM` : `EMIS` or `TPP` `PERF_CREATE_RPS_PER_USER` : numeric env vars set, and call `make test`. + +For read-only search load, use `make test-read-only` (runs the `SearchUser` Locust profile). + +For direct mock-PDS rate limit testing, deploy the mock PDS Lambda from `tests/perf_tests/mock_pds` only for the perf test run and target its Function URL. + +1. Run `Deploy Backend` for `dev/ref` with `deploy_mock_pds=true`. +2. Retrieve the mock PDS Lambda Function URL from AWS after the deploy completes. +3. Export `MOCK_PDS_BASE_URL` to that Function URL. +4. Run mock rate tests: + `make mockpdstest-average`, `make mockpdstest-boundary`, or `make mockpdstest-spike` + +The rate presets are baked in: + +- `make mockpdstest-average` runs at `125 rps` +- `make mockpdstest-boundary` runs at `130 rps` + +Or run the Locust UI against the deployed Function URL: +`MOCK_PDS_BASE_URL=https://abc123.lambda-url.eu-west-2.on.aws PERF_LOAD_PROFILE=average make mockpdstest-ui` +or +`MOCK_PDS_BASE_URL=https://abc123.lambda-url.eu-west-2.on.aws PERF_LOAD_PROFILE=spike make mockpdstest-ui` + +`src/locustfile_pds_rate_limit.py` targets the deployed mock PDS Lambda URL. + +Mock PDS profile defaults are tuned for parity with earlier ref checks: + +- Average profile duration default: `180s` +- Spike profile stages default: `10s warmup + 20s spike + 10s recovery` + +Supported environment variables: + +- `MOCK_PDS_BASE_URL`: deployed mock PDS Lambda Function URL +- `PERF_LOAD_PROFILE`: `average` or `spike` +- `PERF_MOCK_PDS_RPS_PER_USER` +- `PERF_MOCK_PDS_AVERAGE_RPS`, `PERF_MOCK_PDS_AVERAGE_DURATION_SECONDS` +- `PERF_MOCK_PDS_SPIKE_WARMUP_RPS`, `PERF_MOCK_PDS_SPIKE_RPS`, `PERF_MOCK_PDS_SPIKE_WARMUP_SECONDS`, `PERF_MOCK_PDS_SPIKE_DURATION_SECONDS`, `PERF_MOCK_PDS_SPIKE_RECOVERY_RPS`, `PERF_MOCK_PDS_SPIKE_RECOVERY_SECONDS` +- `MOCK_PDS_VERIFY_TLS` diff --git a/tests/perf_tests/mock_pds/Dockerfile b/tests/perf_tests/mock_pds/Dockerfile new file mode 100644 index 000000000..723846f03 --- /dev/null +++ b/tests/perf_tests/mock_pds/Dockerfile @@ -0,0 +1,26 @@ +FROM public.ecr.aws/lambda/python:3.11 AS base + +RUN mkdir -p /home/appuser && \ + echo 'appuser:x:1001:1001::/home/appuser:/sbin/nologin' >> /etc/passwd && \ + echo 'appuser:x:1001:' >> /etc/group && \ + chown -R 1001:1001 /home/appuser && \ + pip install --no-cache-dir --only-binary :all: "poetry==2.1.4" + +COPY ./mock_pds/poetry.lock ./mock_pds/pyproject.toml ./ + +WORKDIR /var/task +RUN poetry config virtualenvs.create false && \ + poetry config installer.only-binary :all: && \ + poetry install --no-interaction --no-ansi --no-root --only main + +FROM base AS build + +WORKDIR /var/task + +COPY ./mock_pds/src . + +RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d) + +USER 1001:1001 + +CMD ["lambda_handler.lambda_handler"] \ No newline at end of file diff --git a/tests/perf_tests/mock_pds/README.md b/tests/perf_tests/mock_pds/README.md new file mode 100644 index 000000000..d0202ff0a --- /dev/null +++ b/tests/perf_tests/mock_pds/README.md @@ -0,0 +1,18 @@ +# Mock PDS Lambda + +This Lambda exposes a deterministic mock PDS endpoint for ref-only integration and performance testing. + +It supports: + +- `GET /Patient/{nhs_number}` for patient lookups used by MNS and id-sync. +- Redis-backed average and spike rate limiting with a fixed response contract. + +Environment variables: + +- `MOCK_PDS_AVERAGE_LIMIT` +- `MOCK_PDS_AVERAGE_WINDOW_SECONDS` +- `MOCK_PDS_SPIKE_LIMIT` +- `MOCK_PDS_SPIKE_WINDOW_SECONDS` +- `MOCK_PDS_GP_ODS_CODE` +- `REDIS_HOST` +- `REDIS_PORT` diff --git a/tests/perf_tests/mock_pds/poetry.lock b/tests/perf_tests/mock_pds/poetry.lock new file mode 100644 index 000000000..c1f2fd3b7 --- /dev/null +++ b/tests/perf_tests/mock_pds/poetry.lock @@ -0,0 +1,158 @@ +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. + +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_full_version < \"3.11.3\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "coverage" +version = "7.13.5" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +files = [ + {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, + {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930"}, + {file = "coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0"}, + {file = "coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0"}, + {file = "coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58"}, + {file = "coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d"}, + {file = "coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743"}, + {file = "coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd"}, + {file = "coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8"}, + {file = "coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf"}, + {file = "coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9"}, + {file = "coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01"}, + {file = "coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256"}, + {file = "coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf"}, + {file = "coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c"}, + {file = "coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf"}, + {file = "coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810"}, + {file = "coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1"}, + {file = "coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a"}, + {file = "coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6"}, + {file = "coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17"}, + {file = "coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85"}, + {file = "coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b"}, + {file = "coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d"}, + {file = "coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd"}, + {file = "coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479"}, + {file = "coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2"}, + {file = "coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a"}, + {file = "coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819"}, + {file = "coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f"}, + {file = "coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6"}, + {file = "coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0"}, + {file = "coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0"}, + {file = "coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc"}, + {file = "coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633"}, + {file = "coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b"}, + {file = "coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90"}, + {file = "coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea"}, + {file = "coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a"}, + {file = "coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215"}, + {file = "coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43"}, + {file = "coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45"}, + {file = "coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61"}, + {file = "coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "redis" +version = "6.4.0" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "redis-6.4.0-py3-none-any.whl", hash = "sha256:f0544fa9604264e9464cdf4814e7d4830f74b165d52f2a330a760a88dd248b7f"}, + {file = "redis-6.4.0.tar.gz", hash = "sha256:b01bc7282b8444e28ec36b261df5375183bb47a07eb9c603f284e89cbc5ef010"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} + +[package.extras] +hiredis = ["hiredis (>=3.2.0)"] +jwt = ["pyjwt (>=2.9.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (>=20.0.1)", "requests (>=2.31.0)"] + +[metadata] +lock-version = "2.1" +python-versions = "~3.11" +content-hash = "acb4e79114b4551bc3e3461e2a5562e1140e665a22a23db81eca2a56038de5de" diff --git a/tests/perf_tests/mock_pds/pyproject.toml b/tests/perf_tests/mock_pds/pyproject.toml new file mode 100644 index 000000000..9ca813e03 --- /dev/null +++ b/tests/perf_tests/mock_pds/pyproject.toml @@ -0,0 +1,16 @@ +[tool.poetry] +name = "mock-pds" +version = "1.0.0" +description = "Mock PDS endpoint for ref integration and performance testing" +authors = ["VED Team "] +readme = "README.md" +packages = [{include = "src"}] + +[tool.poetry.dependencies] +python = "~3.11" +redis = "^6.1.0" +coverage = "^7.13.5" + +[build-system] +requires = ["poetry-core >= 1.5.0"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/tests/perf_tests/mock_pds/src/lambda_handler.py b/tests/perf_tests/mock_pds/src/lambda_handler.py new file mode 100644 index 000000000..a1d3cd47a --- /dev/null +++ b/tests/perf_tests/mock_pds/src/lambda_handler.py @@ -0,0 +1,37 @@ +import logging +import os + +import redis + +from mock_pds_service import MockPdsService +from rate_limiter import FixedWindowRateLimiter + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +_redis_client = redis.Redis( + host=os.environ["REDIS_HOST"], + port=int(os.getenv("REDIS_PORT", "6379")), + decode_responses=True, +) +_rate_limiter = FixedWindowRateLimiter( + redis_client=_redis_client, + key_prefix="mock-pds", + average_limit=int(os.getenv("MOCK_PDS_AVERAGE_LIMIT", "125")), + average_window_seconds=int(os.getenv("MOCK_PDS_AVERAGE_WINDOW_SECONDS", "60")), + spike_limit=int(os.getenv("MOCK_PDS_SPIKE_LIMIT", "450")), + spike_window_seconds=int(os.getenv("MOCK_PDS_SPIKE_WINDOW_SECONDS", "1")), +) +_mock_pds_service = MockPdsService(_rate_limiter, os.getenv("MOCK_PDS_GP_ODS_CODE", "Y12345")) + + +def lambda_handler(event, context): + try: + return _mock_pds_service.handle(event) + except Exception: + logger.exception("Mock PDS failed to handle request") + return { + "statusCode": 500, + "headers": {"Content-Type": "application/json"}, + "body": '{"code": 500, "message": "Mock PDS encountered an unexpected error"}', + } diff --git a/tests/perf_tests/mock_pds/src/mock_pds_service.py b/tests/perf_tests/mock_pds/src/mock_pds_service.py new file mode 100644 index 000000000..6375e7712 --- /dev/null +++ b/tests/perf_tests/mock_pds/src/mock_pds_service.py @@ -0,0 +1,67 @@ +import json +import logging +from http import HTTPStatus + +from rate_limiter import FixedWindowRateLimiter + +logger = logging.getLogger() +logger.setLevel(logging.INFO) + +RATE_LIMIT_MESSAGE = "Mock PDS rate limit has been exceeded" + + +class MockPdsService: + def __init__(self, rate_limiter: FixedWindowRateLimiter, gp_ods_code: str): + self.rate_limiter = rate_limiter + self.gp_ods_code = gp_ods_code + + def handle(self, event: dict) -> dict: + if self._get_method(event) != "GET": + return self._error(HTTPStatus.METHOD_NOT_ALLOWED, "Method not allowed") + + nhs_number = self._extract_patient_id(event) + if not nhs_number: + return self._error(HTTPStatus.BAD_REQUEST, "Patient id is required") + + decision = self.rate_limiter.check("patient-lookup") + if not decision.allowed: + logger.warning( + "Mock PDS rate limit exceeded for %s window: count=%s limit=%s window_seconds=%s", + decision.window_name, + decision.count, + decision.limit, + decision.window_seconds, + ) + return self._error(HTTPStatus.TOO_MANY_REQUESTS, RATE_LIMIT_MESSAGE) + + logger.info("Mock PDS served patient lookup for nhs_number=%s", nhs_number) + return self._empty_response(HTTPStatus.OK) + + @staticmethod + def _get_method(event: dict) -> str: + return event.get("requestContext", {}).get("http", {}).get("method") or event.get("httpMethod") or "GET" + + @staticmethod + def _extract_patient_id(event: dict) -> str | None: + path = ( + event.get("rawPath") or event.get("path") or event.get("requestContext", {}).get("http", {}).get("path", "") + ).rstrip("/") + if "/Patient/" not in path: + return None + return path.rsplit("/Patient/", maxsplit=1)[-1] or None + + @classmethod + def _error(cls, status: HTTPStatus, message: str) -> dict: + return cls._response(status, {"code": int(status), "message": message}) + + @staticmethod + def _empty_response(status_code: int) -> dict: + return {"statusCode": status_code, "body": ""} + + @staticmethod + def _response(status_code: int, body: dict, content_type: str = "application/json") -> dict: + return { + "statusCode": status_code, + "headers": {"Content-Type": content_type}, + "body": json.dumps(body), + } diff --git a/tests/perf_tests/mock_pds/src/rate_limiter.py b/tests/perf_tests/mock_pds/src/rate_limiter.py new file mode 100644 index 000000000..84c760a22 --- /dev/null +++ b/tests/perf_tests/mock_pds/src/rate_limiter.py @@ -0,0 +1,54 @@ +import time +from dataclasses import dataclass + +import redis + + +@dataclass(frozen=True) +class RateLimitDecision: + allowed: bool + window_name: str + count: int + limit: int + window_seconds: int + + +class FixedWindowRateLimiter: + def __init__( + self, + redis_client: redis.Redis, + key_prefix: str, + average_limit: int, + average_window_seconds: int, + spike_limit: int, + spike_window_seconds: int, + ): + self.redis_client = redis_client + self.key_prefix = key_prefix + self._windows = ( + ("average", average_limit * average_window_seconds, average_window_seconds), + ("spike", spike_limit, spike_window_seconds), + ) + + def check(self, scope: str) -> RateLimitDecision: + decision = None + for name, limit, seconds in self._windows: + decision = self._evaluate_window(scope, name, limit, seconds) + if not decision.allowed: + break + return decision + + def _evaluate_window(self, scope: str, window_name: str, limit: int, window_seconds: int) -> RateLimitDecision: + key = f"{self.key_prefix}:{scope}:{window_name}:{int(time.time() // window_seconds)}" + pipeline = self.redis_client.pipeline() + pipeline.incr(key) + pipeline.expire(key, window_seconds + 1) + count, _ = pipeline.execute() + + return RateLimitDecision( + allowed=count <= limit, + window_name=window_name, + count=count, + limit=limit, + window_seconds=window_seconds, + ) diff --git a/tests/perf_tests/mock_pds/tests/test_mock_pds_service.py b/tests/perf_tests/mock_pds/tests/test_mock_pds_service.py new file mode 100644 index 000000000..52993f545 --- /dev/null +++ b/tests/perf_tests/mock_pds/tests/test_mock_pds_service.py @@ -0,0 +1,122 @@ +import importlib +import json +import os +import unittest +from unittest.mock import Mock, patch + +os.environ.setdefault("REDIS_HOST", "test-redis-host") +os.environ.setdefault("REDIS_PORT", "6379") + +import lambda_handler as lambda_handler_module +from mock_pds_service import RATE_LIMIT_MESSAGE, MockPdsService +from rate_limiter import FixedWindowRateLimiter, RateLimitDecision + + +def _event(method: str = "GET", nhs_number: str = "9481152782") -> dict: + return {"rawPath": f"/Patient/{nhs_number}", "requestContext": {"http": {"method": method}}} + + +def _decision(allowed: bool, count: int) -> RateLimitDecision: + return RateLimitDecision(allowed=allowed, window_name="spike", count=count, limit=450, window_seconds=1) + + +class TestMockPdsService(unittest.TestCase): + def setUp(self): + self.rate_limiter = Mock(spec=FixedWindowRateLimiter) + self.rate_limiter.check.return_value = _decision(True, 1) + self.service = MockPdsService(self.rate_limiter, "Y12345") + + def test_returns_empty_200_response(self): + response = self.service.handle(_event()) + + self.assertEqual(response["statusCode"], 200) + self.assertEqual(response["body"], "") + + def test_returns_429_when_rate_limit_exceeded(self): + self.rate_limiter.check.return_value = _decision(False, 451) + + response = self.service.handle(_event()) + + self.assertEqual(response["statusCode"], 429) + self.assertEqual(json.loads(response["body"]), {"code": 429, "message": RATE_LIMIT_MESSAGE}) + + def test_rejects_non_get_requests(self): + response = self.service.handle(_event(method="POST")) + + self.assertEqual(response["statusCode"], 405) + + def test_returns_400_when_patient_id_missing(self): + response = self.service.handle({"rawPath": "/Patient/", "requestContext": {"http": {"method": "GET"}}}) + + self.assertEqual(response["statusCode"], 400) + self.assertEqual(json.loads(response["body"]), {"code": 400, "message": "Patient id is required"}) + + def test_returns_400_when_path_has_no_patient_segment(self): + response = self.service.handle({"rawPath": "/metadata", "requestContext": {"http": {"method": "GET"}}}) + + self.assertEqual(response["statusCode"], 400) + + def test_accepts_http_method_from_api_gateway_rest_shape(self): + event = {"path": "/Patient/9481152782", "httpMethod": "GET"} + response = self.service.handle(event) + + self.assertEqual(response["statusCode"], 200) + + def test_defaults_to_get_when_method_absent(self): + event = {"rawPath": "/Patient/9481152782"} + response = self.service.handle(event) + + self.assertEqual(response["statusCode"], 200) + + def test_extracts_patient_from_path_when_raw_path_missing(self): + event = {"path": "/Patient/9912003888", "httpMethod": "GET"} + response = self.service.handle(event) + + self.assertEqual(response["statusCode"], 200) + self.assertEqual(response["body"], "") + + def test_extracts_patient_from_request_context_http_path(self): + event = {"requestContext": {"http": {"method": "GET", "path": "/Patient/1111111111"}}} + response = self.service.handle(event) + + self.assertEqual(response["statusCode"], 200) + self.assertEqual(response["body"], "") + + +class TestLambdaHandler(unittest.TestCase): + def tearDown(self): + importlib.reload(lambda_handler_module) + + @patch.dict( + "os.environ", + { + "REDIS_HOST": "mock-redis", + "MOCK_PDS_AVERAGE_LIMIT": "125", + "MOCK_PDS_AVERAGE_WINDOW_SECONDS": "60", + "MOCK_PDS_SPIKE_LIMIT": "450", + "MOCK_PDS_SPIKE_WINDOW_SECONDS": "1", + }, + clear=False, + ) + @patch("mock_pds_service.MockPdsService") + @patch("redis.Redis") + def test_lambda_handler_uses_cached_service(self, mock_redis, mock_pds_cls): + mock_service = Mock() + mock_service.handle.return_value = {"statusCode": 200} + mock_pds_cls.return_value = mock_service + + importlib.reload(lambda_handler_module) + first_response = lambda_handler_module.lambda_handler(_event(nhs_number="123"), None) + second_response = lambda_handler_module.lambda_handler(_event(nhs_number="456"), None) + + self.assertEqual(first_response, {"statusCode": 200}) + self.assertEqual(second_response, {"statusCode": 200}) + mock_redis.assert_called_once_with(host="mock-redis", port=6379, decode_responses=True) + + def test_lambda_handler_returns_500_on_unhandled_error(self): + mock_svc = Mock() + mock_svc.handle.side_effect = RuntimeError("boom") + with patch.object(lambda_handler_module, "_mock_pds_service", mock_svc): + response = lambda_handler_module.lambda_handler(_event(nhs_number="123"), None) + + self.assertEqual(response["statusCode"], 500) diff --git a/tests/perf_tests/mock_pds/tests/test_rate_limiter.py b/tests/perf_tests/mock_pds/tests/test_rate_limiter.py new file mode 100644 index 000000000..f78630960 --- /dev/null +++ b/tests/perf_tests/mock_pds/tests/test_rate_limiter.py @@ -0,0 +1,87 @@ +import unittest +from unittest.mock import MagicMock, patch + +from rate_limiter import FixedWindowRateLimiter + + +class TestFixedWindowRateLimiter(unittest.TestCase): + def _limiter(self, redis_client: MagicMock, **kwargs) -> FixedWindowRateLimiter: + defaults = { + "key_prefix": "pfx", + "average_limit": 2, + "average_window_seconds": 60, + "spike_limit": 10, + "spike_window_seconds": 1, + } + defaults.update(kwargs) + return FixedWindowRateLimiter(redis_client, **defaults) + + def _pipeline_mock(self, execute_result: list): + pipeline = MagicMock() + pipeline.incr.return_value = pipeline + pipeline.expire.return_value = pipeline + pipeline.execute.return_value = execute_result + return pipeline + + def test_check_allows_when_both_windows_under_limit(self): + redis_client = MagicMock() + pipe_avg = self._pipeline_mock([1, True]) + pipe_spike = self._pipeline_mock([2, True]) + redis_client.pipeline.side_effect = [pipe_avg, pipe_spike] + + limiter = self._limiter(redis_client) + decision = limiter.check("scope-a") + + self.assertTrue(decision.allowed) + self.assertEqual(decision.window_name, "spike") + self.assertEqual(decision.count, 2) + self.assertEqual(redis_client.pipeline.call_count, 2) + + def test_check_denies_on_average_window(self): + redis_client = MagicMock() + pipe_avg = self._pipeline_mock([121, True]) + redis_client.pipeline.return_value = pipe_avg + + limiter = self._limiter(redis_client, average_limit=2, average_window_seconds=60) + decision = limiter.check("scope-b") + + self.assertFalse(decision.allowed) + self.assertEqual(decision.window_name, "average") + self.assertEqual(decision.count, 121) + self.assertEqual(decision.limit, 120) + self.assertEqual(decision.window_seconds, 60) + redis_client.pipeline.assert_called_once() + + def test_check_denies_on_spike_after_average_passes(self): + redis_client = MagicMock() + pipe_avg = self._pipeline_mock([1, True]) + pipe_spike = self._pipeline_mock([11, True]) + redis_client.pipeline.side_effect = [pipe_avg, pipe_spike] + + limiter = self._limiter(redis_client, spike_limit=10) + decision = limiter.check("scope-c") + + self.assertFalse(decision.allowed) + self.assertEqual(decision.window_name, "spike") + self.assertEqual(decision.count, 11) + self.assertEqual(decision.limit, 10) + + def test_evaluate_window_uses_time_bucket_in_key(self): + redis_client = MagicMock() + pipeline = self._pipeline_mock([1, True]) + redis_client.pipeline.return_value = pipeline + + limiter = self._limiter(redis_client) + fixed_t = 1_700_000_000 + window_seconds = 60 + expected_bucket = int(fixed_t // window_seconds) + + with patch("rate_limiter.time.time", return_value=fixed_t): + decision = limiter._evaluate_window("s", "average", 120, window_seconds) + + self.assertTrue(decision.allowed) + self.assertEqual(decision.count, 1) + pipeline.incr.assert_called_once() + incr_key = pipeline.incr.call_args[0][0] + self.assertIn(f":s:average:{expected_bucket}", incr_key) + pipeline.expire.assert_called_once_with(incr_key, window_seconds + 1) diff --git a/tests/perf_tests/src/locustfile_pds_rate_limit.py b/tests/perf_tests/src/locustfile_pds_rate_limit.py new file mode 100644 index 000000000..4a4e8d11b --- /dev/null +++ b/tests/perf_tests/src/locustfile_pds_rate_limit.py @@ -0,0 +1,149 @@ +"""Locust load generator for validating deployed mock PDS rate-limit behavior. + +This module drives two profiles against the deployed mock PDS Lambda: +- average: sustained load around the threshold +- spike: warmup, burst, and recovery phases +""" + +import math +import os +import random +from urllib.parse import urlparse + +from locust import HttpUser, LoadTestShape, constant_throughput, task + +PERF_LOAD_PROFILE = os.getenv("PERF_LOAD_PROFILE", "").strip().lower() +MOCK_PDS_BASE_URL = os.getenv("MOCK_PDS_BASE_URL", "").strip().rstrip("/") + + +def _validate_mock_pds_base_url(base_url: str) -> str: + """Validate that the target URL is an absolute HTTP(S) endpoint.""" + if not base_url: + raise ValueError( + "MOCK_PDS_BASE_URL must be set to the deployed mock PDS Lambda Function URL, " + "for example https://abc123.lambda-url.eu-west-2.on.aws" + ) + + if "<" in base_url or ">" in base_url: + raise ValueError( + "MOCK_PDS_BASE_URL still contains a placeholder. Set it to the real Lambda Function URL, " + "for example https://abc123.lambda-url.eu-west-2.on.aws" + ) + + parsed = urlparse(base_url) + if parsed.scheme not in {"http", "https"} or not parsed.netloc: + raise ValueError( + "MOCK_PDS_BASE_URL must be a valid absolute URL, for example https://abc123.lambda-url.eu-west-2.on.aws" + ) + + return base_url + + +MOCK_PDS_BASE_URL = _validate_mock_pds_base_url(MOCK_PDS_BASE_URL) + +PERF_MOCK_PDS_RPS_PER_USER = float(os.getenv("PERF_MOCK_PDS_RPS_PER_USER", "1")) +MOCK_PDS_VERIFY_TLS = os.getenv("MOCK_PDS_VERIFY_TLS", "false").strip().lower() in {"1", "true", "yes"} + +PERF_MOCK_PDS_AVERAGE_RPS = int(os.getenv("PERF_MOCK_PDS_AVERAGE_RPS", "140")) +PERF_MOCK_PDS_AVERAGE_DURATION_SECONDS = int(os.getenv("PERF_MOCK_PDS_AVERAGE_DURATION_SECONDS", "180")) + +PERF_MOCK_PDS_SPIKE_WARMUP_RPS = int(os.getenv("PERF_MOCK_PDS_SPIKE_WARMUP_RPS", "125")) +PERF_MOCK_PDS_SPIKE_RPS = int(os.getenv("PERF_MOCK_PDS_SPIKE_RPS", "460")) +PERF_MOCK_PDS_SPIKE_WARMUP_SECONDS = int(os.getenv("PERF_MOCK_PDS_SPIKE_WARMUP_SECONDS", "10")) +PERF_MOCK_PDS_SPIKE_DURATION_SECONDS = int(os.getenv("PERF_MOCK_PDS_SPIKE_DURATION_SECONDS", "20")) +PERF_MOCK_PDS_SPIKE_RECOVERY_RPS = int(os.getenv("PERF_MOCK_PDS_SPIKE_RECOVERY_RPS", "125")) +PERF_MOCK_PDS_SPIKE_RECOVERY_SECONDS = int(os.getenv("PERF_MOCK_PDS_SPIKE_RECOVERY_SECONDS", "10")) + +RATE_LIMIT_MESSAGE = "Mock PDS rate limit has been exceeded" + + +def _users_for_target_rps(target_rps: int) -> int: + """Convert an RPS target into the number of Locust users to spawn.""" + per_user_rps = PERF_MOCK_PDS_RPS_PER_USER if PERF_MOCK_PDS_RPS_PER_USER > 0 else 1 + return max(1, math.ceil(target_rps / per_user_rps)) + + +class MockPdsRateLimitShape(LoadTestShape): + """Dynamic load shape used for average and spike rate-limit scenarios.""" + + abstract = PERF_LOAD_PROFILE not in {"average", "spike"} + + def tick(self): + """Return `(user_count, spawn_rate)` for the current run time stage.""" + run_time = self.get_run_time() + + if PERF_LOAD_PROFILE == "average": + if run_time >= PERF_MOCK_PDS_AVERAGE_DURATION_SECONDS: + return None + target_rps = PERF_MOCK_PDS_AVERAGE_RPS + elif PERF_LOAD_PROFILE == "spike": + spike_end = PERF_MOCK_PDS_SPIKE_WARMUP_SECONDS + PERF_MOCK_PDS_SPIKE_DURATION_SECONDS + recovery_end = spike_end + PERF_MOCK_PDS_SPIKE_RECOVERY_SECONDS + + if run_time < PERF_MOCK_PDS_SPIKE_WARMUP_SECONDS: + target_rps = PERF_MOCK_PDS_SPIKE_WARMUP_RPS + elif run_time < spike_end: + target_rps = PERF_MOCK_PDS_SPIKE_RPS + elif run_time < recovery_end: + target_rps = PERF_MOCK_PDS_SPIKE_RECOVERY_RPS + else: + return None + else: + return None + + user_count = _users_for_target_rps(target_rps) + return user_count, user_count + + +class MockPdsUser(HttpUser): + """Locust user that repeatedly calls mock PDS Patient lookup endpoints.""" + + wait_time = constant_throughput(PERF_MOCK_PDS_RPS_PER_USER) + host = MOCK_PDS_BASE_URL + + def on_start(self): + """Apply configured TLS verification behavior to the HTTP client.""" + self.client.verify = MOCK_PDS_VERIFY_TLS + + @staticmethod + def _random_nhs_number() -> str: + """Generate a pseudo NHS number for request variation during load tests.""" + return f"99{random.randint(10_000_000, 99_999_999)}" + + @task + def get_patient(self): + """Execute one lookup request and classify outcomes for test reporting.""" + with self.client.get( + f"/Patient/{self._random_nhs_number()}", + headers={"Accept": "application/fhir+json"}, + name="Mock PDS Patient Lookup", + catch_response=True, + ) as response: + if response.status_code == 200: + response.success() + return + + if response.status_code == 429: + try: + payload = response.json() + except ValueError: + response.failure(f"429 response was not valid JSON: {response.text}") + return + + if payload.get("code") == 429 and payload.get("message") == RATE_LIMIT_MESSAGE: + response.failure(f"HTTP {response.status_code}: {RATE_LIMIT_MESSAGE}") + else: + response.failure(f"Unexpected 429 payload: {response.text}") + return + + if response.status_code == 0: + error_detail = getattr(response, "error", None) + response.failure( + "Connection failed before reaching mock PDS. " + f"Check MOCK_PDS_BASE_URL={self.host}. " + f"TLS verification enabled={MOCK_PDS_VERIFY_TLS}. " + f"Underlying error: {error_detail!r}" + ) + return + + response.failure(f"Unexpected response: {response.status_code} {response.text}")