diff --git a/.github/actions/start-local-lambda/action.yaml b/.github/actions/start-local-lambda/action.yaml index f8e1aaa..f7a8dbc 100644 --- a/.github/actions/start-local-lambda/action.yaml +++ b/.github/actions/start-local-lambda/action.yaml @@ -5,10 +5,6 @@ inputs: description: "Command to start local Lambda" required: false default: "make deploy" - health-path: - description: "Health probe path to POST" - required: false - default: "/2015-03-31/functions/function/invocations" max-seconds: description: "Maximum seconds to wait for readiness" required: false @@ -26,25 +22,4 @@ runs: run: | set -euo pipefail echo "Starting local Lambda: '${{ inputs.deploy-command }}'" - nohup ${{ inputs.deploy-command }} >/tmp/lambda.log 2>&1 & - echo $! > /tmp/lambda.pid - echo "PID: $(cat /tmp/lambda.pid)" - - name: "Wait for Lambda to be ready" - shell: bash - run: | - set -euo pipefail - BASE_URL="${BASE_URL:-http://localhost:5001}" - HEALTH_URL="${BASE_URL}${{ inputs.health-path }}" - MAX="${{ inputs.max-seconds }}" - echo "Waiting for Lambda at ${HEALTH_URL} (max ${MAX}s)..." - for i in $(seq 1 "${MAX}"); do - if curl -sSf -X POST "${HEALTH_URL}" -d '{}' >/dev/null; then - echo "Lambda is ready" - exit 0 - fi - sleep 1 - done - echo "Lambda did not become ready in time" - echo "---- recent lambda log ----" - tail -n 200 /tmp/lambda.log || true - exit 1 + bash -c "${{ inputs.deploy-command }}" diff --git a/.github/workflows/preview-env.yaml b/.github/workflows/preview-env.yaml index ebd803b..a5326c8 100644 --- a/.github/workflows/preview-env.yaml +++ b/.github/workflows/preview-env.yaml @@ -16,8 +16,9 @@ permissions: env: AWS_REGION: eu-west-2 PREVIEW_PREFIX: pr- + PYTHON_VERSION: 3.14 LAMBDA_RUNTIME: python3.14 - LAMBDA_HANDLER: handler.handler + LAMBDA_HANDLER: lambda_handler.handler jobs: pr-preview: @@ -34,13 +35,16 @@ jobs: - name: Set up Python uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 with: - python-version: "3.14" + python-version: "${{ env.PYTHON_VERSION }}" + + - name: "Setup Python project" + uses: ./.github/actions/setup-python-project + with: + python-version: ${{ env.PYTHON_VERSION }} - name: Package artifact run: | - cd infrastructure/environments/preview - rm -f artifact.zip - zip -r artifact.zip . + make build - name: Select AWS role inputs id: role-select @@ -86,7 +90,7 @@ jobs: - name: Create or update preview Lambda (on open/sync/reopen) if: github.event.action != 'closed' run: | - cd infrastructure/environments/preview + cd pathology-api/target/ FN="${{ steps.names.outputs.function_name }}" echo "Deploying preview function: $FN" wait_for_lambda_ready() { diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml index 9575159..fe8b512 100644 --- a/.github/workflows/stage-2-test.yaml +++ b/.github/workflows/stage-2-test.yaml @@ -1,7 +1,7 @@ name: "Test stage" env: - BASE_URL: "http://localhost:5001" + BASE_URL: "http://localhost:5002" HOST: "localhost" on: diff --git a/.gitignore b/.gitignore index c470317..5d571d6 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,4 @@ pathology-api/test-artefacts/ **/.env **/.DS_Store +**/.coverage diff --git a/Makefile b/Makefile index 49e91ad..7136f30 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,9 @@ docker := doas docker else docker := docker endif + +dockerNetwork := pathology-local + # ============================================================================== # Example CI/CD targets are: dependencies, build, publish, deploy, clean, etc. @@ -17,49 +20,66 @@ endif dependencies: # Install dependencies needed to build and test the project @Pipeline cd pathology-api && poetry sync -.PHONY: build-pathology-api -build-pathology-api: dependencies +.PHONY: build +build: clean-artifacts dependencies @cd pathology-api @echo "Running type checks..." @rm -rf target && rm -rf dist @poetry run mypy --no-namespace-packages . @echo "Packaging dependencies..." @poetry build --format=wheel - @pip install "dist/pathology_api-0.1.0-py3-none-any.whl" --target "./target/pathology-api" - # Copy main file separately as it is not included within the package. + VERSION=$$(poetry version -s) + @pip install "dist/pathology_api-$$VERSION-py3-none-any.whl" --target "./target/pathology-api" --platform manylinux2014_x86_64 --only-binary=:all: + # Copy lambda_handler file separately as it is not included within the package. @cp lambda_handler.py ./target/pathology-api/ - @rm -rf ../infrastructure/images/pathology-api/resources/build/ - @mkdir ../infrastructure/images/pathology-api/resources/build/ - @cp -r ./target/pathology-api ../infrastructure/images/pathology-api/resources/build/ - # Remove temporary build artefacts once build has completed - @rm -rf target && rm -rf dist + @cd ./target/pathology-api + @zip -r "../artifact.zip" . + +.PHONY: build-images +build-images: build # Build the project artefact @Pipeline + @mkdir infrastructure/images/pathology-api/resources/build/ + @cp pathology-api/target/artifact.zip infrastructure/images/pathology-api/resources/build/ + @mkdir infrastructure/images/pathology-api/resources/build/pathology-api + @unzip infrastructure/images/pathology-api/resources/build/artifact.zip -d infrastructure/images/pathology-api/resources/build/pathology-api -.PHONY: build -build: build-pathology-api # Build the project artefact @Pipeline @echo "Building Docker image using Docker. Utilising python version: ${PYTHON_VERSION} ..." - @$(docker) buildx build --load --provenance=false --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t localhost/pathology-api-image infrastructure/images/pathology-api + @$(docker) buildx build --load --platform=linux/amd64 --provenance=false --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t localhost/pathology-api-image infrastructure/images/pathology-api @echo "Docker image 'pathology-api-image' built successfully!" + @echo "Building api-gateway-mock using Docker. Utilising python version: ${PYTHON_VERSION} ..." + @$(docker) buildx build --load --build-arg PYTHON_VERSION=${PYTHON_VERSION} -t localhost/api-gateway-mock-image infrastructure/images/api-gateway-mock + @echo "Docker image 'api-gateway-mock-image' built successfully!" + publish: # Publish the project artefact @Pipeline # TODO: Implement the artefact publishing step -deploy: clean build # Deploy the project artefact to the target environment @Pipeline - @if [[ -n "$${IN_BUILD_CONTAINER}" ]]; then \ - echo "Starting using local docker network ..." ; \ - $(docker) run --name pathology-api -p 5001:8080 --network pathology-local -d localhost/pathology-api-image ; \ - else \ - $(docker) run --name pathology-api -p 5001:8080 -d localhost/pathology-api-image ; \ - fi +deploy: clean-docker build-images # Deploy the project artefact to the target environment @Pipeline + $(docker) network create $(dockerNetwork) || echo "Docker network '$(dockerNetwork)' already exists." + $(docker) run --platform linux/amd64 --name pathology-api -p 5001:8080 --network $(dockerNetwork) -d localhost/pathology-api-image + $(docker) run --name api-gateway-mock -p 5002:5000 --network $(dockerNetwork) -d localhost/api-gateway-mock-image + +clean-artifacts: + @echo "Removing build artefacts..." + @rm -rf infrastructure/images/pathology-api/resources/build/ + @rm -rf pathology-api/target && rm -rf pathology-api/dist -clean:: stop # Clean-up project resources (main) @Operations +clean-docker: stop @echo "Removing pathology API container..." @$(docker) rm pathology-api || echo "No pathology API container currently exists." + @echo "Removing api-gateway-mock container..." + @$(docker) rm api-gateway-mock || echo "No api-gateway-mock container currently exists." + +clean:: clean-artifacts clean-docker # Clean-up project resources (main) @Operations + .PHONY: stop stop: @echo "Stopping pathology API container..." @$(docker) stop pathology-api || echo "No pathology API container currently running." + @echo "Stopping api-gateway-mock container..." + @$(docker) stop api-gateway-mock || echo "No api-gateway-mock container currently running." + config:: # Configure development environment (main) @Configuration # Configure poetry to trust dev certificate if specified @if [[ -n "$${DEV_CERTS_INCLUDED}" ]]; then \ diff --git a/infrastructure/environments/preview/handler.py b/infrastructure/environments/preview/handler.py deleted file mode 100644 index bf432f9..0000000 --- a/infrastructure/environments/preview/handler.py +++ /dev/null @@ -1,28 +0,0 @@ -import json -import logging -from typing import Any - -logger = logging.getLogger() -logger.setLevel(logging.INFO) - - -def handler(event: dict[str, Any], context): - headers = event.get("headers", {}) or {} - - # Log headers to CloudWatch - logger.info("Incoming request headers:") - for k, v in headers.items(): - logger.info("%s: %s", k, v) - - response_body = { - "message": "ok", - "headers": headers, - "requestContext": event.get("requestContext", {}), - } - - return { - "statusCode": 200, - "headers": {"content-type": "application/json"}, - "body": json.dumps(response_body, indent=2), - "isBase64Encoded": False, - } diff --git a/infrastructure/images/api-gateway-mock/Dockerfile b/infrastructure/images/api-gateway-mock/Dockerfile new file mode 100644 index 0000000..614ca12 --- /dev/null +++ b/infrastructure/images/api-gateway-mock/Dockerfile @@ -0,0 +1,12 @@ +# Retrieve the python version from build arguments, deliberately set to "invalid" by default to highlight when no version is provided when building the container. +ARG PYTHON_VERSION=invalid +# Use the specified python version to retrieve the required base lambda image. +ARG url=python:${PYTHON_VERSION}-alpine3.23 +FROM $url + +COPY resources/ /resources +WORKDIR /resources + +RUN pip install --no-cache-dir -r requirements.txt + +ENTRYPOINT ["flask", "--app", "server", "run", "--host=0.0.0.0"] diff --git a/infrastructure/images/api-gateway-mock/resources/requirements.txt b/infrastructure/images/api-gateway-mock/resources/requirements.txt new file mode 100644 index 0000000..816a85f --- /dev/null +++ b/infrastructure/images/api-gateway-mock/resources/requirements.txt @@ -0,0 +1,2 @@ +flask==3.1.2 +requests==2.32.5 diff --git a/infrastructure/images/api-gateway-mock/resources/server.py b/infrastructure/images/api-gateway-mock/resources/server.py new file mode 100644 index 0000000..c7a6c9b --- /dev/null +++ b/infrastructure/images/api-gateway-mock/resources/server.py @@ -0,0 +1,72 @@ +from logging.config import dictConfig + +import requests +from flask import Flask, request + +# Very simple logging configuration taken from https://flask.palletsprojects.com/en/stable/logging/ +dictConfig( + { + "version": 1, + "formatters": { + "default": { + "format": "[%(asctime)s] %(levelname)s in %(module)s: %(message)s", + }, + }, + "handlers": { + "wsgi": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "formatter": "default", + } + }, + "root": {"level": "INFO", "handlers": ["wsgi"]}, + } +) + +app = Flask(__name__) + + +@app.route("/", methods=["POST", "GET"], defaults={"path_params": None}) +@app.route("/", methods=["POST", "GET"]) +def forward_request(path_params): + app.logger.info("received request with data: %s", request.get_data(as_text=True)) + + response = requests.post( + "http://pathology-api:8080/2015-03-31/functions/function/invocations", + json={ + "body": request.get_data(as_text=True).replace("\n", "").replace(" ", ""), + "requestContext": { + "http": { + "path": f"/{path_params}", + "method": request.method, + }, + "requestId": "request-id", + "stage": "$default", + }, + "httpMethod": request.method, + "rawPath": f"/{path_params}", + "rawQueryString": "", + "pathParameters": {"proxy": path_params}, + }, + headers={"Content-Type": "application/json"}, + timeout=120, + ) + + app.logger.info( + "response: status_code=%s, body=%s", response.status_code, response.text + ) + + app.logger.info("response: %s", response.text) + response_data = response.json() + + output = ( + ( + response_data["body"], + response_data["statusCode"], + response_data["headers"], + ) + if "body" in response_data + else (response_data, 500, {"Content-Type": "text/plain"}) + ) + + return output diff --git a/pathology-api/lambda_handler.py b/pathology-api/lambda_handler.py index 043e05c..15f36b0 100644 --- a/pathology-api/lambda_handler.py +++ b/pathology-api/lambda_handler.py @@ -1,38 +1,99 @@ -from typing import TypedDict +import json +import logging +import logging.config +from functools import reduce +from typing import Any -from pathology_api.handler import User, greet +from aws_lambda_powertools.event_handler import ( + APIGatewayHttpResolver, + Response, +) +from aws_lambda_powertools.utilities.typing import LambdaContext +from pathology_api.fhir.r4.resources import Bundle +from pathology_api.handler import handle_request +from pydantic import ValidationError +_INVALID_PAYLOAD_MESSAGE = "Invalid payload provided." -class LambdaResponse[T](TypedDict): - """A lambda response including a body with a generic type.""" +logging.config.dictConfig( + { + "version": 1, + "formatters": { + "default": { + "format": "[%(asctime)s] %(levelname)s - %(module)s: %(message)s", + }, + }, + "handlers": { + "stdout": { + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout", + "formatter": "default", + } + }, + "root": {"level": "DEBUG", "handlers": ["stdout"]}, + } +) - statusCode: int - headers: dict[str, str] - body: T +_logger = logging.getLogger(__name__) +app = APIGatewayHttpResolver() -def _with_default_headers[T](status_code: int, body: T) -> LambdaResponse[T]: - return { - "statusCode": status_code, - "headers": {"Content-Type": "application/json"}, - "body": body, - } +def _with_default_headers(status_code: int, body: str) -> Response[str]: + content_type = "application/fhir+json" if status_code == 200 else "text/plain" + return Response( + status_code=status_code, + headers={"Content-Type": content_type}, + body=body, + ) -def handler(event: dict[str, str], context: dict[str, str]) -> LambdaResponse[str]: - print(f"Received event: {event}") - if "payload" not in event: - return _with_default_headers(status_code=400, body="Name is required") +@app.get("/_status") +def status() -> Response[str]: + _logger.debug("Status check endpoint called") + return Response(status_code=200, body="OK", headers={"Content-Type": "text/plain"}) - name = event["payload"] - if not name: - return _with_default_headers(status_code=400, body="Name cannot be empty") - user = User(name=name) +@app.post("/FHIR/R4/Bundle") +def post_result() -> Response[str]: + _logger.debug("Post result endpoint called.") try: - return _with_default_headers(status_code=200, body=f"{greet(user)}") - except ValueError: + payload = app.current_event.json_body + except json.JSONDecodeError as err: + _logger.error("Error decoding JSON payload. error: %s", err) + return _with_default_headers(status_code=400, body=_INVALID_PAYLOAD_MESSAGE) + _logger.debug("Payload received: %s", payload) + + if not payload: + _logger.error("No payload provided.") + return _with_default_headers(status_code=400, body="No payload provided.") + + try: + bundle = Bundle.model_validate(payload, by_alias=True) + except ValidationError as err: + _logger.error( + "Error parsing payload. error: %s issues: %s", + err, + reduce(lambda acc, e: acc + "," + str(e), err.errors(), ""), + ) + return _with_default_headers(status_code=400, body=_INVALID_PAYLOAD_MESSAGE) + except TypeError as err: + _logger.error("Error parsing payload. error: %s", err) + return _with_default_headers(status_code=400, body=_INVALID_PAYLOAD_MESSAGE) + + try: + response = handle_request(bundle) + + return _with_default_headers( + status_code=200, + body=response.model_dump_json(by_alias=True, exclude_none=True), + ) + except ValueError as err: + _logger.error("Error processing payload. error: %s", err) return _with_default_headers( - status_code=404, body=f"Provided name cannot be found. name={name}" + status_code=400, body="Error processing provided bundle." ) + + +def handler(data: dict[str, Any], context: LambdaContext) -> dict[str, Any]: + return app.resolve(data, context) diff --git a/pathology-api/openapi.yaml b/pathology-api/openapi.yaml index 3ff0a1e..baaa0e3 100644 --- a/pathology-api/openapi.yaml +++ b/pathology-api/openapi.yaml @@ -6,286 +6,145 @@ info: contact: name: API Support servers: - - url: http://localhost:5001 + - url: http://localhost:5002 description: Local development server paths: - /2015-03-31/functions/function/invocations: + /FHIR/R4/Bundle: post: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld + summary: Provide a new test result + description: Provide a new test result to the pathology API + operationId: postBundle requestBody: - required: false + required: true content: - application/json: + application/fhir+json: schema: type: object + required: + - resourceType + - type + - entry properties: - payload: + resourceType: type: string - description: The payload to be processed - responses: - '200': - description: Successful response - content: - text/plain: - schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: - type: string - description: The stack trace of the error, if an error has occurred - get: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld - requestBody: - required: false - content: - application/json: - schema: - type: object - properties: - payload: + enum: + - Bundle + description: FHIR resource type + type: type: string - description: The payload to be processed - responses: - '200': - description: Successful response - content: - text/plain: - schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: - type: string - description: The stack trace of the error, if an error has occurred + enum: + - document + - transaction + description: The type of the bundle + example: document + entry: + type: array + description: Entries in the bundle + minItems: 1 + maxItems: 1 + items: + type: object + required: + - fullUrl + - resource + properties: + fullUrl: + type: string + description: URI for resource + example: "patient" + resource: + type: object + required: + - resourceType + - identifier + properties: + resourceType: + type: string + description: Type of FHIR resource (Always "Patient") + enum: + - Patient + example: Patient + identifier: + type: object + required: + - system + - value + properties: + system: + type: string + enum: + - "https://fhir.nhs.uk/Id/nhs-number" + example: "https://fhir.nhs.uk/Id/nhs-number" + value: + type: string + example: "9999999999" - '404': - description: Route not found - content: - text/html: - schema: - type: string - put: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld - requestBody: - required: false - content: - application/json: - schema: - type: object - properties: - payload: - type: string - description: The payload to be processed responses: '200': description: Successful response content: - text/plain: + application/fhir+json: schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: - type: string - description: The stack trace of the error, if an error has occurred - - '404': - description: Route not found - content: - text/html: - schema: - type: string - patch: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld - requestBody: - required: false - content: - application/json: - schema: + schema: type: object + required: + - resourceType + - type properties: - payload: + resourceType: type: string - description: The payload to be processed - responses: - '200': - description: Successful response - content: - text/plain: - schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: - type: string - description: The stack trace of the error, if an error has occurred - - '404': - description: Route not found - content: - text/html: - schema: - type: string - delete: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld - requestBody: - required: false - content: - application/json: - schema: - type: object - properties: - payload: + enum: + - Bundle + description: FHIR resource type (always "Bundle") + meta: + type: object + description: Metadata about the resource + nullable: true + type: type: string - description: The payload to be processed - responses: - '200': - description: Successful response - content: - text/plain: - schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: + enum: + - document + - transaction + description: The type of the bundle + identifier: + type: object + nullable: true + description: Persistent identifier for the bundle (UUID) + properties: + system: type: string - description: The stack trace of the error, if an error has occurred - trace: - summary: Get hello world message - description: Returns a simple hello world message - operationId: postHelloWorld - requestBody: - required: false - content: - application/json: - schema: - type: object - properties: - payload: - type: string - description: The payload to be processed - responses: - '200': - description: Successful response + format: uri + value: + type: string + format: uuid + entry: + type: array + nullable: true + description: Entries in the bundle + items: + type: object + required: + - fullUrl + - resource + properties: + fullUrl: + type: string + description: URI for resource + resource: + type: object + required: + - resourceType + description: The Patient a test result is for + properties: + resourceType: + type: string + description: Type of FHIR resource (always "Patient") + enum: + - Patient + example: Patient + '400': + description: Invalid request content: text/plain: schema: - type: object - properties: - status_code: - type: integer - description: Status code of the interaction - body: - type: string - description: The output of the interaction - errorMessage: - type: string - description: Any error messages relating to errors encountered with the interaction - errorType: - type: string - description: The type of error encountered during the interaction, if an error has occurred - requestId: - type: string - format: uuid - description: The unique request ID for the interaction - stacktrace: - type: array - items: - type: string - description: The stack trace of the error, if an error has occurred + type: string diff --git a/pathology-api/poetry.lock b/pathology-api/poetry.lock index 215abe6..8d5a292 100644 --- a/pathology-api/poetry.lock +++ b/pathology-api/poetry.lock @@ -1,5 +1,17 @@ # This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + [[package]] name = "anyio" version = "4.11.0" @@ -51,6 +63,35 @@ files = [ {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, ] +[[package]] +name = "aws-lambda-powertools" +version = "3.24.0" +description = "Powertools for AWS Lambda (Python) is a developer toolkit to implement Serverless best practices and increase developer velocity." +optional = false +python-versions = "<4.0.0,>=3.10" +groups = ["main"] +files = [ + {file = "aws_lambda_powertools-3.24.0-py3-none-any.whl", hash = "sha256:9c9002856f61b86f49271a9d7efa0dad322ecd22719ddc1c6bb373e57ee0421a"}, + {file = "aws_lambda_powertools-3.24.0.tar.gz", hash = "sha256:9f86959c4aeac9669da799999aae5feac7a3a86e642b52473892eaa4273d3cc3"}, +] + +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +typing-extensions = ">=4.11.0,<5.0.0" + +[package.extras] +all = ["aws-encryption-sdk (>=3.1.1,<5.0.0)", "aws-xray-sdk (>=2.8.0,<3.0.0)", "fastjsonschema (>=2.14.5,<3.0.0)", "jsonpath-ng (>=1.6.0,<2.0.0)", "pydantic (>=2.4.0,<3.0.0)", "pydantic-settings (>=2.6.1,<3.0.0)"] +aws-sdk = ["boto3 (>=1.34.32,<2.0.0)"] +datadog = ["datadog-lambda (>=8.114.0,<9.0.0)"] +datamasking = ["aws-encryption-sdk (>=3.1.1,<5.0.0)", "jsonpath-ng (>=1.6.0,<2.0.0)"] +kafka-consumer-avro = ["avro (>=1.12.0,<2.0.0)"] +kafka-consumer-protobuf = ["protobuf (>=6.30.2,<7.0.0)"] +parser = ["pydantic (>=2.4.0,<3.0.0)"] +redis = ["redis (>=4.4,<8.0)"] +tracer = ["aws-xray-sdk (>=2.8.0,<3.0.0)"] +validation = ["fastjsonschema (>=2.14.5,<3.0.0)"] +valkey = ["valkey-glide (>=1.3.5,<3.0)"] + [[package]] name = "backoff" version = "2.2.1" @@ -670,6 +711,18 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "jsonpointer" version = "3.0.0" @@ -1139,14 +1192,14 @@ files = [ [[package]] name = "pact-python" -version = "3.1.0" +version = "3.2.1" description = "Tool for creating and verifying consumer-driven contracts using the Pact framework." optional = false python-versions = ">=3.10" groups = ["dev"] files = [ - {file = "pact_python-3.1.0-py3-none-any.whl", hash = "sha256:382e2dea3a8e4c956bd9fed6960e76678ba3f37c329581e3f83bc4a46b901c06"}, - {file = "pact_python-3.1.0.tar.gz", hash = "sha256:176c03c74f0095cf7832826e26d42c83e7ec08d507e7f45a017f01632c3ab50e"}, + {file = "pact_python-3.2.1-py3-none-any.whl", hash = "sha256:2902fe74da09093269cf69d007227ae15db854abb22be7520ed643efa182b42e"}, + {file = "pact_python-3.2.1.tar.gz", hash = "sha256:bf6d9709999650feae49a085080fca95d2c1d6f94913fff8f2a6d6b9446b978d"}, ] [package.dependencies] @@ -1154,7 +1207,7 @@ pact-python-ffi = ">=0.4.0,<0.5.0" yarl = ">=1.0,<2.0" [package.extras] -compat-v2 = ["click (>=8.0,<9.0)", "pact-python-cli (>=2.0,<3.0)", "psutil (>=7.0,<8.0)", "requests (>=2.0,<3.0)", "six (>=1.0,<2.0)"] +compat-v2 = ["click (>=8.0,<9.0)", "pact-python-cli (>=2.5,<3.0)", "psutil (>=7.0,<8.0)", "requests (>=2.0,<3.0)", "six (>=1.0,<2.0)"] [[package]] name = "pact-python-ffi" @@ -1384,6 +1437,162 @@ files = [ {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, ] +[[package]] +name = "pydantic" +version = "2.12.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"}, + {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.41.5" +typing-extensions = ">=4.14.1" +typing-inspection = ">=0.4.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"}, + {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"}, + {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"}, + {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"}, + {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"}, + {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"}, + {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"}, + {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"}, + {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"}, + {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"}, + {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"}, + {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"}, + {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"}, + {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"}, + {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"}, + {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"}, + {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"}, + {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"}, + {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"}, + {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"}, + {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"}, + {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"}, + {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"}, + {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"}, + {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"}, + {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"}, + {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"}, + {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"}, +] + +[package.dependencies] +typing-extensions = ">=4.14.1" + [[package]] name = "pygments" version = "2.19.2" @@ -1998,12 +2207,27 @@ version = "4.15.0" description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, ] +[[package]] +name = "typing-inspection" +version = "0.4.2" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"}, + {file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" version = "2025.2" @@ -2227,4 +2451,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = ">3.13,<4.0.0" -content-hash = "b1f7ca89a1d19d8d860a88040999a785e36e52110abb98e15dbedcb5c78a7073" +content-hash = "16c10c515c7ceb2070612adce26f5c9caa927e32950de99b0fc78209520c6d29" diff --git a/pathology-api/pyproject.toml b/pathology-api/pyproject.toml index 36a0034..abb6c7b 100644 --- a/pathology-api/pyproject.toml +++ b/pathology-api/pyproject.toml @@ -8,6 +8,8 @@ authors = [ readme = "README.md" requires-python = ">3.13,<4.0.0" dependencies = [ + "aws-lambda-powertools (>=3.24.0,<4.0.0)", + "pydantic (>=2.12.5,<3.0.0)" ] [tool.poetry] diff --git a/pathology-api/src/pathology_api/fhir/__init__.py b/pathology-api/src/pathology_api/fhir/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pathology-api/src/pathology_api/fhir/r4/__init__.py b/pathology-api/src/pathology_api/fhir/r4/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pathology-api/src/pathology_api/fhir/r4/elements.py b/pathology-api/src/pathology_api/fhir/r4/elements.py new file mode 100644 index 0000000..b337bdf --- /dev/null +++ b/pathology-api/src/pathology_api/fhir/r4/elements.py @@ -0,0 +1,71 @@ +import datetime +import uuid +from abc import ABC +from dataclasses import dataclass +from typing import Annotated, ClassVar + +from pydantic import Field, model_validator + + +@dataclass(frozen=True) +class Meta: + """ + A FHIR R4 Meta element. See https://hl7.org/fhir/R4/datatypes.html#Meta. + Attributes: + version_id: The version id of the resource. + last_updated: The last updated timestamp of the resource. + """ + + last_updated: Annotated[datetime.datetime | None, Field(alias="lastUpdated")] = None + version_id: Annotated[str | None, Field(alias="versionId")] = None + + @classmethod + def with_last_updated(cls, last_updated: datetime.datetime | None = None) -> "Meta": + """ + Create a Meta instance with the provided last_updated timestamp. + Args: + last_updated: The last updated timestamp. + Returns: + A Meta instance with the specified last_updated. + """ + return cls( + last_updated=last_updated or datetime.datetime.now(tz=datetime.timezone.utc) + ) + + +@dataclass(frozen=True) +class Identifier(ABC): + """ + A FHIR R4 Identifier element. See https://hl7.org/fhir/R4/datatypes.html#Identifier. + Attributes: + system: The namespace for the identifier value. + value: The value that is unique within the system. + """ + + _expected_system: ClassVar[str] = "__unknown__" + + value: str + system: str + + @model_validator(mode="after") + def validate_system(self) -> "Identifier": + if self.system != self._expected_system: + raise ValueError( + f"Identifier system '{self.system}' does not match expected " + f"system '{self._expected_system}'." + ) + return self + + @classmethod + def __init_subclass__(cls, expected_system: str) -> None: + cls._expected_system = expected_system + + +class UUIDIdentifier(Identifier, expected_system="https://tools.ietf.org/html/rfc4122"): + """A UUID identifier utilising the standard RFC 4122 system.""" + + def __init__(self, value: uuid.UUID | None = None): + super().__init__( + value=str(value or uuid.uuid4()), + system=self._expected_system, + ) diff --git a/pathology-api/src/pathology_api/fhir/r4/py.typed b/pathology-api/src/pathology_api/fhir/r4/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pathology-api/src/pathology_api/fhir/r4/resources.py b/pathology-api/src/pathology_api/fhir/r4/resources.py new file mode 100644 index 0000000..2834630 --- /dev/null +++ b/pathology-api/src/pathology_api/fhir/r4/resources.py @@ -0,0 +1,125 @@ +from typing import Annotated, Any, ClassVar, Literal, Self + +from pydantic import ( + BaseModel, + Field, + SerializeAsAny, + ValidatorFunctionWrapHandler, + field_validator, + model_validator, +) + +from .elements import Identifier, Meta, UUIDIdentifier + + +class Resource(BaseModel): + """A FHIR R4 Resource base class.""" + + # class variable to hold class mappings per resource_type + __resource_types: ClassVar[dict[str, type["Resource"]]] = {} + __expected_resource_type: ClassVar[dict[type["Resource"], str]] = {} + + meta: Annotated[Meta | None, Field(alias="meta", frozen=True)] = None + resource_type: str = Field(alias="resourceType", frozen=True) + + def __init_subclass__(cls, resource_type: str, **kwargs: Any) -> None: + cls.__resource_types[resource_type] = cls + cls.__expected_resource_type[cls] = resource_type + + super().__init_subclass__(**kwargs) + + @model_validator(mode="wrap") + @classmethod + def validate_with_subtype( + cls, value: dict[str, Any], handler: ValidatorFunctionWrapHandler + ) -> Any: + """ + Provides a model validator that instantiates the correct Resource subclass + based on the its defined resource_type. + """ + # If we're not currently acting on a top level Resource, and we've not been + # provided a generic dictonary object, delegate to the normal handler. + if cls != Resource or not isinstance(value, dict): + return handler(value) + + if "resourceType" not in value or value["resourceType"] is None: + raise TypeError("resourceType is required for Resource validation.") + + resource_type = value["resourceType"] + + subclass = cls.__resource_types.get(resource_type) + if subclass is None: + raise TypeError(f"Unknown resource type: {resource_type}") + + # Instantiate the subclass using the dictionary values. + return subclass.model_validate(value) + + @classmethod + def create(cls, **kwargs: Any) -> Self: + """Create a Resource instance with the correct resourceType.""" + return cls(resourceType=cls.__expected_resource_type[cls], **kwargs) + + @field_validator("resource_type", mode="after") + @classmethod + def _validate_resource_type(cls, value: str) -> str: + expected_resource_type = cls.__expected_resource_type[cls] + if value != expected_resource_type: + raise ValueError( + f"Resource type '{value}' does not match expected " + f"resource type '{expected_resource_type}'." + ) + return value + + +type BundleType = Literal["document", "transaction"] + + +class Bundle(Resource, resource_type="Bundle"): + """A FHIR R4 Bundle resource.""" + + bundle_type: BundleType = Field(alias="type", frozen=True) + identifier: Annotated[UUIDIdentifier | None, Field(frozen=True)] = None + entries: list["Bundle.Entry"] | None = Field(None, frozen=True, alias="entry") + + class Entry(BaseModel): + full_url: str = Field(..., alias="fullUrl", frozen=True) + resource: Annotated[SerializeAsAny[Resource], Field(frozen=True)] + + def find_resources[T: Resource](self, t: type[T]) -> list[T]: + """ + Find all resources of a given type in the bundle entries. If the bundle has no + entries, an empty list is returned. + Args: + t: The resource type to search for. + Returns: + A list of resources of the specified type. + """ + return [ + entry.resource + for entry in self.entries or [] + if isinstance(entry.resource, t) + ] + + @classmethod + def empty(cls, bundle_type: BundleType) -> "Bundle": + """Create an empty Bundle of the specified type.""" + return cls.create(type=bundle_type, entry=None) + + +class Patient(Resource, resource_type="Patient"): + """A FHIR R4 Patient resource.""" + + class PatientIdentifier( + Identifier, expected_system="https://fhir.nhs.uk/Id/nhs-number" + ): + """A FHIR R4 Patient Identifier utilising the NHS Number system.""" + + def __init__(self, value: str): + super().__init__(value=value, system=self._expected_system) + + @classmethod + def from_nhs_number(cls, nhs_number: str) -> "Patient.PatientIdentifier": + """Create a PatientIdentifier from an NHS number.""" + return cls(value=nhs_number) + + identifier: Annotated[PatientIdentifier, Field(frozen=True)] diff --git a/pathology-api/src/pathology_api/fhir/r4/test_elements.py b/pathology-api/src/pathology_api/fhir/r4/test_elements.py new file mode 100644 index 0000000..628e0f1 --- /dev/null +++ b/pathology-api/src/pathology_api/fhir/r4/test_elements.py @@ -0,0 +1,97 @@ +import datetime +import uuid + +import pytest +from pydantic import BaseModel + +from .elements import Identifier, Meta, UUIDIdentifier + + +class TestMeta: + def test_create(self) -> None: + """Test creating a Meta element.""" + meta = Meta( + version_id="1", + last_updated=datetime.datetime.fromisoformat("2023-10-01T12:00:00Z"), + ) + assert meta.version_id == "1" + assert meta.last_updated == datetime.datetime.fromisoformat( + "2023-10-01T12:00:00Z" + ) + + def test_create_without_last_updated(self) -> None: + """Test creating a Meta element without last_updated.""" + meta = Meta(version_id="2") + + assert meta.version_id == "2" + assert meta.last_updated is None + + def test_create_without_version(self) -> None: + """Test creating a Meta element without version_id.""" + meta = Meta( + last_updated=datetime.datetime.fromisoformat("2023-10-01T12:00:00Z") + ) + + assert meta.version_id is None + assert meta.last_updated == datetime.datetime.fromisoformat( + "2023-10-01T12:00:00Z" + ) + + def test_with_last_updated(self) -> None: + """Test creating a Meta element using with_last_updated class method.""" + last_updated = datetime.datetime.fromisoformat("2023-10-01T12:00:00Z") + meta = Meta.with_last_updated(last_updated) + + assert meta.last_updated == last_updated + assert meta.version_id is None + + def test_with_last_updated_defaults_to_now(self) -> None: + """Test creating a Meta element with current time when last_updated is None.""" + before_create = datetime.datetime.now(tz=datetime.timezone.utc) + meta = Meta.with_last_updated(None) + after_create = datetime.datetime.now(tz=datetime.timezone.utc) + + assert meta.last_updated is not None + assert meta.version_id is None + + assert before_create <= meta.last_updated + assert meta.last_updated <= after_create + + +class TestUUIDIdentifier: + def test_create_with_value(self) -> None: + """Test creating a UUIDIdentifier with a specific UUID value.""" + expected_uuid = uuid.UUID("12345678-1234-5678-1234-567812345678") + identifier = UUIDIdentifier(value=expected_uuid) + + assert identifier.system == "https://tools.ietf.org/html/rfc4122" + assert identifier.value == str(expected_uuid) + + def test_create_without_value(self) -> None: + """Test creating a UUIDIdentifier without providing a UUID value.""" + identifier = UUIDIdentifier() + + assert identifier.system == "https://tools.ietf.org/html/rfc4122" + # Validates that value is a valid UUID v4 + parsed_uuid = uuid.UUID(identifier.value) + assert parsed_uuid.version == 4 + + +class TestIdentifier: + def test_invalid_system(self) -> None: + """Test that creating an Identifier with an invalid system raises ValueError.""" + + class _TestIdentifier(Identifier, expected_system="expected-system"): + pass + + class _TestContainer(BaseModel): + identifier: _TestIdentifier + + with pytest.raises( + ValueError, + match="Identifier system 'invalid-system' does not match expected " + "system 'expected-system'.", + ): + _TestContainer.model_validate( + {"identifier": {"system": "invalid-system", "value": "some-value"}} + ) diff --git a/pathology-api/src/pathology_api/fhir/r4/test_resources.py b/pathology-api/src/pathology_api/fhir/r4/test_resources.py new file mode 100644 index 0000000..b0632ac --- /dev/null +++ b/pathology-api/src/pathology_api/fhir/r4/test_resources.py @@ -0,0 +1,222 @@ +import json +from typing import Any + +import pytest +from pydantic import BaseModel + +from .resources import Bundle, Patient, Resource + + +class TestResource: + class _TestContainer(BaseModel): + resource: Resource + + def test_resource_deserialisation(self) -> None: + expected_system = "https://fhir.nhs.uk/Id/nhs-number" + expected_nhs_number = "nhs_number" + example_json = json.dumps( + { + "resource": { + "resourceType": "Patient", + "identifier": { + "system": expected_system, + "value": expected_nhs_number, + }, + } + } + ) + + created_object = self._TestContainer.model_validate_json(example_json) + assert isinstance(created_object.resource, Patient) + + created_patient = created_object.resource + assert created_patient.identifier is not None + assert created_patient.identifier.system == expected_system + assert created_patient.identifier.value == expected_nhs_number + + def test_resource_deserialisation_unknown_resource(self) -> None: + expected_resource_type = "UnknownResourceType" + example_json = json.dumps( + { + "resource": { + "resourceType": expected_resource_type, + } + } + ) + + with pytest.raises( + TypeError, + match=f"Unknown resource type: {expected_resource_type}", + ): + self._TestContainer.model_validate_json(example_json) + + @pytest.mark.parametrize( + "value", + [ + pytest.param({"resource": {}}, id="No resourceType key"), + pytest.param( + {"resource": {"resourceType": None}}, + id="resourceType is defined as None", + ), + ], + ) + def test_resource_deserialisation_without_resource_type( + self, value: dict[str, Any] + ) -> None: + example_json = json.dumps(value) + + with pytest.raises( + TypeError, + match="resourceType is required for Resource validation.", + ): + self._TestContainer.model_validate_json(example_json) + + @pytest.mark.parametrize( + ("json", "expected_error_message"), + [ + pytest.param( + json.dumps({"resourceType": "invalid", "type": "document"}), + "Value error, Resource type 'invalid' does not match expected " + "resource type 'Bundle'.", + id="Invalid resource type", + ), + pytest.param( + json.dumps({"resourceType": None, "type": "document"}), + "1 validation error for Bundle\nresourceType\n " + "Input should be a valid string", + id="Input should be a valid string", + ), + pytest.param( + json.dumps({"type": "document"}), + "1 validation error for Bundle\nresourceType\n Field required", + id="Missing resource type", + ), + ], + ) + def test_deserialise_wrong_resource_type( + self, json: str, expected_error_message: str + ) -> None: + with pytest.raises( + ValueError, + match=expected_error_message, + ): + Bundle.model_validate_json(json, strict=True) + + +class TestBundle: + def test_create(self) -> None: + """Test creating a Bundle resource.""" + expected_entry = Bundle.Entry( + fullUrl="full", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number("nhs_number") + ), + ) + + bundle = Bundle.create( + type="document", + entry=[expected_entry], + ) + + assert bundle.bundle_type == "document" + assert bundle.identifier is None + assert bundle.entries == [expected_entry] + + def test_create_without_entries(self) -> None: + """Test creating a Bundle resource without entries.""" + bundle = Bundle.empty("document") + + assert bundle.bundle_type == "document" + assert bundle.identifier is None + assert bundle.entries is None + + expected_resource = Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number("nhs_number") + ) + + @pytest.mark.parametrize( + ("entries", "expected_results"), + [ + pytest.param( + [ + Bundle.Entry( + fullUrl="fullUrl", + resource=expected_resource, + ), + Bundle.Entry( + fullUrl="fullUrl", + resource=expected_resource, + ), + ], + [expected_resource, expected_resource], + id="Duplicate resources", + ), + pytest.param( + [ + Bundle.Entry( + fullUrl="fullUrl", + resource=expected_resource, + ), + ], + [expected_resource], + id="Single resource", + ), + ], + ) + def test_find_resources( + self, entries: list[Bundle.Entry], expected_results: list[Resource] + ) -> None: + bundle = Bundle.create(type="document", entry=entries) + + result = bundle.find_resources(Patient) + assert result == expected_results + + @pytest.mark.parametrize( + "bundle", + [ + pytest.param(Bundle.empty("document"), id="Bundle has no entries at all"), + pytest.param( + Bundle.create(type="document", entry=[]), + id="Bundle has an empty entries list", + ), + pytest.param( + Bundle.create( + type="document", + entry=[ + Bundle.Entry( + fullUrl="fullUrl", + resource=Bundle.empty("document"), + ), + ], + ), + id="different_resource_type", + ), + ], + ) + def test_find_resources_returns_empty_list(self, bundle: Bundle) -> None: + """ + Test that find_resources returns an empty list when no matching resources exist. + """ + result = bundle.find_resources(Patient) + assert result == [] + + +class TestPatient: + def test_create(self) -> None: + """Test creating a Patient resource.""" + nhs_number = "1234567890" + + expected_identifier = Patient.PatientIdentifier.from_nhs_number(nhs_number) + patient = Patient.create(identifier=expected_identifier) + + assert patient.identifier == expected_identifier + + +class TestPatientIdentifier: + def test_create_from_nhs_number(self) -> None: + """Test creating a PatientIdentifier from an NHS number.""" + nhs_number = "1234567890" + identifier = Patient.PatientIdentifier.from_nhs_number(nhs_number) + + assert identifier.system == "https://fhir.nhs.uk/Id/nhs-number" + assert identifier.value == nhs_number diff --git a/pathology-api/src/pathology_api/handler.py b/pathology-api/src/pathology_api/handler.py index 940e382..f9cc0f0 100644 --- a/pathology-api/src/pathology_api/handler.py +++ b/pathology-api/src/pathology_api/handler.py @@ -1,13 +1,50 @@ -class User: - def __init__(self, name: str): - self._name = name +import logging +from collections.abc import Callable - @property - def name(self) -> str: - return self._name +from pathology_api.fhir.r4.elements import Meta, UUIDIdentifier +from pathology_api.fhir.r4.resources import Bundle, Patient +_logger = logging.getLogger(__name__) -def greet(user: User) -> str: - if user.name == "nonexistent": - raise ValueError("nonexistent user provided.") - return f"Hello, {user.name}!" + +def _ensure_test_result_references_patient(bundle: Bundle) -> None: + patient_references = [ + patient.identifier for patient in bundle.find_resources(t=Patient) + ] + if not patient_references: + raise ValueError( + "Test Result Bundle must reference at least one Patient resource." + ) + + _logger.debug("Bundle.entries %s", bundle.entries) + _logger.debug("Patient references found: %s", patient_references) + + if len(patient_references) > 1: + raise ValueError( + "Test Result Bundle must not reference more than one Patient resource." + ) + + +type ValidationFunction = Callable[[Bundle], None] +_validation_functions: list[ValidationFunction] = [ + _ensure_test_result_references_patient, +] + + +def handle_request(bundle: Bundle) -> Bundle: + if bundle.identifier: + raise ValueError("Bundle with identifier is not allowed.") + + for validate_function in _validation_functions: + validate_function(bundle) + + _logger.debug("Bundle entries: %s", bundle.entries) + return_bundle = Bundle.create( + meta=Meta.with_last_updated(), + identifier=UUIDIdentifier(), + type=bundle.bundle_type, + entry=bundle.entries, + ) + _logger.debug("Return bundle: %s", return_bundle) + + return return_bundle diff --git a/pathology-api/src/pathology_api/test_handler.py b/pathology-api/src/pathology_api/test_handler.py index f403b7b..a7d15e8 100644 --- a/pathology-api/src/pathology_api/test_handler.py +++ b/pathology-api/src/pathology_api/test_handler.py @@ -1,54 +1,129 @@ +import datetime + import pytest -from pathology_api.handler import User, greet - - -class TestUser: - """Test suite for the User class.""" - - @pytest.mark.parametrize( - "name", - [ - "Alice", - "Bob", - "", - "O'Brien", - ], - ) - def test_user_initialization(self, name: str) -> None: - """Test that a User can be initialized with various names.""" - user = User(name) - assert user.name == name - - def test_user_name_is_immutable(self) -> None: - """Test that the name property cannot be directly modified.""" - user = User("Charlie") - with pytest.raises(AttributeError): - user.name = "David" # type: ignore[misc] - - -class TestGreet: - """Test suite for the greet function.""" - - @pytest.mark.parametrize( - ("name", "expected_greeting"), - [ - ("Alice", "Hello, Alice!"), - ("Bob", "Hello, Bob!"), - ("", "Hello, !"), - ("O'Brien", "Hello, O'Brien!"), - ("Nonexistent", "Hello, Nonexistent!"), - ("nonexistent ", "Hello, nonexistent !"), - ], - ) - def test_greet_with_valid_users(self, name: str, expected_greeting: str) -> None: - """Test that greet returns the correct greeting for various valid users.""" - user = User(name) - result = greet(user) - assert result == expected_greeting - - def test_greet_with_nonexistent_user_raises_value_error(self) -> None: - """Test that greet raises ValueError for nonexistent user.""" - user = User("nonexistent") - with pytest.raises(ValueError, match="nonexistent user provided."): - greet(user) +from pathology_api.fhir.r4.elements import UUIDIdentifier +from pathology_api.fhir.r4.resources import Bundle, Patient +from pathology_api.handler import handle_request + + +class TestHandleRequest: + """Test suite for the handle_request function.""" + + def test_handle_request(self) -> None: + """Test that handle_request processes a valid bundle correctly.""" + # Arrange + bundle = Bundle.create( + type="transaction", + entry=[ + Bundle.Entry( + fullUrl="patient", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number( + "nhs_number" + ) + ), + ) + ], + ) + + # Act + before_call = datetime.datetime.now(tz=datetime.timezone.utc) + result_bundle = handle_request(bundle) + after_call = datetime.datetime.now(tz=datetime.timezone.utc) + + # Assert + assert result_bundle is not None + + assert result_bundle.identifier is not None + result_identifier = result_bundle.identifier + assert result_identifier.system == "https://tools.ietf.org/html/rfc4122" + + assert result_bundle.bundle_type == bundle.bundle_type + assert result_bundle.entries == bundle.entries + + # Verify last_updated field + assert result_bundle.meta is not None + created_meta = result_bundle.meta + + assert created_meta.last_updated is not None + assert before_call <= created_meta.last_updated + assert created_meta.last_updated <= after_call + + assert created_meta.version_id is None + + def test_handle_request_raises_error_when_no_patient_resource(self) -> None: + """ + Test that handle_request raises ValueError when bundle has no Patient resource. + """ + # Arrange + bundle = Bundle.create( + type="transaction", + entry=[], + ) + + # Act & Assert + with pytest.raises( + ValueError, + match="Test Result Bundle must reference at least one Patient resource.", + ): + handle_request(bundle) + + def test_handle_request_raises_error_when_multiple_patient_resources( + self, + ) -> None: + """ + Test that handle_request raises ValueError when bundle has multiple Patient + resources. + """ + # Arrange + patient = Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number("nhs_number_1") + ) + + bundle = Bundle.create( + type="transaction", + entry=[ + Bundle.Entry( + fullUrl="patient1", + resource=patient, + ), + Bundle.Entry( + fullUrl="patient2", + resource=patient, + ), + ], + ) + + # Act & Assert + with pytest.raises( + ValueError, + match="Test Result Bundle must not reference more than one Patient " + "resource.", + ): + handle_request(bundle) + + def test_handle_request_bundle_includes_identifier( + self, + ) -> None: + """ + Test that handle_request raises ValueError when bundle includes identifier + resources. + """ + # Arrange + patient = Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number("nhs_number_1") + ) + + bundle = Bundle.create( + identifier=UUIDIdentifier(), + type="transaction", + entry=[Bundle.Entry(fullUrl="patient1", resource=patient)], + ) + + # Act & Assert + with pytest.raises( + ValueError, + match="Bundle with identifier is not allowed.", + ): + handle_request(bundle) diff --git a/pathology-api/test_lambda_handler.py b/pathology-api/test_lambda_handler.py index df38367..0115e16 100644 --- a/pathology-api/test_lambda_handler.py +++ b/pathology-api/test_lambda_handler.py @@ -1,63 +1,226 @@ +import json +from typing import Any +from unittest.mock import patch + import pytest +from aws_lambda_powertools.utilities.typing import LambdaContext from lambda_handler import handler +from pathology_api.fhir.r4.resources import Bundle, Patient +from pydantic import ValidationError class TestHandler: """Unit tests for the Lambda handler function.""" + def _create_test_event( + self, + body: str | None = None, + path_params: str | None = None, + request_method: str | None = None, + ) -> dict[str, Any]: + return { + "body": body, + "requestContext": { + "http": { + "path": f"/{path_params}", + "method": request_method, + }, + "requestId": "request-id", + "stage": "$default", + }, + "httpMethod": request_method, + "rawPath": f"/{path_params}", + "rawQueryString": "", + "pathParameters": {"proxy": path_params}, + } + + def test_create_test_result_success(self) -> None: + """Test create test result returns 200 with processed bundle for valid input.""" + bundle = Bundle.create( + type="transaction", + entry=[ + Bundle.Entry( + fullUrl="patient", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number( + "nhs_number" + ) + ), + ) + ], + ) + event = self._create_test_event( + body=bundle.model_dump_json(by_alias=True), + path_params="FHIR/R4/Bundle", + request_method="POST", + ) + context = LambdaContext() + + # Act + response = handler(event, context) + + # Assert + assert response["statusCode"] == 200 + assert response["headers"] == {"Content-Type": "application/fhir+json"} + + response_body = response["body"] + assert isinstance(response_body, str) + + response_bundle = Bundle.model_validate_json(response_body, by_alias=True) + assert response_bundle.bundle_type == bundle.bundle_type + assert response_bundle.entries == bundle.entries + + assert response_bundle.identifier is not None + assert ( + response_bundle.identifier.system == "https://tools.ietf.org/html/rfc4122" + ) + # A UUID value so can only check its presence. + assert response_bundle.identifier.value is not None + + def test_create_test_result_no_payload(self) -> None: + """Test create test result returns 400 when no payload is provided.""" + # Arrange + event = self._create_test_event( + path_params="FHIR/R4/Bundle", request_method="POST" + ) + context = LambdaContext() + + # Act + response = handler(event, context) + + # Assert + assert response["statusCode"] == 400 + assert response["body"] == "No payload provided." + assert response["headers"] == {"Content-Type": "text/plain"} + + def test_create_test_result_empty_payload(self) -> None: + """Test create test result returns 400 when empty payload is provided.""" + # Arrange + event = self._create_test_event( + body="{}", path_params="FHIR/R4/Bundle", request_method="POST" + ) + context = LambdaContext() + + # Act + response = handler(event, context) + + # Assert + assert response["statusCode"] == 400 + assert response["body"] == "No payload provided." + assert response["headers"] == {"Content-Type": "text/plain"} + + def test_create_test_result_invalid_json(self) -> None: + """Test create test result handles invalid JSON payload.""" + # Arrange + event = self._create_test_event( + body="invalid json", path_params="FHIR/R4/Bundle", request_method="POST" + ) + context = LambdaContext() + + response = handler(event, context) + + # Assert + assert response["statusCode"] == 400 + assert response["body"] == "Invalid payload provided." + assert response["headers"] == {"Content-Type": "text/plain"} + + def test_create_test_result_processing_error(self) -> None: + """Test create test result returns 400 when handle_request raises ValueError.""" + # Arrange + bundle = Bundle.empty(bundle_type="transaction") + event = self._create_test_event( + body=bundle.model_dump_json(by_alias=True), + path_params="FHIR/R4/Bundle", + request_method="POST", + ) + context = LambdaContext() + error_message = "Test processing error" + + expected_error = ValueError(error_message) + with patch("lambda_handler.handle_request", side_effect=expected_error): + # Act + response = handler(event, context) + + # Assert + assert response["statusCode"] == 400 + assert response["body"] == "Error processing provided bundle." + assert response["headers"] == {"Content-Type": "text/plain"} + @pytest.mark.parametrize( - ("name", "expected_greeting"), + "expected_error", [ - ("Alice", "Hello, Alice!"), - ("Bob", "Hello, Bob!"), - ("John Doe", "Hello, John Doe!"), - ("user123", "Hello, user123!"), + pytest.param( + TypeError("Test type error"), + id="TypeError", + ), + pytest.param( + ValidationError("Test validation error", []), + id="ValidationError", + ), ], - ids=["simple_name_alice", "simple_name_bob", "name_with_space", "alphanumeric"], ) - def test_handler_success(self, name: str, expected_greeting: str) -> None: - """Test handler returns 200 with greeting for valid names.""" + def test_create_test_result_parse_json_error( + self, expected_error: Exception + ) -> None: + """Test create test result returns 400 when handle_request raises TypeError.""" + # Arrange + bundle = Bundle.empty(bundle_type="transaction") + event = self._create_test_event( + body=bundle.model_dump_json(by_alias=True), + path_params="FHIR/R4/Bundle", + request_method="POST", + ) + context = LambdaContext() + + with patch( + "pathology_api.fhir.r4.resources.Bundle.model_validate", + side_effect=expected_error, + ): + # Act + response = handler(event, context) + + # Assert + assert response["statusCode"] == 400 + assert response["body"] == "Invalid payload provided." + assert response["headers"] == {"Content-Type": "text/plain"} + + def test_status_success(self) -> None: + """Test status function returns 200 OK.""" # Arrange - event = {"payload": name} - context: dict[str, str] = {} + event = self._create_test_event(path_params="_status", request_method="GET") + context = LambdaContext() # Act response = handler(event, context) # Assert assert response["statusCode"] == 200 - assert response["body"] == expected_greeting - assert response["headers"] == {"Content-Type": "application/json"} + assert response["body"] == "OK" + assert response["headers"] == {"Content-Type": "text/plain"} @pytest.mark.parametrize( - ("event", "expected_status", "expected_body"), + ("request_method", "request_parameter"), [ - ({"other_key": "value"}, 400, "Name is required"), - ({"payload": ""}, 400, "Name cannot be empty"), - ({"payload": None}, 400, "Name cannot be empty"), - ( - {"payload": "nonexistent"}, - 404, - "Provided name cannot be found. name=nonexistent", - ), - ], - ids=[ - "missing_payload_key", - "empty_payload", - "none_payload", - "nonexistent_user", + pytest.param("GET", "unknown_path", id="Unknown path"), + pytest.param("GET", "FHIR/R4/Bundle", id="Unknown GET method"), + pytest.param("POST", "_status", id="Unknown POST method"), ], ) - def test_handler_error_cases( - self, event: dict[str, str], expected_status: int, expected_body: str - ) -> None: - """Test handler returns appropriate error responses for invalid or - nonexistent input. - """ + def test_invalid_request(self, request_method: str, request_parameter: str) -> None: + """Test that unknown request methods and paths return a 404.""" + # Arrange + event = self._create_test_event( + path_params=request_parameter, request_method=request_method + ) + context = LambdaContext() + # Act - response = handler(event, {}) + response = handler(event, context) # Assert - assert response["statusCode"] == expected_status - assert response["body"] == expected_body + assert response["statusCode"] == 404 + assert json.loads(response["body"]) == { + "statusCode": 404, + "message": "Not found", + } assert response["headers"] == {"Content-Type": "application/json"} diff --git a/pathology-api/tests/acceptance/features/bundle_endpoint.feature b/pathology-api/tests/acceptance/features/bundle_endpoint.feature new file mode 100644 index 0000000..2644faf --- /dev/null +++ b/pathology-api/tests/acceptance/features/bundle_endpoint.feature @@ -0,0 +1,16 @@ +Feature: pathology Bundle API + As an API consumer + I want to interact with the pathology API + So that I can verify it responds correctly to valid and invalid requests + + Background: The API is running + Given the API is running + + Scenario: Send a valid Bundle + When I send a valid Bundle to the Pathology API + Then the response status code should be 200 + And the response should contain a valid "document" Bundle + + Scenario: Sending an invalid bundle + When I send an invalid Bundle to the Pathology API + Then the response status code should be 400 diff --git a/pathology-api/tests/acceptance/features/hello_world.feature b/pathology-api/tests/acceptance/features/hello_world.feature deleted file mode 100644 index 831c204..0000000 --- a/pathology-api/tests/acceptance/features/hello_world.feature +++ /dev/null @@ -1,16 +0,0 @@ -Feature: pathology API Hello World - As an API consumer - I want to interact with the pathology API - So that I can verify it responds correctly to valid and invalid requests - - Background: The API is running - Given the API is running - - Scenario: Get hello world message - When I send "World" to the endpoint - Then the response status code should be 200 - And the response should contain "Hello, World!" - - Scenario: Accessing a non-existent endpoint returns a 404 - When I send "nonexistent" to the endpoint - Then the response status code should be 404 diff --git a/pathology-api/tests/acceptance/scenarios/test_bundle_endpoint.py b/pathology-api/tests/acceptance/scenarios/test_bundle_endpoint.py new file mode 100644 index 0000000..588dcb4 --- /dev/null +++ b/pathology-api/tests/acceptance/scenarios/test_bundle_endpoint.py @@ -0,0 +1,19 @@ +""" +Provides the scenario bindings for the bundle endpoint feature file. +""" + +from pytest_bdd import scenario + +from tests.acceptance.steps.bundle_endpoint_steps import * # noqa: F403,S2208 - Required to import all hello world steps. + + +@scenario("bundle_endpoint.feature", "Send a valid Bundle") +def test_send_valid_bundle() -> None: + # No body required here as this method simply provides a binding to the BDD step + pass + + +@scenario("bundle_endpoint.feature", "Sending an invalid bundle") +def test_sending_invalid_bundle() -> None: + # No body required here as this method simply provides a binding to the BDD step + pass diff --git a/pathology-api/tests/acceptance/scenarios/test_hello_world.py b/pathology-api/tests/acceptance/scenarios/test_hello_world.py deleted file mode 100644 index 93ed4a1..0000000 --- a/pathology-api/tests/acceptance/scenarios/test_hello_world.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Provides the scenario bindings for the hello world feature file. -""" - -from pytest_bdd import scenario - -from tests.acceptance.steps.hello_world_steps import * # noqa: F403,S2208 - Required to import all hello world steps. - - -@scenario("hello_world.feature", "Get hello world message") -def test_hello_world() -> None: - # No body required here as this method simply provides a binding to the BDD step - pass - - -@scenario("hello_world.feature", "Accessing a non-existent endpoint returns a 404") -def test_nonexistent_route() -> None: - # No body required here as this method simply provides a binding to the BDD step - pass diff --git a/pathology-api/tests/acceptance/steps/bundle_endpoint_steps.py b/pathology-api/tests/acceptance/steps/bundle_endpoint_steps.py new file mode 100644 index 0000000..3068888 --- /dev/null +++ b/pathology-api/tests/acceptance/steps/bundle_endpoint_steps.py @@ -0,0 +1,130 @@ +"""Step definitions for pathology API hello world feature.""" + +import requests +from pathology_api.fhir.r4.resources import Bundle, BundleType, Patient +from pytest_bdd import given, parsers, then, when + +from tests.acceptance.conftest import ResponseContext +from tests.conftest import Client + + +@given("the API is running") +def step_api_is_running(client: Client) -> None: + """Verify the API test client is available. + + Args: + client: Test client from conftest.py + """ + response = client.send_without_payload(path="_status", request_method="GET") + assert response.text == "OK" + assert response.status_code == 200 + + +@when("I send a valid Bundle to the Pathology API") +def step_send_valid_bundle(client: Client, response_context: ResponseContext) -> None: + """ + Send a valid Bundle to the API. + + Args: + client: Test client + response_context: Context to store the response + """ + response_context.response = client.send( + path="FHIR/R4/Bundle", + request_method="POST", + data=Bundle.create( + type="document", + entry=[ + Bundle.Entry( + fullUrl="patient", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number( + "nhs_number" + ) + ), + ) + ], + ).model_dump_json(by_alias=True, exclude_none=True), + ) + + +@when("I send an invalid Bundle to the Pathology API") +def step_send_invalid_bundle(client: Client, response_context: ResponseContext) -> None: + """ + Send an invalid request to the API. + + Args: + client: Test client + response_context: Context to store the response + """ + bundle = Bundle.empty(bundle_type="document").model_dump_json( + by_alias=True, exclude_none=True + ) + + response_context.response = client.send( + path="FHIR/R4/Bundle", request_method="POST", data=bundle + ) + + +# fmt: off +@then(parsers.cfparse("the response status code should be {expected_status:d}",extra_types={"expected_status": int})) # noqa: E501 - BDD steps must be declared on a singular line. +# fmt: on +def step_check_status_code( + response_context: ResponseContext, expected_status: int +) -> None: + """Verify the response status code matches expected value. + + Args: + context: Behave context containing the response + expected_status: Expected HTTP status code + """ + response = _validate_response_set(response_context) + + assert response.status_code == expected_status, ( + f"Expected status {expected_status}, " + f"got {response.status_code}" + ) + + +@then(parsers.cfparse('the response should contain "{expected_text}"')) +def step_check_response_contains( + response_context: ResponseContext, expected_text: str +) -> None: + """Verify the response contains the expected text. + + Args: + context: Behave context containing the response + expected_text: Text that should be in the response + """ + response = _validate_response_set(response_context) + + assert expected_text in response.text, ( + f"Expected '{expected_text}' in response, got: {response.text}" + ) + +@then(parsers.cfparse('the response should contain a valid "{expected_type}" Bundle')) +def step_check_response_contains_valid_bundle( + response_context: ResponseContext, + expected_type: BundleType +) -> None: + """Verify the response contains a valid FHIR Bundle. + + Args: + context: Behave context containing the response + """ + response = _validate_response_set(response_context) + + response_data = response.json() + bundle = Bundle.model_validate(response_data, by_alias=True) + + assert bundle.bundle_type == expected_type, ( + f"Expected bundle type '{expected_type}', got: '{bundle.bundle_type}'" + ) + + assert bundle.identifier is not None, "Bundle identifier is missing." + assert bundle.identifier.system == "https://tools.ietf.org/html/rfc4122" + assert bundle.identifier.value is not None, "Bundle identifier value is missing." + +def _validate_response_set(response_context: ResponseContext) -> requests.Response: + assert response_context.response is not None, "Response has not been set." + return response_context.response diff --git a/pathology-api/tests/acceptance/steps/hello_world_steps.py b/pathology-api/tests/acceptance/steps/hello_world_steps.py deleted file mode 100644 index 88edff5..0000000 --- a/pathology-api/tests/acceptance/steps/hello_world_steps.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Step definitions for pathology API hello world feature.""" - -from pytest_bdd import given, parsers, then, when - -from tests.acceptance.conftest import ResponseContext -from tests.conftest import Client - - -@given("the API is running") -def step_api_is_running(client: Client) -> None: - """Verify the API test client is available. - - Args: - client: Test client from conftest.py - """ - response = client.send("test") - assert response.text is not None - assert response.status_code == 200 - - -@when(parsers.cfparse('I send "{message}" to the endpoint')) -def step_send_get_request( - client: Client, message: str, response_context: ResponseContext -) -> None: - """Send a GET request to the specified endpoint. - - Args: - client: Test client - endpoint: The API endpoint path to request - """ - response_context.response = client.send(message) - - -# fmt: off -@then(parsers.cfparse("the response status code should be {expected_status:d}",extra_types={"expected_status": int})) # noqa: E501 - BDD steps must be declared on a singular line. -# fmt: on -def step_check_status_code( - response_context: ResponseContext, expected_status: int -) -> None: - """Verify the response status code matches expected value. - - Args: - context: Behave context containing the response - expected_status: Expected HTTP status code - """ - assert response_context.response, "Response has not been set." - - data = response_context.response.json() - - assert data["statusCode"] == expected_status, ( - f"Expected status {expected_status}, " - f"got {response_context.response.status_code}" - ) - - -@then(parsers.cfparse('the response should contain "{expected_text}"')) -def step_check_response_contains( - response_context: ResponseContext, expected_text: str -) -> None: - """Verify the response contains the expected text. - - Args: - context: Behave context containing the response - expected_text: Text that should be in the response - """ - assert response_context.response, "Response has not been set." - - assert expected_text in response_context.response.text, ( - f"Expected '{expected_text}' in response, got: {response_context.response.text}" - ) diff --git a/pathology-api/tests/conftest.py b/pathology-api/tests/conftest.py index 1fced17..7882853 100644 --- a/pathology-api/tests/conftest.py +++ b/pathology-api/tests/conftest.py @@ -1,9 +1,8 @@ """Pytest configuration and shared fixtures for pathology API tests.""" -import json import os from datetime import timedelta -from typing import cast +from typing import Literal, cast import pytest import requests @@ -19,33 +18,58 @@ def __init__(self, lambda_url: str, timeout: timedelta = timedelta(seconds=1)): self._lambda_url = lambda_url self._timeout = timeout.total_seconds() - def send(self, data: str) -> requests.Response: + def send( + self, data: str, path: str, request_method: Literal["GET", "POST"] + ) -> requests.Response: """ Send a request to the APIs with some given parameters. Args: data: The data to send in the request payload + path: The path to send the request to + request_method: The HTTP method to use for the request Returns: Response object from the request """ - return self._send(data=data, include_payload=True) + return self._send( + data=data, path=path, include_payload=True, request_method=request_method + ) - def send_without_payload(self) -> requests.Response: + def send_without_payload( + self, path: str, request_method: Literal["GET", "POST"] + ) -> requests.Response: """ Send a request to the APIs without a payload. + Args: + path: The path to send the request to + request_method: The HTTP method to use for the request Returns: Response object from the request """ - return self._send(data=None, include_payload=False) - - def _send(self, data: str | None, include_payload: bool) -> requests.Response: - json_data = {"payload": data} if include_payload else {} - - return requests.post( - f"{self._lambda_url}/2015-03-31/functions/function/invocations", - data=json.dumps(json_data), - timeout=self._timeout, + return self._send( + data=None, path=path, include_payload=False, request_method=request_method ) + def _send( + self, + data: str | None, + path: str, + include_payload: bool, + request_method: Literal["GET", "POST"], + ) -> requests.Response: + match request_method: + case "POST": + return requests.post( + f"{self._lambda_url}/{path}", + data=data if include_payload else None, + timeout=self._timeout, + ) + case "GET": + return requests.get( + f"{self._lambda_url}/{path}", + timeout=self._timeout, + data=data if include_payload else None, + ) + @pytest.fixture(scope="module") def client(base_url: str) -> Client: diff --git a/pathology-api/tests/contract/pacts/PathologyAPIConsumer-PathologyAPIProvider.json b/pathology-api/tests/contract/pacts/PathologyAPIConsumer-PathologyAPIProvider.json index f682b72..771dc4f 100644 --- a/pathology-api/tests/contract/pacts/PathologyAPIConsumer-PathologyAPIProvider.json +++ b/pathology-api/tests/contract/pacts/PathologyAPIConsumer-PathologyAPIProvider.json @@ -16,12 +16,25 @@ "type": "Synchronous/HTTP" }, { - "description": "a request for the hello world message", + "description": "a request for the Bundle endpoint", "pending": false, "request": { "body": { "content": { - "payload": "World" + "entry": [ + { + "fullUrl": "patient", + "resource": { + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhs-number", + "value": "nhs_number" + }, + "resourceType": "Patient" + } + } + ], + "resourceType": "Bundle", + "type": "document" }, "contentType": "application/json", "encoded": false @@ -32,17 +45,90 @@ ] }, "method": "POST", - "path": "/2015-03-31/functions/function/invocations" + "path": "/FHIR/R4/Bundle" }, "response": { "body": { - "content": "{\"statusCode\": 200, \"headers\": {\"Content-Type\": \"application/json\"}, \"body\": \"Hello, World!\"}", - "contentType": "text/plain;charset=utf-8", + "content": { + "entry": [ + { + "fullUrl": "patient", + "resource": { + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhs-number", + "value": "nhs_number" + }, + "resourceType": "Patient" + } + } + ], + "identifier": { + "system": "https://tools.ietf.org/html/rfc4122", + "value": null + }, + "meta": { + "lastUpdated": "2026-01-16T12:00:00.000Z" + }, + "resourceType": "Bundle", + "type": "document" + }, + "contentType": "application/fhir+json", + "encoded": false + }, + "generators": { + "body": { + "$.identifier.value": { + "type": "Uuid" + } + } + }, + "headers": { + "Content-Type": [ + "application/fhir+json" + ] + }, + "matchingRules": { + "body": { + "$.identifier.value": { + "combine": "AND", + "matchers": [ + { + "match": "regex", + "regex": "^([0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}|[0-9A-F]{8}(-[0-9A-F]{4}){3}-[0-9A-F]{12})$" + } + ] + }, + "$.meta.lastUpdated": { + "combine": "AND", + "matchers": [ + { + "format": "yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'", + "match": "datetime" + } + ] + } + } + }, + "status": 200 + }, + "type": "Synchronous/HTTP" + }, + { + "description": "a request for the status endpoint", + "pending": false, + "request": { + "method": "GET", + "path": "/_status" + }, + "response": { + "body": { + "content": "OK", + "contentType": "text/plain", "encoded": false }, "headers": { "Content-Type": [ - "text/plain;charset=utf-8" + "text/plain" ] }, "status": 200 diff --git a/pathology-api/tests/contract/test_consumer_contract.py b/pathology-api/tests/contract/test_consumer_contract.py index 354bf8c..7c44873 100644 --- a/pathology-api/tests/contract/test_consumer_contract.py +++ b/pathology-api/tests/contract/test_consumer_contract.py @@ -5,38 +5,76 @@ """ import requests -from pact import Pact +from pact import Pact, match class TestConsumerContract: """Consumer contract tests to define expected API behavior.""" - def test_get_hello_world(self) -> None: - """Test the consumer's expectation of the hello world endpoint. + def test_post_bundle(self) -> None: + """Test the consumer's expectation of the Bundle endpoint. This test defines the contract: when the consumer requests - GET/PUT/POST/PATCH/TRACE/DELETE to the - /2015-03-31/functions/function/invocations endpoint, with a payload of "World", - a 200 response containing "Hello, World!" is returned. + POST to the Bundle endpoint, with a valid Bundle, + a 200 response containing the newly created Bundle is returned. """ pact = Pact(consumer="PathologyAPIConsumer", provider="PathologyAPIProvider") + request_body = { + "resourceType": "Bundle", + "type": "document", + "entry": [ + { + "fullUrl": "patient", + "resource": { + "resourceType": "Patient", + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhs-number", + "value": "nhs_number", + }, + }, + } + ], + } + + response_body = { + "resourceType": "Bundle", + "type": "document", + "entry": [ + { + "fullUrl": "patient", + "resource": { + "resourceType": "Patient", + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhs-number", + "value": "nhs_number", + }, + }, + } + ], + "identifier": { + "system": "https://tools.ietf.org/html/rfc4122", + "value": match.uuid(), + }, + "meta": { + "lastUpdated": match.datetime( + "2026-01-16T12:00:00.000Z", format="%Y-%m-%dT%H:%M:%S.%fZ" + ), + }, + } + # Define the expected interaction ( - pact.upon_receiving("a request for the hello world message") - .with_body({"payload": "World"}) + pact.upon_receiving("a request for the Bundle endpoint") + .with_body(request_body) .with_request( method="POST", - path="/2015-03-31/functions/function/invocations", + path="/FHIR/R4/Bundle", ) .will_respond_with(status=200) .with_body( - { - "statusCode": 200, - "headers": {"Content-Type": "application/json"}, - "body": "Hello, World!", - }, - content_type="text/plain;charset=utf-8", + response_body, + content_type="application/fhir+json", ) ) @@ -44,17 +82,46 @@ def test_get_hello_world(self) -> None: with pact.serve() as server: # Make the actual request to the mock provider response = requests.post( - f"{server.url}/2015-03-31/functions/function/invocations", - json={"payload": "World"}, + f"{server.url}/FHIR/R4/Bundle", + json=request_body, timeout=10, ) # Verify the response matches expectations assert response.status_code == 200 - body = response.json() - assert body["body"] == "Hello, World!" - assert body["statusCode"] == 200 - assert body["headers"] == {"Content-Type": "application/json"} + assert response.headers["Content-Type"] == "application/fhir+json" + + # Write the pact file after the test + pact.write_file("tests/contract/pacts") + + def test_status(self) -> None: + """Test the consumer's expectation of the status endpoint. + + This test defines the contract: when the consumer requests + GET to the status endpoint, a 200 response with "OK" body is returned. + """ + pact = Pact(consumer="PathologyAPIConsumer", provider="PathologyAPIProvider") + + # Define the expected interaction + ( + pact.upon_receiving("a request for the status endpoint") + .with_request(method="GET", path="/_status") + .will_respond_with(status=200) + .with_body( + "OK", + content_type="text/plain", + ) + ) + + # Start the mock server and execute the test + with pact.serve() as server: + # Make the actual request to the mock provider + response = requests.get(f"{server.url}/_status", timeout=10) + + # Verify the response matches expectations + assert response.status_code == 200 + assert response.text == "OK" + assert response.headers["Content-Type"] == "text/plain" # Write the pact file after the test pact.write_file("tests/contract/pacts") diff --git a/pathology-api/tests/integration/test_main.py b/pathology-api/tests/integration/test_main.py index 5ccad8e..283d378 100644 --- a/pathology-api/tests/integration/test_main.py +++ b/pathology-api/tests/integration/test_main.py @@ -1,49 +1,121 @@ """Integration tests for the pathology API using pytest.""" -from tests.conftest import Client +from pathology_api.fhir.r4.resources import Bundle, Patient +from tests.conftest import Client -class TestHelloWorld: - """Test suite for the hello world endpoint.""" - def test_hello_world_returns_200(self, client: Client) -> None: - """Test that the root endpoint returns a 200 status code.""" - response = client.send("world") - assert response.status_code == 200 +class TestBundleEndpoint: + """Test suite for the bundle endpoint.""" - def test_hello_world_returns_correct_message(self, client: Client) -> None: - """Test that the root endpoint returns the correct message.""" - response = client.send("World") - assert response.json()["body"] == "Hello, World!" + def test_bundle_returns_200(self, client: Client) -> None: + """Test that the bundle endpoint returns a 200 status code.""" + bundle = Bundle.create( + type="document", + entry=[ + Bundle.Entry( + fullUrl="patient", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number( + "nhs_number" + ) + ), + ) + ], + ) - def test_hello_world_content_type(self, client: Client) -> None: - """Test that the response has the correct content type.""" - response = client.send("world") - assert "text/plain" in response.headers["Content-Type"] + response = client.send( + data=bundle.model_dump_json(by_alias=True), + path="FHIR/R4/Bundle", + request_method="POST", + ) - def test_nonexistent_returns_error(self, client: Client) -> None: - """Test that non-existent routes return 404.""" - response = client.send("nonexistent") assert response.status_code == 200 + assert response.headers["Content-Type"] == "application/fhir+json" + + response_data = response.json() + response_bundle = Bundle.model_validate(response_data, by_alias=True) - body = response.json().get("body") - assert body == "Provided name cannot be found. name=nonexistent" + assert response_bundle.bundle_type == bundle.bundle_type + assert response_bundle.entries == bundle.entries - status_code = response.json().get("statusCode") - assert status_code == 404 + assert response_bundle.identifier is not None + response_identifier = response_bundle.identifier + assert response_identifier.system == "https://tools.ietf.org/html/rfc4122" + # A UUID value so can only check its presence. + assert response_identifier.value is not None + + assert response_bundle.meta is not None + response_meta = response_bundle.meta + assert response_meta.last_updated is not None + assert response_meta.version_id is None def test_no_payload_returns_error(self, client: Client) -> None: """Test that an error is returned when no payload is provided.""" - response = client.send_without_payload() - assert response.status_code == 200 + response = client.send_without_payload( + request_method="POST", path="FHIR/R4/Bundle" + ) + assert response.status_code == 400 - body = response.json().get("body") - assert body == "Name is required" + response_data = response.text + assert response_data == "No payload provided." + + assert response.status_code == 400 def test_empty_name_returns_error(self, client: Client) -> None: """Test that an error is returned when an empty name is provided.""" - response = client.send("") + response = client.send(data="", request_method="POST", path="FHIR/R4/Bundle") + assert response.status_code == 400 + + response_data = response.text + assert response_data == "No payload provided." + + def test_invalid_request_method(self, client: Client) -> None: + """Test that an error is returned when an invalid request method is used.""" + + bundle = Bundle.create( + type="document", + entry=[ + Bundle.Entry( + fullUrl="patient", + resource=Patient.create( + identifier=Patient.PatientIdentifier.from_nhs_number( + "nhs_number" + ) + ), + ) + ], + ) + + response = client.send( + data=bundle.model_dump_json(by_alias=True), + request_method="GET", + path="FHIR/R4/Bundle", + ) + assert response.status_code == 404 + assert response.headers["Content-Type"] == "application/json" + assert response.json() == {"message": "Not found", "statusCode": 404} + + +class TestStatusEndpoint: + """Test suite for the status endpoint.""" + + def test_status_returns_200(self, client: Client) -> None: + """Test that the status endpoint returns a 200 status code.""" + response = client.send_without_payload(request_method="GET", path="_status") assert response.status_code == 200 + assert response.headers["Content-Type"] == "text/plain" + + response_data = response.text + assert response_data == "OK" - body = response.json().get("body") - assert body == "Name cannot be empty" + def test_invalid_request_method(self, client: Client) -> None: + """Test that an error is returned when an invalid request method is used.""" + response = client.send( + data="", + request_method="POST", + path="_status", + ) + assert response.status_code == 404 + assert response.headers["Content-Type"] == "application/json" + assert response.json() == {"message": "Not found", "statusCode": 404} diff --git a/scripts/tests/run-test.sh b/scripts/tests/run-test.sh index bfe48d6..0b972eb 100755 --- a/scripts/tests/run-test.sh +++ b/scripts/tests/run-test.sh @@ -25,7 +25,7 @@ cd "$(git rev-parse --show-toplevel)" # Determine test path based on test type if [ "$TEST_TYPE" = "unit" ]; then - TEST_PATH="test_*.py src/*/test_*.py" + TEST_PATH="test_*.py src/" else TEST_PATH="tests/${TEST_TYPE}/" fi