diff --git a/.coverage b/.coverage
deleted file mode 100644
index ade9ec2f..00000000
Binary files a/.coverage and /dev/null differ
diff --git a/.github/workflows/stage-2-test.yaml b/.github/workflows/stage-2-test.yaml
index ee95a848..3d4a8fa2 100644
--- a/.github/workflows/stage-2-test.yaml
+++ b/.github/workflows/stage-2-test.yaml
@@ -72,9 +72,10 @@ jobs:
- uses: actions/setup-node@v6
with:
node-version: 24.10.0
- - uses: actions/setup-python@v6
+ - name: "Setup Python"
+ uses: actions/setup-python@v6
with:
- python-version: '3.14'
+ python-version: ${{ inputs.python_version }}
- name: "Run unit test suite"
run: |
make test-unit
@@ -94,7 +95,10 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: python-coverage-reports
- path: "src/**/coverage.xml"
+ path: |
+ src/**/coverage.xml
+ utils/**/coverage.xml
+ lambdas/**/coverage.xml
test-lint:
name: "Linting"
runs-on: ubuntu-latest
@@ -102,6 +106,10 @@ jobs:
steps:
- name: "Checkout code"
uses: actions/checkout@v5
+ - name: "Setup Python"
+ uses: actions/setup-python@v6
+ with:
+ python-version: ${{ inputs.python_version }}
- uses: actions/setup-node@v6
with:
node-version: 24.10.0
@@ -156,7 +164,7 @@ jobs:
uses: actions/download-artifact@v5
with:
name: python-coverage-reports
- path: src/
+ path: .
- name: "Perform static analysis"
uses: ./.github/actions/perform-static-analysis
with:
diff --git a/.gitignore b/.gitignore
index 498acdd9..cf7d4b6a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,15 @@ version.json
# Please, add your custom content below!
+# Don't track ephemeral local build files
+target/
+
+# Python build artifacts
+*.egg-info/
+.eggs/
+build/
+dist/
+
# dependencies
node_modules
.node-version
@@ -25,6 +34,7 @@ dist
output
/schemas
.env
+**/__pycache__
# Python
__pycache__/
diff --git a/.gitleaksignore b/.gitleaksignore
index 5def75c1..d160f02d 100644
--- a/.gitleaksignore
+++ b/.gitleaksignore
@@ -16,3 +16,5 @@ d1c0a37078cbed4fbedae044e5cbafac71717af0:utils/utils/src/__tests__/key-generatio
d1c0a37078cbed4fbedae044e5cbafac71717af0:utils/utils/src/__tests__/key-generation/get-private-key.test.ts:private-key:23
d1c0a37078cbed4fbedae044e5cbafac71717af0:utils/utils/src/__tests__/key-generation/get-private-key.test.ts:private-key:30
d1c0a37078cbed4fbedae044e5cbafac71717af0:utils/utils/src/__tests__/key-generation/get-private-key.test.ts:private-key:46
+f0eebf1356a699213340a45f64c6b990afcbb869:infrastructure/terraform/components/dl/ssm_parameter_mesh.tf:hashicorp-tf-password:11
+f0eebf1356a699213340a45f64c6b990afcbb869:infrastructure/terraform/components/dl/ssm_parameter_mesh.tf:hashicorp-tf-password:16
diff --git a/.tool-versions b/.tool-versions
index a89da095..9a400271 100644
--- a/.tool-versions
+++ b/.tool-versions
@@ -8,8 +8,6 @@ terraform 1.10.1
terraform-docs 0.19.0
trivy 0.61.0
vale 3.6.0
-
-
# ==============================================================================
# The section below is reserved for Docker image versions.
diff --git a/Makefile b/Makefile
index 6f643bdb..348ca347 100644
--- a/Makefile
+++ b/Makefile
@@ -9,15 +9,26 @@ include scripts/init.mk
quick-start: config clean build serve-docs # Quick start target to setup, build and serve docs @Pipeline
-dependencies: # Install dependencies needed to build and test the project @Pipeline
- # TODO: Implement installation of your project dependencies
+dependencies:: # Install dependencies needed to build and test the project @Pipeline
+ $(MAKE) -C src/cloudevents install
+ $(MAKE) -C src/eventcatalogasyncapiimporter install
+ $(MAKE) -C lambdas/mesh-poll install
+ $(MAKE) -C lambdas/mesh-download install
+ $(MAKE) -C utils/metric-publishers install
+ $(MAKE) -C utils/event-publisher-py install
+ $(MAKE) -C utils/py-mock-mesh install
+ npm install --workspaces
+ $(MAKE) generate
-generate: # Generate any autogenerated output @Pipeline
- npm run generate-dependencies
+dependencies-docs:: # Install documentation dependencies @Pipeline
+ $(MAKE) -C docs install
-build: # Build the project artefact @Pipeline
+build: dependencies-docs # Build the project artefact @Pipeline
$(MAKE) -C docs build
+generate: # Generate any autogenerated output @Pipeline
+ npm run generate-dependencies
+
debug:
$(MAKE) -C docs debug
@@ -32,16 +43,16 @@ clean:: # Clean-up project resources (main) @Operations
$(MAKE) -C src/cloudevents clean && \
$(MAKE) -C src/eventcatalogasyncapiimporter clean && \
$(MAKE) -C src/eventcatalogasyncapiimporter clean-output && \
+ $(MAKE) -C lambdas/mesh-poll clean && \
+ $(MAKE) -C lambdas/mesh-download clean && \
+ $(MAKE) -C utils/metric-publishers clean && \
+ $(MAKE) -C utils/event-publisher-py clean && \
+ $(MAKE) -C utils/py-mock-mesh clean && \
$(MAKE) -C src/python-schema-generator clean && \
rm -f .version
npm run clean
-config:: _install-dependencies version # Configure development environment (main) @Configuration
- $(MAKE) -C docs install
- $(MAKE) -C src/cloudevents install
- $(MAKE) -C src/eventcatalogasyncapiimporter install
- npm install
- $(MAKE) generate
+config:: _install-dependencies version dependencies # Configure development environment (main) @Configuration
serve-docs:
$(MAKE) -C docs s
diff --git a/infrastructure/terraform/components/dl/README.md b/infrastructure/terraform/components/dl/README.md
index db9e38e7..4d00a5f6 100644
--- a/infrastructure/terraform/components/dl/README.md
+++ b/infrastructure/terraform/components/dl/README.md
@@ -17,6 +17,7 @@ No requirements.
| [component](#input\_component) | The variable encapsulating the name of this component | `string` | `"dl"` | no |
| [default\_tags](#input\_default\_tags) | A map of default tags to apply to all taggable resources within the component | `map(string)` | `{}` | no |
| [enable\_dynamodb\_delete\_protection](#input\_enable\_dynamodb\_delete\_protection) | Enable DynamoDB Delete Protection on all Tables | `bool` | `true` | no |
+| [enable\_mock\_mesh](#input\_enable\_mock\_mesh) | Enable mock mesh access (dev only). Grants lambda permission to read mock-mesh prefix in non-pii bucket. | `bool` | `false` | no |
| [environment](#input\_environment) | The name of the tfscaffold environment | `string` | n/a | yes |
| [force\_destroy](#input\_force\_destroy) | Flag to force deletion of S3 buckets | `bool` | `false` | no |
| [force\_lambda\_code\_deploy](#input\_force\_lambda\_code\_deploy) | If the lambda package in s3 has the same commit id tag as the terraform build branch, the lambda will not update automatically. Set to True if making changes to Lambda code from on the same commit for example during development | `bool` | `false` | no |
@@ -24,7 +25,7 @@ No requirements.
| [kms\_deletion\_window](#input\_kms\_deletion\_window) | When a kms key is deleted, how long should it wait in the pending deletion state? | `string` | `"30"` | no |
| [log\_level](#input\_log\_level) | The log level to be used in lambda functions within the component. Any log with a lower severity than the configured value will not be logged: https://docs.python.org/3/library/logging.html#levels | `string` | `"INFO"` | no |
| [log\_retention\_in\_days](#input\_log\_retention\_in\_days) | The retention period in days for the Cloudwatch Logs events to be retained, default of 0 is indefinite | `number` | `0` | no |
-| [mesh\_poll\_schedule](#input\_mesh\_poll\_schedule) | Schedule to poll MESH for messages | `string` | `"cron(0,30 8-16 ? * MON-FRI *)"` | no |
+| [mesh\_poll\_schedule](#input\_mesh\_poll\_schedule) | Schedule to poll MESH for messages | `string` | `"rate(5 minutes)"` | no |
| [parent\_acct\_environment](#input\_parent\_acct\_environment) | Name of the environment responsible for the acct resources used, affects things like DNS zone. Useful for named dev environments | `string` | `"main"` | no |
| [project](#input\_project) | The name of the tfscaffold project | `string` | n/a | yes |
| [queue\_batch\_size](#input\_queue\_batch\_size) | maximum number of queue items to process | `number` | `10` | no |
@@ -39,11 +40,15 @@ No requirements.
| [kms](#module\_kms) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-kms.zip | n/a |
| [lambda\_apim\_key\_generation](#module\_lambda\_apim\_key\_generation) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip | n/a |
| [lambda\_lambda\_apim\_refresh\_token](#module\_lambda\_lambda\_apim\_refresh\_token) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip | n/a |
+| [mesh\_download](#module\_mesh\_download) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip | n/a |
| [mesh\_poll](#module\_mesh\_poll) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip | n/a |
| [s3bucket\_cf\_logs](#module\_s3bucket\_cf\_logs) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| [s3bucket\_letters](#module\_s3bucket\_letters) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
+| [s3bucket\_non\_pii\_data](#module\_s3bucket\_non\_pii\_data) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
+| [s3bucket\_pii\_data](#module\_s3bucket\_pii\_data) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| [s3bucket\_static\_assets](#module\_s3bucket\_static\_assets) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip | n/a |
| [sqs\_event\_publisher\_errors](#module\_sqs\_event\_publisher\_errors) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
+| [sqs\_mesh\_download](#module\_sqs\_mesh\_download) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [sqs\_ttl](#module\_sqs\_ttl) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [sqs\_ttl\_handle\_expiry\_errors](#module\_sqs\_ttl\_handle\_expiry\_errors) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip | n/a |
| [ttl\_create](#module\_ttl\_create) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip | n/a |
diff --git a/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_received.tf b/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_received.tf
new file mode 100644
index 00000000..0b6f7a4b
--- /dev/null
+++ b/infrastructure/terraform/components/dl/cloudwatch_event_rule_mesh_inbox_message_received.tf
@@ -0,0 +1,24 @@
+resource "aws_cloudwatch_event_rule" "mesh_inbox_message_received" {
+ name = "${local.csi}-mesh-inbox-message-received"
+ description = "Route MESHInboxMessageReceived events from mesh-poll lambda to mesh-download queue"
+ event_bus_name = aws_cloudwatch_event_bus.main.name
+
+ event_pattern = jsonencode({
+ "detail" : {
+ "type" : [
+ "uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1"
+ ],
+ "dataschemaversion" : [{
+ "prefix" : "1."
+ }]
+ }
+ })
+}
+
+# EventBridge target to send events to SQS queue
+resource "aws_cloudwatch_event_target" "mesh_download_sqs" {
+ rule = aws_cloudwatch_event_rule.mesh_inbox_message_received.name
+ target_id = "mesh-inbox-message-received-sqs-target"
+ arn = module.sqs_mesh_download.sqs_queue_arn
+ event_bus_name = aws_cloudwatch_event_bus.main.name
+}
diff --git a/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_download.tf b/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_download.tf
new file mode 100644
index 00000000..86c3ecf8
--- /dev/null
+++ b/infrastructure/terraform/components/dl/lambda_event_source_mapping_mesh_download.tf
@@ -0,0 +1,10 @@
+resource "aws_lambda_event_source_mapping" "mesh_download" {
+ event_source_arn = module.sqs_mesh_download.sqs_queue_arn
+ function_name = module.mesh_download.function_name
+ batch_size = var.queue_batch_size
+ maximum_batching_window_in_seconds = var.queue_batch_window_seconds
+
+ function_response_types = [
+ "ReportBatchItemFailures"
+ ]
+}
diff --git a/infrastructure/terraform/components/dl/locals.tf b/infrastructure/terraform/components/dl/locals.tf
index 0424bd1c..ef76f4c4 100644
--- a/infrastructure/terraform/components/dl/locals.tf
+++ b/infrastructure/terraform/components/dl/locals.tf
@@ -5,6 +5,8 @@ locals {
apim_api_key_ssm_parameter_name = "/${var.component}/${var.environment}/apim/api_key"
apim_private_key_ssm_parameter_name = "/${var.component}/${var.environment}/apim/private_key"
apim_keystore_s3_bucket = "nhs-${var.aws_account_id}-${var.region}-${var.environment}-${var.component}-static-assets"
+ ssm_mesh_prefix = "/${var.component}/${var.environment}/mesh"
+ mock_mesh_endpoint = "s3://${module.s3bucket_non_pii_data.bucket}/mock-mesh"
root_domain_name = "${var.environment}.${local.acct.route53_zone_names["digital-letters"]}"
root_domain_id = local.acct.route53_zone_ids["digital-letters"]
ttl_shard_count = 3
diff --git a/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf b/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf
new file mode 100644
index 00000000..2c51315b
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_lambda_mesh_download.tf
@@ -0,0 +1,178 @@
+module "mesh_download" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-lambda.zip"
+
+ function_name = "mesh-download"
+ description = "A lambda function for downloading MESH messages and storing in S3"
+
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ group = var.group
+
+ log_retention_in_days = var.log_retention_in_days
+ kms_key_arn = module.kms.key_arn
+
+ iam_policy_document = {
+ body = data.aws_iam_policy_document.mesh_download_lambda.json
+ }
+
+ function_s3_bucket = local.acct.s3_buckets["lambda_function_artefacts"]["id"]
+ function_code_base_path = local.aws_lambda_functions_dir_path
+ function_code_dir = "mesh-download/target/dist"
+ function_include_common = true
+ function_module_name = "mesh_download"
+ handler_function_name = "handler.handler"
+ runtime = "python3.14"
+ memory = 256
+ timeout = 60
+ log_level = var.log_level
+
+ force_lambda_code_deploy = var.force_lambda_code_deploy
+ enable_lambda_insights = false
+
+ send_to_firehose = true
+ log_destination_arn = local.log_destination_arn
+ log_subscription_role_arn = local.acct.log_subscription_role_arn
+
+ lambda_env_vars = {
+ CERTIFICATE_EXPIRY_METRIC_NAME = "mesh-download-client-certificate-near-expiry"
+ CERTIFICATE_EXPIRY_METRIC_NAMESPACE = "dl-mesh-download"
+ DOWNLOAD_METRIC_NAME = "mesh-download-successful-downloads"
+ DOWNLOAD_METRIC_NAMESPACE = "dl-mesh-download"
+ ENVIRONMENT = var.environment
+ EVENT_PUBLISHER_DLQ_URL = module.sqs_event_publisher_errors.sqs_queue_url
+ EVENT_PUBLISHER_EVENT_BUS_ARN = aws_cloudwatch_event_bus.main.arn
+ PII_BUCKET = module.s3bucket_pii_data.bucket
+ SSM_PREFIX = "${local.ssm_mesh_prefix}"
+ USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
+ }
+
+}
+
+data "aws_iam_policy_document" "mesh_download_lambda" {
+ # Mock S3 ListBucket only when enabled
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "MockMeshListBucket"
+ effect = "Allow"
+
+ actions = [
+ "s3:ListBucket"
+ ]
+
+ resources = [
+ module.s3bucket_non_pii_data.arn
+ ]
+
+ condition {
+ test = "StringLike"
+ variable = "s3:prefix"
+ values = ["mock-mesh/*"]
+ }
+ }
+ }
+
+ # Mock S3 GetObject only when enabled
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "MockMeshGetObject"
+ effect = "Allow"
+
+ actions = [
+ "s3:GetObject"
+ ]
+
+ resources = [
+ "${module.s3bucket_non_pii_data.arn}/mock-mesh/*"
+ ]
+ }
+ }
+
+ statement {
+ sid = "KMSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "kms:Decrypt",
+ "kms:GenerateDataKey",
+ ]
+
+ resources = [
+ module.kms.key_arn,
+ ]
+ }
+
+ statement {
+ sid = "S3BucketPermissions"
+ effect = "Allow"
+
+ actions = [
+ "s3:PutObject",
+ "s3:GetObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_pii_data.arn}/*",
+ ]
+ }
+
+ statement {
+ sid = "SQSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:ReceiveMessage",
+ "sqs:DeleteMessage",
+ "sqs:GetQueueAttributes",
+ ]
+
+ resources = [
+ module.sqs_mesh_download.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "EventBridgePermissions"
+ effect = "Allow"
+
+ actions = [
+ "events:PutEvents",
+ ]
+
+ resources = [
+ aws_cloudwatch_event_bus.main.arn,
+ ]
+ }
+
+ statement {
+ sid = "DLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ "sqs:SendMessageBatch",
+ ]
+
+ resources = [
+ module.sqs_event_publisher_errors.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SSMPermissions"
+ effect = "Allow"
+
+ actions = [
+ "ssm:GetParameter",
+ "ssm:GetParametersByPath",
+ ]
+
+ resources = [
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*"
+ ]
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf b/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
index 1b986107..6bcf16bf 100644
--- a/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
+++ b/infrastructure/terraform/components/dl/module_lambda_mesh_poll.tf
@@ -20,12 +20,13 @@ module "mesh_poll" {
function_s3_bucket = local.acct.s3_buckets["lambda_function_artefacts"]["id"]
function_code_base_path = local.aws_lambda_functions_dir_path
- function_code_dir = "mesh-poll/dist"
+ function_code_dir = "mesh-poll/target/dist"
function_include_common = true
- handler_function_name = "handler"
- runtime = "nodejs22.x"
+ function_module_name = "mesh_poll"
+ handler_function_name = "handler.handler"
+ runtime = "python3.14"
memory = 128
- timeout = 5
+ timeout = 300
log_level = var.log_level
schedule = var.mesh_poll_schedule
@@ -37,10 +38,62 @@ module "mesh_poll" {
log_subscription_role_arn = local.acct.log_subscription_role_arn
lambda_env_vars = {
+ CERTIFICATE_EXPIRY_METRIC_NAME = "mesh-poll-client-certificate-near-expiry"
+ CERTIFICATE_EXPIRY_METRIC_NAMESPACE = "dl-mesh-poll"
+ ENVIRONMENT = var.environment
+ EVENT_PUBLISHER_DLQ_URL = module.sqs_event_publisher_errors.sqs_queue_url
+ EVENT_PUBLISHER_EVENT_BUS_ARN = aws_cloudwatch_event_bus.main.arn
+ MAXIMUM_RUNTIME_MILLISECONDS = "240000" # 4 minutes (Lambda has 5 min timeout)
+ POLLING_METRIC_NAME = "mesh-poll-successful-polls"
+ POLLING_METRIC_NAMESPACE = "dl-mesh-poll"
+ SSM_PREFIX = "${local.ssm_mesh_prefix}"
+ USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
}
+
}
data "aws_iam_policy_document" "mesh_poll_lambda" {
+ # Mock S3 ListBucket only when enabled
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "MockMeshListBucket"
+ effect = "Allow"
+
+ actions = [
+ "s3:ListBucket"
+ ]
+
+ resources = [
+ module.s3bucket_non_pii_data.arn
+ ]
+
+ condition {
+ test = "StringLike"
+ variable = "s3:prefix"
+ values = ["mock-mesh/*"]
+ }
+ }
+ }
+
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "AllowMockMeshActions"
+ effect = "Allow"
+
+ actions = [
+ "s3:PutObject",
+ "s3:GetObject",
+ "s3:DeleteObject"
+ ]
+
+ resources = [
+ "${module.s3bucket_non_pii_data.arn}/mock-mesh/*"
+ ]
+ }
+ }
+
statement {
sid = "KMSPermissions"
effect = "Allow"
@@ -54,4 +107,45 @@ data "aws_iam_policy_document" "mesh_poll_lambda" {
module.kms.key_arn,
]
}
+
+ statement {
+ sid = "EventBridgePermissions"
+ effect = "Allow"
+
+ actions = [
+ "events:PutEvents",
+ ]
+
+ resources = [
+ aws_cloudwatch_event_bus.main.arn,
+ ]
+ }
+
+ statement {
+ sid = "DLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ "sqs:SendMessageBatch",
+ ]
+
+ resources = [
+ module.sqs_event_publisher_errors.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SSMPermissions"
+ effect = "Allow"
+
+ actions = [
+ "ssm:GetParameter",
+ "ssm:GetParametersByPath",
+ ]
+
+ resources = [
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_mesh_prefix}/*"
+ ]
+ }
}
diff --git a/infrastructure/terraform/components/dl/module_s3bucket_non_pii_data.tf b/infrastructure/terraform/components/dl/module_s3bucket_non_pii_data.tf
new file mode 100644
index 00000000..a44acda5
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_s3bucket_non_pii_data.tf
@@ -0,0 +1,58 @@
+module "s3bucket_non_pii_data" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip"
+
+ name = "non-pii-data"
+
+ aws_account_id = var.aws_account_id
+ region = var.region
+ project = var.project
+ environment = var.environment
+ component = local.component
+
+ kms_key_arn = module.kms.key_arn
+
+ policy_documents = [data.aws_iam_policy_document.s3bucket_non_pii_data.json]
+}
+
+data "aws_iam_policy_document" "s3bucket_non_pii_data" {
+ statement {
+ sid = "AllowManagedAccountsToList"
+ effect = "Allow"
+
+ actions = [
+ "s3:ListBucket",
+ ]
+
+ resources = [
+ module.s3bucket_non_pii_data.arn,
+ ]
+
+ principals {
+ type = "AWS"
+ identifiers = [
+ "arn:aws:iam::${var.aws_account_id}:root"
+ ]
+ }
+ }
+
+ statement {
+ sid = "AllowManagedAccountsToGet"
+ effect = "Allow"
+
+ actions = [
+ "s3:GetObject",
+ "s3:PutObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_non_pii_data.arn}/*",
+ ]
+
+ principals {
+ type = "AWS"
+ identifiers = [
+ "arn:aws:iam::${var.aws_account_id}:root"
+ ]
+ }
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_s3bucket_pii_data.tf b/infrastructure/terraform/components/dl/module_s3bucket_pii_data.tf
new file mode 100644
index 00000000..af9aef56
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_s3bucket_pii_data.tf
@@ -0,0 +1,58 @@
+module "s3bucket_pii_data" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-s3bucket.zip"
+
+ name = "pii-data"
+
+ aws_account_id = var.aws_account_id
+ region = var.region
+ project = var.project
+ environment = var.environment
+ component = local.component
+
+ kms_key_arn = module.kms.key_arn
+
+ policy_documents = [data.aws_iam_policy_document.s3bucket_pii_data.json]
+}
+
+data "aws_iam_policy_document" "s3bucket_pii_data" {
+ statement {
+ sid = "AllowManagedAccountsToList"
+ effect = "Allow"
+
+ actions = [
+ "s3:ListBucket",
+ ]
+
+ resources = [
+ module.s3bucket_pii_data.arn,
+ ]
+
+ principals {
+ type = "AWS"
+ identifiers = [
+ "arn:aws:iam::${var.aws_account_id}:root"
+ ]
+ }
+ }
+
+ statement {
+ sid = "AllowManagedAccountsToGet"
+ effect = "Allow"
+
+ actions = [
+ "s3:GetObject",
+ "s3:PutObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_pii_data.arn}/*",
+ ]
+
+ principals {
+ type = "AWS"
+ identifiers = [
+ "arn:aws:iam::${var.aws_account_id}:root"
+ ]
+ }
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf b/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf
new file mode 100644
index 00000000..f33e7881
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_sqs_mesh_download.tf
@@ -0,0 +1,38 @@
+module "sqs_mesh_download" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.24/terraform-sqs.zip"
+
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ name = "mesh-download"
+
+ sqs_kms_key_arn = module.kms.key_arn
+
+ visibility_timeout_seconds = 60
+
+ create_dlq = true
+
+ sqs_policy_overload = data.aws_iam_policy_document.sqs_mesh_download.json
+}
+
+data "aws_iam_policy_document" "sqs_mesh_download" {
+ statement {
+ sid = "AllowEventBridgeToSendMessage"
+ effect = "Allow"
+
+ principals {
+ type = "Service"
+ identifiers = ["events.amazonaws.com"]
+ }
+
+ actions = [
+ "sqs:SendMessage"
+ ]
+
+ resources = [
+ "arn:aws:sqs:${var.region}:${var.aws_account_id}:${var.project}-${var.environment}-${local.component}-mesh-download-queue"
+ ]
+ }
+}
diff --git a/infrastructure/terraform/components/dl/pre.sh b/infrastructure/terraform/components/dl/pre.sh
index 870803e0..373d14b0 100755
--- a/infrastructure/terraform/components/dl/pre.sh
+++ b/infrastructure/terraform/components/dl/pre.sh
@@ -9,3 +9,9 @@ npm ci
npm run generate-dependencies
npm run lambda-build --workspaces --if-present
+
+# Build Python lambdas
+ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../.." && pwd)"
+
+make -C "$ROOT/lambdas/mesh-poll" package
+make -C "$ROOT/lambdas/mesh-download" package
diff --git a/infrastructure/terraform/components/dl/ssm_parameter_mesh.tf b/infrastructure/terraform/components/dl/ssm_parameter_mesh.tf
new file mode 100644
index 00000000..35045c18
--- /dev/null
+++ b/infrastructure/terraform/components/dl/ssm_parameter_mesh.tf
@@ -0,0 +1,68 @@
+# MESH Configuration SSM Parameter
+resource "aws_ssm_parameter" "mesh_config" {
+ name = "${local.ssm_mesh_prefix}/config"
+ description = "MESH configuration"
+ type = "SecureString"
+
+
+ value = var.enable_mock_mesh ? jsonencode({
+ mesh_endpoint = local.mock_mesh_endpoint
+ mesh_mailbox = "mock-mailbox"
+ mesh_mailbox_password = "mock-password"
+ mesh_shared_key = "mock-shared-key"
+ }) : jsonencode({
+ mesh_endpoint = "UNSET"
+ mesh_mailbox = "UNSET"
+ mesh_mailbox_password = "UNSET"
+ mesh_shared_key = "UNSET"
+ })
+
+ tags = merge(local.default_tags, {
+ Backup = "true"
+ Description = "MESH configuration"
+ })
+
+ lifecycle {
+ ignore_changes = [
+ value
+ ]
+ }
+}
+
+# MESH Client Certificate SSM Parameter
+resource "aws_ssm_parameter" "mesh_client_cert" {
+ name = "${local.ssm_mesh_prefix}/client-cert"
+ description = "MESH client certificate"
+ type = "SecureString"
+ value = var.enable_mock_mesh ? "mock-cert" : "UNSET"
+
+ tags = merge(local.default_tags, {
+ Backup = "true"
+ Description = "MESH client certificate"
+ })
+
+ lifecycle {
+ ignore_changes = [
+ value
+ ]
+ }
+}
+
+# MESH Client Private Key SSM Parameter
+resource "aws_ssm_parameter" "mesh_client_key" {
+ name = "${local.ssm_mesh_prefix}/client-key"
+ description = "MESH client private key"
+ type = "SecureString"
+ value = var.enable_mock_mesh ? "mock-key" : "UNSET"
+
+ tags = merge(local.default_tags, {
+ Backup = "true"
+ Description = "MESH client private key"
+ })
+
+ lifecycle {
+ ignore_changes = [
+ value
+ ]
+ }
+}
diff --git a/infrastructure/terraform/components/dl/variables.tf b/infrastructure/terraform/components/dl/variables.tf
index ef2081c3..ecc5f9a2 100644
--- a/infrastructure/terraform/components/dl/variables.tf
+++ b/infrastructure/terraform/components/dl/variables.tf
@@ -89,7 +89,13 @@ variable "parent_acct_environment" {
variable "mesh_poll_schedule" {
type = string
description = "Schedule to poll MESH for messages"
- default = "cron(0,30 8-16 ? * MON-FRI *)" # Every 30 minutes between 8am and 4:30pm Mon-Fri
+ default = "rate(5 minutes)" # Every 5 minutes
+}
+
+variable "enable_mock_mesh" {
+ description = "Enable mock mesh access (dev only). Grants lambda permission to read mock-mesh prefix in non-pii bucket."
+ type = bool
+ default = false
}
variable "queue_batch_size" {
diff --git a/lambdas/mesh-download/Makefile b/lambdas/mesh-download/Makefile
new file mode 100644
index 00000000..22279cac
--- /dev/null
+++ b/lambdas/mesh-download/Makefile
@@ -0,0 +1,34 @@
+PACKAGE=mesh_download
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev:
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=lambdas/mesh-download:$$PYTHONPATH pytest lambdas/mesh-download/mesh_download/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=lambdas/mesh-download:$$PYTHONPATH pytest lambdas/mesh-download/mesh_download/__tests__/ \
+ --cov=lambdas/mesh-download/mesh_download \
+ --cov-config=lambdas/mesh-download/pytest.ini \
+ --cov-report=html:lambdas/mesh-download/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:lambdas/mesh-download/coverage.xml \
+ --cov-branch
+
+lint:
+ pylint mesh_download
+
+format:
+ autopep8 -ri .
+
+package:
+ ./package_python_lambda.sh meshdownloadlambda
+
+clean:
+ rm -rf target
+
+.PHONY: install install-dev test coverage lint format package clean
diff --git a/lambdas/mesh-download/mesh_download/__init__.py b/lambdas/mesh-download/mesh_download/__init__.py
new file mode 100644
index 00000000..fc3ce40e
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/__init__.py
@@ -0,0 +1,11 @@
+"""
+MESH Download Lambda
+
+This module handles downloading and storing MESH messages from SQS events
+published by the mesh-poll lambda.
+"""
+
+__version__ = '0.1.0'
+from .handler import *
+from .processor import *
+from .errors import *
diff --git a/lambdas/mesh-download/mesh_download/__tests__/__init__.py b/lambdas/mesh-download/mesh_download/__tests__/__init__.py
new file mode 100644
index 00000000..43222971
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/__tests__/__init__.py
@@ -0,0 +1,3 @@
+"""
+Tests for mesh-download Lambda
+"""
diff --git a/lambdas/mesh-download/mesh_download/__tests__/test_document_store.py b/lambdas/mesh-download/mesh_download/__tests__/test_document_store.py
new file mode 100644
index 00000000..9126c3da
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/__tests__/test_document_store.py
@@ -0,0 +1,54 @@
+"""Tests for DocumentStore"""
+import pytest
+from unittest.mock import Mock
+from mesh_download.document_store import DocumentStore, IntermediaryBodyStoreError
+
+
+class TestDocumentStore:
+ """Test suite for DocumentStore"""
+
+ def test_store_document_success(self):
+ """Successfully stores document and returns S3 key"""
+ mock_s3_client = Mock()
+ mock_s3_client.put_object.return_value = {
+ 'ResponseMetadata': {'HTTPStatusCode': 200}
+ }
+
+ config = Mock()
+ config.s3_client = mock_s3_client
+ config.transactional_data_bucket = 'test-pii-bucket'
+
+ store = DocumentStore(config)
+
+ result = store.store_document(
+ sender_id='SENDER_001',
+ message_reference='ref_123',
+ content=b'test content'
+ )
+
+ assert result == 'document-reference/SENDER_001_ref_123'
+ mock_s3_client.put_object.assert_called_once_with(
+ Bucket='test-pii-bucket',
+ Key='document-reference/SENDER_001_ref_123',
+ Body=b'test content'
+ )
+
+ def test_store_document_s3_failure_raises_error(self):
+ """Raises IntermediaryBodyStoreError when S3 put_object fails"""
+ mock_s3_client = Mock()
+ mock_s3_client.put_object.return_value = {
+ 'ResponseMetadata': {'HTTPStatusCode': 500}
+ }
+
+ config = Mock()
+ config.s3_client = mock_s3_client
+ config.transactional_data_bucket = 'test-pii-bucket'
+
+ store = DocumentStore(config)
+
+ with pytest.raises(IntermediaryBodyStoreError):
+ store.store_document(
+ sender_id='SENDER_001',
+ message_reference='ref_123',
+ content=b'test content'
+ )
diff --git a/lambdas/mesh-download/mesh_download/__tests__/test_handler.py b/lambdas/mesh-download/mesh_download/__tests__/test_handler.py
new file mode 100644
index 00000000..cf6b2d21
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/__tests__/test_handler.py
@@ -0,0 +1,282 @@
+"""
+Tests for Lambda handler
+"""
+import pytest
+from unittest.mock import Mock, patch, MagicMock
+
+
+def setup_mocks():
+ """
+ Create all mock objects needed for handler testing
+ """
+ mock_context = Mock()
+
+ mock_config = MagicMock()
+ mock_config.mesh_client = Mock()
+
+ mock_processor = Mock()
+ mock_processor.process_sqs_message = Mock()
+
+ return (
+ mock_context,
+ mock_config,
+ mock_processor
+ )
+
+
+def create_sqs_event(num_records=1, event_source='aws:sqs'):
+ """
+ Create a mock SQS event for testing
+ """
+ records = []
+ for i in range(num_records):
+ records.append({
+ 'messageId': f'msg-{i}',
+ 'eventSource': event_source,
+ 'body': '{"detail": {"data": {"meshMessageId": "test_id"}}}'
+ })
+
+ return {'Records': records}
+
+
+class TestHandler:
+ """Test suite for Lambda handler"""
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_success_single_message(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test successful handler execution with single SQS message"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store = Mock()
+ mock_doc_store_class.return_value = mock_doc_store
+ mock_event_pub = Mock()
+ mock_event_pub_class.return_value = mock_event_pub
+
+ event = create_sqs_event(num_records=1)
+
+ result = handler(event, mock_context)
+
+ mock_config_class.assert_called_once()
+ mock_config_class.return_value.__enter__.assert_called_once()
+
+ # Verify MeshDownloadProcessor was created with correct parameters
+ mock_processor_class.assert_called_once()
+ call_kwargs = mock_processor_class.call_args[1]
+ assert call_kwargs['config'] == mock_config
+ assert call_kwargs['log'] is not None
+
+ mock_processor.process_sqs_message.assert_called_once()
+
+ assert result == {"batchItemFailures": []}
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_success_multiple_messages(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test successful handler execution with multiple SQS messages"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ event = create_sqs_event(num_records=3)
+
+ result = handler(event, mock_context)
+
+ # Verify process_sqs_message was called 3 times
+ assert mock_processor.process_sqs_message.call_count == 3
+
+ # Verify return value (no failures)
+ assert result == {"batchItemFailures": []}
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_config_cleanup_on_success(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test that Config context manager cleanup is called on success"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_exit = Mock(return_value=None)
+ mock_config_class.return_value.__exit__ = mock_exit
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ event = create_sqs_event(num_records=1)
+
+ handler(event, mock_context)
+
+ mock_exit.assert_called_once()
+ assert mock_exit.call_args[0] == (None, None, None)
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_partial_batch_failure(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test handler handles partial batch failures correctly"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ # Make second message fail
+ mock_processor.process_sqs_message.side_effect = [
+ None,
+ Exception("Test error"),
+ None
+ ]
+
+ event = create_sqs_event(num_records=3)
+
+ result = handler(event, mock_context)
+
+ # Verify only the failed message is in batch item failures
+ assert len(result["batchItemFailures"]) == 1
+ assert result["batchItemFailures"][0]["itemIdentifier"] == "msg-1"
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_skips_non_sqs_records(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test handler skips records that are not from SQS"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ event = create_sqs_event(num_records=1, event_source='aws:dynamodb')
+
+ result = handler(event, mock_context)
+
+ mock_processor.process_sqs_message.assert_not_called()
+
+ assert result == {"batchItemFailures": []}
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_config_cleanup_on_exception(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test that Config context manager cleanup is called even on exception"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_exit = Mock(return_value=None)
+ mock_config_class.return_value.__exit__ = mock_exit
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ test_exception = RuntimeError("Processing error")
+ mock_processor.process_sqs_message.side_effect = test_exception
+ mock_processor_class.return_value = mock_processor
+
+ event = create_sqs_event(num_records=1)
+
+ result = handler(event, mock_context)
+
+ mock_exit.assert_called_once()
+
+ # Verify the failed message is in batch failures
+ assert len(result["batchItemFailures"]) == 1
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_returns_empty_failures_on_empty_event(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test handler handles empty event gracefully"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store_class.return_value = Mock()
+ mock_event_pub_class.return_value = Mock()
+
+ event = {'Records': []}
+
+ result = handler(event, mock_context)
+
+ mock_processor.process_sqs_message.assert_not_called()
+
+ assert result == {"batchItemFailures": []}
+
+ @patch('mesh_download.handler.EventPublisher')
+ @patch('mesh_download.handler.DocumentStore')
+ @patch('mesh_download.handler.Config')
+ @patch('mesh_download.handler.MeshDownloadProcessor')
+ def test_handler_passes_correct_parameters_to_processor(self, mock_processor_class, mock_config_class, mock_doc_store_class, mock_event_pub_class):
+ """Test that handler passes all required parameters to MeshDownloadProcessor"""
+ from mesh_download.handler import handler
+
+ (mock_context, mock_config, mock_processor) = setup_mocks()
+
+ mock_mesh_client = Mock()
+ mock_download_metric = Mock()
+ mock_config.mesh_client = mock_mesh_client
+ mock_config.download_metric = mock_download_metric
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_processor_class.return_value = mock_processor
+
+ mock_doc_store = Mock()
+ mock_doc_store_class.return_value = mock_doc_store
+ mock_event_pub = Mock()
+ mock_event_pub_class.return_value = mock_event_pub
+
+ event = create_sqs_event(num_records=1)
+
+ handler(event, mock_context)
+
+ mock_processor_class.assert_called_once()
+ call_kwargs = mock_processor_class.call_args[1]
+
+ # Handler now passes the entire config object and dependencies
+ assert call_kwargs['config'] == mock_config
+ assert call_kwargs['mesh_client'] == mock_mesh_client
+ assert call_kwargs['download_metric'] == mock_download_metric
+ assert call_kwargs['document_store'] == mock_doc_store
+ assert call_kwargs['event_publisher'] == mock_event_pub
+ assert 'log' in call_kwargs
diff --git a/lambdas/mesh-download/mesh_download/__tests__/test_processor.py b/lambdas/mesh-download/mesh_download/__tests__/test_processor.py
new file mode 100644
index 00000000..1901ee18
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/__tests__/test_processor.py
@@ -0,0 +1,350 @@
+"""
+Tests for mesh-download MeshDownloadProcessor
+Following the pattern from mesh-poll tests
+"""
+import json
+from uuid import uuid4
+import pytest
+from unittest.mock import Mock, patch
+from datetime import datetime, timezone
+from pydantic import ValidationError
+
+
+def setup_mocks():
+ """
+ Create all mock objects needed for processor testing
+ """
+ config = Mock()
+ # Set up default config attributes
+ config.mesh_client = Mock()
+ config.download_metric = Mock()
+ config.s3_client = Mock()
+ config.environment = 'development'
+ config.transactional_data_bucket = 'test-pii-bucket'
+ config.use_mesh_mock = False
+
+ log = Mock()
+ event_publisher = Mock()
+ document_store = Mock()
+
+ return config, log, event_publisher, document_store
+
+
+def create_valid_cloud_event():
+ """
+ Create a valid CloudEvent for testing
+ """
+ return {
+ 'id': str(uuid4()),
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/development/primary/data-plane/digitalletters/mesh',
+ 'subject': 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000',
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
+ 'time': '2023-01-01T12:00:00Z',
+ 'recordedtime': '2023-01-01T12:00:00Z',
+ 'severitynumber': 2,
+ 'severitytext': 'INFO',
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-received-data.schema.json',
+ 'data': {
+ 'meshMessageId': 'test_message_123',
+ 'senderId': 'TEST_SENDER',
+ 'messageReference': 'ref_001'
+ }
+ }
+
+
+def create_sqs_record(cloud_event=None):
+ """
+ Create a mock SQS record containing a CloudEvent
+ """
+ if cloud_event is None:
+ cloud_event = create_valid_cloud_event()
+
+ return {
+ 'messageId': 'sqs-msg-123',
+ 'eventSource': 'aws:sqs',
+ 'body': json.dumps({'detail': cloud_event})
+ }
+
+
+def create_mesh_message(message_id='test_123', sender='SENDER_001', local_id='ref_001'):
+ """
+ Create a mock MESH message object
+ """
+ message = Mock()
+ message.id.return_value = message_id
+ message.sender = sender
+ message.local_id = local_id
+ message.subject = 'test_document.pdf'
+ message.workflow_id = 'TEST_WORKFLOW'
+ message.message_type = 'DATA'
+ message.read.return_value = b'Test message content'
+ message.acknowledge = Mock()
+ return message
+
+
+class TestMeshDownloadProcessor:
+ """Test suite for MeshDownloadProcessor"""
+
+ def test_processor_initialization_calls_mesh_handshake(self):
+ """Processor initializes and handshakes mesh client"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ config.mesh_client.handshake.assert_called_once()
+
+ @patch('mesh_download.processor.datetime')
+ def test_process_sqs_message_success(self, mock_datetime):
+ """Successful end-to-end: validate, download, store via document_store, publish, acknowledge"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+
+ fixed_time = datetime(2025, 11, 19, 15, 30, 45, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ document_store.store_document.return_value = 'document-reference/SENDER_001_ref_001'
+
+ event_publisher.send_events.return_value = []
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ mesh_message = create_mesh_message()
+ config.mesh_client.retrieve_message.return_value = mesh_message
+
+ sqs_record = create_sqs_record()
+
+ processor.process_sqs_message(sqs_record)
+
+ config.mesh_client.retrieve_message.assert_called_once_with('test_message_123')
+
+ mesh_message.read.assert_called_once()
+
+ document_store.store_document.assert_called_once_with(
+ sender_id='TEST_SENDER',
+ message_reference='ref_001',
+ content=b'Test message content'
+ )
+
+ mesh_message.acknowledge.assert_called_once()
+
+ config.download_metric.record.assert_called_once_with(1)
+
+ event_publisher.send_events.assert_called_once()
+
+ # Verify the published event content
+ published_events = event_publisher.send_events.call_args[0][0]
+ assert len(published_events) == 1
+
+ published_event = published_events[0]
+
+ # Verify CloudEvent envelope fields
+ assert published_event['type'] == 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1'
+ assert published_event['source'] == '/nhs/england/notify/development/primary/data-plane/digitalletters/mesh'
+ assert published_event['subject'] == 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000'
+ assert published_event['time'] == '2025-11-19T15:30:45+00:00'
+ assert 'id' in published_event
+
+ # Verify CloudEvent data payload
+ event_data = published_event['data']
+ assert event_data['senderId'] == 'TEST_SENDER'
+ assert event_data['messageReference'] == 'ref_001'
+ assert event_data['messageUri'] == 's3://test-pii-bucket/document-reference/SENDER_001_ref_001'
+ assert set(event_data.keys()) == {'senderId', 'messageReference', 'messageUri'}
+
+ def test_process_sqs_message_validation_failure(self):
+ """Malformed CloudEvents should be rejected by pydantic and not trigger downloads"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ # Create broken cloud event
+ invalid_event = {'id': 'test-id'} # missing required fields
+ sqs_record = create_sqs_record(cloud_event=invalid_event)
+
+ with pytest.raises(ValidationError):
+ processor.process_sqs_message(sqs_record)
+
+ config.mesh_client.retrieve_message.assert_not_called()
+
+ def test_process_sqs_message_missing_mesh_message_id(self):
+ """Event missing meshMessageId should not be processed"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ event = create_valid_cloud_event()
+ del event['data']['meshMessageId']
+ sqs_record = create_sqs_record(cloud_event=event)
+
+ # Should raise ValidationError for missing required field
+ with pytest.raises(ValidationError, match="meshMessageId"):
+ processor.process_sqs_message(sqs_record)
+
+ config.mesh_client.retrieve_message.assert_not_called()
+
+ def test_download_and_store_message_not_found(self):
+ """If MESH returns None, nothing is stored or published"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+ bound_logger = Mock()
+ log.bind.return_value = bound_logger
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ config.mesh_client.retrieve_message.return_value = None
+ sqs_record = create_sqs_record()
+ processor.process_sqs_message(sqs_record)
+ config.mesh_client.retrieve_message.assert_called_once_with('test_message_123')
+
+ document_store.store_document.assert_not_called()
+ event_publisher.send_events.assert_not_called()
+ config.download_metric.record.assert_not_called()
+
+ bound_logger.error.assert_called_once_with("Message not found in MESH inbox")
+
+ def test_document_store_failure_prevents_ack_and_raises(self):
+ """If storing fails the processor should raise and not acknowledge the MESH message"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+
+ document_store.store_document.side_effect = Exception("document store failure")
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ mesh_message = create_mesh_message()
+ config.mesh_client.retrieve_message.return_value = mesh_message
+ sqs_record = create_sqs_record()
+
+ with pytest.raises(Exception, match="document store failure"):
+ processor.process_sqs_message(sqs_record)
+
+ # ensure we did not acknowledge the message if storage failed
+ mesh_message.acknowledge.assert_not_called()
+
+ @patch('mesh_download.processor.datetime')
+ def test_bucket_selection_with_mesh_mock_enabled(self, mock_datetime):
+ """When use_mesh_mock=True, processor uses PII bucket for storage"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+ # Configure for mock mesh
+ config.use_mesh_mock = True
+ config.transactional_data_bucket = 'test-pii-bucket'
+
+ fixed_time = datetime(2025, 11, 19, 15, 30, 45, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ document_store.store_document.return_value = 'document-reference/SENDER_001_ref_001'
+ event_publisher.send_events.return_value = []
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ mesh_message = create_mesh_message()
+ config.mesh_client.retrieve_message.return_value = mesh_message
+ sqs_record = create_sqs_record()
+
+ processor.process_sqs_message(sqs_record)
+
+ # Verify event was published with PII bucket in URI
+ event_publisher.send_events.assert_called_once()
+ published_events = event_publisher.send_events.call_args[0][0]
+ assert len(published_events) == 1
+ message_uri = published_events[0]['data']['messageUri']
+ assert message_uri.startswith('s3://test-pii-bucket/')
+
+ @patch('mesh_download.processor.datetime')
+ def test_bucket_selection_with_mesh_mock_disabled(self, mock_datetime):
+ """When use_mesh_mock=False, processor uses PII bucket for storage"""
+ from mesh_download.processor import MeshDownloadProcessor
+
+ config, log, event_publisher, document_store = setup_mocks()
+ # Configure for production (PII bucket)
+ config.use_mesh_mock = False
+ config.transactional_data_bucket = 'test-pii-bucket'
+
+ fixed_time = datetime(2025, 11, 19, 15, 30, 45, tzinfo=timezone.utc)
+ mock_datetime.now.return_value = fixed_time
+
+ document_store.store_document.return_value = 'document-reference/SENDER_001_ref_001'
+ event_publisher.send_events.return_value = []
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ mesh_message = create_mesh_message()
+ config.mesh_client.retrieve_message.return_value = mesh_message
+ sqs_record = create_sqs_record()
+
+ processor.process_sqs_message(sqs_record)
+
+ event_publisher.send_events.assert_called_once()
+ published_events = event_publisher.send_events.call_args[0][0]
+ assert len(published_events) == 1
+ message_uri = published_events[0]['data']['messageUri']
+ assert message_uri.startswith('s3://test-pii-bucket/')
diff --git a/lambdas/mesh-download/mesh_download/config.py b/lambdas/mesh-download/mesh_download/config.py
new file mode 100644
index 00000000..ab4a400b
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/config.py
@@ -0,0 +1,57 @@
+"""
+Module for configuring MESH Download application
+"""
+from event_publisher import BaseMeshConfig, log
+from metric_publishers.metric_client import Metric
+
+
+_REQUIRED_ENV_VAR_MAP = {
+ "ssm_prefix": "SSM_PREFIX",
+ "environment": "ENVIRONMENT",
+ "certificate_expiry_metric_name": "CERTIFICATE_EXPIRY_METRIC_NAME",
+ "certificate_expiry_metric_namespace": "CERTIFICATE_EXPIRY_METRIC_NAMESPACE",
+ "download_metric_name": "DOWNLOAD_METRIC_NAME",
+ "download_metric_namespace": "DOWNLOAD_METRIC_NAMESPACE",
+ "event_publisher_event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
+ "event_publisher_dlq_url": "EVENT_PUBLISHER_DLQ_URL",
+ "pii_bucket": "PII_BUCKET"
+}
+
+
+class Config(BaseMeshConfig):
+ """
+ Represents the configuration of the MESH Download application.
+ Inherits common MESH configuration from BaseMeshConfig.
+ """
+
+ _REQUIRED_ENV_VAR_MAP = _REQUIRED_ENV_VAR_MAP
+
+ def __init__(self, ssm=None, s3_client=None):
+ super().__init__(ssm=ssm, s3_client=s3_client)
+
+ self.download_metric = None
+
+ def __enter__(self):
+ super().__enter__()
+
+ # Build download metric
+ self.download_metric = self.build_download_metric()
+
+ return self
+
+ def build_download_metric(self):
+ """
+ Returns a custom metric to record messages successfully downloaded and processed
+ """
+ return Metric(
+ name=self.download_metric_name,
+ namespace=self.download_metric_namespace,
+ dimensions={"Environment": self.environment}
+ )
+
+ @property
+ def transactional_data_bucket(self):
+ """
+ Returns the S3 bucket for storing downloaded messages.
+ """
+ return self.pii_bucket
diff --git a/lambdas/mesh-download/mesh_download/document_store.py b/lambdas/mesh-download/mesh_download/document_store.py
new file mode 100644
index 00000000..eb5612a1
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/document_store.py
@@ -0,0 +1,35 @@
+"""Module for storing document references in S3"""
+
+
+class IntermediaryBodyStoreError(Exception):
+ """Error to represent any failure to upload document to intermediate location"""
+
+
+class DocumentStoreConfig:
+ """Configuration holder for DocumentStore"""
+ def __init__(self, s3_client, transactional_data_bucket):
+ self.s3_client = s3_client
+ self.transactional_data_bucket = transactional_data_bucket
+
+
+class DocumentStore: # pylint: disable=too-few-public-methods
+ """Class for storing document references in S3"""
+
+ def __init__(self, config):
+ self.config = config
+
+ def store_document(self, sender_id, message_reference, content):
+ """store document reference in S3"""
+
+ s3_key = f"document-reference/{sender_id}_{message_reference}"
+
+ s3_response = self.config.s3_client.put_object(
+ Bucket=self.config.transactional_data_bucket,
+ Key=s3_key,
+ Body=content
+ )
+
+ if s3_response['ResponseMetadata']['HTTPStatusCode'] != 200:
+ raise IntermediaryBodyStoreError(s3_response)
+
+ return s3_key
diff --git a/lambdas/mesh-download/mesh_download/errors.py b/lambdas/mesh-download/mesh_download/errors.py
new file mode 100644
index 00000000..d8b1932c
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/errors.py
@@ -0,0 +1,13 @@
+"""
+Module representing possible errors within this application
+"""
+
+import traceback
+
+
+def format_exception(exception):
+ """
+ Returns a nicely formatted exception string
+ """
+ return ''.join(traceback.format_exception(
+ type(exception), exception, exception.__traceback__))
diff --git a/lambdas/mesh-download/mesh_download/handler.py b/lambdas/mesh-download/mesh_download/handler.py
new file mode 100644
index 00000000..de46bf59
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/handler.py
@@ -0,0 +1,79 @@
+"""lambda handler for mesh download"""
+
+import json
+from event_publisher import EventPublisher
+
+from .config import Config, log
+from .processor import MeshDownloadProcessor
+from .document_store import DocumentStore, DocumentStoreConfig
+
+
+def handler(event, context):
+ """
+ lambda handler for mesh download
+ Processes SQS events from mesh-download queue
+ Returns batch item failures for partial batch failure handling
+ """
+
+ log.info("Received SQS event", record_count=len(event.get('Records', [])))
+
+ batch_item_failures = []
+ processed = {
+ 'retrieved': 0,
+ 'downloaded': 0,
+ 'failed': 0
+ }
+
+ try:
+ with Config() as config:
+ doc_store_config = DocumentStoreConfig(
+ s3_client=config.s3_client,
+ transactional_data_bucket=config.transactional_data_bucket
+ )
+ document_store = DocumentStore(doc_store_config)
+
+ event_publisher = EventPublisher(
+ event_bus_arn=config.event_publisher_event_bus_arn,
+ dlq_url=config.event_publisher_dlq_url,
+ logger=log
+ )
+
+ processor = MeshDownloadProcessor(
+ config=config,
+ log=log,
+ mesh_client=config.mesh_client,
+ download_metric=config.download_metric,
+ document_store=document_store,
+ event_publisher=event_publisher
+ )
+
+ # Process each SQS record
+ for record in event.get('Records', []):
+ processed['retrieved'] += 1
+ message_id = record.get('messageId')
+
+ if record.get('eventSource') != 'aws:sqs':
+ log.warn("Skipping non-SQS record", message_id=message_id)
+ continue
+
+ try:
+ processor.process_sqs_message(record)
+ processed['downloaded'] += 1
+
+ except Exception as exc:
+ processed['failed'] += 1
+ log.error("Failed to process SQS message",
+ message_id=message_id,
+ error=str(exc))
+ batch_item_failures.append({"itemIdentifier": message_id})
+
+ log.info("Processed SQS event",
+ retrieved=processed['retrieved'],
+ downloaded=processed['downloaded'],
+ failed=processed['failed'])
+
+ return {"batchItemFailures": batch_item_failures}
+
+ except Exception as exc:
+ log.error("Error in mesh download handler", error=str(exc))
+ raise
diff --git a/lambdas/mesh-download/mesh_download/processor.py b/lambdas/mesh-download/mesh_download/processor.py
new file mode 100644
index 00000000..7ae51ea0
--- /dev/null
+++ b/lambdas/mesh-download/mesh_download/processor.py
@@ -0,0 +1,145 @@
+import json
+from datetime import datetime, timezone
+from uuid import uuid4
+
+from pydantic import ValidationError
+from event_publisher.models import MeshInboxMessageEvent, MeshDownloadMessageEvent
+
+
+class MeshDownloadProcessor:
+ def __init__(self, **kwargs):
+ self.__config = kwargs['config']
+ self.__log = kwargs['log']
+ self.__mesh_client = kwargs['mesh_client']
+ self.__download_metric = kwargs['download_metric']
+ self.__document_store = kwargs['document_store']
+ self.__event_publisher = kwargs['event_publisher']
+
+ self.__mesh_client.handshake()
+
+ self.__storage_bucket = self.__config.transactional_data_bucket
+
+ def process_sqs_message(self, sqs_record):
+ try:
+ validated_event = self._parse_and_validate_event(sqs_record)
+ logger = self.__log.bind(mesh_message_id=validated_event.data.meshMessageId)
+
+ logger.info("Processing MESH download request")
+ self._handle_download(validated_event, logger)
+
+ except Exception as exc:
+ self.__log.error(
+ "Error processing SQS message",
+ error=str(exc),
+ sqs_record=sqs_record
+ )
+ raise
+
+ def _parse_and_validate_event(self, sqs_record):
+ message_body = json.loads(sqs_record['body'])
+ event_detail = message_body.get('detail', {})
+
+ try:
+ event = MeshInboxMessageEvent(**event_detail)
+ self.__log.debug("CloudEvent validation passed")
+ return event
+ except ValidationError as e:
+ self.__log.error(
+ "CloudEvent validation failed",
+ validation_errors=str(e),
+ event_detail=event_detail
+ )
+ raise
+
+ def _handle_download(self, event, logger):
+ data = event.data
+
+ message = self.__mesh_client.retrieve_message(data.meshMessageId)
+ if not message:
+ logger.error("Message not found in MESH inbox")
+ return
+
+ logger.info(
+ "Retrieved MESH message",
+ sender=getattr(message, 'sender', ''),
+ local_id=getattr(message, 'local_id', ''),
+ workflow_id=getattr(message, 'workflow_id', ''),
+ subject=getattr(message, 'subject', ''),
+ message_type=getattr(message, 'message_type', '')
+ )
+
+ content = message.read()
+ logger.info("Downloaded MESH message content")
+
+ uri = self._store_message_content(
+ sender_id=data.senderId,
+ message_reference=data.messageReference,
+ message_content=content,
+ logger=logger
+ )
+
+ self._publish_downloaded_event(
+ incoming_event=event,
+ message_uri=uri
+ )
+
+ message.acknowledge()
+ logger.info("Acknowledged message")
+
+ self.__download_metric.record(1)
+
+ def _store_message_content(self, sender_id, message_reference, message_content, logger):
+ s3_key = self.__document_store.store_document(
+ sender_id=sender_id,
+ message_reference=message_reference,
+ content=message_content,
+ )
+
+ message_uri = f"s3://{self.__storage_bucket}/{s3_key}"
+ logger.info("Stored MESH message in S3",
+ s3_bucket=self.__storage_bucket,
+ s3_key=s3_key)
+
+ return message_uri
+
+ def _publish_downloaded_event(self, incoming_event, message_uri):
+ """
+ Publishes a MESHInboxMessageDownloaded event.
+ """
+ now = datetime.now(timezone.utc).isoformat()
+
+ cloud_event = {
+ **incoming_event.model_dump(),
+ 'id': str(uuid4()),
+ 'time': now,
+ 'recordedtime': now,
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.downloaded.v1',
+ 'dataschema': (
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/'
+ 'digital-letters-mesh-inbox-message-downloaded-data.schema.json'
+ ),
+ 'data': {
+ 'senderId': incoming_event.data.senderId,
+ 'messageReference': incoming_event.data.messageReference,
+ 'messageUri': message_uri,
+ }
+ }
+
+ try:
+ MeshDownloadMessageEvent(**cloud_event)
+ except ValidationError as e:
+ self.__log.error("Invalid MeshDownloadMessageEvent", error=str(e))
+ raise
+
+ failed = self.__event_publisher.send_events([cloud_event])
+ if failed:
+ msg = f"Failed to publish MESHInboxMessageDownloaded event: {failed}"
+ self.__log.error(msg, failed_count=len(failed))
+ raise RuntimeError(msg)
+
+ self.__log.info(
+ "Published MESHInboxMessageDownloaded event",
+ sender_id=incoming_event.data.senderId,
+ message_uri=message_uri,
+ message_reference=incoming_event.data.messageReference
+ )
diff --git a/lambdas/mesh-download/package_python_lambda.sh b/lambdas/mesh-download/package_python_lambda.sh
new file mode 100755
index 00000000..a9834152
--- /dev/null
+++ b/lambdas/mesh-download/package_python_lambda.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+set -e
+
+component_name="$1"
+
+rootdir=$(realpath "$(dirname "$0")/../..")
+source ${rootdir}/utils/get_version.sh
+
+dist_dir="${PWD}/target/dist"
+rm -rf "${dist_dir}"
+mkdir -p "${dist_dir}"
+
+# Extract internal (file://) and external dependencies from requirements.txt
+grep -E '^-e ' requirements.txt | sed 's|^-e ||' > target/internal_requirements.txt || true
+grep -vE '^-e ' requirements.txt > target/external_requirements.txt || true
+
+# Install external dependencies (from PyPI)
+pip install --platform manylinux2014_x86_64 --only-binary=:all: -r target/external_requirements.txt --target ${dist_dir} --python-version 3.14 --implementation cp
+
+# Install internal dependencies (local packages)
+pip install -r target/internal_requirements.txt --target ${dist_dir}
+
+# Bundle application code
+pip install . --no-deps --target ${dist_dir}
diff --git a/lambdas/mesh-download/pytest.ini b/lambdas/mesh-download/pytest.ini
new file mode 100644
index 00000000..303659aa
--- /dev/null
+++ b/lambdas/mesh-download/pytest.ini
@@ -0,0 +1,15 @@
+[pytest]
+testpaths = mesh_download/__tests__
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+addopts = -v --tb=short
+
+[coverage:run]
+relative_files = True
+omit =
+ */tests/*
+ */test_*.py
+ */__pycache__/*
+ */venv/*
+ */env/*
diff --git a/lambdas/mesh-download/requirements-dev.txt b/lambdas/mesh-download/requirements-dev.txt
new file mode 100644
index 00000000..3d0bd76e
--- /dev/null
+++ b/lambdas/mesh-download/requirements-dev.txt
@@ -0,0 +1,5 @@
+-r requirements.txt
+autopep8>=2.0.2
+pylint>=2.17.4
+pytest>=7.0.1
+pytest-cov>=4.0.0
diff --git a/lambdas/mesh-download/requirements.txt b/lambdas/mesh-download/requirements.txt
new file mode 100644
index 00000000..e817b2f5
--- /dev/null
+++ b/lambdas/mesh-download/requirements.txt
@@ -0,0 +1,13 @@
+certifi>=2023.07.22
+mesh-client>=3.2.3
+structlog>=21.5.0
+orjson>=3.9.15
+pydantic>=2.0.0
+boto3>=1.28.62
+urllib3>=1.26.19,<2.0.0
+idna>=3.7
+requests>=2.32.0
+pyopenssl>=24.2.1
+-e ../../utils/event-publisher-py
+-e ../../utils/py-mock-mesh
+-e ../../utils/metric-publishers
diff --git a/lambdas/mesh-download/setup.py b/lambdas/mesh-download/setup.py
new file mode 100644
index 00000000..cb104505
--- /dev/null
+++ b/lambdas/mesh-download/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="mesh-download",
+ version="0.1.0",
+ packages=find_packages(),
+)
diff --git a/lambdas/mesh-poll/.eslintignore b/lambdas/mesh-poll/.eslintignore
deleted file mode 100644
index 1521c8b7..00000000
--- a/lambdas/mesh-poll/.eslintignore
+++ /dev/null
@@ -1 +0,0 @@
-dist
diff --git a/lambdas/mesh-poll/.gitignore b/lambdas/mesh-poll/.gitignore
index 80323f7c..9f7550b1 100644
--- a/lambdas/mesh-poll/.gitignore
+++ b/lambdas/mesh-poll/.gitignore
@@ -1,4 +1,2 @@
-coverage
-node_modules
-dist
-.reports
+__pycache__
+.venv
diff --git a/lambdas/mesh-poll/Makefile b/lambdas/mesh-poll/Makefile
new file mode 100644
index 00000000..21759ef3
--- /dev/null
+++ b/lambdas/mesh-poll/Makefile
@@ -0,0 +1,34 @@
+PACKAGE=mesh_poll
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev:
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=lambdas/mesh-poll:$$PYTHONPATH pytest lambdas/mesh-poll/mesh_poll/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=lambdas/mesh-poll:$$PYTHONPATH pytest lambdas/mesh-poll/mesh_poll/__tests__/ \
+ --cov=lambdas/mesh-poll/mesh_poll \
+ --cov-config=lambdas/mesh-poll/pytest.ini \
+ --cov-report=html:lambdas/mesh-poll/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:lambdas/mesh-poll/coverage.xml \
+ --cov-branch
+
+lint:
+ pylint mesh_poll
+
+format:
+ autopep8 -ri .
+
+package:
+ ./package_python_lambda.sh meshpolllambda
+
+clean:
+ rm -rf target
+
+.PHONY: install install-dev test coverage lint format package clean
diff --git a/lambdas/mesh-poll/jest.config.ts b/lambdas/mesh-poll/jest.config.ts
deleted file mode 100644
index c02601ae..00000000
--- a/lambdas/mesh-poll/jest.config.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import { baseJestConfig } from '../../jest.config.base';
-
-const config = baseJestConfig;
-
-export default config;
diff --git a/lambdas/mesh-poll/mesh_poll/__init__.py b/lambdas/mesh-poll/mesh_poll/__init__.py
new file mode 100644
index 00000000..21ae4dbf
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/__init__.py
@@ -0,0 +1,12 @@
+"""
+MESH Poll Lambda
+
+This module handles polling MESH inbox for new messages and publishing events.
+"""
+
+__version__ = '0.1.0'
+from .config import *
+from .handler import *
+from .processor import *
+from .sender_lookup import *
+from .errors import *
diff --git a/lambdas/mesh-poll/mesh_poll/__tests__/__init__.py b/lambdas/mesh-poll/mesh_poll/__tests__/__init__.py
new file mode 100644
index 00000000..3be5b3aa
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/__tests__/__init__.py
@@ -0,0 +1 @@
+# Test package init
diff --git a/lambdas/mesh-poll/mesh_poll/__tests__/test_handler.py b/lambdas/mesh-poll/mesh_poll/__tests__/test_handler.py
new file mode 100644
index 00000000..0ef47383
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/__tests__/test_handler.py
@@ -0,0 +1,116 @@
+"""
+Tests for Lambda handler
+"""
+import pytest
+from unittest.mock import Mock, patch, MagicMock
+
+
+def setup_mocks():
+ """
+ Create all mock objects needed for handler testing
+ """
+ mock_context = Mock()
+ mock_context.get_remaining_time_in_millis = Mock(return_value=300000)
+
+ mock_config = MagicMock()
+ mock_config.mesh_client = Mock()
+ mock_config.polling_metric = Mock()
+
+ mock_ssm = Mock()
+
+ mock_sender_lookup = Mock()
+
+ mock_processor = Mock()
+ mock_processor.process_messages = Mock()
+
+ return (
+ mock_context,
+ mock_config,
+ mock_ssm,
+ mock_sender_lookup,
+ mock_processor
+ )
+
+
+class TestHandler:
+ """Test suite for Lambda handler"""
+
+ @patch('mesh_poll.handler.Config')
+ @patch('mesh_poll.handler.SenderLookup')
+ @patch('mesh_poll.handler.MeshMessageProcessor')
+ @patch('mesh_poll.handler.client')
+ def test_handler_success(self, mock_boto_client, mock_processor_class, mock_sender_lookup_class, mock_config_class):
+ """Test successful handler execution"""
+ from mesh_poll.handler import handler
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+
+ # Wire up the mocks
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+
+ # Execute handler
+ handler(None, mock_context)
+
+ # Verify Config was created and used as context manager
+ mock_config_class.assert_called_once()
+ mock_config_class.return_value.__enter__.assert_called_once()
+
+ # Verify SSM client was created
+ mock_boto_client.assert_called_once_with('ssm')
+
+ # Verify SenderLookup was created with correct parameters
+ mock_sender_lookup_class.assert_called_once()
+ call_args = mock_sender_lookup_class.call_args
+ assert call_args[0][0] == mock_ssm
+ assert call_args[0][1] == mock_config
+
+ # Verify MeshMessageProcessor was created with correct parameters
+ mock_processor_class.assert_called_once()
+ call_kwargs = mock_processor_class.call_args[1]
+ assert call_kwargs['config'] == mock_config
+ assert call_kwargs['sender_lookup'] == mock_sender_lookup
+ assert call_kwargs['mesh_client'] == mock_config.mesh_client
+ assert call_kwargs['get_remaining_time_in_millis'] == mock_context.get_remaining_time_in_millis
+ assert call_kwargs['polling_metric'] == mock_config.polling_metric
+ assert 'log' in call_kwargs
+
+ # Verify process_messages was called
+ mock_processor.process_messages.assert_called_once()
+
+ @patch('mesh_poll.handler.Config')
+ @patch('mesh_poll.handler.SenderLookup')
+ @patch('mesh_poll.handler.MeshMessageProcessor')
+ @patch('mesh_poll.handler.client')
+ def test_handler_config_cleanup_on_exception(self, mock_boto_client, mock_processor_class, mock_sender_lookup_class, mock_config_class):
+ """Test that Config context manager cleanup is called even on exception"""
+ from mesh_poll.handler import handler
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+
+ # Make processor raise an exception
+ test_exception = RuntimeError("Test error")
+ mock_processor.process_messages.side_effect = test_exception
+
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_exit = Mock(return_value=None)
+ mock_config_class.return_value.__exit__ = mock_exit
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+
+ # Handler should raise the exception
+ with pytest.raises(RuntimeError, match="Test error"):
+ handler(None, mock_context)
+
+ # Verify __exit__ was still called (cleanup happened)
+ mock_exit.assert_called_once()
+ # __exit__ should be called with exception info on error
+ call_args = mock_exit.call_args[0]
+ assert call_args[0] == RuntimeError
+ assert call_args[1] == test_exception
diff --git a/lambdas/mesh-poll/mesh_poll/__tests__/test_processor.py b/lambdas/mesh-poll/mesh_poll/__tests__/test_processor.py
new file mode 100644
index 00000000..142f5654
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/__tests__/test_processor.py
@@ -0,0 +1,232 @@
+"""
+Tests for mesh-poll MeshMessageProcessor
+Following the pattern from backend comms-mgr mesh-poll tests
+"""
+from unittest.mock import Mock, call, patch
+from mesh_client import MeshClient
+from mesh_poll.processor import MeshMessageProcessor
+
+
+def setup_mocks():
+ """
+ Create all mock objects needed for processor testing
+ """
+ config = Mock()
+ config.maximum_runtime_milliseconds = "500"
+ config.ssm_prefix = "/dl/test/mesh"
+ config.environment = "development"
+
+ sender_lookup = Mock()
+ sender_lookup.is_valid_sender.return_value = True # Default to valid sender
+ sender_lookup.get_sender_id.return_value = "test-sender-id"
+
+ mesh_client = Mock(spec=MeshClient)
+
+ log = Mock()
+
+ polling_metric = Mock()
+
+ return (
+ config,
+ sender_lookup,
+ mesh_client,
+ log,
+ polling_metric
+ )
+
+
+def setup_message_data(test_id="0"):
+ """
+ Create test message data
+ """
+ from mesh_client import Message
+
+ message = Mock(spec=Message)
+ message.sender = f"TEST_SENDER_{test_id}"
+ message.subject = f"test_subject_{test_id}"
+ message.local_id = f"test_local_id_{test_id}"
+ message.workflow_id = "NHS_NOTIFY_SEND_REQUEST"
+ message.message_type = "DATA"
+ message.id.return_value = f"test_message_id_{test_id}"
+ message.read.return_value = b"test_message_%s_contents" % test_id.encode()
+
+ return message
+
+
+def get_remaining_time_in_millis():
+ return 1000
+
+
+def get_remaining_time_in_millis_near_timeout():
+ return 100
+
+
+@patch('mesh_poll.processor.EventPublisher')
+class TestMeshMessageProcessor:
+ """Test suite for MeshMessageProcessor"""
+
+ def test_process_messages_iterates_through_inbox(self, mock_event_publisher_class):
+ """Test that processor iterates through all messages in MESH inbox"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message1 = setup_message_data("1")
+ message2 = setup_message_data("2")
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ mesh_client.iterate_all_messages.side_effect = [
+ [message1, message2], []]
+ sender_lookup.is_valid_sender.return_value = True
+
+ processor.process_messages()
+
+ mesh_client.handshake.assert_called_once()
+ assert mesh_client.iterate_all_messages.call_count == 2
+ polling_metric.record.assert_called_once()
+
+ def test_process_messages_stops_near_timeout(self, mock_event_publisher_class):
+ """Test that processor stops processing when near timeout"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message1 = setup_message_data("1")
+
+ mock_event_publisher = Mock()
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis_near_timeout,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ mesh_client.iterate_all_messages.return_value = [message1]
+
+ processor.process_messages()
+
+ sender_lookup.is_valid_sender.assert_not_called()
+ mock_event_publisher.send_events.assert_not_called() # No events published when timeout
+ polling_metric.record.assert_called_once()
+
+ def test_process_message_with_valid_sender(self, mock_event_publisher_class):
+ """Test processing a single message from valid sender"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message = setup_message_data("1")
+
+ mock_event_publisher = Mock()
+ mock_event_publisher.send_events.return_value = [] # No failed events
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ sender_lookup.is_valid_sender.return_value = True
+
+ processor.process_message(message)
+
+ mesh_client.handshake.assert_called_once()
+ sender_lookup.is_valid_sender.assert_called_once_with(message.sender)
+ mock_event_publisher.send_events.assert_called_once()
+ message.acknowledge.assert_not_called() # Only acknowledged on auth error
+
+ def test_process_message_with_unknown_sender(self, mock_event_publisher_class):
+ """Test that messages from unknown senders are rejected silently"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message = setup_message_data("1")
+
+ mock_event_publisher = Mock()
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ # Invalid sender
+ sender_lookup.is_valid_sender.return_value = False
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ processor.process_message(message)
+
+ sender_lookup.is_valid_sender.assert_called_once_with(message.sender)
+ message.acknowledge.assert_called_once()
+ mock_event_publisher.send_events.assert_not_called() # No event published for invalid sender
+
+ def test_process_message_logs_error_on_event_publish_failure(self, mock_event_publisher_class):
+ """Test that processor logs error when event publishing fails and does not acknowledge message"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message = setup_message_data("1")
+
+ mock_event_publisher = Mock()
+ mock_event_publisher.send_events.return_value = [{"id": "failed-event-1"}]
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ sender_lookup.is_valid_sender.return_value = True
+ sender_lookup.get_sender_id.return_value = "test_sender_id"
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ processor.process_message(message)
+
+ mock_event_publisher.send_events.assert_called_once()
+ message.acknowledge.assert_not_called()
+ # Verify error was logged
+ log.error.assert_called()
+
+ def test_process_messages_across_multiple_iterations(self, mock_event_publisher_class):
+ """Test that processor continues polling until no messages remain"""
+ (config, sender_lookup, mesh_client, log, polling_metric) = setup_mocks()
+ message1 = setup_message_data("1")
+ message2 = setup_message_data("2")
+ message3 = setup_message_data("3")
+
+ mock_event_publisher = Mock()
+ mock_event_publisher.send_events.return_value = [] # No failed events
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=sender_lookup,
+ mesh_client=mesh_client,
+ get_remaining_time_in_millis=get_remaining_time_in_millis,
+ log=log,
+ polling_metric=polling_metric
+ )
+
+ mesh_client.iterate_all_messages.side_effect = [
+ [message1, message2], # First iteration
+ [message3], # Second iteration
+ [] # Third iteration - empty, stops
+ ]
+ sender_lookup.is_valid_sender.return_value = True
+
+ processor.process_messages()
+
+ mesh_client.handshake.assert_called_once()
+ assert mesh_client.iterate_all_messages.call_count == 3
+ assert sender_lookup.is_valid_sender.call_count == 3
+ assert mock_event_publisher.send_events.call_count == 3 # Events published for all 3 messages
+ polling_metric.record.assert_called_once()
diff --git a/lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py b/lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py
new file mode 100644
index 00000000..a2a3ad53
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/__tests__/test_sender_lookup.py
@@ -0,0 +1,332 @@
+"""
+Tests for SenderLookup
+"""
+import json
+from unittest.mock import Mock, call
+from mesh_poll.sender_lookup import SenderLookup
+
+
+def setup_mocks():
+ ssm = Mock()
+
+ config = Mock()
+ config.ssm_prefix = "/dl/test/mesh"
+
+ logger = Mock()
+
+ return ssm, config, logger
+
+
+def create_sender_parameter(sender_id, mailbox_id):
+ return {
+ "Name": f"/dl/test/mesh/senders/{sender_id}",
+ "Value": json.dumps({
+ "senderId": sender_id,
+ "meshMailboxSenderId": mailbox_id,
+ "name": f"Test Sender {sender_id}"
+ })
+ }
+
+
+class TestSenderLookup:
+ """Test suite for SenderLookup"""
+
+ def test_load_valid_senders_single_page(self):
+ """Test loading valid senders from SSM (single page)"""
+
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ create_sender_parameter("sender2", "MAILBOX_002"),
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ ssm.get_parameters_by_path.assert_called_once_with(
+ Path="/dl/test/mesh/senders/",
+ WithDecryption=True
+ )
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.is_valid_sender("MAILBOX_002")
+ assert sender_lookup.is_valid_sender("MAILBOX_003")
+ assert not sender_lookup.is_valid_sender("UNKNOWN_MAILBOX")
+
+ def test_load_valid_senders_multiple_pages(self):
+ """Test loading valid senders from SSM with pagination"""
+
+ ssm, config, logger = setup_mocks()
+
+ # Simulate paginated response
+ ssm.get_parameters_by_path.side_effect = [
+ {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ create_sender_parameter("sender2", "MAILBOX_002"),
+ ],
+ "NextToken": "token123"
+ },
+ {
+ "Parameters": [
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ],
+ }
+ ]
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert ssm.get_parameters_by_path.call_count == 2
+ ssm.get_parameters_by_path.assert_has_calls([
+ call(Path="/dl/test/mesh/senders/", WithDecryption=True),
+ call(Path="/dl/test/mesh/senders/", WithDecryption=True, NextToken="token123")
+ ], any_order=False)
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.is_valid_sender("MAILBOX_002")
+ assert sender_lookup.is_valid_sender("MAILBOX_003")
+
+ def test_is_valid_sender_case_insensitive(self):
+ """Test that sender validation is case-insensitive"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "Mailbox_MixedCase"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.is_valid_sender("Mailbox_MixedCase")
+ assert sender_lookup.is_valid_sender("MAILBOX_MIXEDCASE")
+ assert sender_lookup.is_valid_sender("mailbox_mixedcase")
+ assert sender_lookup.is_valid_sender("mAiLbOx_MiXeDcAsE")
+
+ def test_is_valid_sender_returns_false_for_empty_mailbox_id(self):
+ """Test that empty mailbox IDs are rejected"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert not sender_lookup.is_valid_sender("")
+ assert not sender_lookup.is_valid_sender(None)
+
+ def test_load_valid_senders_handles_malformed_json(self):
+ """Test that malformed JSON in parameters is handled gracefully"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ {
+ "Name": "/dl/test/mesh/senders/bad_sender",
+ "Value": "not valid json {{"
+ },
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.is_valid_sender("MAILBOX_003")
+ assert logger.warn.called
+
+ def test_load_valid_senders_handles_missing_mailbox_id(self):
+ """Test that parameters without meshMailboxSenderId are skipped"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ {
+ "Name": "/dl/test/mesh/senders/incomplete_sender",
+ "Value": json.dumps({
+ "senderId": "incomplete",
+ "name": "Incomplete Sender"
+ # Missing meshMailboxSenderId
+ })
+ },
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.is_valid_sender("MAILBOX_003")
+
+ def test_load_valid_senders_handles_empty_mailbox_id(self):
+ """Test that empty meshMailboxSenderId values are skipped"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ {
+ "Name": "/dl/test/mesh/senders/empty_mailbox",
+ "Value": json.dumps({
+ "senderId": "empty",
+ "meshMailboxSenderId": "", # Empty string
+ "name": "Empty Mailbox Sender"
+ })
+ },
+ create_sender_parameter("sender3", "MAILBOX_003"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.is_valid_sender("MAILBOX_003")
+ assert not sender_lookup.is_valid_sender("")
+
+ def test_load_valid_senders_with_trailing_slash_in_path(self):
+ """Test that paths with trailing slashes are handled correctly"""
+ ssm, config, logger = setup_mocks()
+ config.ssm_prefix = "/dl/test/mesh/" # Trailing slash
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ ssm.get_parameters_by_path.assert_called_once_with(
+ Path="/dl/test/mesh/senders/",
+ WithDecryption=True
+ )
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+
+ def test_load_valid_senders_handles_empty_response(self):
+ """Test that empty SSM response is handled correctly"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": []
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert not sender_lookup.is_valid_sender("ANY_MAILBOX")
+ logger.debug.assert_called_once()
+ call_args = logger.debug.call_args[0][0]
+ assert "0" in call_args # Should log count of 0
+
+ def test_get_sender_id_returns_correct_sender_id(self):
+ """Test that get_sender_id returns correct sender ID for valid mailbox IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ create_sender_parameter("sender2", "MAILBOX_002"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
+ assert sender_lookup.get_sender_id("MAILBOX_002") == "sender2"
+
+ def test_get_sender_id_case_insensitive(self):
+ """Test that get_sender_id lookup is case-insensitive"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "Mailbox_MixedCase"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_sender_id("Mailbox_MixedCase") == "sender1"
+ assert sender_lookup.get_sender_id("MAILBOX_MIXEDCASE") == "sender1"
+ assert sender_lookup.get_sender_id("mailbox_mixedcase") == "sender1"
+
+ def test_get_sender_id_returns_none_for_unknown_mailbox(self):
+ """Test that get_sender_id returns None for unknown mailbox IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_sender_id("UNKNOWN_MAILBOX") is None
+
+ def test_get_sender_id_returns_none_for_empty_mailbox_id(self):
+ """Test that get_sender_id returns None for empty/None mailbox IDs"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ assert sender_lookup.get_sender_id("") is None
+ assert sender_lookup.get_sender_id(None) is None
+
+ def test_load_valid_senders_skips_entries_with_missing_sender_id(self):
+ """Test that entries without senderId are skipped from validation and mapping"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ {
+ "Name": "/dl/test/mesh/senders/incomplete",
+ "Value": json.dumps({
+ "meshMailboxSenderId": "MAILBOX_BAD",
+ "name": "Incomplete"
+ # Missing senderId
+ })
+ },
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ # Entry with missing senderId should not be valid or mapped
+ assert not sender_lookup.is_valid_sender("MAILBOX_BAD")
+ assert sender_lookup.get_sender_id("MAILBOX_BAD") is None
+
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
+
+ def test_load_valid_senders_skips_entries_with_empty_sender_id(self):
+ """Test that entries with empty senderId are skipped from validation and mapping"""
+ ssm, config, logger = setup_mocks()
+
+ ssm.get_parameters_by_path.return_value = {
+ "Parameters": [
+ create_sender_parameter("sender1", "MAILBOX_001"),
+ create_sender_parameter("", "MAILBOX_002"),
+ ]
+ }
+
+ sender_lookup = SenderLookup(ssm, config, logger)
+
+ # Entry with empty senderId should not be valid or mapped
+ assert not sender_lookup.is_valid_sender("MAILBOX_002")
+ assert sender_lookup.get_sender_id("MAILBOX_002") is None
+
+ assert sender_lookup.is_valid_sender("MAILBOX_001")
+ assert sender_lookup.get_sender_id("MAILBOX_001") == "sender1"
diff --git a/lambdas/mesh-poll/mesh_poll/config.py b/lambdas/mesh-poll/mesh_poll/config.py
new file mode 100644
index 00000000..4db5508a
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/config.py
@@ -0,0 +1,50 @@
+"""
+Module for configuring Mesh Poll application
+"""
+from event_publisher import BaseMeshConfig, log
+from metric_publishers.metric_client import Metric
+
+
+_REQUIRED_ENV_VAR_MAP = {
+ "ssm_prefix": "SSM_PREFIX",
+ "maximum_runtime_milliseconds": "MAXIMUM_RUNTIME_MILLISECONDS",
+ "environment": "ENVIRONMENT",
+ "event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
+ "event_publisher_dlq_url": "EVENT_PUBLISHER_DLQ_URL",
+ "certificate_expiry_metric_name": "CERTIFICATE_EXPIRY_METRIC_NAME",
+ "certificate_expiry_metric_namespace": "CERTIFICATE_EXPIRY_METRIC_NAMESPACE",
+ "polling_metric_name": "POLLING_METRIC_NAME",
+ "polling_metric_namespace": "POLLING_METRIC_NAMESPACE"
+}
+
+
+class Config(BaseMeshConfig):
+ """
+ Represents the configuration of the Mesh Poll application.
+ Inherits common MESH configuration from BaseMeshConfig.
+ """
+
+ _REQUIRED_ENV_VAR_MAP = _REQUIRED_ENV_VAR_MAP
+
+ def __init__(self, ssm=None):
+ super().__init__(ssm=ssm)
+
+ self.polling_metric = None
+
+ def __enter__(self):
+ super().__enter__()
+
+ # Build polling metric
+ self.polling_metric = self.build_polling_metric()
+
+ return self
+
+ def build_polling_metric(self):
+ """
+ Returns a custom metric to record messages found in the MESH inbox during polling
+ """
+ return Metric(
+ name=self.polling_metric_name,
+ namespace=self.polling_metric_namespace,
+ dimensions={"Environment": self.environment}
+ )
diff --git a/lambdas/mesh-poll/mesh_poll/errors.py b/lambdas/mesh-poll/mesh_poll/errors.py
new file mode 100644
index 00000000..1e8be801
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/errors.py
@@ -0,0 +1,31 @@
+"""
+Module representing possible errors within this application
+"""
+
+import traceback
+
+
+class AuthorizationError(Exception):
+ """
+ Error representing when a sender is not authorized to perform the requested action
+ """
+
+
+class InvalidMeshEndpointError(Exception):
+ """
+ Indicates an invalid MESH endpoint in configuration
+ """
+
+
+class InvalidEnvironmentVariableError(Exception):
+ """
+ Indicates an invalid environment variable
+ """
+
+
+def format_exception(exception):
+ """
+ Returns a nicely formatted exception string
+ """
+ return ''.join(traceback.format_exception(
+ type(exception), exception, exception.__traceback__))
diff --git a/lambdas/mesh-poll/mesh_poll/handler.py b/lambdas/mesh-poll/mesh_poll/handler.py
new file mode 100644
index 00000000..df449e86
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/handler.py
@@ -0,0 +1,20 @@
+"""lambda handler for mesh poll application"""
+
+from boto3 import client
+from .sender_lookup import SenderLookup
+from .config import Config, log
+from .processor import MeshMessageProcessor
+
+
+def handler(_, context):
+ """lambda handler for mesh poll application"""
+ with Config() as config:
+ processor = MeshMessageProcessor(
+ config=config,
+ sender_lookup=SenderLookup(client('ssm'), config, log),
+ mesh_client=config.mesh_client,
+ get_remaining_time_in_millis=context.get_remaining_time_in_millis,
+ log=log,
+ polling_metric=config.polling_metric)
+
+ processor.process_messages()
diff --git a/lambdas/mesh-poll/mesh_poll/processor.py b/lambdas/mesh-poll/mesh_poll/processor.py
new file mode 100644
index 00000000..8fc82278
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/processor.py
@@ -0,0 +1,160 @@
+"""
+Module for processing messages from a MESH mailbox
+"""
+
+from datetime import datetime, timezone
+from uuid import uuid4
+
+from event_publisher import EventPublisher
+
+from .errors import AuthorizationError, format_exception
+
+ACKNOWLEDGED_MESSAGE = "acknowledged message"
+PROCESSING_MESSAGE = "processing message"
+
+
+class MeshMessageProcessor: # pylint: disable=too-many-instance-attributes
+ """
+ Class that processes messages from a MESH inbox
+ """
+
+ def __init__(self, **kwargs):
+ self.__config = kwargs['config']
+ self.__mesh_client = kwargs['mesh_client']
+ self.__sender_lookup = kwargs['sender_lookup']
+ self.__log = kwargs['log']
+ self.__get_remaining_time_in_millis = kwargs['get_remaining_time_in_millis']
+ self.__mesh_client.handshake()
+ self.__polling_metric = kwargs['polling_metric']
+
+ deployment = 'primary'
+ plane = 'data-plane'
+ self.__cloud_event_source = f'/nhs/england/notify/{self.__config.environment}/{deployment}/{plane}/digitalletters/mesh'
+
+ # Initialize EventPublisher
+ self.__event_publisher = EventPublisher(
+ event_bus_arn=self.__config.event_bus_arn,
+ dlq_url=self.__config.event_publisher_dlq_url,
+ logger=self.__log
+ )
+
+ def is_enough_time_to_process_message(self):
+ """
+ Determines whether the lambda should continue to process messages
+ """
+ remaining_time_in_millis = self.__get_remaining_time_in_millis()
+
+ return int(self.__config.maximum_runtime_milliseconds) \
+ < remaining_time_in_millis
+
+ def process_messages(self):
+ """
+ Iterates over and processes messages in a MESH inbox
+ """
+ is_message_iterator_empty = False
+
+ while not is_message_iterator_empty:
+ self.__log.info('Polling for messages')
+
+ # if iterate_all_messages does not return any items, we will exit the loop
+ is_message_iterator_empty = True
+
+ # Initial processing of each message
+ for message in self.__mesh_client.iterate_all_messages():
+ is_message_iterator_empty = False
+ if not self.is_enough_time_to_process_message():
+ self.__log.info(
+ 'Not enough time to process more files. Exiting')
+ self.__polling_metric.record(1)
+ return
+
+ self.process_message(message)
+
+ self.__log.info('No new messages found. Exiting')
+ self.__polling_metric.record(1)
+
+ def process_message(self, message):
+ """
+ Processes an individual message from a MESH inbox - validates sender and publishes event
+ """
+
+ message_type = getattr(message, 'message_type', '')
+ sender_mailbox_id = getattr(message, "sender", "")
+ workflow_id = getattr(message, "workflow_id", "")
+ subject = getattr(message, "subject", "")
+ message_reference = getattr(message, "local_id", "")
+
+ logger = self.__log.bind(
+ message_id=message.id(),
+ sender=sender_mailbox_id,
+ workflow_id=workflow_id,
+ subject=subject,
+ local_id=message_reference,
+ message_type=message_type,
+ )
+
+ logger.info(PROCESSING_MESSAGE)
+
+ try:
+ # Basic sender validation - only publish events for known senders
+ if not self.__sender_lookup.is_valid_sender(sender_mailbox_id):
+ raise AuthorizationError(
+ f'Cannot authorize sender with mailbox ID "{sender_mailbox_id}"')
+
+ # Get the corresponding sender ID
+ sender_id = self.__sender_lookup.get_sender_id(sender_mailbox_id)
+
+ # Publish event for valid sender
+ message_id = message.id()
+ event_detail = {
+ "data": {
+ "meshMessageId": message_id,
+ "senderId": sender_id,
+ "messageReference": message_reference
+ }
+ }
+
+ self._publish_mesh_inbox_message_received_event(event_detail)
+ logger.info(
+ "published MESHInboxMessageReceived event for valid sender")
+
+ except AuthorizationError as exception:
+ logger.error(format_exception(exception))
+ message.acknowledge() # Remove from inbox - no notification to sender
+ logger.info(ACKNOWLEDGED_MESSAGE)
+ return
+
+ except Exception as exc: # pylint: disable=broad-except
+ logger.error(format_exception(exc))
+
+ def _publish_mesh_inbox_message_received_event(self, event_detail):
+ """
+ Publishes a MESHInboxMessageReceived event for the retriever component.
+ """
+ now = datetime.now(timezone.utc).isoformat()
+
+ cloud_event = {
+ 'id': str(uuid4()),
+ 'specversion': '1.0',
+ 'source': self.__cloud_event_source,
+ 'subject': 'customer/00000000-0000-0000-0000-000000000000/recipient/00000000-0000-0000-0000-000000000000',
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
+ 'time': now,
+ 'recordedtime': now,
+ 'severitynumber': 2,
+ 'severitytext': 'INFO',
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-mesh-inbox-message-received-data.schema.json',
+ 'data': event_detail.get('data', {}),
+ }
+
+ failed_events = self.__event_publisher.send_events([cloud_event])
+
+ if failed_events:
+ error_msg = f"Failed to publish MESHInboxMessageReceived event: {failed_events}"
+ self.__log.error(error_msg, failed_count=len(failed_events))
+ raise RuntimeError(error_msg)
+
+ self.__log.info("Published MESHInboxMessageReceived event",
+ mesh_message_id=event_detail["data"]["meshMessageId"],
+ sender_id=event_detail["data"]["senderId"])
diff --git a/lambdas/mesh-poll/mesh_poll/sender_lookup.py b/lambdas/mesh-poll/mesh_poll/sender_lookup.py
new file mode 100644
index 00000000..f44b69de
--- /dev/null
+++ b/lambdas/mesh-poll/mesh_poll/sender_lookup.py
@@ -0,0 +1,120 @@
+import json
+from .errors import format_exception
+
+
+class SenderLookup:
+ """
+ Lightweight sender lookup for basic sender validation and sender ID extraction
+ """
+
+ def __init__(self, ssm, config, logger):
+ self.__ssm = ssm
+ self.__config = config
+ self.__logger = logger
+ self.__valid_senders = set()
+ self.__mailbox_to_sender = {}
+ self.load_valid_senders()
+
+ def is_valid_sender(self, mailbox_id):
+ """
+ Check if a MESH mailbox ID is from a known sender
+ """
+ if not mailbox_id:
+ return False
+
+ return mailbox_id.upper() in self.__valid_senders
+
+ def get_sender_id(self, mailbox_id):
+ """
+ Get the sender ID for a given MESH mailbox ID
+ """
+ if not mailbox_id:
+ return None
+
+ return self.__mailbox_to_sender.get(mailbox_id.upper())
+
+ def load_valid_senders(self):
+ """
+ Loads mailbox IDs and their corresponding sender IDs into memory
+ """
+ mailbox_ids = set()
+ mailbox_to_sender = {}
+ next_token = ""
+ page_number = 0
+
+ while next_token or page_number < 1:
+ (page_mailbox_ids, page_mapping, token) = self.__get_page(next_token)
+ mailbox_ids.update(page_mailbox_ids)
+ mailbox_to_sender.update(page_mapping)
+ next_token = token
+ page_number += 1
+
+ self.__valid_senders = mailbox_ids
+ self.__mailbox_to_sender = mailbox_to_sender
+ self.__logger.debug(
+ f"Loaded {len(self.__valid_senders)} valid sender mailbox IDs")
+
+ def __get_page(self, next_token=""):
+ """
+ Loads a page of sender data and extracts mailbox IDs and sender IDs
+ """
+ senders_path = f"{self.__config.ssm_prefix.rstrip('/')}/senders/"
+
+ if len(next_token) == 0:
+ response = self.__ssm.get_parameters_by_path(
+ Path=senders_path,
+ WithDecryption=True,
+ )
+ else:
+ response = self.__ssm.get_parameters_by_path(
+ Path=senders_path,
+ WithDecryption=True,
+ NextToken=next_token,
+ )
+
+ mailbox_ids = set()
+ mailbox_to_sender = {}
+
+ if "Parameters" in response:
+ for parameter in response["Parameters"]:
+ mailbox_id = self.__extract_mailbox_id(parameter)
+ sender_id = self.__extract_sender_id(parameter)
+ if mailbox_id and sender_id:
+ mailbox_id_upper = mailbox_id.upper()
+ mailbox_ids.add(mailbox_id_upper)
+ mailbox_to_sender[mailbox_id_upper] = sender_id
+
+ new_next_token = response.get("NextToken", "")
+ return (mailbox_ids, mailbox_to_sender, new_next_token)
+
+ def __extract_mailbox_id(self, parameter):
+ """
+ Extract just the meshMailboxSenderId from a sender parameter
+ """
+ if "Value" not in parameter:
+ return None
+
+ try:
+ sender_config = json.loads(parameter["Value"])
+ return sender_config.get("meshMailboxSenderId", "")
+ except (ValueError, AttributeError) as exception:
+ self.__logger.warn(
+ f"Failed to parse mailbox ID from parameter {parameter['Name']}")
+ self.__logger.error(format_exception(exception))
+ return None
+
+ def __extract_sender_id(self, parameter):
+ """
+ Extract just the sender ID from a sender parameter
+ """
+ if "Value" not in parameter:
+ return None
+
+ try:
+ sender_config = json.loads(parameter["Value"])
+ return sender_config.get("senderId", "")
+ except (ValueError, AttributeError) as exception:
+ self.__logger.warn(
+ f"Failed to parse sender ID from parameter {parameter['Name']}")
+ self.__logger.error(format_exception(exception))
+ return None
diff --git a/lambdas/mesh-poll/package.json b/lambdas/mesh-poll/package.json
deleted file mode 100644
index c8dbf17e..00000000
--- a/lambdas/mesh-poll/package.json
+++ /dev/null
@@ -1,24 +0,0 @@
-{
- "dependencies": {
- "aws-lambda": "^1.0.7",
- "digital-letters-events": "^0.0.1"
- },
- "devDependencies": {
- "@tsconfig/node22": "^22.0.2",
- "@types/aws-lambda": "^8.10.155",
- "@types/jest": "^29.5.14",
- "jest": "^29.7.0",
- "jest-mock-extended": "^4.0.0",
- "typescript": "^5.9.3"
- },
- "name": "nhs-notify-digital-letters-mesh-poll",
- "private": true,
- "scripts": {
- "lambda-build": "rm -rf dist && npx esbuild --bundle --minify --sourcemap --target=es2020 --platform=node --loader:.node=file --entry-names=[name] --outdir=dist src/index.ts",
- "lint": "echo 'placeholder to be removed'",
- "lint:fix": "echo 'placeholder to be removed'",
- "test:unit": "echo 'placeholder to be removed'",
- "typecheck": "echo 'placeholder to be removed'"
- },
- "version": "0.0.1"
-}
diff --git a/lambdas/mesh-poll/package_python_lambda.sh b/lambdas/mesh-poll/package_python_lambda.sh
new file mode 100755
index 00000000..a9834152
--- /dev/null
+++ b/lambdas/mesh-poll/package_python_lambda.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+set -e
+
+component_name="$1"
+
+rootdir=$(realpath "$(dirname "$0")/../..")
+source ${rootdir}/utils/get_version.sh
+
+dist_dir="${PWD}/target/dist"
+rm -rf "${dist_dir}"
+mkdir -p "${dist_dir}"
+
+# Extract internal (file://) and external dependencies from requirements.txt
+grep -E '^-e ' requirements.txt | sed 's|^-e ||' > target/internal_requirements.txt || true
+grep -vE '^-e ' requirements.txt > target/external_requirements.txt || true
+
+# Install external dependencies (from PyPI)
+pip install --platform manylinux2014_x86_64 --only-binary=:all: -r target/external_requirements.txt --target ${dist_dir} --python-version 3.14 --implementation cp
+
+# Install internal dependencies (local packages)
+pip install -r target/internal_requirements.txt --target ${dist_dir}
+
+# Bundle application code
+pip install . --no-deps --target ${dist_dir}
diff --git a/lambdas/mesh-poll/pytest.ini b/lambdas/mesh-poll/pytest.ini
new file mode 100644
index 00000000..93372031
--- /dev/null
+++ b/lambdas/mesh-poll/pytest.ini
@@ -0,0 +1,16 @@
+[pytest]
+testpaths = mesh_poll/__tests__
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+addopts = -v --tb=short
+
+[coverage:run]
+relative_files = True
+omit =
+ */mesh_poll/__tests__/*
+ */test_*.py
+ */__pycache__/*
+ */venv/*
+ */.venv/*
+ */env/*
diff --git a/lambdas/mesh-poll/requirements-dev.txt b/lambdas/mesh-poll/requirements-dev.txt
new file mode 100644
index 00000000..1f257452
--- /dev/null
+++ b/lambdas/mesh-poll/requirements-dev.txt
@@ -0,0 +1,6 @@
+-r requirements.txt
+autopep8>=2.0.2
+pylint>=2.17.4
+pytest>=7.0.1
+pytest-cov>=4.0.0
+jake>=3.0.1
diff --git a/lambdas/mesh-poll/requirements.txt b/lambdas/mesh-poll/requirements.txt
new file mode 100644
index 00000000..8550d255
--- /dev/null
+++ b/lambdas/mesh-poll/requirements.txt
@@ -0,0 +1,13 @@
+certifi>=2023.07.22
+mesh-client>=3.2.3
+structlog>=21.5.0
+orjson>=3.9.15
+boto3>=1.28.62
+urllib3>=1.26.19,<2.0.0
+idna>=3.7
+requests>=2.32.0
+pyopenssl>=24.2.1
+pydantic>=2.0.0
+-e ../../utils/metric-publishers
+-e ../../utils/event-publisher-py
+-e ../../utils/py-mock-mesh
diff --git a/lambdas/mesh-poll/setup.py b/lambdas/mesh-poll/setup.py
new file mode 100644
index 00000000..eead957f
--- /dev/null
+++ b/lambdas/mesh-poll/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="mesh-poll",
+ version="0.1.0",
+ packages=find_packages(),
+)
diff --git a/lambdas/mesh-poll/src/__tests__/index.test.ts b/lambdas/mesh-poll/src/__tests__/index.test.ts
deleted file mode 100644
index 52398e0f..00000000
--- a/lambdas/mesh-poll/src/__tests__/index.test.ts
+++ /dev/null
@@ -1,55 +0,0 @@
-import type { Context } from 'aws-lambda';
-import { mockDeep } from 'jest-mock-extended';
-import { PDMResourceSubmitted } from 'digital-letters-events';
-import { handler } from '..';
-
-const context = mockDeep();
-const callback = jest.fn();
-
-describe('event-logging Lambda', () => {
- it('logs the input event and returns 200', async () => {
- const event: PDMResourceSubmitted = {
- type: 'uk.nhs.notify.digital.letters.pdm.resource.submitted.v1',
- source:
- '/nhs/england/notify/staging/dev-647563337/data-plane/digitalletters/pdm',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-pdm-resource-submitted-data.schema.json',
- specversion: '1.0',
- id: '0249e529-f947-4012-819e-b634eb71be79',
- subject:
- 'customer/7ff8ed41-cd5f-20e4-ef4e-34f96d8cc8ac/75027ace-9b8c-bcfe-866e-6c24242cffc3/q58dnxk5e/4cbek805wwx/yiaw7bl0d/her/1ccb7eb8-c6fe-0a42-279a-2a0e48ff1ca9/zk',
- time: '2025-11-21T16:01:52.268Z',
- datacontenttype: 'application/json',
- traceparent: '00-ee4790eb6821064c645406abe918b3da-3a4e6957ce2a15de-01',
- tracestate: 'nisi quis',
- partitionkey: 'customer-7ff8ed41',
- recordedtime: '2025-11-21T16:01:53.268Z',
- sampledrate: 1,
- sequence: '00000000000350773861',
- severitytext: 'INFO',
- severitynumber: 2,
- dataclassification: 'restricted',
- dataregulation: 'ISO-27001',
- datacategory: 'non-sensitive',
- data: {
- messageReference: 'incididunt Ut aute laborum',
- senderId: 'officia voluptate culpa Ut dolor',
- resourceId: 'a2bcbb42-ab7e-42b6-88d6-74f8d3ca4a09',
- retryCount: 97_903_257,
- },
- };
-
- const result = await handler(event, context, callback);
-
- expect(result).toEqual({
- statusCode: 200,
- body: 'Event logged',
- });
- });
-
- it('throws an error if an invalid event is provided', async () => {
- const invalidEvent = { foo: 'bar' };
-
- await expect(handler(invalidEvent, context, callback)).rejects.toThrow();
- });
-});
diff --git a/lambdas/mesh-poll/src/index.ts b/lambdas/mesh-poll/src/index.ts
deleted file mode 100644
index 29f227a4..00000000
--- a/lambdas/mesh-poll/src/index.ts
+++ /dev/null
@@ -1,59 +0,0 @@
-/* eslint-disable no-console -- Allowing console logging as this is an example file. */
-// Replace me with the actual code for your Lambda function
-import { Handler } from 'aws-lambda';
-import { PDMResourceSubmitted } from 'digital-letters-events';
-import eventValidator from 'digital-letters-events/PDMResourceSubmitted.js';
-
-export const handler: Handler = async (event: PDMResourceSubmitted) => {
- console.log('Received event:', event);
-
- // We can build a new PDMResourceSubmitted event object like this:
- const pdmResourceSubmittedEvent: PDMResourceSubmitted = {
- type: 'uk.nhs.notify.digital.letters.pdm.resource.submitted.v1',
- source:
- '/nhs/england/notify/staging/dev-647563337/data-plane/digitalletters/pdm',
- dataschema:
- 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-pdm-resource-submitted-data.schema.json',
- specversion: '1.0',
- id: '0249e529-f947-4012-819e-b634eb71be79',
- subject:
- 'customer/7ff8ed41-cd5f-20e4-ef4e-34f96d8cc8ac/75027ace-9b8c-bcfe-866e-6c24242cffc3/q58dnxk5e/4cbek805wwx/yiaw7bl0d/her/1ccb7eb8-c6fe-0a42-279a-2a0e48ff1ca9/zk',
- time: '2025-11-21T16:01:52.268Z',
- datacontenttype: 'application/json',
- traceparent: '00-ee4790eb6821064c645406abe918b3da-3a4e6957ce2a15de-01',
- tracestate: 'nisi quis',
- partitionkey: 'customer-7ff8ed41',
- recordedtime: '2025-11-21T16:01:53.268Z',
- sampledrate: 1,
- sequence: '00000000000350773861',
- severitytext: 'INFO',
- severitynumber: 2,
- dataclassification: 'restricted',
- dataregulation: 'ISO-27001',
- datacategory: 'non-sensitive',
- data: {
- messageReference: 'incididunt Ut aute laborum',
- senderId: 'officia voluptate culpa Ut dolor',
- resourceId: 'a2bcbb42-ab7e-42b6-88d6-74f8d3ca4a09',
- retryCount: 97_903_257,
- },
- };
-
- console.log('PDM resource submitted event:', pdmResourceSubmittedEvent);
-
- // We can validate an event like this:
- const isEventValid = eventValidator(event);
- if (isEventValid) {
- console.log('pdmResourceSubmittedEvent is valid!');
- } else {
- console.error('Validation failure!', eventValidator.errors);
- throw new Error('Event validation failed');
- }
-
- return {
- statusCode: 200,
- body: 'Event logged',
- };
-};
-
-export default handler;
diff --git a/lambdas/mesh-poll/tsconfig.json b/lambdas/mesh-poll/tsconfig.json
deleted file mode 100644
index 19a99d17..00000000
--- a/lambdas/mesh-poll/tsconfig.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "compilerOptions": {
- "allowJs": true,
- "isolatedModules": true
- },
- "extends": "@tsconfig/node22/tsconfig.json",
- "include": [
- "src/**/*",
- "jest.config.ts"
- ]
-}
diff --git a/package-lock.json b/package-lock.json
index 04ccd2e2..e0cd05c8 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,14 +1,15 @@
{
"name": "nhs-notify-digital-letters",
+ "version": "0.0.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "nhs-notify-digital-letters",
+ "version": "0.0.1",
"workspaces": [
"lambdas/key-generation",
"lambdas/refresh-apim-access-token",
- "lambdas/mesh-poll",
"lambdas/ttl-create-lambda",
"lambdas/ttl-handle-expiry-lambda",
"lambdas/ttl-poll-lambda",
@@ -149,6 +150,7 @@
"lambdas/mesh-poll": {
"name": "nhs-notify-digital-letters-mesh-poll",
"version": "0.0.1",
+ "extraneous": true,
"dependencies": {
"aws-lambda": "^1.0.7",
"digital-letters-events": "^0.0.1"
@@ -162,44 +164,6 @@
"typescript": "^5.9.3"
}
},
- "lambdas/mesh-poll/node_modules/@types/jest": {
- "version": "29.5.14",
- "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz",
- "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "expect": "^29.0.0",
- "pretty-format": "^29.0.0"
- }
- },
- "lambdas/mesh-poll/node_modules/jest": {
- "version": "29.7.0",
- "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz",
- "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "@jest/core": "^29.7.0",
- "@jest/types": "^29.6.3",
- "import-local": "^3.0.2",
- "jest-cli": "^29.7.0"
- },
- "bin": {
- "jest": "bin/jest.js"
- },
- "engines": {
- "node": "^14.15.0 || ^16.10.0 || >=18.0.0"
- },
- "peerDependencies": {
- "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0"
- },
- "peerDependenciesMeta": {
- "node-notifier": {
- "optional": true
- }
- }
- },
"lambdas/refresh-apim-access-token": {
"version": "0.0.1",
"dependencies": {
@@ -14684,10 +14648,6 @@
"resolved": "tests/playwright",
"link": true
},
- "node_modules/nhs-notify-digital-letters-mesh-poll": {
- "resolved": "lambdas/mesh-poll",
- "link": true
- },
"node_modules/nhs-notify-digital-letters-ttl-create-lambda": {
"resolved": "lambdas/ttl-create-lambda",
"link": true
diff --git a/package.json b/package.json
index df6a61d8..5da2123f 100644
--- a/package.json
+++ b/package.json
@@ -52,10 +52,10 @@
"test:unit": "npm run test:unit --workspaces",
"typecheck": "npm run typecheck --workspaces"
},
+ "version": "0.0.1",
"workspaces": [
"lambdas/key-generation",
"lambdas/refresh-apim-access-token",
- "lambdas/mesh-poll",
"lambdas/ttl-create-lambda",
"lambdas/ttl-handle-expiry-lambda",
"lambdas/ttl-poll-lambda",
diff --git a/project.code-workspace b/project.code-workspace
index b43fdd0b..b7407a2a 100644
--- a/project.code-workspace
+++ b/project.code-workspace
@@ -81,7 +81,6 @@
"shellcheck.run": "onSave",
"jest.virtualFolders": [
{ "name": "key-generation", "rootPath": "lambdas/key-generation" },
- { "name": "mesh-poll", "rootPath": "lambdas/mesh-poll" },
{ "name": "refresh-apim-access-token", "rootPath": "lambdas/refresh-apim-access-token" },
{ "name": "python-schema-generator", "rootPath": "src/python-schema-generator" },
{ "name": "ttl-create-lambda", "rootPath": "lambdas/ttl-create-lambda/" },
diff --git a/scripts/config/sonar-scanner.properties b/scripts/config/sonar-scanner.properties
index 8335bc3b..ce9a8392 100644
--- a/scripts/config/sonar-scanner.properties
+++ b/scripts/config/sonar-scanner.properties
@@ -3,13 +3,33 @@
sonar.host.url=https://sonarcloud.io
sonar.qualitygate.wait=true
sonar.sourceEncoding=UTF-8
-sonar.sources=.
-sonar.tests=tests/, src/asyncapigenerator/tests, src/cloudeventjekylldocs/tests, src/eventcatalogasyncapiimporter/tests, src/python-schema-generator/tests, src/cloudevents/tools/builder/__tests__, src/cloudevents/tools/cache/__tests__, src/cloudevents/tools/generator/__tests__, lambdas/mesh-poll/src/__tests__, lambdas/ttl-create-lambda/src/__tests__, lambdas/ttl-poll-lambda/src/__tests__, utils/utils/src/__tests__, utils/sender-management/src/__tests__
-sonar.test.inclusions=tests/**, src/**/tests/**, src/**/__tests__/**, lambdas/**/src/__tests__/**, utils/utils/src/__tests__/**, utils/sender-management/src/__tests__/**
+sonar.sources=src,lambdas,utils,tests
+sonar.tests=tests
+sonar.test.inclusions=tests/**, lambdas/*/src/__tests__/**, utils/utils/src/__tests__/**, src/**/tests/**, src/**/__tests__/**, src/cloudevents/tools/**/__tests__/**, utils/sender-management/src/__tests__/**
sonar.terraform.provider.aws.version=5.54.1
sonar.cpd.exclusions=**.test.*
-sonar.coverage.exclusions=tests/**, src/**/tests/**, src/**/__tests__/**, **/*.dev.*, lambdas/**/src/__tests__/**, **/jest.config.ts, **/jest.config.cjs, scripts/**/*.*, docs/**/*.*, utils/utils/src/__tests__/**, src/asyncapigenerator/example_usage.py, src/asyncapigenerator/test_generator.py, src/eventcatalogasyncapiimporter/examples.py, src/digital-letters-events/**
-sonar.python.coverage.reportPaths=src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,src/python-schema-generator/coverage.xml
+# Coverage exclusions
+sonar.coverage.exclusions=\
+ tests/**, \
+ **/*.dev.*, \
+ **/__tests__/**, \
+ **/test_*.py, \
+ **/jest.config.ts, \
+ **/jest.config.cjs, \
+ **/setup.py, \
+ scripts/**/*.*, \
+ docs/**/*.*, \
+ utils/py-mock-mesh/**, \
+ utils/event-publisher-py/event_publisher/mesh_config.py, \
+ lambdas/mesh-download/mesh_download/config.py, \
+ lambdas/mesh-download/mesh_download/errors.py, \
+ lambdas/mesh-poll/mesh_poll/config.py, \
+ src/asyncapigenerator/example_usage.py, \
+ src/asyncapigenerator/test_generator.py, \
+ src/eventcatalogasyncapiimporter/examples.py
+
+# Coverage reports
+sonar.python.coverage.reportPaths=.coverage/coverage.xml,src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,utils/event-publisher-py/coverage.xml,utils/metric-publishers/coverage.xml,lambdas/mesh-poll/coverage.xml,lambdas/mesh-download/coverage.xml,src/python-schema-generator/coverage.xml
sonar.javascript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
sonar.typescript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh
index e08ea30c..b9035e69 100755
--- a/scripts/tests/unit.sh
+++ b/scripts/tests/unit.sh
@@ -34,6 +34,25 @@ echo "Setting up and running eventcatalogasyncapiimporter tests..."
make -C ./src/eventcatalogasyncapiimporter install-dev
make -C ./src/eventcatalogasyncapiimporter coverage # Run with coverage to generate coverage.xml for SonarCloud
+# Python utility packages - event-publisher-py
+echo "Setting up and running event-publisher-py tests..."
+make -C ./utils/event-publisher-py install-dev
+make -C ./utils/event-publisher-py coverage # Run with coverage to generate coverage.xml for SonarCloud
+
+# Python utility packages - metric-publishers
+echo "Setting up and running metric-publishers tests..."
+make -C ./utils/metric-publishers install-dev
+make -C ./utils/metric-publishers coverage # Run with coverage to generate coverage.xml for SonarCloud
+
+# Python Lambda - mesh-poll
+echo "Setting up and running mesh-poll tests..."
+make -C ./lambdas/mesh-poll install-dev
+make -C ./lambdas/mesh-poll coverage # Run with coverage to generate coverage.xml for SonarCloud
+
+# Python Lambda - mesh-download
+echo "Setting up and running mesh-download tests..."
+make -C ./lambdas/mesh-download install-dev
+make -C ./lambdas/mesh-download coverage # Run with coverage to generate coverage.xml for SonarCloud
# Python projects - python-schema-generator
echo "Setting up and running python-schema-generator tests..."
make -C ./src/python-schema-generator install-dev
diff --git a/utils/event-publisher-py/Makefile b/utils/event-publisher-py/Makefile
new file mode 100644
index 00000000..a9ee1437
--- /dev/null
+++ b/utils/event-publisher-py/Makefile
@@ -0,0 +1,24 @@
+.PHONY: install install-dev test coverage clean
+
+install:
+ pip install -r requirements.txt
+
+install-dev: install
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=utils/event-publisher-py:$$PYTHONPATH pytest utils/event-publisher-py/event_publisher/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=utils/event-publisher-py:$$PYTHONPATH pytest utils/event-publisher-py/event_publisher/__tests__/ \
+ --cov=utils/event-publisher-py/event_publisher \
+ --cov-config=utils/event-publisher-py/pytest.ini \
+ --cov-report=html:utils/event-publisher-py/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:utils/event-publisher-py/coverage.xml \
+ --cov-branch
+
+clean:
+ rm -rf dist/ .coverage htmlcov/ .pytest_cache/ coverage.xml
+ find . -type d -name __pycache__ -exec rm -rf {} +
+ find . -type f -name '*.pyc' -delete
diff --git a/utils/event-publisher-py/event_publisher/__init__.py b/utils/event-publisher-py/event_publisher/__init__.py
new file mode 100644
index 00000000..d618812f
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/__init__.py
@@ -0,0 +1,26 @@
+"""
+Event Publisher for AWS EventBridge with DLQ support.
+
+This module provides a Python equivalent of the TypeScript EventPublisher
+for publishing CloudEvents to EventBridge.
+"""
+
+from .event_publisher import EventPublisher
+from . import models
+from .mesh_config import (
+ BaseMeshConfig,
+ InvalidMeshEndpointError,
+ InvalidEnvironmentVariableError,
+ store_file,
+ log
+)
+
+__all__ = [
+ 'EventPublisher',
+ 'models',
+ 'BaseMeshConfig',
+ 'InvalidMeshEndpointError',
+ 'InvalidEnvironmentVariableError',
+ 'store_file',
+ 'log'
+]
diff --git a/utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py b/utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py
new file mode 100644
index 00000000..91ba77fd
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/__tests__/test_event_publisher.py
@@ -0,0 +1,310 @@
+import json
+import pytest
+from unittest.mock import Mock, MagicMock, call
+from uuid import uuid4
+from botocore.exceptions import ClientError
+
+from event_publisher import EventPublisher
+
+
+@pytest.fixture
+def mock_logger():
+ logger = Mock()
+ logger.info = Mock()
+ logger.warning = Mock()
+ logger.error = Mock()
+ return logger
+
+
+@pytest.fixture
+def mock_events_client():
+ return Mock()
+
+
+@pytest.fixture
+def mock_sqs_client():
+ return Mock()
+
+
+@pytest.fixture
+def test_config(mock_logger, mock_events_client, mock_sqs_client):
+ return {
+ 'event_bus_arn': 'arn:aws:events:us-east-1:123456789012:event-bus/test-bus',
+ 'dlq_url': 'https://sqs.us-east-1.amazonaws.com/123456789012/test-dlq',
+ 'logger': mock_logger,
+ 'events_client': mock_events_client,
+ 'sqs_client': mock_sqs_client,
+ }
+
+
+@pytest.fixture
+def valid_cloud_event():
+ return {
+ 'profileversion': '1.0.0',
+ 'profilepublished': '2025-10',
+ 'id': '550e8400-e29b-41d4-a716-446655440001',
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ 'subject': 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
+ 'time': '2023-06-20T12:00:00Z',
+ 'recordedtime': '2023-06-20T12:00:00.250Z',
+ 'severitynumber': 2,
+ 'severitytext': 'INFO',
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10/digital-letters-mesh-inbox-message-received-data.schema.json',
+ 'dataschemaversion': '1.0',
+ 'data': {
+ 'meshMessageId': 'test-123',
+ 'senderId': 'sender1',
+ 'messageReference': 'ref_001'
+ },
+ }
+
+
+@pytest.fixture
+def valid_cloud_event2():
+ return {
+ 'profileversion': '1.0.0',
+ 'profilepublished': '2025-10',
+ 'id': '550e8400-e29b-41d4-a716-446655440002',
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/development/primary/data-plane/digitalletters/mesh',
+ 'subject': 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ 'type': 'uk.nhs.notify.digital.letters.mesh.inbox.message.received.v1',
+ 'time': '2023-06-20T12:00:00Z',
+ 'recordedtime': '2023-06-20T12:00:00.250Z',
+ 'severitynumber': 2,
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10/digital-letters-mesh-inbox-message-received-data.schema.json',
+ 'dataschemaversion': '1.0',
+ 'data': {
+ 'meshMessageId': 'test-123',
+ 'senderId': 'sender1',
+ 'messageReference': 'ref_001'
+ },
+ }
+
+
+@pytest.fixture
+def invalid_cloud_event():
+ return {
+ 'type': 'data',
+ 'id': 'missing-source',
+ }
+
+
+class TestEventPublishing:
+
+ def test_should_return_empty_array_when_no_events_provided(self, test_config, mock_events_client, mock_sqs_client):
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([])
+
+ assert result == []
+ mock_events_client.put_events.assert_not_called()
+ mock_sqs_client.send_message_batch.assert_not_called()
+
+ def test_should_send_valid_events_to_eventbridge(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ mock_events_client.put_events.return_value = {
+ 'FailedEntryCount': 0,
+ 'Entries': [{'EventId': 'event-1'}]
+ }
+ mock_sqs_client.send_message_batch.return_value = {
+ 'Successful': []
+ }
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+
+ assert result == []
+ assert mock_events_client.put_events.call_count == 1
+
+ call_args = mock_events_client.put_events.call_args[1]
+ assert len(call_args['Entries']) == 2
+ assert call_args['Entries'][0]['Source'] == valid_cloud_event['source']
+ assert call_args['Entries'][0]['DetailType'] == valid_cloud_event['type']
+ assert call_args['Entries'][0]['Detail'] == json.dumps(valid_cloud_event)
+ assert call_args['Entries'][0]['EventBusName'] == test_config['event_bus_arn']
+
+ def test_should_send_invalid_events_directly_to_dlq(self, test_config, mock_sqs_client, invalid_cloud_event):
+ mock_sqs_client.send_message_batch.return_value = {
+ 'Successful': [{'Id': 'msg-1', 'MessageId': 'success-1', 'MD5OfMessageBody': 'hash1'}]
+ }
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([invalid_cloud_event])
+
+ assert result == []
+ assert mock_sqs_client.send_message_batch.call_count == 1
+
+ call_args = mock_sqs_client.send_message_batch.call_args[1]
+ assert call_args['QueueUrl'] == test_config['dlq_url']
+ assert len(call_args['Entries']) == 1
+ assert call_args['Entries'][0]['MessageBody'] == json.dumps(invalid_cloud_event)
+ assert call_args['Entries'][0]['MessageAttributes']['DlqReason']['StringValue'] == 'INVALID_EVENT'
+
+ def test_should_send_failed_eventbridge_events_to_dlq(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ mock_events_client.put_events.return_value = {
+ 'FailedEntryCount': 1,
+ 'Entries': [
+ {'ErrorCode': 'InternalFailure', 'ErrorMessage': 'Internal error'},
+ {'EventId': 'event-2'}
+ ]
+ }
+ mock_sqs_client.send_message_batch.return_value = {
+ 'Successful': [{'Id': 'msg-1', 'MessageId': 'success-1', 'MD5OfMessageBody': 'hash1'}]
+ }
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+
+ assert result == []
+ assert mock_events_client.put_events.call_count == 1
+ # Should call DLQ once for the failed event
+ assert mock_sqs_client.send_message_batch.call_count == 1
+
+ # Verify EventBridge was called with both events
+ eventbridge_call_args = mock_events_client.put_events.call_args[1]
+ assert len(eventbridge_call_args['Entries']) == 2
+
+ # Verify DLQ gets the failed event (first one)
+ dlq_call_args = mock_sqs_client.send_message_batch.call_args[1]
+ assert len(dlq_call_args['Entries']) == 1
+ assert dlq_call_args['Entries'][0]['MessageBody'] == json.dumps(valid_cloud_event)
+ assert dlq_call_args['Entries'][0]['MessageAttributes']['DlqReason']['StringValue'] == 'EVENTBRIDGE_FAILURE'
+
+ def test_should_handle_eventbridge_send_error_and_send_all_events_to_dlq(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ mock_events_client.put_events.side_effect = ClientError(
+ {'Error': {'Code': 'InternalError', 'Message': 'EventBridge error'}},
+ 'PutEvents'
+ )
+ mock_sqs_client.send_message_batch.return_value = {
+ 'Successful': [{'Id': 'msg-1', 'MessageId': 'success-1', 'MD5OfMessageBody': 'hash1'}]
+ }
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([valid_cloud_event, valid_cloud_event2])
+
+ assert result == []
+ assert mock_events_client.put_events.call_count == 1
+ # Should call DLQ once for all events after EventBridge failure
+ assert mock_sqs_client.send_message_batch.call_count == 1
+
+ def test_should_return_failed_events_when_dlq_also_fails(self, test_config, mock_sqs_client, invalid_cloud_event):
+ def mock_send_message_batch(**kwargs):
+ first_entry_id = kwargs['Entries'][0]['Id']
+ return {
+ 'Failed': [{
+ 'Id': first_entry_id,
+ 'Code': 'SenderFault',
+ 'Message': 'Invalid message',
+ 'SenderFault': True
+ }]
+ }
+
+ mock_sqs_client.send_message_batch.side_effect = mock_send_message_batch
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([invalid_cloud_event])
+
+ assert result == [invalid_cloud_event]
+ assert mock_sqs_client.send_message_batch.call_count == 1
+
+ def test_should_handle_dlq_send_error_and_return_all_events_as_failed(self, test_config, mock_sqs_client, invalid_cloud_event):
+ mock_sqs_client.send_message_batch.side_effect = ClientError(
+ {'Error': {'Code': 'InternalError', 'Message': 'DLQ error'}},
+ 'SendMessageBatch'
+ )
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events([invalid_cloud_event])
+
+ assert result == [invalid_cloud_event]
+ assert mock_sqs_client.send_message_batch.call_count == 1
+
+ def test_should_send_to_eventbridge_in_batches(self, test_config, mock_events_client, valid_cloud_event):
+ large_event_array = [
+ {**valid_cloud_event, 'id': str(uuid4())}
+ for _ in range(25)
+ ]
+
+ mock_events_client.put_events.return_value = {
+ 'FailedEntryCount': 0,
+ 'Entries': [{'EventId': 'success'}]
+ }
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events(large_event_array)
+
+ assert result == []
+ assert mock_events_client.put_events.call_count == 3
+
+ # Verify batch sizes: 10, 10, 5
+ calls = mock_events_client.put_events.call_args_list
+ assert len(calls[0][1]['Entries']) == 10
+ assert len(calls[1][1]['Entries']) == 10
+ assert len(calls[2][1]['Entries']) == 5
+
+ def test_should_send_to_dlq_in_batches(self, test_config, mock_sqs_client, invalid_cloud_event):
+ large_event_array = [
+ {**invalid_cloud_event, 'id': str(uuid4())}
+ for _ in range(25)
+ ]
+
+ def mock_send_message_batch(**kwargs):
+ return {
+ 'Failed': [{
+ 'Id': entry['Id'],
+ 'Code': 'SenderFault',
+ 'Message': 'Invalid message',
+ 'SenderFault': True
+ } for entry in kwargs['Entries']]
+ }
+
+ mock_sqs_client.send_message_batch.side_effect = mock_send_message_batch
+
+ publisher = EventPublisher(**test_config)
+ result = publisher.send_events(large_event_array)
+
+ assert len(result) == 25
+ assert mock_sqs_client.send_message_batch.call_count == 3
+
+ # Verify batch sizes: 10, 10, 5
+ calls = mock_sqs_client.send_message_batch.call_args_list
+ assert len(calls[0][1]['Entries']) == 10
+ assert len(calls[1][1]['Entries']) == 10
+ assert len(calls[2][1]['Entries']) == 5
+
+
+class TestEventPublisherClass:
+
+ def test_should_throw_error_when_event_bus_arn_is_missing(self, test_config):
+ test_config['event_bus_arn'] = ''
+ with pytest.raises(ValueError, match='event_bus_arn has not been specified'):
+ EventPublisher(**test_config)
+
+ def test_should_throw_error_when_dlq_url_is_missing(self, test_config):
+ test_config['dlq_url'] = ''
+ with pytest.raises(ValueError, match='dlq_url has not been specified'):
+ EventPublisher(**test_config)
+
+ def test_should_be_reusable_for_multiple_calls(self, test_config, mock_events_client, mock_sqs_client, valid_cloud_event, valid_cloud_event2):
+ mock_events_client.put_events.return_value = {
+ 'FailedEntryCount': 0,
+ 'Entries': [{'EventId': 'event-1'}]
+ }
+ mock_sqs_client.send_message_batch.return_value = {
+ 'Successful': []
+ }
+
+ publisher = EventPublisher(**test_config)
+
+ # First call
+ result1 = publisher.send_events([valid_cloud_event])
+ assert result1 == []
+
+ # Second call with same publisher instance
+ result2 = publisher.send_events([valid_cloud_event2])
+ assert result2 == []
+
+ assert mock_events_client.put_events.call_count == 2
diff --git a/utils/event-publisher-py/event_publisher/__tests__/test_models.py b/utils/event-publisher-py/event_publisher/__tests__/test_models.py
new file mode 100644
index 00000000..dea3fdf2
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/__tests__/test_models.py
@@ -0,0 +1,72 @@
+import pytest
+from pydantic import ValidationError
+from event_publisher.models import CloudEvent, MeshInboxMessageEvent
+
+
+class TestCloudEvent:
+ """Test CloudEvent validation"""
+
+ @pytest.fixture
+ def valid_event(self):
+ return {
+ 'id': '550e8400-e29b-41d4-a716-446655440001',
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/production/primary/data-plane/digitalletters/mesh',
+ 'subject': 'customer/920fca11-596a-4eca-9c47-99f624614658/recipient/769acdd4-6a47-496f-999f-76a6fd2c3959',
+ 'type': 'uk.nhs.notify.digital.letters.example.v1',
+ 'time': '2024-07-10T14:30:00Z',
+ 'recordedtime': '2024-07-10T14:30:00.250Z',
+ 'severitynumber': 2,
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10/digital-letter-base-data.schema.json',
+ 'data': {
+ 'digital-letter-id': '123e4567-e89b-12d3-a456-426614174000',
+ 'messageReference': 'ref1',
+ 'senderId': 'sender1',
+ },
+ }
+
+ def test_parses_valid_cloud_event(self, valid_event):
+ event = CloudEvent(**valid_event)
+ assert str(event.id) == valid_event['id']
+ assert event.source == valid_event['source']
+ assert event.subject == valid_event['subject']
+ assert event.type == valid_event['type']
+
+ def test_fails_for_missing_required_fields(self):
+ with pytest.raises(ValidationError):
+ CloudEvent(**{})
+
+ def test_fails_for_invalid_source_pattern(self, valid_event):
+ invalid = valid_event.copy()
+ invalid['source'] = 'invalid-source'
+ with pytest.raises(ValidationError) as exc_info:
+ CloudEvent(**invalid)
+ assert 'source' in str(exc_info.value).lower()
+
+ def test_fails_for_invalid_subject_pattern(self, valid_event):
+ invalid = valid_event.copy()
+ invalid['subject'] = 'invalid-subject'
+ with pytest.raises(ValidationError) as exc_info:
+ CloudEvent(**invalid)
+ assert 'subject' in str(exc_info.value).lower()
+
+ def test_fails_for_invalid_type_pattern(self, valid_event):
+ invalid = valid_event.copy()
+ invalid['type'] = 'invalid.type'
+ with pytest.raises(ValidationError) as exc_info:
+ CloudEvent(**invalid)
+ assert 'type' in str(exc_info.value).lower()
+
+ def test_allows_any_data_structure(self, valid_event):
+ """Base CloudEvent accepts any dict as data, but specific event types validate data structure"""
+ event_with_empty_data = valid_event.copy()
+ event_with_empty_data['data'] = {}
+ # Base CloudEvent accepts any dict
+ event = CloudEvent(**event_with_empty_data)
+ assert event.data == {}
+
+ # But MeshInboxMessageEvent should reject empty data
+ with pytest.raises(ValidationError) as exc_info:
+ MeshInboxMessageEvent(**event_with_empty_data)
+ assert 'meshMessageId' in str(exc_info.value).lower() or 'field required' in str(exc_info.value).lower()
diff --git a/utils/event-publisher-py/event_publisher/event_publisher.py b/utils/event-publisher-py/event_publisher/event_publisher.py
new file mode 100644
index 00000000..83e22226
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/event_publisher.py
@@ -0,0 +1,272 @@
+"""
+EventPublisher - Python implementation for publishing CloudEvents to EventBridge.
+
+This module provides a Python equivalent of the TypeScript EventPublisher class.
+"""
+
+import json
+import logging
+from typing import List, Dict, Any, Optional, Literal
+from uuid import uuid4
+import boto3
+from botocore.exceptions import ClientError
+from pydantic import ValidationError
+from .models import CloudEvent
+
+
+DlqReason = Literal['INVALID_EVENT', 'EVENTBRIDGE_FAILURE']
+MAX_BATCH_SIZE = 10
+
+
+class EventPublisher:
+ """
+ Publisher for CloudEvents to AWS EventBridge with DLQ support.
+
+ Validates events, sends them to EventBridge in batches, and routes
+ failed events to a Dead Letter Queue (DLQ) for later processing.
+ """
+
+ def __init__(
+ self,
+ event_bus_arn: str,
+ dlq_url: str,
+ logger: Optional[logging.Logger] = None,
+ events_client: Optional[Any] = None,
+ sqs_client: Optional[Any] = None
+ ):
+ """
+ Initialize the EventPublisher.
+ """
+ if not event_bus_arn:
+ raise ValueError('event_bus_arn has not been specified')
+ if not dlq_url:
+ raise ValueError('dlq_url has not been specified')
+
+ self.event_bus_arn = event_bus_arn
+ self.dlq_url = dlq_url
+ self.logger = logger or logging.getLogger(__name__)
+ self.events_client = events_client or boto3.client('events')
+ self.sqs_client = sqs_client or boto3.client('sqs')
+
+ def _validate_cloud_event(self, event: Dict[str, Any]) -> tuple[bool, Optional[str]]:
+ """
+ Validate event using Pydantic CloudEvent model.
+ """
+ try:
+ CloudEvent(**event)
+ return (True, None)
+ except ValidationError as e:
+ return (False, str(e))
+
+ def _send_to_event_bridge(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Send events to EventBridge in batches.
+ """
+ failed_events = []
+
+ self.logger.info(
+ f"Sending {len(events)} events to EventBridge",
+ extra={
+ 'event_bus_arn': self.event_bus_arn,
+ 'event_count': len(events)
+ }
+ )
+
+ for i in range(0, len(events), MAX_BATCH_SIZE):
+ batch = events[i:i + MAX_BATCH_SIZE]
+
+ self.logger.info(
+ f"Sending batch of {len(batch)} events to EventBridge",
+ extra={
+ 'event_bus_arn': self.event_bus_arn,
+ 'batch_size': len(batch)
+ }
+ )
+
+ try:
+ entries = [
+ {
+ 'Source': event['source'],
+ 'DetailType': event['type'],
+ 'Detail': json.dumps(event),
+ 'EventBusName': self.event_bus_arn
+ }
+ for event in batch
+ ]
+
+ response = self.events_client.put_events(Entries=entries)
+
+ failed_count = response.get('FailedEntryCount', 0)
+ success_count = len(batch) - failed_count
+
+ self.logger.info(
+ 'EventBridge batch sent',
+ extra={
+ 'batch_size': len(batch),
+ 'failed_entry_count': failed_count,
+ 'successful_count': success_count
+ }
+ )
+
+ # Track failed entries
+ if failed_count > 0 and 'Entries' in response:
+ for idx, entry in enumerate(response['Entries']):
+ if 'ErrorCode' in entry:
+ self.logger.warning(
+ 'Event failed to send to EventBridge',
+ extra={
+ 'error_code': entry.get('ErrorCode'),
+ 'error_message': entry.get('ErrorMessage'),
+ 'event_id': batch[idx].get('id')
+ }
+ )
+ failed_events.append(batch[idx])
+
+ except ClientError as error:
+ self.logger.warning(
+ 'EventBridge send error',
+ extra={
+ 'error': str(error),
+ 'batch_size': len(batch)
+ }
+ )
+ failed_events.extend(batch)
+
+ return failed_events
+
+ def _send_to_dlq(
+ self,
+ events: List[Dict[str, Any]],
+ reason: DlqReason
+ ) -> List[Dict[str, Any]]:
+ """
+ Send failed events to the Dead Letter Queue.
+ """
+ failed_dlqs = []
+
+ self.logger.warning(
+ 'Sending failed events to DLQ',
+ extra={
+ 'dlq_url': self.dlq_url,
+ 'event_count': len(events),
+ 'reason': reason
+ }
+ )
+
+ for i in range(0, len(events), MAX_BATCH_SIZE):
+ batch = events[i:i + MAX_BATCH_SIZE]
+ id_to_event_map = {}
+
+ entries = []
+ for event in batch:
+ entry_id = str(uuid4())
+ id_to_event_map[entry_id] = event
+ entries.append({
+ 'Id': entry_id,
+ 'MessageBody': json.dumps(event),
+ 'MessageAttributes': {
+ 'DlqReason': {
+ 'DataType': 'String',
+ 'StringValue': reason
+ }
+ }
+ })
+
+ try:
+ response = self.sqs_client.send_message_batch(
+ QueueUrl=self.dlq_url,
+ Entries=entries
+ )
+
+ # Track failed DLQ sends
+ if 'Failed' in response:
+ for failed_entry in response['Failed']:
+ entry_id = failed_entry.get('Id')
+ if entry_id and entry_id in id_to_event_map:
+ failed_event = id_to_event_map[entry_id]
+ self.logger.warning(
+ 'Event failed to send to DLQ',
+ extra={
+ 'error_code': failed_entry.get('Code'),
+ 'error_message': failed_entry.get('Message'),
+ 'event_id': failed_event.get('id')
+ }
+ )
+ failed_dlqs.append(failed_event)
+
+ except ClientError as error:
+ self.logger.warning(
+ 'DLQ send error',
+ extra={
+ 'error': str(error),
+ 'dlq_url': self.dlq_url,
+ 'batch_size': len(batch)
+ }
+ )
+ failed_dlqs.extend(batch)
+
+ if failed_dlqs:
+ self.logger.error(
+ 'Failed to send events to DLQ',
+ extra={
+ 'failed_event_count': len(failed_dlqs),
+ 'dlq_url': self.dlq_url
+ }
+ )
+
+ return failed_dlqs
+
+ def send_events(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
+ """
+ Send CloudEvents to EventBridge with validation and DLQ support.
+
+ 1. Validates events against CloudEvent schema
+ 2. Sends valid events to EventBridge
+ 3. Routes failed events to DLQ
+ """
+ if not events:
+ self.logger.info('No events to send')
+ return []
+
+ valid_events = []
+ invalid_events = []
+
+ # Validate events using Pydantic
+ for event in events:
+ is_valid, error_msg = self._validate_cloud_event(event)
+ if is_valid:
+ valid_events.append(event)
+ else:
+ invalid_events.append(event)
+ self.logger.warning(
+ 'CloudEvent validation failed',
+ extra={
+ 'event_id': event.get('id', 'unknown'),
+ 'validation_error': error_msg
+ }
+ )
+
+ self.logger.info(
+ 'Event validation completed',
+ extra={
+ 'valid_event_count': len(valid_events),
+ 'invalid_event_count': len(invalid_events),
+ 'total_event_count': len(events)
+ }
+ )
+
+ total_failed_events = []
+
+ # Send invalid events to DLQ
+ if invalid_events:
+ failed_dlq_sends = self._send_to_dlq(invalid_events, 'INVALID_EVENT')
+ total_failed_events.extend(failed_dlq_sends)
+
+ # Send valid events to EventBridge
+ if valid_events:
+ failed_sends = self._send_to_event_bridge(valid_events)
+ if failed_sends:
+ failed_dlq_sends = self._send_to_dlq(failed_sends, 'EVENTBRIDGE_FAILURE')
+ total_failed_events.extend(failed_dlq_sends)
+
+ return total_failed_events
diff --git a/utils/event-publisher-py/event_publisher/mesh_config.py b/utils/event-publisher-py/event_publisher/mesh_config.py
new file mode 100644
index 00000000..64b9ef2c
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/mesh_config.py
@@ -0,0 +1,186 @@
+"""
+Base configuration module for MESH client applications
+"""
+import json
+import os
+import tempfile
+import boto3
+import structlog
+import mesh_client
+from py_mock_mesh.mesh_client import MockMeshClient
+from metric_publishers.certificate_monitor import report_expiry_time
+
+structlog.configure(processors=[structlog.processors.JSONRenderer()])
+log = structlog.get_logger()
+
+
+class InvalidMeshEndpointError(Exception):
+ """
+ Indicates an invalid MESH endpoint in configuration
+ """
+
+
+class InvalidEnvironmentVariableError(Exception):
+ """
+ Indicates an invalid environment variable
+ """
+
+
+def store_file(content):
+ """
+ Writes a temp file and returns the name
+ """
+ with tempfile.NamedTemporaryFile(delete=False) as file:
+ file.write(content)
+ file.close()
+ return file.name
+
+
+class BaseMeshConfig: # pylint: disable=too-many-instance-attributes
+ """
+ Base configuration class for MESH client applications.
+ """
+
+ _OPTIONAL_ENV_VAR_MAP = {
+ "use_mesh_mock": "USE_MESH_MOCK"
+ }
+
+ def __init__(self, ssm=None, s3_client=None):
+ """
+ Initialize base MESH configuration.
+ """
+ self.ssm = ssm if ssm is not None else boto3.client('ssm')
+ self.s3_client = s3_client if s3_client is not None else boto3.client('s3')
+
+ # MESH connection attributes
+ self.mesh_endpoint = None
+ self.mesh_mailbox = None
+ self.mesh_mailbox_password = None
+ self.mesh_shared_key = None
+ self.client_cert = None
+ self.client_key = None
+ self.mesh_client = None
+
+ # Common configuration attributes
+ self.ssm_prefix = None
+ self.environment = None
+ self.certificate_expiry_metric_name = None
+ self.certificate_expiry_metric_namespace = None
+ self.polling_metric_name = None
+ self.polling_metric_namespace = None
+ self.use_mesh_mock = False
+
+ self._load_required_env_vars()
+
+ self._load_optional_env_vars()
+
+ def _load_required_env_vars(self):
+ """
+ Load required environment variables.
+ """
+ if not hasattr(self, '_REQUIRED_ENV_VAR_MAP'):
+ raise NotImplementedError()
+
+ missing_env_vars = []
+ for attr, key in self._REQUIRED_ENV_VAR_MAP.items():
+ if key not in os.environ:
+ missing_env_vars.append(f'"{key}"')
+ else:
+ setattr(self, attr, os.environ[key])
+
+ if len(missing_env_vars) > 0:
+ raise InvalidEnvironmentVariableError(
+ f"Required environment variables {', '.join(missing_env_vars)} not set.")
+
+ def _load_optional_env_vars(self):
+ """
+ Load optional environment variables.
+ """
+ for attr, key in self._OPTIONAL_ENV_VAR_MAP.items():
+ if key in os.environ:
+ value = os.environ[key]
+ if attr == "use_mesh_mock":
+ # Convert string to boolean
+ setattr(self, attr, value.lower() in ('true', '1', 'yes', 'on'))
+ else:
+ setattr(self, attr, value)
+
+ def __enter__(self):
+ # Load MESH configuration from SSM
+ ssm_response = self.ssm.get_parameter(
+ Name=self.ssm_prefix + '/config',
+ WithDecryption=True
+ )
+ mesh_config = json.loads(ssm_response['Parameter']['Value'])
+
+ self.mesh_endpoint = mesh_config['mesh_endpoint']
+ self.mesh_mailbox = mesh_config['mesh_mailbox']
+ self.mesh_mailbox_password = mesh_config['mesh_mailbox_password']
+ self.mesh_shared_key = mesh_config['mesh_shared_key'].encode('ascii')
+
+ # Load client certificates from SSM
+ client_cert_parameter = self.ssm.get_parameter(
+ Name=self.ssm_prefix + '/client-cert',
+ WithDecryption=True
+ )
+ client_key_parameter = self.ssm.get_parameter(
+ Name=self.ssm_prefix + '/client-key',
+ WithDecryption=True
+ )
+
+ self.client_cert = store_file(
+ client_cert_parameter['Parameter']['Value'].encode('utf-8')
+ )
+ self.client_key = store_file(
+ client_key_parameter['Parameter']['Value'].encode('utf-8')
+ )
+
+ # Build MESH client
+ self.mesh_client = self.build_mesh_client()
+
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ log.info('Cleaning up temporary files')
+ if self.client_cert:
+ os.unlink(self.client_cert)
+ if self.client_key:
+ os.unlink(self.client_key)
+ if self.mesh_client:
+ self.mesh_client.close()
+
+ def lookup_endpoint(self, endpoint_identifier):
+ variable_name = f"{endpoint_identifier}_ENDPOINT"
+
+ if hasattr(mesh_client, variable_name):
+ return getattr(mesh_client, variable_name)
+
+ raise InvalidMeshEndpointError(
+ f"mesh_client module has no such endpoint {variable_name}")
+
+ def build_mesh_client(self):
+ if self.use_mesh_mock:
+ mock_endpoint = self.mesh_endpoint
+ return MockMeshClient(
+ boto3.client('s3'),
+ mock_endpoint,
+ self.mesh_mailbox,
+ log
+ )
+
+ # Use real MESH client
+ report_expiry_time(
+ self.client_cert,
+ self.certificate_expiry_metric_name,
+ self.certificate_expiry_metric_namespace,
+ self.environment
+ )
+
+ return mesh_client.MeshClient(
+ self.lookup_endpoint(self.mesh_endpoint),
+ self.mesh_mailbox,
+ self.mesh_mailbox_password,
+ self.mesh_shared_key,
+ transparent_compress=True,
+ cert=(self.client_cert, self.client_key)
+ )
diff --git a/utils/event-publisher-py/event_publisher/models.py b/utils/event-publisher-py/event_publisher/models.py
new file mode 100644
index 00000000..2fa5e889
--- /dev/null
+++ b/utils/event-publisher-py/event_publisher/models.py
@@ -0,0 +1,202 @@
+from typing import Any, Literal, Optional
+from pydantic import BaseModel, ConfigDict, Field, field_validator
+
+
+class CloudEvent(BaseModel):
+ # Required fields - NHS Notify CloudEvents profile
+ specversion: Literal['1.0'] = Field(
+ default='1.0',
+ description='CloudEvents specification version'
+ )
+ id: str = Field(
+ ...,
+ description='Unique identifier for this event instance (UUID)'
+ )
+ source: str = Field(
+ ...,
+ description='Event source for digital letters domain'
+ )
+ subject: str = Field(
+ ...,
+ description='Path in the form customer/{id}/recipient/{id} where each {id} is a UUID'
+ )
+ type: str = Field(
+ ...,
+ description='Concrete versioned event type string'
+ )
+ time: str = Field(
+ ...,
+ description='Timestamp when the event occurred (RFC 3339)'
+ )
+ recordedtime: str = Field(
+ ...,
+ description='Timestamp when the event was recorded/persisted'
+ )
+ severitynumber: int = Field(
+ ...,
+ ge=0,
+ le=5,
+ description='Numeric severity (TRACE=0, DEBUG=1, INFO=2, WARN=3, ERROR=4, FATAL=5)'
+ )
+ traceparent: str = Field(
+ ...,
+ description='W3C Trace Context traceparent header value'
+ )
+ data: dict[str, Any] = Field(
+ ...,
+ description='Digital letters payload'
+ )
+
+ # Optional fields
+ dataschema: Optional[str] = Field(
+ None,
+ description='Canonical URI of the event data schema'
+ )
+ datacontenttype: Optional[Literal['application/json']] = Field(
+ None,
+ description='Media type for the data field'
+ )
+ severitytext: Optional[Literal['TRACE', 'DEBUG', 'INFO', 'WARN', 'ERROR', 'FATAL']] = Field(
+ None,
+ description='Log severity level name'
+ )
+ tracestate: Optional[str] = Field(
+ None,
+ description='Optional W3C Trace Context tracestate header value'
+ )
+ partitionkey: Optional[str] = Field(
+ None,
+ min_length=1,
+ max_length=64,
+ description='Partition / ordering key'
+ )
+ sequence: Optional[str] = Field(
+ None,
+ description='Zero-padded 20 digit numeric sequence'
+ )
+ sampledrate: Optional[int] = Field(
+ None,
+ ge=1,
+ description='Sampling factor: number of similar occurrences this event represents'
+ )
+ dataclassification: Optional[Literal['public', 'internal', 'confidential', 'restricted']] = Field(
+ None,
+ description='Data sensitivity classification'
+ )
+ dataregulation: Optional[Literal['GDPR', 'HIPAA', 'PCI-DSS', 'ISO-27001', 'NIST-800-53', 'CCPA']] = Field(
+ None,
+ description='Regulatory regime tag'
+ )
+ datacategory: Optional[Literal['non-sensitive', 'standard', 'sensitive', 'special-category']] = Field(
+ None,
+ description='Data category classification'
+ )
+
+ @field_validator('source')
+ @classmethod
+ def validate_source(cls, v: str) -> str:
+ if not v:
+ raise ValueError('Source cannot be empty')
+ import re
+ # Must match NHS Notify CloudEvents pattern
+ pattern = r'^/nhs/england/notify/(production|staging|development|uat)/(primary|secondary|dev-\d+)/data-plane/digitalletters/mesh$'
+
+ if not re.match(pattern, v):
+ raise ValueError(
+ f'Invalid source pattern: {v}. '
+ 'Must match /nhs/england/notify/{environment}/{instance}/{plane}/digitalletters/mesh'
+ )
+ return v
+
+ @field_validator('subject')
+ @classmethod
+ def validate_subject(cls, v: str) -> str:
+ import re
+ if not re.match(
+ r'^customer/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}/recipient/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$',
+ v
+ ):
+ raise ValueError('Subject must be in format customer/{uuid}/recipient/{uuid}')
+ return v
+
+ @field_validator('type')
+ @classmethod
+ def validate_type(cls, v: str) -> str:
+ import re
+ if not re.match(r'^uk\.nhs\.notify\.digital\.letters\.[a-z0-9.]+\.v\d+$', v):
+ raise ValueError(f'Invalid type pattern: {v}')
+ return v
+
+ @field_validator('traceparent')
+ @classmethod
+ def validate_traceparent(cls, v: str) -> str:
+ import re
+ if not re.match(r'^00-[0-9a-f]{32}-[0-9a-f]{16}-[0-9a-f]{2}$', v):
+ raise ValueError('Invalid traceparent format')
+ return v
+
+ @field_validator('partitionkey')
+ @classmethod
+ def validate_partitionkey(cls, v: Optional[str]) -> Optional[str]:
+ if v is None:
+ return v
+ import re
+ if not re.match(r'^[a-z0-9-]+$', v):
+ raise ValueError('Partition key must only contain lowercase letters, numbers, and hyphens')
+ return v
+
+ @field_validator('sequence')
+ @classmethod
+ def validate_sequence(cls, v: Optional[str]) -> Optional[str]:
+ if v is None:
+ return v
+ import re
+ if not re.match(r'^\d{20}$', v):
+ raise ValueError('Sequence must be exactly 20 digits')
+ return v
+
+ model_config = ConfigDict(extra='allow')
+
+
+class MeshInboxMessageData(BaseModel):
+ """Data payload for MESH inbox message received event"""
+ meshMessageId: str = Field(..., min_length=1)
+ senderId: str = Field(..., min_length=1)
+ messageReference: str = Field(..., min_length=1)
+
+
+class MeshInboxMessageEvent(CloudEvent):
+ """Complete CloudEvent for MESH inbox message received"""
+ data: MeshInboxMessageData
+
+ @field_validator('data', mode='before')
+ @classmethod
+ def validate_data(cls, v: Any) -> MeshInboxMessageData:
+ """Ensure data is validated as MeshInboxMessageData"""
+ if isinstance(v, MeshInboxMessageData):
+ return v
+ if isinstance(v, dict):
+ return MeshInboxMessageData(**v)
+ raise ValueError('data must be a dict with meshMessageId and senderId')
+
+
+class MeshDownloadMessageData(BaseModel):
+ """Data payload for MESH inbox message downloaded event"""
+ messageReference: str = Field(..., min_length=1)
+ senderId: str = Field(..., min_length=1)
+ messageUri: str = Field(..., min_length=1)
+
+
+class MeshDownloadMessageEvent(CloudEvent):
+ """Complete CloudEvent for MESH inbox message downloaded"""
+ data: MeshDownloadMessageData
+
+ @field_validator('data', mode='before')
+ @classmethod
+ def validate_data(cls, v: Any) -> MeshDownloadMessageData:
+ """Ensure data is validated as MeshDownloadMessageData"""
+ if isinstance(v, MeshDownloadMessageData):
+ return v
+ if isinstance(v, dict):
+ return MeshDownloadMessageData(**v)
+ raise ValueError('data must be a dict with messageReference, senderId, and messageUri')
diff --git a/utils/event-publisher-py/pytest.ini b/utils/event-publisher-py/pytest.ini
new file mode 100644
index 00000000..826ac6e2
--- /dev/null
+++ b/utils/event-publisher-py/pytest.ini
@@ -0,0 +1,16 @@
+[pytest]
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+testpaths = event_publisher/__tests__
+addopts = -v --tb=short
+
+[coverage:run]
+relative_files = True
+omit =
+ */event_publisher/__tests__/*
+ */test_*.py
+ */__pycache__/*
+ */venv/*
+ */.venv/*
+ */env/*
diff --git a/utils/event-publisher-py/requirements-dev.txt b/utils/event-publisher-py/requirements-dev.txt
new file mode 100644
index 00000000..1cf3ca7f
--- /dev/null
+++ b/utils/event-publisher-py/requirements-dev.txt
@@ -0,0 +1,5 @@
+-r requirements.txt
+pytest>=8.0.0
+pytest-cov>=6.0.0
+pytest-mock>=3.14.0
+moto>=5.0.0
diff --git a/utils/event-publisher-py/requirements.txt b/utils/event-publisher-py/requirements.txt
new file mode 100644
index 00000000..671c270d
--- /dev/null
+++ b/utils/event-publisher-py/requirements.txt
@@ -0,0 +1,7 @@
+boto3>=1.40.70
+pydantic>=2.0.0
+structlog>=21.5.0
+mesh-client>=3.2.3
+pyopenssl>=24.0.0
+-e ../metric-publishers
+-e ../py-mock-mesh
diff --git a/utils/event-publisher-py/setup.py b/utils/event-publisher-py/setup.py
new file mode 100644
index 00000000..8475aa40
--- /dev/null
+++ b/utils/event-publisher-py/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="event-publisher-py",
+ version="0.1.0",
+ packages=find_packages(),
+)
diff --git a/utils/get_version.sh b/utils/get_version.sh
new file mode 100755
index 00000000..9e53c095
--- /dev/null
+++ b/utils/get_version.sh
@@ -0,0 +1,14 @@
+#!/usr/bin/env bash
+
+if [[ "${BASH_SOURCE[0]}" -ef "$0" ]]; then
+ echo "get_version.sh should be sourced, not executed!"
+ exit 1
+fi
+
+root_package_json="$(dirname ${BASH_SOURCE[0]})/../package.json"
+version="$(jq -r ".version" "${root_package_json}")"
+POINT_VERSION="${version}"."${CI_PIPELINE_IID:-0}"
+BRANCH_AND_VERSION="${CI_COMMIT_REF_NAME:-$(git branch --show-current)}"_"${POINT_VERSION}";
+
+export POINT_VERSION
+export BRANCH_AND_VERSION
diff --git a/utils/metric-publishers/Makefile b/utils/metric-publishers/Makefile
new file mode 100644
index 00000000..1a78108a
--- /dev/null
+++ b/utils/metric-publishers/Makefile
@@ -0,0 +1,31 @@
+PACKAGE=metric_publishers
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev: install
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=utils/metric-publishers:$$PYTHONPATH pytest utils/metric-publishers/tests/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=utils/metric-publishers:$$PYTHONPATH pytest utils/metric-publishers/tests/ \
+ --cov=utils/metric-publishers/metric_publishers \
+ --cov-config=utils/metric-publishers/pytest.ini \
+ --cov-report=html:utils/metric-publishers/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:utils/metric-publishers/coverage.xml \
+ --cov-branch
+
+lint:
+ pylint metric_publishers
+
+format:
+ autopep8 -ri .
+
+clean:
+ rm -rf target
+
+.PHONY: audit install install-dev test coverage lint format clean
diff --git a/utils/metric-publishers/README.md b/utils/metric-publishers/README.md
new file mode 100644
index 00000000..7ddd6d53
--- /dev/null
+++ b/utils/metric-publishers/README.md
@@ -0,0 +1,22 @@
+# metric-publishers module
+
+This is a module is for reporting the certificate expiry time to CloudWatch metric, currently this is used for monitoring the MeshClient certificate expiry.
+
+## Dependencies
+
+- make
+- [poetry](https://python-poetry.org/docs/#installation) - package management for Python applications
+
+## Test, Build and Package
+
+`make install` - install dependencies into local virtual environment (in `.venv` directory)
+
+`make test` - run unit tests for the package
+
+`make clean` - remove generated files from the project
+
+## Configuration
+
+### VSCode
+
+If using VSCode, after running `make install`, ensure your Python interpreter is set to the `.venv` directory (cmd+shift+p, "Python: Select Interpreter")
diff --git a/utils/metric-publishers/metric_publishers/__init__.py b/utils/metric-publishers/metric_publishers/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/metric-publishers/metric_publishers/certificate_monitor.py b/utils/metric-publishers/metric_publishers/certificate_monitor.py
new file mode 100644
index 00000000..681cfafe
--- /dev/null
+++ b/utils/metric-publishers/metric_publishers/certificate_monitor.py
@@ -0,0 +1,54 @@
+"""
+Module for parsing certificate expiry and reporting metric
+"""
+from datetime import datetime
+import OpenSSL
+import OpenSSL.crypto
+from .metric_client import Metric
+
+
+def report_expiry_time(client_cert, metric_name, metric_namespace, environment):
+ """
+ Report the days till expiry date for the given certificate
+ """
+ certificate_metric = CertificateExpiryMonitor(
+ client_cert=client_cert,
+ metric_client=Metric(name=metric_name,
+ namespace=metric_namespace,
+ dimensions={"Environment": environment}))
+
+ certificate_metric.report_expiry_time()
+
+
+class CertificateExpiryMonitor: # pylint: disable=too-few-public-methods
+ """Class for storing report metrics in Cloudwatch"""
+
+ def __init__(self, **kwargs):
+ self.client_cert = kwargs['client_cert']
+ self.metric_client = kwargs['metric_client']
+
+ def get_expiry_date(self):
+ """
+ Gets the expiry date from the certificate
+ """
+ with open(self.client_cert, encoding="utf-8") as cerf_file:
+ cert = OpenSSL.crypto.load_certificate(
+ OpenSSL.crypto.FILETYPE_PEM, cerf_file.read())
+ not_after = cert.get_notAfter()
+ timestamp = not_after.decode('utf-8')
+ return datetime.strptime(timestamp, '%Y%m%d%H%M%S%z').date()
+
+ def days_to_date(self, date):
+ """
+ Calculates number of days till date
+ """
+ now = datetime.now().date()
+ return (date - now).days
+
+ def report_expiry_time(self):
+ """
+ Reads the certificate and report metric with days until expiry of certificate.
+ """
+ expiry_date = self.get_expiry_date()
+ days_left = self.days_to_date(expiry_date)
+ self.metric_client.record(days_left)
diff --git a/utils/metric-publishers/metric_publishers/metric_client.py b/utils/metric-publishers/metric_publishers/metric_client.py
new file mode 100644
index 00000000..dfbdb1ba
--- /dev/null
+++ b/utils/metric-publishers/metric_publishers/metric_client.py
@@ -0,0 +1,41 @@
+"""
+Module for reporting metrics
+"""
+import json
+import time
+
+
+class Metric: # pylint: disable=too-few-public-methods
+ """
+ Class for reporting metrics
+ """
+
+ def __init__(self, **kwargs):
+ self.name = kwargs['name']
+ self.namespace = kwargs['namespace']
+ self.dimensions = kwargs.get("dimensions", {})
+ self.unit = kwargs.get("unit", 'Count')
+
+ def record(self, value):
+ """
+ method for reporting metric
+ """
+ print(json.dumps({
+ "_aws": {
+ "Timestamp": int(time.time() * 1000),
+ "CloudWatchMetrics": [{
+ "Namespace": self.namespace,
+ "Dimensions": [
+ list(self.dimensions.keys())
+ ],
+ "Metrics": [
+ {
+ "Name": self.name,
+ "Unit": self.unit,
+ }
+ ]
+ }],
+ },
+ **self.dimensions,
+ self.name: value,
+ }))
diff --git a/utils/metric-publishers/package.json b/utils/metric-publishers/package.json
new file mode 100644
index 00000000..7b112beb
--- /dev/null
+++ b/utils/metric-publishers/package.json
@@ -0,0 +1,15 @@
+{
+ "description": "python metrics library",
+ "name": "@comms/metric-publishers",
+ "private": true,
+ "scripts": {
+ "audit": "make audit",
+ "lint": "make lint",
+ "lint:fmt": "make format",
+ "python-install": "make install",
+ "test:unit": "make test",
+ "test:unit:coverage": "make test-coverage",
+ "typecheck": "echo this package contains no typescript"
+ },
+ "version": "1.0.0"
+}
diff --git a/utils/metric-publishers/pytest.ini b/utils/metric-publishers/pytest.ini
new file mode 100644
index 00000000..20b86c5d
--- /dev/null
+++ b/utils/metric-publishers/pytest.ini
@@ -0,0 +1,19 @@
+[pytest]
+testpaths = tests
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+addopts = -v --tb=short
+env =
+ AWS_ACCESS_KEY_ID=testing
+ AWS_SECRET_ACCESS_KEY=testing
+ AWS_DEFAULT_REGION=eu-west-2
+
+[coverage:run]
+relative_files = True
+omit =
+ */tests/*
+ */test_*.py
+ */__pycache__/*
+ */venv/*
+ */env/*
diff --git a/utils/metric-publishers/requirements-dev.txt b/utils/metric-publishers/requirements-dev.txt
new file mode 100644
index 00000000..8a4c6ef1
--- /dev/null
+++ b/utils/metric-publishers/requirements-dev.txt
@@ -0,0 +1,6 @@
+-r requirements.txt
+pylint>=2.17.5
+pytest>=7.4.0
+pytest-cov>=4.1.0
+autopep8>=2.0.2
+jake>=3.0.1
diff --git a/utils/metric-publishers/requirements.txt b/utils/metric-publishers/requirements.txt
new file mode 100644
index 00000000..5def5b4a
--- /dev/null
+++ b/utils/metric-publishers/requirements.txt
@@ -0,0 +1,5 @@
+boto3>=1.28.62
+urllib3>=1.26.19,<2.0.0
+idna>=3.7
+requests>=2.32.0
+pyopenssl>=24.2.1
diff --git a/utils/metric-publishers/setup.py b/utils/metric-publishers/setup.py
new file mode 100644
index 00000000..391b6ad1
--- /dev/null
+++ b/utils/metric-publishers/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="metric-publishers",
+ version="0.1.0",
+ packages=find_packages(),
+)
diff --git a/utils/metric-publishers/tests/test_certificate_monitor.py b/utils/metric-publishers/tests/test_certificate_monitor.py
new file mode 100644
index 00000000..8fcbcc6d
--- /dev/null
+++ b/utils/metric-publishers/tests/test_certificate_monitor.py
@@ -0,0 +1,110 @@
+from unittest.mock import Mock, patch
+from metric_publishers.certificate_monitor import CertificateExpiryMonitor, report_expiry_time
+from metric_publishers.metric_client import Metric
+import OpenSSL
+import OpenSSL.crypto
+import tempfile
+import pytest
+import os
+import json
+
+
+def create_certificate(not_after):
+ cert = OpenSSL.crypto.X509()
+ k = OpenSSL.crypto.PKey()
+ k.generate_key(OpenSSL.crypto.TYPE_RSA, 4096)
+ cert.get_subject().C = "UK"
+ cert.get_subject().ST = "stateOrProvinceName"
+ cert.get_subject().L = "localityName"
+ cert.get_subject().O = "stateOrProvinceName"
+ cert.get_subject().OU = "organizationUnitName"
+ cert.get_subject().CN = "commonName"
+ cert.get_subject().emailAddress = "emailAddress"
+ cert.set_serial_number(0)
+ cert.gmtime_adj_notBefore(0)
+ cert.gmtime_adj_notAfter(not_after)
+ cert.set_issuer(cert.get_subject())
+ cert.set_pubkey(k)
+ cert.sign(k, 'sha512')
+
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert)
+
+
+def days_to_seconds(days):
+ return 60*60*24 * days
+
+
+def test_certificate_monitor_valid_cert():
+ cert_bytes = create_certificate(days_to_seconds(10))
+ with tempfile.NamedTemporaryFile(delete=False) as file:
+ file.write(cert_bytes)
+ file.close()
+ certificate_monitor_valid_cert_test(file.name)
+ os.unlink(file.name)
+
+
+def certificate_monitor_valid_cert_test(cert_filename):
+ metric_client = Mock(spec=Metric)
+
+ monitor = CertificateExpiryMonitor(
+ client_cert=cert_filename, metric_client=metric_client)
+ monitor.report_expiry_time()
+
+ metric_client.record.assert_called_once_with(10)
+
+
+def test_certificate_monitor_invalid_cert():
+ cert_bytes = bytes("jkh", encoding="utf-8")
+ with tempfile.NamedTemporaryFile(delete=False) as file:
+ file.write(cert_bytes)
+ file.close()
+ certificate_monitor_invalid_cert_test(file.name)
+ os.unlink(file.name)
+
+
+def certificate_monitor_invalid_cert_test(cert_filename):
+ metric_client = Mock(spec=Metric)
+
+ monitor = CertificateExpiryMonitor(
+ client_cert=cert_filename, metric_client=metric_client)
+ with pytest.raises(OpenSSL.crypto.Error):
+ monitor.report_expiry_time()
+
+ metric_client.record.assert_not_called()
+
+
+@patch('builtins.print')
+@patch('time.time', Mock(return_value=1234567890))
+def test_report_expiry_time_valid_cert(mock_print):
+ cert_bytes = create_certificate(days_to_seconds(10))
+ with tempfile.NamedTemporaryFile(delete=False) as file:
+ file.write(cert_bytes)
+ file.close()
+ report_expiry_time(file.name,
+ 'Test_alarm_2', 'test_alarm_namespace_2', 'de-test2')
+
+ mock_print.assert_called_once()
+
+ arg = mock_print.call_args[0][0]
+
+ assert json.loads(arg) == {
+ "_aws": {
+ "Timestamp": 1234567890000,
+ "CloudWatchMetrics": [{
+ "Namespace": "test_alarm_namespace_2",
+ "Dimensions": [
+ ["Environment"]
+ ],
+ "Metrics": [
+ {
+ "Name": "Test_alarm_2",
+ "Unit": "Count",
+ }
+ ]
+ }],
+ },
+ "Environment": "de-test2",
+ "Test_alarm_2": 10,
+ }
+
+ os.unlink(file.name)
diff --git a/utils/metric-publishers/tests/test_metric_client.py b/utils/metric-publishers/tests/test_metric_client.py
new file mode 100644
index 00000000..cb422184
--- /dev/null
+++ b/utils/metric-publishers/tests/test_metric_client.py
@@ -0,0 +1,37 @@
+import json
+from unittest.mock import Mock, patch
+from metric_publishers.metric_client import Metric
+
+
+@patch('builtins.print')
+@patch('time.time', Mock(return_value=1234567890))
+def test_metric(mock_print):
+
+ m2 = Metric(name='Test_alarm_1',
+ namespace='test_alarm_namespace_1',
+ dimensions={"Environment": 'de-test1'})
+ m2.record(56)
+
+ mock_print.assert_called_once()
+
+ arg = mock_print.call_args[0][0]
+
+ assert json.loads(arg) == {
+ "_aws": {
+ "Timestamp": 1234567890000,
+ "CloudWatchMetrics": [{
+ "Namespace": "test_alarm_namespace_1",
+ "Dimensions": [
+ ["Environment"]
+ ],
+ "Metrics": [
+ {
+ "Name": "Test_alarm_1",
+ "Unit": "Count",
+ }
+ ]
+ }],
+ },
+ "Environment": "de-test1",
+ "Test_alarm_1": 56,
+ }
diff --git a/utils/py-mock-mesh/Makefile b/utils/py-mock-mesh/Makefile
new file mode 100644
index 00000000..a3b16747
--- /dev/null
+++ b/utils/py-mock-mesh/Makefile
@@ -0,0 +1,25 @@
+PACKAGE=py_mock_mesh
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev: install
+ pip install -r requirements-dev.txt
+
+test:
+ @echo "No tests for py-mock-mesh"
+
+test-coverage:
+ @echo "No tests for py-mock-mesh"
+
+lint:
+ pylint py_mock_mesh
+
+format:
+ autopep8 -ri .
+
+clean:
+ rm -rf target
+
+.PHONY: audit install install-dev test test-coverage lint format clean
diff --git a/utils/py-mock-mesh/README.md b/utils/py-mock-mesh/README.md
new file mode 100644
index 00000000..6ac0b551
--- /dev/null
+++ b/utils/py-mock-mesh/README.md
@@ -0,0 +1,33 @@
+# py-mock-mesh
+
+## Overview
+
+The py-mock-mesh library provides a mock implementation of the Digital Letters MESH API using AWS S3, allowing developers to simulate interactions with the MESH service for testing and development purposes.
+
+This library does the following:
+
+- Simulates the sending and receiving of messages via the MESH API.
+- Stores messages in AWS S3 buckets for retrieval and processing.
+- Provides endpoints to interact with the mock MESH service.
+- Supports testing of MESH API integrations without needing access to the live service.
+- Facilitates development by mimicking the behaviour of the actual MESH service.
+- Ensures compatibility with existing MESH API clients.
+- Allows configuration of message delivery delays and errors for testing purposes.
+- Includes logging and monitoring features for debugging and analysis.
+
+## Sending and Receiving Messages
+
+1. **Upload a test message** (CSV file) to the S3 bucket with required metadata:
+
+ ```bash
+ mesh_message_id=$(uuidgen)
+ aws s3 cp \
+ s3://nhs--eu-west-2--dl-non-pii-data/mock-mesh//in/$mesh_message_id \
+ --metadata "{\"subject\":\"\",\"sender\":\"\",\"workflow_id\":\"\",\"local_id\":\"\"}"
+ ```
+
+ **Note:** The input file must be a CSV (comma-delimited). Ensure the sender mailbox ID exists in SSM Parameter Store at `/dl//mesh/senders/` with valid sender configuration.
+
+2. **Trigger the MESH poll lambda** by pressing 'Test' in the AWS console, or wait for the scheduled poll (every 5 minutes).
+
+3. **Check CloudWatch logs** for the MESH poll lambda first, then the MESH download lambda to verify message processing.
diff --git a/utils/py-mock-mesh/package.json b/utils/py-mock-mesh/package.json
new file mode 100644
index 00000000..27b4951c
--- /dev/null
+++ b/utils/py-mock-mesh/package.json
@@ -0,0 +1,15 @@
+{
+ "description": "python library to simulate mesh with an s3 backend",
+ "name": "@comms/py-mock-mesh",
+ "private": true,
+ "scripts": {
+ "audit": "make audit",
+ "lint": "make lint",
+ "lint:fmt": "make format",
+ "python-install": "make install",
+ "test:unit": "make test",
+ "test:unit:coverage": "make test-coverage",
+ "typecheck": "echo this package contains no typescript"
+ },
+ "version": "1.0.0"
+}
diff --git a/utils/py-mock-mesh/py_mock_mesh/__init__.py b/utils/py-mock-mesh/py_mock_mesh/__init__.py
new file mode 100644
index 00000000..7c46bae4
--- /dev/null
+++ b/utils/py-mock-mesh/py_mock_mesh/__init__.py
@@ -0,0 +1,6 @@
+"""
+Mock MESH client for testing
+"""
+
+from .mesh_client import MockMeshClient
+from .mesh_message import MockMeshMessage, InvalidHeaderException
diff --git a/utils/py-mock-mesh/py_mock_mesh/mesh_client.py b/utils/py-mock-mesh/py_mock_mesh/mesh_client.py
new file mode 100644
index 00000000..5daa8fb2
--- /dev/null
+++ b/utils/py-mock-mesh/py_mock_mesh/mesh_client.py
@@ -0,0 +1,81 @@
+"""
+Module implementing a fake MESH mailbox using AWS S3
+"""
+
+import io
+import uuid
+from .mesh_message import MockMeshMessage
+
+
+class MockMeshClient: # pylint: disable=too-many-arguments
+ """
+ Implements a fake MESH mailbox using AWS S3
+ """
+
+ def __init__(self, s3_client, s3_path, mailbox, log):
+ self.__log = log
+ self.s3_client = s3_client
+ _, _, self.s3_bucket, self.s3_prefix = s3_path.split('/', 3)
+
+ self.inbox_prefix = f"{self.s3_prefix}/{mailbox}/in/"
+ self.outbox_prefix = f"{self.s3_prefix}/{mailbox}/out/"
+
+ def iterate_all_messages(self):
+ """
+ Iterates over all available messages in a mailbox
+ """
+
+ response = self.s3_client.list_objects_v2(
+ Bucket=self.s3_bucket,
+ Prefix=self.inbox_prefix)
+
+ message_count = 0
+
+ has_objects = True
+ while has_objects:
+ for s3_object in response.get('Contents', []):
+ message_count += 1
+ if message_count > 500:
+ return
+
+ yield MockMeshMessage(self.s3_client, self.s3_bucket, s3_object, self.__log)
+
+ # pagination
+ has_objects = response['IsTruncated']
+ if has_objects:
+ continuation_token = response['NextContinuationToken']
+ response = self.s3_client.list_objects_v2(
+ Bucket=self.s3_bucket,
+ Prefix=self.inbox_prefix,
+ ContinuationToken=continuation_token
+ )
+
+ def send_message(self, recipient, data, **kwargs):
+ """
+ Sends a message to a mailbox
+ """
+
+ local_id = kwargs['local_id']
+ message_id = f"{local_id}_{uuid.uuid1()}"
+ message_key = f"{self.outbox_prefix}{recipient}/{message_id}"
+
+ output_buffer = io.StringIO()
+ output_buffer.write(data.decode('utf-8'))
+ output_buffer.seek(0)
+
+ self.s3_client.put_object(
+ Bucket=self.s3_bucket,
+ Key=message_key,
+ Body=output_buffer.read().encode('utf8'),
+ Metadata=kwargs
+ )
+
+ def close(self):
+ """
+ Empty implementation
+ """
+
+ def handshake(self):
+ """
+ Empty implementation
+ """
diff --git a/utils/py-mock-mesh/py_mock_mesh/mesh_message.py b/utils/py-mock-mesh/py_mock_mesh/mesh_message.py
new file mode 100644
index 00000000..af627744
--- /dev/null
+++ b/utils/py-mock-mesh/py_mock_mesh/mesh_message.py
@@ -0,0 +1,107 @@
+"""
+Module for mock MESH messages
+"""
+
+
+_ALLOWED_HEADERS = [
+ 'local_id',
+ 'message_id',
+ 'message_type',
+ 'partner_id',
+ 'recipient',
+ 'recipient_smtp',
+ 'sender',
+ 'sender_smtp',
+ 'subject',
+ 'version',
+ 'workflow_id'
+]
+
+
+class InvalidHeaderException(Exception):
+ """
+ Indicates an invalid header on a MESH message
+ """
+ def __init__(self, header_key, header_value):
+ self.header_key = header_key
+ self.header_value = header_value
+ super().__init__(f"Invalid header: {header_key}={header_value}")
+
+
+class MockMeshMessage(): # pylint: disable=too-many-instance-attributes
+ """
+ Represents an S3-backed MESH Message
+ """
+
+ def __init__(self, s3_client, s3_bucket, s3_object, log):
+ self.subject = None
+ self.sender = None
+ self.local_id = None
+ self.__log = log
+ self.headers = {}
+
+ s3_response = s3_client.get_object(
+ Bucket=s3_bucket,
+ Key=s3_object['Key'],
+ IfMatch=s3_object['ETag']
+ )
+
+ self.__log.info(f"Read S3 object {s3_object['Key']}")
+
+ for header_key, header_value in s3_response['Metadata'].items():
+ self.__log.info(
+ f"Read header key {header_key}, value {header_value}")
+
+ self.headers[header_key] = header_value
+
+ if header_key not in _ALLOWED_HEADERS:
+ raise InvalidHeaderException(header_key, header_value)
+
+ setattr(self, header_key, header_value)
+
+ self.full_body = s3_response['Body']
+
+ self._msg_id = s3_object['Key'].rsplit('/', 2)[-1]
+ self._s3_client = s3_client
+ self._s3_bucket = s3_bucket
+ self._s3_key = s3_object['Key']
+
+ def read(self, byte_count=None):
+ """
+ Reads bytes from the message
+ """
+ return self.full_body.read(byte_count)
+
+ def id(self): # pylint: disable=invalid-name
+ """
+ Returns the message id
+ """
+
+ return self._msg_id
+
+ def mex_headers(self):
+ """
+ returns a generator iteritems for all the headers
+ """
+ return self.headers.items()
+
+ def __repr__(self):
+ return (
+ f"MockMeshMessage"
+ )
+
+ def acknowledge(self):
+ """
+ Acknowledge the message and delete it from the inbox
+ """
+
+ self._s3_client.delete_object(
+ Bucket=self._s3_bucket,
+ Key=self._s3_key
+ )
+
+ def __iter__(self):
+ return self.full_body.iter_lines()
diff --git a/utils/py-mock-mesh/requirements-dev.txt b/utils/py-mock-mesh/requirements-dev.txt
new file mode 100644
index 00000000..8a4c6ef1
--- /dev/null
+++ b/utils/py-mock-mesh/requirements-dev.txt
@@ -0,0 +1,6 @@
+-r requirements.txt
+pylint>=2.17.5
+pytest>=7.4.0
+pytest-cov>=4.1.0
+autopep8>=2.0.2
+jake>=3.0.1
diff --git a/utils/py-mock-mesh/requirements.txt b/utils/py-mock-mesh/requirements.txt
new file mode 100644
index 00000000..5b6e0013
--- /dev/null
+++ b/utils/py-mock-mesh/requirements.txt
@@ -0,0 +1,5 @@
+boto3>=1.28.62
+urllib3>=1.26.19,<2.0.0
+idna>=3.7
+requests>=2.32.0
+setuptools>=78.1.1
diff --git a/utils/py-mock-mesh/setup.py b/utils/py-mock-mesh/setup.py
new file mode 100644
index 00000000..457134c2
--- /dev/null
+++ b/utils/py-mock-mesh/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="py-mock-mesh",
+ version="0.1.0",
+ packages=find_packages(),
+)