diff --git a/Makefile b/Makefile
index c61d2fd5..321de929 100644
--- a/Makefile
+++ b/Makefile
@@ -12,11 +12,12 @@ quick-start: config clean build serve-docs # Quick start target to setup, build
dependencies:: # Install dependencies needed to build and test the project @Pipeline
$(MAKE) -C src/cloudevents install
$(MAKE) -C src/eventcatalogasyncapiimporter install
- $(MAKE) -C lambdas/mesh-acknowledge install
$(MAKE) -C utils/py-utils install
+ $(MAKE) -C utils/py-mock-mesh install
+ $(MAKE) -C lambdas/mesh-acknowledge install
$(MAKE) -C lambdas/mesh-poll install
$(MAKE) -C lambdas/mesh-download install
- $(MAKE) -C utils/py-mock-mesh install
+ $(MAKE) -C lambdas/report-sender install
./scripts/set-github-token.sh
npm install --workspaces
$(MAKE) generate
@@ -48,6 +49,7 @@ clean:: # Clean-up project resources (main) @Operations
$(MAKE) -C utils/py-utils clean && \
$(MAKE) -C lambdas/mesh-poll clean && \
$(MAKE) -C lambdas/mesh-download clean && \
+ $(MAKE) -C lambdas/report-sender clean && \
$(MAKE) -C utils/py-mock-mesh clean && \
$(MAKE) -C src/python-schema-generator clean && \
rm -f .version
diff --git a/infrastructure/terraform/components/dl/README.md b/infrastructure/terraform/components/dl/README.md
index ac8f25b4..73b0051a 100644
--- a/infrastructure/terraform/components/dl/README.md
+++ b/infrastructure/terraform/components/dl/README.md
@@ -66,6 +66,7 @@ No requirements.
| [print\_status\_handler](#module\_print\_status\_handler) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [report\_event\_transformer](#module\_report\_event\_transformer) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [report\_scheduler](#module\_report\_scheduler) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
+| [report\_sender](#module\_report\_sender) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip | n/a |
| [s3bucket\_cf\_logs](#module\_s3bucket\_cf\_logs) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-s3bucket.zip | n/a |
| [s3bucket\_file\_quarantine](#module\_s3bucket\_file\_quarantine) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-s3bucket.zip | n/a |
| [s3bucket\_file\_safe](#module\_s3bucket\_file\_safe) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-s3bucket.zip | n/a |
@@ -84,6 +85,7 @@ No requirements.
| [sqs\_print\_analyser](#module\_sqs\_print\_analyser) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
| [sqs\_print\_sender](#module\_sqs\_print\_sender) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
| [sqs\_print\_status\_handler](#module\_sqs\_print\_status\_handler) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
+| [sqs\_report\_sender](#module\_sqs\_report\_sender) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
| [sqs\_scanner](#module\_sqs\_scanner) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
| [sqs\_ttl](#module\_sqs\_ttl) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
| [sqs\_ttl\_handle\_expiry\_errors](#module\_sqs\_ttl\_handle\_expiry\_errors) | https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip | n/a |
diff --git a/infrastructure/terraform/components/dl/cloudwatch_event_rule_report_generated.tf b/infrastructure/terraform/components/dl/cloudwatch_event_rule_report_generated.tf
new file mode 100644
index 00000000..d8c1b694
--- /dev/null
+++ b/infrastructure/terraform/components/dl/cloudwatch_event_rule_report_generated.tf
@@ -0,0 +1,20 @@
+resource "aws_cloudwatch_event_rule" "report_generated" {
+ name = "${local.csi}-report-generated"
+ description = "Route ReportGenerated events from report-generation lambda to report-sender queue"
+ event_bus_name = aws_cloudwatch_event_bus.main.name
+
+ event_pattern = jsonencode({
+ "detail" : {
+ "type" : [
+ "uk.nhs.notify.digital.letters.reporting.report.generated.v1"
+ ]
+ }
+ })
+}
+
+# EventBridge target to send events to SQS queue
+resource "aws_cloudwatch_event_target" "report_sender_sqs" {
+ rule = aws_cloudwatch_event_rule.report_generated.name
+ arn = module.sqs_report_sender.sqs_queue_arn
+ event_bus_name = aws_cloudwatch_event_bus.main.name
+}
diff --git a/infrastructure/terraform/components/dl/lambda_event_source_mapping_report_sender.tf b/infrastructure/terraform/components/dl/lambda_event_source_mapping_report_sender.tf
new file mode 100644
index 00000000..e9b182d0
--- /dev/null
+++ b/infrastructure/terraform/components/dl/lambda_event_source_mapping_report_sender.tf
@@ -0,0 +1,10 @@
+resource "aws_lambda_event_source_mapping" "report_sender" {
+ event_source_arn = module.sqs_report_sender.sqs_queue_arn
+ function_name = module.report_sender.function_name
+ batch_size = var.queue_batch_size
+ maximum_batching_window_in_seconds = var.queue_batch_window_seconds
+
+ function_response_types = [
+ "ReportBatchItemFailures"
+ ]
+}
diff --git a/infrastructure/terraform/components/dl/module_lambda_report_sender.tf b/infrastructure/terraform/components/dl/module_lambda_report_sender.tf
new file mode 100644
index 00000000..f02cc5ae
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_lambda_report_sender.tf
@@ -0,0 +1,165 @@
+module "report_sender" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.29/terraform-lambda.zip"
+
+ function_name = "report-sender"
+ description = "A lambda function for sending reports to Trusts via MESH messages"
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ group = var.group
+
+ log_retention_in_days = var.log_retention_in_days
+ kms_key_arn = module.kms.key_arn
+
+ iam_policy_document = {
+ body = data.aws_iam_policy_document.report_sender_lambda.json
+ }
+
+ function_s3_bucket = local.acct.s3_buckets["lambda_function_artefacts"]["id"]
+ function_code_base_path = local.aws_lambda_functions_dir_path
+ function_code_dir = "report-sender/target/dist"
+ function_include_common = true
+ function_module_name = "report_sender"
+ handler_function_name = "handler.handler"
+ runtime = "python3.14"
+ memory = 128
+ timeout = 5
+ log_level = var.log_level
+
+ force_lambda_code_deploy = var.force_lambda_code_deploy
+ enable_lambda_insights = false
+
+ log_destination_arn = local.log_destination_arn
+ log_subscription_role_arn = local.acct.log_subscription_role_arn
+
+ lambda_env_vars = {
+ REPORT_SENDER_METRIC_NAME = "report-sender-successful-sends"
+ REPORT_SENDER_METRIC_NAMESPACE = "dl-report-sender"
+ DLQ_URL = module.sqs_report_sender.sqs_dlq_url
+ ENVIRONMENT = var.environment
+ EVENT_PUBLISHER_DLQ_URL = module.sqs_event_publisher_errors.sqs_queue_url
+ EVENT_PUBLISHER_EVENT_BUS_ARN = aws_cloudwatch_event_bus.main.arn
+ MOCK_MESH_BUCKET = module.s3bucket_non_pii_data.bucket
+ SSM_MESH_PREFIX = "${local.ssm_mesh_prefix}"
+ SSM_SENDERS_PREFIX = "${local.ssm_senders_prefix}"
+ USE_MESH_MOCK = var.enable_mock_mesh ? "true" : "false"
+ }
+
+}
+
+data "aws_iam_policy_document" "report_sender_lambda" {
+ statement {
+ sid = "KMSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "kms:Decrypt",
+ "kms:GenerateDataKey",
+ ]
+
+ resources = [
+ module.kms.key_arn,
+ ]
+ }
+
+ statement {
+ sid = "SQSPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:ReceiveMessage",
+ "sqs:DeleteMessage",
+ "sqs:GetQueueAttributes",
+ ]
+
+ resources = [
+ module.sqs_report_sender.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SQSDLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ ]
+
+ resources = [
+ module.sqs_report_sender.sqs_dlq_arn,
+ ]
+ }
+
+ statement {
+ sid = "EventBridgePermissions"
+ effect = "Allow"
+
+ actions = [
+ "events:PutEvents",
+ ]
+
+ resources = [
+ aws_cloudwatch_event_bus.main.arn,
+ ]
+ }
+
+ statement {
+ sid = "DLQPermissions"
+ effect = "Allow"
+
+ actions = [
+ "sqs:SendMessage",
+ "sqs:SendMessageBatch",
+ ]
+
+ resources = [
+ module.sqs_event_publisher_errors.sqs_queue_arn,
+ ]
+ }
+
+ statement {
+ sid = "SSMPermissions"
+ effect = "Allow"
+
+ actions = [
+ "ssm:GetParameter",
+ "ssm:GetParametersByPath",
+ ]
+
+ resources = [
+ "arn:aws:ssm:${var.region}:${var.aws_account_id}:parameter${local.ssm_prefix}/*"
+ ]
+ }
+
+ statement {
+ sid = "S3BucketPermissions"
+ effect = "Allow"
+
+ actions = [
+ "s3:GetObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_reporting.arn}/*",
+ ]
+ }
+
+ # Grant S3 PutObject permissions for the mock-mesh directory only when the mock is enabled
+ dynamic "statement" {
+ for_each = var.enable_mock_mesh ? [1] : []
+ content {
+ sid = "MockMeshPutObject"
+ effect = "Allow"
+
+ actions = [
+ "s3:PutObject",
+ ]
+
+ resources = [
+ "${module.s3bucket_non_pii_data.arn}/mock-mesh/*"
+ ]
+ }
+ }
+}
diff --git a/infrastructure/terraform/components/dl/module_sqs_report_sender.tf b/infrastructure/terraform/components/dl/module_sqs_report_sender.tf
new file mode 100644
index 00000000..020e7e6f
--- /dev/null
+++ b/infrastructure/terraform/components/dl/module_sqs_report_sender.tf
@@ -0,0 +1,44 @@
+module "sqs_report_sender" {
+ source = "https://github.com/NHSDigital/nhs-notify-shared-modules/releases/download/v2.0.30/terraform-sqs.zip"
+
+ aws_account_id = var.aws_account_id
+ component = local.component
+ environment = var.environment
+ project = var.project
+ region = var.region
+ name = "report-sender"
+
+ sqs_kms_key_arn = module.kms.key_arn
+
+ visibility_timeout_seconds = 60
+
+ create_dlq = true
+
+ sqs_policy_overload = data.aws_iam_policy_document.sqs_report_sender.json
+}
+
+data "aws_iam_policy_document" "sqs_report_sender" {
+ statement {
+ sid = "AllowEventBridgeToSendMessage"
+ effect = "Allow"
+
+ principals {
+ type = "Service"
+ identifiers = ["events.amazonaws.com"]
+ }
+
+ actions = [
+ "sqs:SendMessage"
+ ]
+
+ resources = [
+ "arn:aws:sqs:${var.region}:${var.aws_account_id}:${local.csi}-report-sender-queue"
+ ]
+
+ condition {
+ test = "ArnLike"
+ variable = "aws:SourceArn"
+ values = [aws_cloudwatch_event_rule.report_generated.arn]
+ }
+ }
+}
diff --git a/infrastructure/terraform/components/dl/pre.sh b/infrastructure/terraform/components/dl/pre.sh
index fd613c06..2be65625 100755
--- a/infrastructure/terraform/components/dl/pre.sh
+++ b/infrastructure/terraform/components/dl/pre.sh
@@ -26,3 +26,4 @@ ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../.." && pwd)"
make -C "$ROOT/lambdas/mesh-acknowledge" package
make -C "$ROOT/lambdas/mesh-poll" package
make -C "$ROOT/lambdas/mesh-download" package
+make -C "$ROOT/lambdas/report-sender" package
diff --git a/lambdas/report-sender/.gitignore b/lambdas/report-sender/.gitignore
new file mode 100644
index 00000000..9f7550b1
--- /dev/null
+++ b/lambdas/report-sender/.gitignore
@@ -0,0 +1,2 @@
+__pycache__
+.venv
diff --git a/lambdas/report-sender/Makefile b/lambdas/report-sender/Makefile
new file mode 100644
index 00000000..6efe2188
--- /dev/null
+++ b/lambdas/report-sender/Makefile
@@ -0,0 +1,34 @@
+PACKAGE=report_sender
+VERSION=0.1.0
+
+install:
+ pip install -r requirements.txt
+
+install-dev:
+ pip install -r requirements-dev.txt
+
+test:
+ cd ../.. && PYTHONPATH=lambdas/report-sender:$$PYTHONPATH pytest lambdas/report-sender/report_sender/__tests__/ -v
+
+coverage:
+ cd ../.. && PYTHONPATH=lambdas/report-sender:$$PYTHONPATH pytest lambdas/report-sender/report_sender/__tests__/ \
+ --cov=lambdas/report-sender/report_sender \
+ --cov-config=lambdas/report-sender/pytest.ini \
+ --cov-report=html:lambdas/report-sender/htmlcov \
+ --cov-report=term-missing \
+ --cov-report=xml:lambdas/report-sender/coverage.xml \
+ --cov-branch
+
+lint:
+ pylint report_sender
+
+format:
+ autopep8 -ri .
+
+package:
+ ./package_python_lambda.sh report_sender
+
+clean:
+ rm -rf target
+
+.PHONY: install install-dev test coverage lint format package clean
diff --git a/lambdas/report-sender/package_python_lambda.sh b/lambdas/report-sender/package_python_lambda.sh
new file mode 100755
index 00000000..a9834152
--- /dev/null
+++ b/lambdas/report-sender/package_python_lambda.sh
@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+set -e
+
+component_name="$1"
+
+rootdir=$(realpath "$(dirname "$0")/../..")
+source ${rootdir}/utils/get_version.sh
+
+dist_dir="${PWD}/target/dist"
+rm -rf "${dist_dir}"
+mkdir -p "${dist_dir}"
+
+# Extract internal (file://) and external dependencies from requirements.txt
+grep -E '^-e ' requirements.txt | sed 's|^-e ||' > target/internal_requirements.txt || true
+grep -vE '^-e ' requirements.txt > target/external_requirements.txt || true
+
+# Install external dependencies (from PyPI)
+pip install --platform manylinux2014_x86_64 --only-binary=:all: -r target/external_requirements.txt --target ${dist_dir} --python-version 3.14 --implementation cp
+
+# Install internal dependencies (local packages)
+pip install -r target/internal_requirements.txt --target ${dist_dir}
+
+# Bundle application code
+pip install . --no-deps --target ${dist_dir}
diff --git a/lambdas/report-sender/pytest.ini b/lambdas/report-sender/pytest.ini
new file mode 100644
index 00000000..91879c29
--- /dev/null
+++ b/lambdas/report-sender/pytest.ini
@@ -0,0 +1,16 @@
+[pytest]
+testpaths = report_sender/__tests__
+python_files = test_*.py
+python_classes = Test*
+python_functions = test_*
+addopts = -v --tb=short
+
+[coverage:run]
+relative_files = True
+omit =
+ */report_sender/__tests__/*
+ */test_*.py
+ */__pycache__/*
+ */venv/*
+ */.venv/*
+ */env/*
diff --git a/lambdas/report-sender/report_sender/__init__.py b/lambdas/report-sender/report_sender/__init__.py
new file mode 100644
index 00000000..3cd267ca
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__init__.py
@@ -0,0 +1,15 @@
+"""
+Report Sender Lambda
+
+This module handles Generated Report events and fetches the reports from S3 bucket and sends to the reporting mailbox using MESH.
+"""
+
+__version__ = '0.1.0'
+from .config import *
+from .handler import *
+from .report_sender_processor import *
+from .sender_lookup import *
+from .errors import *
+from .reports_store import *
+from .mesh_report_sender import *
+from .sender_lookup import *
diff --git a/lambdas/report-sender/report_sender/__tests__/__init__.py b/lambdas/report-sender/report_sender/__tests__/__init__.py
new file mode 100644
index 00000000..3be5b3aa
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/__init__.py
@@ -0,0 +1 @@
+# Test package init
diff --git a/lambdas/report-sender/report_sender/__tests__/test_handler.py b/lambdas/report-sender/report_sender/__tests__/test_handler.py
new file mode 100644
index 00000000..e8fa8106
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/test_handler.py
@@ -0,0 +1,301 @@
+"""
+Tests for Lambda handler
+"""
+import pytest
+from unittest.mock import Mock, patch, MagicMock
+from report_sender.reports_store import ReportsStore
+from report_sender.handler import handler
+from report_sender.mesh_report_sender import MeshReportsSender
+
+
+def setup_mocks():
+ """
+ Create all mock objects needed for handler testing
+ """
+ mock_context = Mock()
+
+ mock_config = MagicMock()
+ mock_config.mesh_client = Mock()
+ mock_config.s3_client = Mock()
+ mock_config.send_metric = Mock()
+
+ mock_ssm = Mock()
+
+ mock_sender_lookup = Mock()
+
+ mock_processor = Mock()
+ mock_processor.process_sqs_message = Mock()
+
+ return (
+ mock_context,
+ mock_config,
+ mock_ssm,
+ mock_sender_lookup,
+ mock_processor
+ )
+
+
+def create_sqs_event(num_records=1, event_source='aws:sqs'):
+ """
+ Create a mock SQS event for testing
+ """
+ records = []
+ for i in range(num_records):
+ records.append({
+ 'messageId': f'msg-{i}',
+ 'eventSource': event_source,
+ 'body': '{"detail": {"data": {"meshMessageId": "test_id"}}}'
+ })
+
+ return {'Records': records}
+
+
+class TestHandler:
+ """Test suite for Lambda handler"""
+
+ @patch('report_sender.handler.client')
+ @patch('report_sender.handler.EventPublisher')
+ @patch('report_sender.handler.SenderLookup')
+ @patch('report_sender.handler.ReportSenderProcessor')
+ @patch('report_sender.handler.Config')
+ def test_handler_success_single_record_on_event(
+ self,
+ mock_config_class,
+ mock_processor_class,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_boto_client
+ ):
+ """Test successful handler execution"""
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+ mock_event_publisher = Mock()
+ # Wire up the mocks
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ event = create_sqs_event(num_records=1)
+
+ result = handler(event, mock_context)
+
+ self.assert_object_creation(
+ mock_config_class,
+ mock_boto_client,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_processor_class,
+ mock_ssm,
+ mock_config,
+ mock_sender_lookup,
+ mock_event_publisher
+ )
+
+ assert result == {"batchItemFailures": []}
+ mock_processor.process_sqs_message.assert_called_once()
+
+ @patch('report_sender.handler.client')
+ @patch('report_sender.handler.EventPublisher')
+ @patch('report_sender.handler.SenderLookup')
+ @patch('report_sender.handler.ReportSenderProcessor')
+ @patch('report_sender.handler.Config')
+ def test_handler_returns_empty_failures_on_empty_event(
+ self,
+ mock_config_class,
+ mock_processor_class,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_boto_client
+ ):
+ """Test handler handles empty event gracefully"""
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+ mock_event_publisher = Mock()
+
+ # Wire up the mocks
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ event = create_sqs_event(num_records=0)
+
+ result = handler(event, mock_context)
+
+ self.assert_object_creation(
+ mock_config_class,
+ mock_boto_client,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_processor_class,
+ mock_ssm,
+ mock_config,
+ mock_sender_lookup,
+ mock_event_publisher
+ )
+
+ assert result == {"batchItemFailures": []}
+ mock_processor.process_sqs_message.assert_not_called()
+
+ @patch('report_sender.handler.client')
+ @patch('report_sender.handler.EventPublisher')
+ @patch('report_sender.handler.SenderLookup')
+ @patch('report_sender.handler.ReportSenderProcessor')
+ @patch('report_sender.handler.Config')
+ def test_handler_success_multiple_success_error_records_in_event(
+ self,
+ mock_config_class,
+ mock_processor_class,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_boto_client
+ ):
+ """Test successful handler execution with multiple records, some failing"""
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+ mock_event_publisher = Mock()
+
+ # Wire up the mocks
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ # Make second and fourth message fail
+ mock_processor.process_sqs_message.side_effect = [
+ None,
+ Exception("Test error"),
+ None,
+ Exception("Test error 2"),
+ None
+ ]
+
+ event = create_sqs_event(num_records=5)
+
+ result = handler(event, mock_context)
+
+ self.assert_object_creation(
+ mock_config_class,
+ mock_boto_client,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_processor_class,
+ mock_ssm,
+ mock_config,
+ mock_sender_lookup,
+ mock_event_publisher
+ )
+
+ assert result == {"batchItemFailures": [
+ {
+ 'itemIdentifier': 'msg-1',
+ },
+ {
+ 'itemIdentifier': 'msg-3',
+ }
+ ]}
+ assert mock_processor.process_sqs_message.call_count == 5
+
+ @patch('report_sender.handler.client')
+ @patch('report_sender.handler.EventPublisher')
+ @patch('report_sender.handler.SenderLookup')
+ @patch('report_sender.handler.ReportSenderProcessor')
+ @patch('report_sender.handler.Config')
+ def test_handler_skips_non_sqs_records(
+ self,
+ mock_config_class,
+ mock_processor_class,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_boto_client
+ ):
+ """Test that handler skips non-SQS records"""
+
+ (mock_context, mock_config, mock_ssm,
+ mock_sender_lookup, mock_processor) = setup_mocks()
+ mock_event_publisher = Mock()
+
+ # Wire up the mocks
+ mock_config_class.return_value.__enter__.return_value = mock_config
+ mock_config_class.return_value.__exit__ = Mock(return_value=None)
+ mock_boto_client.return_value = mock_ssm
+ mock_sender_lookup_class.return_value = mock_sender_lookup
+ mock_processor_class.return_value = mock_processor
+ mock_event_publisher_class.return_value = mock_event_publisher
+
+ event = create_sqs_event(num_records=1, event_source='aws:sns')
+
+ result = handler(event, mock_context)
+
+ mock_processor.process_sqs_message.assert_not_called()
+ assert result == {"batchItemFailures": []}
+
+ @patch('report_sender.handler.Config')
+ def test_handler_raises_exception_on_config_failure(
+ self,
+ mock_config_class,
+ ):
+ """Test that handler raises exception when Config initialization fails"""
+
+ mock_context = Mock()
+ mock_config_class.return_value.__enter__.side_effect = Exception("Config error")
+
+ event = create_sqs_event(num_records=1)
+
+ with pytest.raises(Exception, match="Config error"):
+ handler(event, mock_context)
+
+ def assert_object_creation(
+ self,
+ mock_config_class,
+ mock_boto_client,
+ mock_sender_lookup_class,
+ mock_event_publisher_class,
+ mock_processor_class,
+ mock_ssm,
+ mock_config,
+ mock_sender_lookup,
+ mock_event_publisher
+ ):
+ """Helper method to assert object creation and initialization"""
+
+ # Verify Config was created and used as context manager
+ mock_config_class.assert_called_once()
+ mock_config_class.return_value.__enter__.assert_called_once()
+
+ # Verify SSM client was created
+ mock_boto_client.assert_called_once_with('ssm')
+
+ # Verify EventPublisher was created with correct parameters
+ mock_event_publisher_class.assert_called_once()
+ ep_kwargs = mock_event_publisher_class.call_args[1]
+ assert ep_kwargs['event_bus_arn'] == mock_config.event_publisher_event_bus_arn
+ assert ep_kwargs['dlq_url'] == mock_config.event_publisher_dlq_url
+ assert 'logger' in ep_kwargs
+
+ # Verify SenderLookup was created with correct parameters (positional args)
+ mock_sender_lookup_class.assert_called_once()
+ sl_args = mock_sender_lookup_class.call_args[0] # Positional args
+ assert sl_args[0] == mock_ssm
+ assert sl_args[1] == mock_config
+
+ # Verify ReportSenderProcessor was created with correct parameters
+ mock_processor_class.assert_called_once()
+ mock_processor_args = mock_processor_class.call_args[1]
+ assert mock_processor_args['config'] == mock_config
+ assert mock_processor_args['sender_lookup'] == mock_sender_lookup
+ assert isinstance(mock_processor_args['mesh_report_sender'], MeshReportsSender)
+ assert isinstance(mock_processor_args['reports_store'], ReportsStore)
+ assert mock_processor_args['event_publisher'] == mock_event_publisher
+ assert mock_processor_args['send_metric'] == mock_config.send_metric
+ assert 'log' in mock_processor_args
diff --git a/lambdas/report-sender/report_sender/__tests__/test_mesh_report_sender.py b/lambdas/report-sender/report_sender/__tests__/test_mesh_report_sender.py
new file mode 100644
index 00000000..4c01e7cb
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/test_mesh_report_sender.py
@@ -0,0 +1,70 @@
+"""
+Tests for MeshReportsSender class
+"""
+import pytest
+from unittest.mock import Mock
+from report_sender.mesh_report_sender import MeshReportsSender
+
+SENT_MESH_MESSAGE_ID = "MSG123456"
+
+@pytest.fixture(name='mock_mesh_client')
+def create_mock_mesh_client():
+ """Create a mock MeshClient for testing"""
+ client = Mock()
+ client.handshake = Mock()
+ client.send_message = Mock(return_value=SENT_MESH_MESSAGE_ID)
+ return client
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.debug = Mock()
+ return logger
+
+@pytest.fixture(name='mesh_report_sender')
+def create_mesh_report_sender(mock_mesh_client, mock_logger):
+ """Create a MeshReportsSender instance with mocked dependencies"""
+ return MeshReportsSender(mock_mesh_client, mock_logger)
+
+class TestMeshReportsSender:
+ """Test suite for MeshReportsSender class"""
+
+ def test_init_performs_handshake(self, mock_mesh_client, mock_logger):
+ """Test that __init__ performs a MESH handshake"""
+ MeshReportsSender(mock_mesh_client, mock_logger)
+
+ mock_mesh_client.handshake.assert_called_once()
+
+ def test_send_report_sends_correct_message(
+ self, mesh_report_sender, mock_mesh_client
+ ):
+ """Test that send_report sends the correct message via MESH"""
+ reporting_mailbox = "MAILBOX001"
+ report_bytes = b"report content"
+ report_date = "2026-02-03"
+ report_reference = "report-reference-123"
+
+ mesh_report_sender.send_report(reporting_mailbox, report_bytes, report_date, report_reference)
+
+ mock_mesh_client.send_message.assert_called_once_with(
+ reporting_mailbox,
+ report_bytes,
+ workflow_id='NHS_NOTIFY_DIGITAL_LETTERS_DAILY_REPORT',
+ subject=report_date,
+ local_id=report_reference
+ )
+
+ def test_send_report_raises_error_if_mesh_send_fails(
+ self, mesh_report_sender, mock_mesh_client
+ ):
+ """Test that send_report raises an error if MESH send_message fails"""
+ reporting_mailbox = "MAILBOX001"
+ report_bytes = b"report content"
+ report_date = "2026-02-03"
+ report_reference = "report-reference-123"
+
+ mock_mesh_client.send_message.side_effect = Exception("MESH send failed")
+
+ with pytest.raises(Exception, match="MESH send failed"):
+ mesh_report_sender.send_report(reporting_mailbox, report_bytes, report_date, report_reference)
diff --git a/lambdas/report-sender/report_sender/__tests__/test_report_sender_processor.py b/lambdas/report-sender/report_sender/__tests__/test_report_sender_processor.py
new file mode 100644
index 00000000..339f72ad
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/test_report_sender_processor.py
@@ -0,0 +1,266 @@
+"""
+Tests for ReportSenderProcessor class
+"""
+import json
+import pytest
+from unittest.mock import ANY, Mock
+from pydantic import ValidationError
+from report_sender.report_sender_processor import ReportSenderProcessor
+from report_sender.errors import InvalidSenderDetailsError
+from digital_letters_events import ReportSent
+
+
+@pytest.fixture(name='mock_logger')
+def create_mock_logger():
+ """Create a mock logger for testing"""
+ logger = Mock()
+ logger.info = Mock()
+ logger.debug = Mock()
+ logger.error = Mock()
+ return logger
+
+
+@pytest.fixture(name='mock_sender_lookup')
+def create_mock_sender_lookup():
+ """Create a mock sender lookup for testing"""
+ sender_lookup = Mock()
+ sender_lookup.get_mesh_mailbox_reports_id_from_sender = Mock()
+ return sender_lookup
+
+
+@pytest.fixture(name='mock_reports_store')
+def create_mock_reports_store():
+ """Create a mock reports store for testing"""
+ reports_store = Mock()
+ reports_store.download_report = Mock()
+ return reports_store
+
+
+@pytest.fixture(name='mock_event_publisher')
+def create_mock_event_publisher():
+ """Create a mock event publisher for testing"""
+ event_publisher = Mock()
+ event_publisher.send_events = Mock(return_value=[])
+ return event_publisher
+
+
+@pytest.fixture(name='mock_send_metric')
+def create_mock_send_metric():
+ """Create a mock send metric for testing"""
+ send_metric = Mock()
+ send_metric.record = Mock()
+ return send_metric
+
+
+@pytest.fixture(name='mock_mesh_report_sender')
+def create_mock_mesh_report_sender():
+ """Create a mock MESH reports sender for testing"""
+ mesh_sender = Mock()
+ mesh_sender.send_report = Mock()
+ return mesh_sender
+
+
+@pytest.fixture(name='processor')
+def create_processor(
+ mock_logger,
+ mock_sender_lookup,
+ mock_reports_store,
+ mock_event_publisher,
+ mock_send_metric,
+ mock_mesh_report_sender
+):
+ """Create a ReportSenderProcessor instance with mocked dependencies"""
+ mock_config = Mock()
+ return ReportSenderProcessor(
+ config=mock_config,
+ log=mock_logger,
+ sender_lookup=mock_sender_lookup,
+ reports_store=mock_reports_store,
+ event_publisher=mock_event_publisher,
+ send_metric=mock_send_metric,
+ mesh_report_sender=mock_mesh_report_sender
+ )
+
+SENDER_ID = "test-sender-1"
+REPORT_URI = "s3://bucket/report-2026-02-03.csv"
+
+def create_valid_sqs_record(sender_id=SENDER_ID, report_uri=REPORT_URI):
+ """Helper to create a valid SQS record"""
+ return {
+ 'messageId': 'msg-123',
+ 'body': json.dumps({
+ 'detail': {
+ 'id': '6f1c2a53-3d54-4a0a-9a0b-0e9ae2d4c111',
+ 'specversion': '1.0',
+ 'source': '/nhs/england/notify/development/primary/data-plane/digitalletters/reporting',
+ 'subject': 'customer/920fca11-596a-4eca-9c47-99f624614658',
+ 'type': 'uk.nhs.notify.digital.letters.reporting.report.generated.v1',
+ 'time': '2026-02-03T10:00:00Z',
+ 'datacontenttype': 'application/json',
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-reporting-report-generated-data.schema.json',
+ 'data': {
+ 'senderId': sender_id,
+ 'reportUri': report_uri
+ },
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ 'recordedtime': '2026-02-03T10:00:00.250Z',
+ 'severitynumber': 2,
+ 'severitytext': 'INFO'
+ }
+ })
+ }
+
+
+class TestReportSenderProcessor:
+ """Test suite for ReportSenderProcessor class"""
+
+ def test_parse_and_validate_event_success(self, processor):
+ """Test successful parsing and validation of CloudEvent"""
+ sqs_record = create_valid_sqs_record()
+
+ result = processor._parse_and_validate_event(sqs_record)
+
+ assert result.data.senderId == SENDER_ID
+ assert str(result.data.reportUri) == REPORT_URI
+
+ def test_parse_and_validate_event_validation_error(self, processor, mock_logger):
+ """Test that validation errors are handled correctly"""
+ sqs_record = {
+ 'messageId': 'msg-123',
+ 'body': json.dumps({
+ 'detail': {
+ 'id': 'event-123',
+ 'specversion': '1.0',
+ 'source': '/test/source',
+ 'type': 'uk.nhs.notify.digital.letters.reporting.report.generated.v1',
+ 'time': '2026-02-03T10:00:00Z',
+ 'data': {
+ # Missing required fields
+ }
+ }
+ })
+ }
+
+ with pytest.raises(ValidationError):
+ processor._parse_and_validate_event(sqs_record)
+
+ mock_logger.error.assert_called()
+
+ def test_process_sqs_message_success(
+ self,
+ processor,
+ mock_sender_lookup,
+ mock_reports_store,
+ mock_mesh_report_sender,
+ mock_event_publisher,
+ mock_send_metric,
+ ):
+ """Test successful processing of SQS message"""
+ sqs_record = create_valid_sqs_record()
+ mock_sender_lookup.get_mesh_mailbox_reports_id_from_sender.return_value = 'MAILBOX001'
+ mock_reports_store.download_report.return_value = b'report content'
+ mock_event_publisher.send_events.return_value = []
+
+ processor.process_sqs_message(sqs_record)
+
+ # Verify all steps were called
+ mock_sender_lookup.get_mesh_mailbox_reports_id_from_sender.assert_called_once_with(SENDER_ID)
+ mock_reports_store.download_report.assert_called_once_with(REPORT_URI)
+ mock_mesh_report_sender.send_report.assert_called_once_with(
+ 'MAILBOX001',
+ b'report content',
+ '2026-02-03',
+ ANY,
+ )
+ mock_event_publisher.send_events.assert_called_once()
+ mock_send_metric.record.assert_called_once_with(1)
+
+ def test_process_sqs_message_sender_lookup_fails(
+ self,
+ processor,
+ mock_sender_lookup,
+ ):
+ """Test processing fails when sender lookup fails"""
+ sqs_record = create_valid_sqs_record()
+ mock_sender_lookup.get_mesh_mailbox_reports_id_from_sender.side_effect = InvalidSenderDetailsError("Failed to parse mailbox ID")
+
+ with pytest.raises(InvalidSenderDetailsError):
+ processor.process_sqs_message(sqs_record)
+
+ def test_process_sqs_message_reports_store_fails(
+ self,
+ processor,
+ mock_sender_lookup,
+ mock_reports_store
+ ):
+ """Test processing fails when reports store fails"""
+ sqs_record = create_valid_sqs_record()
+ mock_sender_lookup.get_mesh_mailbox_reports_id_from_sender.return_value = 'MAILBOX001'
+ mock_reports_store.download_report.side_effect = Exception("S3 error")
+
+ with pytest.raises(Exception, match="S3 error"):
+ processor.process_sqs_message(sqs_record)
+
+ def test_process_sqs_message_mesh_send_fails(
+ self,
+ processor,
+ mock_sender_lookup,
+ mock_reports_store,
+ mock_mesh_report_sender
+ ):
+ """Test processing fails when MESH send fails"""
+ sqs_record = create_valid_sqs_record()
+ mock_sender_lookup.get_mesh_mailbox_reports_id_from_sender.return_value = 'MAILBOX001'
+ mock_reports_store.download_report.return_value = b'report content'
+ mock_mesh_report_sender.send_report.side_effect = Exception("MESH error")
+
+ with pytest.raises(Exception, match="MESH error"):
+ processor.process_sqs_message(sqs_record)
+
+ def test_publish_report_sent_event_success(
+ self,
+ processor,
+ mock_event_publisher,
+ mock_logger
+ ):
+ """Test successful publishing of ReportSent event"""
+ mesh_mailbox_reports_id = "MAILBOX001"
+ report_reference = "report-reference-123"
+
+ processor._publish_report_sent_event(SENDER_ID, mesh_mailbox_reports_id, report_reference)
+
+ # Verify event was published
+ mock_event_publisher.send_events.assert_called_once()
+ call_args = mock_event_publisher.send_events.call_args[0][0]
+ assert len(call_args) == 1
+ event = call_args[0]
+
+ # Verify event structure
+ assert event['type'] == 'uk.nhs.notify.digital.letters.reporting.report.sent.v1'
+ assert event['subject'] == f'customer/{SENDER_ID}'
+ assert event['data']['senderId'] == SENDER_ID
+ assert event['data']['meshMailboxReportsId'] == mesh_mailbox_reports_id
+ assert event['specversion'] == '1.0'
+ assert 'id' in event
+ assert 'time' in event
+ assert 'recordedtime' in event
+ ReportSent.model_validate(event) # Validate against schema
+
+ mock_logger.info.assert_called()
+
+ def test_publish_report_sent_event_failure(
+ self,
+ processor,
+ mock_event_publisher,
+ mock_logger
+ ):
+ """Test error handling when event publishing fails"""
+ mesh_mailbox_reports_id = "MAILBOX001"
+ mock_event_publisher.send_events.return_value = [{'id': 'failed-event'}]
+ report_reference = "report-reference-123"
+
+ with pytest.raises(RuntimeError) as exc_info:
+ processor._publish_report_sent_event(SENDER_ID, mesh_mailbox_reports_id, report_reference)
+
+ assert "Failed to publish ReportingReportSent event" in str(exc_info.value)
+ mock_logger.error.assert_called()
diff --git a/lambdas/report-sender/report_sender/__tests__/test_reports_store.py b/lambdas/report-sender/report_sender/__tests__/test_reports_store.py
new file mode 100644
index 00000000..eab3adba
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/test_reports_store.py
@@ -0,0 +1,60 @@
+"""Tests for ReportsStore"""
+import pytest
+from unittest.mock import Mock
+from report_sender.reports_store import ReportsStore
+from report_sender.errors import ReportNotFoundError
+
+@pytest.fixture(name='mock_s3_client')
+def create_mock_s3_client():
+ """Create a mock S3 client for testing"""
+ s3_client = Mock()
+ return s3_client
+
+@pytest.fixture(name='reports_store')
+def create_reports_store(mock_s3_client):
+ """Create ReportsStore instance with mocked S3 client for testing"""
+ return ReportsStore(mock_s3_client)
+
+class TestReportsStore:
+ """Test suite for ReportsStore"""
+
+ def test_download_report_success(self, reports_store, mock_s3_client):
+ """Successfully downloads report content from S3"""
+
+ mock_s3_client.get_object.return_value = {
+ 'Body': Mock(read=Mock(return_value=b'report content')),
+ 'ResponseMetadata': {'HTTPStatusCode': 200}
+ }
+
+ bucket_name = 'test-bucket'
+ key_name = 'report-key'
+ s3_uri = f's3://{bucket_name}/{key_name}'
+
+ result = reports_store.download_report(s3_uri)
+
+ assert result == b'report content'
+ mock_s3_client.get_object.assert_called_once_with(
+ Bucket=bucket_name,
+ Key=key_name
+ )
+
+ def test_download_report_s3_failure_raises_error(self, reports_store, mock_s3_client):
+ """Raises ReportsStoreError when S3 get_object fails"""
+ mock_s3_client.get_object.side_effect = Exception("S3 error")
+
+ with pytest.raises(Exception):
+ reports_store.download_report(
+ s3_uri='s3://test-bucket/report-key'
+ )
+
+ def test_download_report_non_200_status_code_raises_error(self, reports_store, mock_s3_client):
+ """Raises Exception when S3 returns non-200 status code"""
+ mock_s3_client.get_object.return_value = {
+ 'Body': Mock(read=Mock(return_value=b'report content')),
+ 'ResponseMetadata': {'HTTPStatusCode': 404}
+ }
+
+ with pytest.raises(ReportNotFoundError, match="Failed to fetch report from S3"):
+ reports_store.download_report(
+ s3_uri='s3://test-bucket/report-key'
+ )
diff --git a/lambdas/report-sender/report_sender/__tests__/test_sender_lookup.py b/lambdas/report-sender/report_sender/__tests__/test_sender_lookup.py
new file mode 100644
index 00000000..9aa9c7b9
--- /dev/null
+++ b/lambdas/report-sender/report_sender/__tests__/test_sender_lookup.py
@@ -0,0 +1,95 @@
+import json
+import pytest
+from unittest.mock import Mock
+from report_sender.sender_lookup import SenderLookup
+from report_sender.errors import InvalidSenderDetailsError
+
+test_sender_id = "test-sender-1"
+
+@pytest.fixture(name='mock_ssm')
+def create_mock_ssm():
+ """Create a mock SSM client for testing"""
+ ssm = Mock()
+ ssm.get_parameter = Mock()
+
+ return ssm
+
+@pytest.fixture(name='mock_config')
+def create_mock_config():
+ """Create a mock config object for testing"""
+ config = Mock()
+ config.ssm_senders_prefix = "/test/senders"
+ return config
+
+class TestSenderLookup:
+ """Test suite for SenderLookup class"""
+
+ def test_get_mesh_mailbox_reports_id_from_sender_success(self, mock_ssm, mock_config):
+ """Successfully retrieves mailbox ID from SSM parameter"""
+ mailbox_id = "MAILBOX001"
+ sender_parameter = {
+ 'Parameter': {
+ 'Name': f"{mock_config.ssm_senders_prefix}/{test_sender_id}",
+ 'Value': f'{{"meshMailboxReportsId": "{mailbox_id}"}}'
+ }
+ }
+ mock_ssm.get_parameter.return_value = sender_parameter
+
+ lookup = SenderLookup(mock_ssm, mock_config)
+ result = lookup.get_mesh_mailbox_reports_id_from_sender(test_sender_id)
+
+ assert result == mailbox_id
+ mock_ssm.get_parameter.assert_called_once_with(
+ Name=f"{mock_config.ssm_senders_prefix}/{test_sender_id}",
+ WithDecryption=True
+ )
+
+ def test_get_mesh_mailbox_reports_id_from_sender_not_found(self, mock_ssm, mock_config):
+ """Raises exception when sender ID is not found in SSM"""
+ sender_id = "unknown_sender"
+ mock_ssm.get_parameter.return_value = None
+
+ lookup = SenderLookup(mock_ssm, mock_config)
+
+ with pytest.raises(Exception) as exc_info:
+ lookup.get_mesh_mailbox_reports_id_from_sender(sender_id)
+
+ assert str(exc_info.value) == f"No sender found in SSM for sender ID {sender_id}"
+ mock_ssm.get_parameter.assert_called_once_with(
+ Name=f"{mock_config.ssm_senders_prefix}/{sender_id}",
+ WithDecryption=True
+ )
+
+ def test_get_mesh_mailbox_reports_id_from_sender_missing_value_field(self, mock_ssm, mock_config):
+ """Raises exception when Value field is missing from SSM parameter"""
+ sender_parameter = {
+ 'Parameter': {
+ 'Name': f"{mock_config.ssm_senders_prefix}/{test_sender_id}"
+ # Missing 'Value' field
+ }
+ }
+ mock_ssm.get_parameter.return_value = sender_parameter
+
+ lookup = SenderLookup(mock_ssm, mock_config)
+
+ with pytest.raises(Exception) as exc_info:
+ lookup.get_mesh_mailbox_reports_id_from_sender(test_sender_id)
+
+ assert "missing a 'Value' field" in str(exc_info.value)
+
+ def test_get_mesh_mailbox_reports_id_from_sender_invalid_json(self, mock_ssm, mock_config):
+ """Raises exception when parameter value is not valid JSON"""
+ sender_parameter = {
+ 'Parameter': {
+ 'Name': f"{mock_config.ssm_senders_prefix}/{test_sender_id}",
+ 'Value': 'invalid json {'
+ }
+ }
+ mock_ssm.get_parameter.return_value = sender_parameter
+
+ lookup = SenderLookup(mock_ssm, mock_config)
+
+ with pytest.raises(InvalidSenderDetailsError) as exc_info:
+ lookup.get_mesh_mailbox_reports_id_from_sender(test_sender_id)
+
+ assert "Failed to parse meshMailboxReportsId from parameter for sender ID test-sender-1" in str(exc_info.value)
diff --git a/lambdas/report-sender/report_sender/config.py b/lambdas/report-sender/report_sender/config.py
new file mode 100644
index 00000000..bc3fc002
--- /dev/null
+++ b/lambdas/report-sender/report_sender/config.py
@@ -0,0 +1,46 @@
+"""
+Module for configuring Report Sender application
+"""
+from dl_utils import BaseMeshConfig, Metric
+
+
+_REQUIRED_ENV_VAR_MAP = {
+ "ssm_mesh_prefix": "SSM_MESH_PREFIX",
+ "ssm_senders_prefix": "SSM_SENDERS_PREFIX",
+ "environment": "ENVIRONMENT",
+ "event_publisher_event_bus_arn": "EVENT_PUBLISHER_EVENT_BUS_ARN",
+ "event_publisher_dlq_url": "EVENT_PUBLISHER_DLQ_URL",
+ "send_metric_name": "REPORT_SENDER_METRIC_NAME",
+ "send_metric_namespace": "REPORT_SENDER_METRIC_NAMESPACE"
+}
+
+
+class Config(BaseMeshConfig):
+ """
+ Represents the configuration of the Send Reports application.
+ Inherits common MESH configuration from BaseMeshConfig.
+ """
+
+ _REQUIRED_ENV_VAR_MAP = _REQUIRED_ENV_VAR_MAP
+
+ def __init__(self, ssm=None):
+ super().__init__(ssm=ssm)
+
+ self.send_metric = None
+ def __enter__(self):
+ super().__enter__()
+
+ # Build send metric
+ self.send_metric = self.build_send_metric()
+
+ return self
+
+ def build_send_metric(self):
+ """
+ Returns a custom metric to record messages found in the when sending the report file using MESH
+ """
+ return Metric(
+ name=self.send_metric_name,
+ namespace=self.send_metric_namespace,
+ dimensions={"Environment": self.environment}
+ )
diff --git a/lambdas/report-sender/report_sender/errors.py b/lambdas/report-sender/report_sender/errors.py
new file mode 100644
index 00000000..1e544115
--- /dev/null
+++ b/lambdas/report-sender/report_sender/errors.py
@@ -0,0 +1,13 @@
+"""
+Module representing possible errors within this application
+"""
+
+class InvalidSenderDetailsError(Exception):
+ """
+ Indicates that the sender is missing or the details are invalid
+ """
+
+class ReportNotFoundError(Exception):
+ """
+ Indicates that the report was not found
+ """
diff --git a/lambdas/report-sender/report_sender/handler.py b/lambdas/report-sender/report_sender/handler.py
new file mode 100644
index 00000000..5fa3f412
--- /dev/null
+++ b/lambdas/report-sender/report_sender/handler.py
@@ -0,0 +1,77 @@
+"""lambda handler for send reports application"""
+
+from boto3 import client
+from dl_utils import log, EventPublisher
+from .sender_lookup import SenderLookup
+from .config import Config
+from .report_sender_processor import ReportSenderProcessor
+from .reports_store import ReportsStore
+from .mesh_report_sender import MeshReportsSender
+
+
+def handler(event, context):
+ """
+ Lambda handler for sending reports to Trusts via MESH.
+ Process SQS events from the report-sender queue.
+ Returns batch item failures for partial batch failure handling.
+ """
+
+ log.info("Received SQS event", record_count=len(event.get('Records', [])))
+ batch_item_failures = []
+ processed = {
+ 'retrieved': 0,
+ 'sent': 0,
+ 'failed': 0
+ }
+
+ try:
+ with Config() as config:
+
+ event_publisher = EventPublisher(
+ event_bus_arn=config.event_publisher_event_bus_arn,
+ dlq_url=config.event_publisher_dlq_url,
+ logger=log
+ )
+
+ reports_store = ReportsStore(config.s3_client)
+
+ mesh_report_sender = MeshReportsSender(config.mesh_client, log)
+
+ processor = ReportSenderProcessor(
+ config=config,
+ log=log,
+ sender_lookup=SenderLookup(client('ssm'), config),
+ mesh_report_sender=mesh_report_sender,
+ reports_store=reports_store,
+ event_publisher=event_publisher,
+ send_metric=config.send_metric)
+
+ # Process each SQS record
+ for record in event.get('Records', []):
+ processed['retrieved'] += 1
+ message_id = record.get('messageId')
+
+ if record.get('eventSource') != 'aws:sqs':
+ log.warn("Skipping non-SQS record", message_id=message_id)
+ continue
+
+ try:
+ processor.process_sqs_message(record)
+ processed['sent'] += 1
+
+ except Exception as exc:
+ processed['failed'] += 1
+ log.error("Failed to process SQS message",
+ message_id=message_id,
+ error=str(exc))
+ batch_item_failures.append({"itemIdentifier": message_id})
+
+ log.info("Processed SQS event",
+ retrieved=processed['retrieved'],
+ sent=processed['sent'],
+ failed=processed['failed'])
+
+ return {"batchItemFailures": batch_item_failures}
+ except Exception as exc:
+ log.exception("Failed to process send reports", error=str(exc))
+ raise exc
diff --git a/lambdas/report-sender/report_sender/mesh_report_sender.py b/lambdas/report-sender/report_sender/mesh_report_sender.py
new file mode 100644
index 00000000..2390b8aa
--- /dev/null
+++ b/lambdas/report-sender/report_sender/mesh_report_sender.py
@@ -0,0 +1,49 @@
+from dl_utils.errors import format_exception
+from mesh_client import MeshClient
+
+MESH_MESSAGE_WORKFLOW_ID = 'NHS_NOTIFY_DIGITAL_LETTERS_DAILY_REPORT'
+
+class MeshReportsSender:
+ """
+ Class responsible for sending reports to MESH mailboxes.
+ """
+ def __init__(self, mesh_client: MeshClient, logger):
+ self.__log = logger
+ self.__mesh_client = mesh_client
+
+ self.__mesh_client.handshake()
+
+ def send_report(self, reporting_mailbox: str, report_bytes: bytes, report_date: str, report_reference: str):
+ """
+ Sends a report to a specified MESH mailbox.
+
+ Args:
+ reporting_mailbox (str): The MESH mailbox ID to send the report to.
+ report_bytes (bytes): The report content in bytes.
+ report_date (str): The date of the report, used in the message subject.
+ report_reference (str): The reference for the report, used in the message subject.
+
+ Raises:
+ Exception: If sending the report fails.
+ """
+ try:
+ self.__mesh_client.send_message(
+ reporting_mailbox,
+ report_bytes,
+ workflow_id=MESH_MESSAGE_WORKFLOW_ID,
+ subject=f'{report_date}',
+ local_id=report_reference,
+ )
+ self.__log.info(
+ "Sent report to MESH mailbox",
+ reporting_mailbox=reporting_mailbox,
+ report_date=report_date
+ )
+ except Exception as e:
+ self.__log.error(
+ f"Failed to send report to MESH mailbox, error:{str(e)}",
+ reporting_mailbox=reporting_mailbox,
+ report_date=report_date,
+ error=format_exception(e)
+ )
+ raise
diff --git a/lambdas/report-sender/report_sender/report_sender_processor.py b/lambdas/report-sender/report_sender/report_sender_processor.py
new file mode 100644
index 00000000..daddf28c
--- /dev/null
+++ b/lambdas/report-sender/report_sender/report_sender_processor.py
@@ -0,0 +1,122 @@
+"""
+Module for processing messages from an SQS queue.
+"""
+
+from datetime import datetime, timezone
+import json
+from uuid import uuid4
+
+from pydantic import ValidationError
+from digital_letters_events import ReportGenerated, ReportSent
+
+class ReportSenderProcessor: # pylint: disable=too-many-instance-attributes
+ """
+ Class that processes messages from the SQS queue and publish a Report Sent Event to the event bus.
+ """
+
+ def __init__(self, **kwargs):
+ self.__log = kwargs['log']
+ self.__sender_lookup = kwargs['sender_lookup']
+ self.__reports_store = kwargs['reports_store']
+ self.__event_publisher = kwargs['event_publisher']
+ self.__send_metric = kwargs['send_metric']
+ self.__mesh_report_sender = kwargs['mesh_report_sender']
+
+ environment = 'development'
+ deployment = 'primary'
+ plane = 'data-plane'
+ self.__cloud_event_source = f'/nhs/england/notify/{environment}/{deployment}/{plane}/digitalletters/reporting'
+
+ def _parse_and_validate_event(self, sqs_record) -> ReportGenerated:
+ """Extract report generated data from SQS record"""
+ message_body = json.loads(sqs_record['body'])
+ event_detail = message_body.get('detail', {})
+
+ try:
+ validated_event = ReportGenerated(**event_detail)
+ self.__log.debug("CloudEvent validation passed")
+ return validated_event
+ except ValidationError as e:
+ self.__log.error(
+ "CloudEvent validation failed",
+ validation_errors=str(e),
+ event_detail=event_detail
+ )
+ raise
+
+ def _extract_report_date_from_report_uri(self, report_uri) -> str:
+ ignore_extension_characters = -4 # to skip .csv
+ report_date_start_index = -14 # to extract from the end of the URI the date 2026-02-03.csv
+ report_uri_str = str(report_uri)
+ return report_uri_str[report_date_start_index:ignore_extension_characters]
+
+
+ def process_sqs_message(self, sqs_record):
+ """
+ Iterates over and processes messages in the SQS queue
+ """
+ self.__log.info('Extract data from SQS record')
+
+ report_generated_event : ReportGenerated = self._parse_and_validate_event(sqs_record)
+ sender_id = report_generated_event.data.senderId
+ report_uri = str(report_generated_event.data.reportUri)
+
+ self.__log.info(f'Fetching sender details for sender ID: {sender_id}')
+ reporting_mailbox = self.__sender_lookup.get_mesh_mailbox_reports_id_from_sender(sender_id)
+
+ self.__log.info(f'Fetching reporting URI : {report_uri} for sender ID: {sender_id}')
+ report_bytes = self.__reports_store.download_report(report_uri)
+ report_date = self._extract_report_date_from_report_uri(report_uri)
+ report_reference = str(uuid4())
+
+ self.__log.info(f'Sending MESH message to the sender: {sender_id} using mailbox: {reporting_mailbox} for date: {report_date} with reference: {report_reference}')
+
+ self.__mesh_report_sender.send_report(
+ reporting_mailbox,
+ report_bytes,
+ report_date,
+ report_reference
+ )
+
+ self.__log.info(f'Publishing ReportEventSent for the sender: {sender_id} using mailbox: {reporting_mailbox} for date: {report_date}')
+ self._publish_report_sent_event(sender_id, reporting_mailbox, report_reference)
+ self.__send_metric.record(1)
+
+ def _publish_report_sent_event(self, sender_id, mesh_mailbox_reports_id, report_reference):
+ """
+ Publishes a ReportSent event
+ """
+ now = datetime.now(timezone.utc).isoformat()
+
+ cloud_event = {
+ 'id': str(uuid4()),
+ 'specversion': '1.0',
+ 'source': self.__cloud_event_source,
+ 'subject': f'customer/{sender_id}',
+ 'type': 'uk.nhs.notify.digital.letters.reporting.report.sent.v1',
+ 'time': now,
+ 'recordedtime': now,
+ 'severitynumber': 2,
+ 'severitytext': 'INFO',
+ 'traceparent': '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01', # Note: covered by CCM-14255
+ 'dataschema': 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-reporting-report-sent-data.schema.json',
+ 'data': {
+ "senderId": sender_id,
+ "meshMailboxReportsId": mesh_mailbox_reports_id,
+ "reportReference": report_reference,
+ },
+ }
+
+ failed_events = self.__event_publisher.send_events([cloud_event], ReportSent)
+
+ if failed_events:
+ error_msg = f"Failed to publish ReportingReportSent event: {failed_events}"
+ self.__log.error(error_msg, failed_count=len(failed_events))
+ raise RuntimeError(error_msg)
+
+ self.__log.info(
+ "Published ReportingReportSent event",
+ sender_id=sender_id,
+ mesh_mailbox_reports_id=mesh_mailbox_reports_id,
+ report_reference=report_reference
+ )
diff --git a/lambdas/report-sender/report_sender/reports_store.py b/lambdas/report-sender/report_sender/reports_store.py
new file mode 100644
index 00000000..bedfa865
--- /dev/null
+++ b/lambdas/report-sender/report_sender/reports_store.py
@@ -0,0 +1,28 @@
+""" Module for fetching reports from S3 """
+
+from urllib.parse import urlparse
+from .errors import ReportNotFoundError
+
+class ReportsStore:
+ """Class for fetching reports from S3"""
+
+ def __init__(self, s3_client):
+ self.__s3_client = s3_client
+
+ def download_report(self, s3_uri):
+ """Download report from S3 given its URI is in format s3:/// """
+ # Parse the S3 URI
+ parsed_uri = urlparse(s3_uri)
+ bucket = parsed_uri.netloc
+ key = parsed_uri.path.lstrip('/') # Remove leading slash from the path
+
+ # Download the object
+ s3_response = self.__s3_client.get_object(
+ Bucket=bucket,
+ Key=key
+ )
+
+ if s3_response['ResponseMetadata']['HTTPStatusCode'] != 200:
+ raise ReportNotFoundError(f"Failed to fetch report from S3: {s3_response}")
+
+ return s3_response['Body'].read()
diff --git a/lambdas/report-sender/report_sender/sender_lookup.py b/lambdas/report-sender/report_sender/sender_lookup.py
new file mode 100644
index 00000000..db3453da
--- /dev/null
+++ b/lambdas/report-sender/report_sender/sender_lookup.py
@@ -0,0 +1,39 @@
+import json
+from .errors import InvalidSenderDetailsError
+
+
+class SenderLookup:
+ """
+ Lightweight sender lookup for basic sender validation and sender ID extraction
+ """
+
+ def __init__(self, ssm, config):
+ self.__ssm = ssm
+ self.__config = config
+
+ def get_mesh_mailbox_reports_id_from_sender(self, sender_id) -> str:
+ """
+ Get the MESH reporting mailbox for a given sender ID
+ """
+
+ sender_key = f"{self.__config.ssm_senders_prefix}/{sender_id}"
+
+ sender = self.__ssm.get_parameter(Name=sender_key, WithDecryption=True)
+
+ if not sender:
+ raise InvalidSenderDetailsError(f"No sender found in SSM for sender ID {sender_id}")
+
+ return self.__extract_mesh_mailbox_reports_id(sender, sender_id)
+
+ def __extract_mesh_mailbox_reports_id(self, sender, sender_id) -> str:
+ """
+ Extract just the meshMailboxReportsId from a sender parameter
+ """
+ if "Value" not in sender['Parameter']:
+ raise InvalidSenderDetailsError(f"The SSM value for the sender ID {sender_id} are missing a 'Value' field")
+
+ try:
+ sender_config = json.loads(sender['Parameter']['Value'])
+ return sender_config.get("meshMailboxReportsId")
+ except (ValueError, AttributeError):
+ raise InvalidSenderDetailsError(f"Failed to parse meshMailboxReportsId from parameter for sender ID {sender_id}")
diff --git a/lambdas/report-sender/requirements-dev.txt b/lambdas/report-sender/requirements-dev.txt
new file mode 100644
index 00000000..1f257452
--- /dev/null
+++ b/lambdas/report-sender/requirements-dev.txt
@@ -0,0 +1,6 @@
+-r requirements.txt
+autopep8>=2.0.2
+pylint>=2.17.4
+pytest>=7.0.1
+pytest-cov>=4.0.0
+jake>=3.0.1
diff --git a/lambdas/report-sender/requirements.txt b/lambdas/report-sender/requirements.txt
new file mode 100644
index 00000000..5e7f1345
--- /dev/null
+++ b/lambdas/report-sender/requirements.txt
@@ -0,0 +1,13 @@
+certifi>=2023.07.22
+mesh-client>=3.2.3
+structlog>=21.5.0
+orjson>=3.9.15
+boto3>=1.28.62
+urllib3>=1.26.19,<2.0.0
+idna>=3.7
+requests>=2.32.0
+pyopenssl>=24.2.1
+pydantic>=2.0.0
+-e ../../src/digital-letters-events
+-e ../../utils/py-mock-mesh
+-e ../../utils/py-utils
diff --git a/lambdas/report-sender/setup.py b/lambdas/report-sender/setup.py
new file mode 100644
index 00000000..0635f095
--- /dev/null
+++ b/lambdas/report-sender/setup.py
@@ -0,0 +1,7 @@
+from setuptools import setup, find_packages
+
+setup(
+ name="report-sender",
+ version="0.1.0",
+ packages=find_packages(),
+)
diff --git a/scripts/config/sonar-scanner.properties b/scripts/config/sonar-scanner.properties
index 789b2989..1e219478 100644
--- a/scripts/config/sonar-scanner.properties
+++ b/scripts/config/sonar-scanner.properties
@@ -25,11 +25,13 @@ sonar.coverage.exclusions=\
lambdas/mesh-download/mesh_download/config.py, \
lambdas/mesh-download/mesh_download/errors.py, \
lambdas/mesh-poll/mesh_poll/config.py, \
+ lambdas/report-sender/report_sender/config.py, \
+ lambdas/report-sender/report_sender/errors.py, \
src/asyncapigenerator/example_usage.py, \
src/asyncapigenerator/test_generator.py, \
src/eventcatalogasyncapiimporter/examples.py
# Coverage reports
-sonar.python.coverage.reportPaths=.coverage/coverage.xml,src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,utils/py-utils/coverage.xml,lambdas/mesh-acknowledge/coverage.xml,src/python-schema-generator/coverage.xml,lambdas/mesh-poll/coverage.xml,lambdas/mesh-download/coverage.xml
+sonar.python.coverage.reportPaths=.coverage/coverage.xml,src/asyncapigenerator/coverage.xml,src/cloudeventjekylldocs/coverage.xml,src/eventcatalogasyncapiimporter/coverage.xml,utils/py-utils/coverage.xml,lambdas/mesh-acknowledge/coverage.xml,src/python-schema-generator/coverage.xml,lambdas/mesh-poll/coverage.xml,lambdas/mesh-download/coverage.xml,lambdas/report-sender/coverage.xml,
sonar.javascript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
sonar.typescript.lcov.reportPaths=lcov.info,src/cloudevents/coverage/lcov.info
diff --git a/scripts/tests/unit.sh b/scripts/tests/unit.sh
index 90e5109d..1a4a23e6 100755
--- a/scripts/tests/unit.sh
+++ b/scripts/tests/unit.sh
@@ -45,6 +45,11 @@ echo "Setting up and running py-utils tests..."
make -C ./utils/py-utils install-dev
make -C ./utils/py-utils coverage # Run with coverage to generate coverage.xml for SonarCloud
+# Python projects - python-schema-generator
+echo "Setting up and running python-schema-generator tests..."
+make -C ./src/python-schema-generator install-dev
+make -C ./src/python-schema-generator coverage # Run with coverage to generate coverage.xml for SonarCloud
+
# Python Lambda - mesh-acknowledge
echo "Setting up and running mesh-acknowledge tests..."
make -C ./lambdas/mesh-acknowledge install-dev
@@ -60,10 +65,10 @@ echo "Setting up and running mesh-download tests..."
make -C ./lambdas/mesh-download install-dev
make -C ./lambdas/mesh-download coverage # Run with coverage to generate coverage.xml for SonarCloud
-# Python projects - python-schema-generator
-echo "Setting up and running python-schema-generator tests..."
-make -C ./src/python-schema-generator install-dev
-make -C ./src/python-schema-generator coverage # Run with coverage to generate coverage.xml for SonarCloud
+# Python Lambda - report-sender
+echo "Setting up and running report-sender tests..."
+make -C ./lambdas/report-sender install-dev
+make -C ./lambdas/report-sender coverage # Run with coverage to generate coverage.xml for SonarCloud
# merge coverage reports
mkdir -p .reports
diff --git a/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-reporting-report-sent-data.schema.yaml b/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-reporting-report-sent-data.schema.yaml
index 981180eb..932cda72 100644
--- a/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-reporting-report-sent-data.schema.yaml
+++ b/src/cloudevents/domains/digital-letters/2025-10-draft/data/digital-letters-reporting-report-sent-data.schema.yaml
@@ -8,6 +8,9 @@ properties:
$ref: ../defs/requests.schema.yaml#/properties/senderId
meshMailboxReportsId:
$ref: ../defs/mesh.schema.yaml#/properties/meshMailboxReportsId
+ reportReference:
+ $ref: ../defs/mesh.schema.yaml#/properties/reportReference
required:
- senderId
- meshMailboxReportsId
+ - reportReference
diff --git a/src/cloudevents/domains/digital-letters/2025-10-draft/defs/mesh.schema.yaml b/src/cloudevents/domains/digital-letters/2025-10-draft/defs/mesh.schema.yaml
index e7398162..105de6f7 100644
--- a/src/cloudevents/domains/digital-letters/2025-10-draft/defs/mesh.schema.yaml
+++ b/src/cloudevents/domains/digital-letters/2025-10-draft/defs/mesh.schema.yaml
@@ -27,3 +27,9 @@ properties:
description: Date covered by the report
examples:
- "2025-12-03"
+ reportReference:
+ type: string
+ minLength: 1
+ description: Reference for the report
+ examples:
+ - "33ad37e1-ebac-4ff6-b973-261ca1f69e94"
diff --git a/src/cloudevents/domains/digital-letters/2025-10-draft/events/uk.nhs.notify.digital.letters.mesh.report.sent.v1.schema.yaml b/src/cloudevents/domains/digital-letters/2025-10-draft/events/uk.nhs.notify.digital.letters.mesh.report.sent.v1.schema.yaml
deleted file mode 100644
index e31fc0eb..00000000
--- a/src/cloudevents/domains/digital-letters/2025-10-draft/events/uk.nhs.notify.digital.letters.mesh.report.sent.v1.schema.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-$schema: https://json-schema.org/draft/2020-12/schema
-title: ReportSent
-type: object
-allOf:
- - $ref: ../digital-letters-mesh-profile.schema.yaml
-properties:
- type:
- type: string
- const: uk.nhs.notify.digital.letters.mesh.report.sent.v1
- description: Concrete versioned event type string for this event (.vN suffix).
- source:
- type: string
- pattern: ^/nhs/england/notify/(production|staging|development|uat)/(primary|secondary|dev-[0-9]+)/data-plane/digitalletters/mesh
- description: Event source for digital letters.
-
- dataschema:
- type: string
- const: ../data/digital-letter-base-data.schema.yaml
- description: Canonical URI of the event's data schema.
- examples:
- - digital-letter-base-data.schema.yaml
- data:
- $ref: ../data/digital-letter-base-data.schema.yaml
- description: Example payload wrapper containing notify-payload.
diff --git a/tests/playwright/constants/backend-constants.ts b/tests/playwright/constants/backend-constants.ts
index 9c98f58b..cda981d8 100644
--- a/tests/playwright/constants/backend-constants.ts
+++ b/tests/playwright/constants/backend-constants.ts
@@ -30,6 +30,7 @@ export const PRINT_ANALYSER_DLQ_NAME = `${CSI}-print-analyser-dlq`;
export const PRINT_SENDER_DLQ_NAME = `${CSI}-print-sender-dlq`;
export const MOVE_SCANNED_FILES_NAME = `${CSI}-move-scanned-files-queue`;
export const MOVE_SCANNED_FILES_DLQ_NAME = `${CSI}-move-scanned-files-dlq`;
+export const REPORT_SENDER_DLQ_NAME = `${CSI}-report-sender-dlq`;
// Queue Url Prefix
export const SQS_URL_PREFIX = `https://sqs.${REGION}.amazonaws.com/${AWS_ACCOUNT_ID}/`;
@@ -49,6 +50,7 @@ export const PII_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-$
export const FILE_SAFE_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-file-safe`;
export const UNSCANNED_FILES_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-main-acct-digi-unscanned-files`;
export const FILE_QUARANTINE_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-file-quarantine`;
+export const REPORTING_S3_BUCKET_NAME = `nhs-${process.env.AWS_ACCOUNT_ID}-${REGION}-${ENV}-dl-reporting`;
// Files that are scanned by Guardduty are in a bucket prefixed by the environment.
export const PREFIX_DL_FILES = `${CSI}/`;
diff --git a/tests/playwright/digital-letters-component-tests/send-reports-trust.component.spec.ts b/tests/playwright/digital-letters-component-tests/send-reports-trust.component.spec.ts
new file mode 100644
index 00000000..5799b046
--- /dev/null
+++ b/tests/playwright/digital-letters-component-tests/send-reports-trust.component.spec.ts
@@ -0,0 +1,114 @@
+import { expect, test } from '@playwright/test';
+import {
+ ENV,
+ NON_PII_S3_BUCKET_NAME,
+ REPORTING_S3_BUCKET_NAME,
+ REPORT_SENDER_DLQ_NAME,
+} from 'constants/backend-constants';
+import { getLogsFromCloudwatch } from 'helpers/cloudwatch-helpers';
+import eventPublisher from 'helpers/event-bus-helpers';
+import expectToPassEventually from 'helpers/expectations';
+import { downloadFromS3, uploadToS3 } from 'helpers/s3-helpers';
+import { expectMessageContainingString } from 'helpers/sqs-helpers';
+import { v4 as uuidv4 } from 'uuid';
+import reportGenerated from 'digital-letters-events/ReportGenerated.js';
+import { SENDER_ID_SKIPS_NOTIFY } from 'constants/tests-constants';
+
+test.describe('Digital Letters - Send reports to Trust', () => {
+ const senderId = SENDER_ID_SKIPS_NOTIFY;
+ const trustMeshMailboxReportsId = 'test-mesh-reports-1';
+ const messageContent = 'Sample content';
+
+ async function publishReportGeneratedEvent(reportKey: string): Promise {
+ await eventPublisher.sendEvents(
+ [
+ {
+ id: uuidv4(),
+ specversion: '1.0',
+ source:
+ '/nhs/england/notify/development/dev-12345/data-plane/digitalletters/reporting',
+ subject: `report/${uuidv4()}`,
+ type: 'uk.nhs.notify.digital.letters.reporting.report.generated.v1',
+ time: new Date().toISOString(),
+ recordedtime: new Date().toISOString(),
+ datacontenttype: 'application/json',
+ severitynumber: 2,
+ severitytext: 'INFO',
+ traceparent:
+ '00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01',
+ dataschema:
+ 'https://notify.nhs.uk/cloudevents/schemas/digital-letters/2025-10-draft/data/digital-letters-reporting-report-generated-data.schema.json',
+ data: {
+ reportUri: `s3://${REPORTING_S3_BUCKET_NAME}/${reportKey}`,
+ senderId,
+ },
+ },
+ ],
+ reportGenerated,
+ );
+ }
+
+ async function expectReportSentEventAndMeshMessageSent(
+ meshMailboxReportsId: string,
+ ): Promise {
+ await expectToPassEventually(async () => {
+ const eventLogEntry = await getLogsFromCloudwatch(
+ `/aws/vendedlogs/events/event-bus/nhs-${ENV}-dl`,
+ [
+ '$.message_type = "EVENT_RECEIPT"',
+ '$.details.detail_type = "uk.nhs.notify.digital.letters.reporting.report.sent.v1"',
+ `$.details.event_detail = "*\\"meshMailboxReportsId\\":\\"${meshMailboxReportsId}\\"*"`,
+ `$.details.event_detail = "*\\"senderId\\":\\"${senderId}\\"*"`,
+ ],
+ );
+
+ expect(eventLogEntry.length).toBeGreaterThanOrEqual(1);
+
+ const parsedEvents = eventLogEntry.map((entry: any) =>
+ JSON.parse(entry.details.event_detail),
+ );
+
+ for (const event of parsedEvents) {
+ const { reportReference } = event.data;
+ expect(reportReference).toBeDefined();
+ // Mock MESH uses NON_PII_S3_BUCKET_NAME bucket, the object key starts with the local_id (i.e. the report reference).
+ const storedMessage = await downloadFromS3(
+ NON_PII_S3_BUCKET_NAME,
+ `mock-mesh/mock-mailbox/out/${trustMeshMailboxReportsId}/${reportReference}`,
+ );
+
+ expect(storedMessage.body).toContain(messageContent);
+ }
+ }, 120_000);
+ }
+
+ test('should send a ReportSent event following a successful reportGenerated event', async () => {
+ const yesterday = new Date();
+ yesterday.setDate(yesterday.getDate() - 1);
+ const yesterdayString = yesterday.toISOString().split('T')[0];
+ const reportForDate = yesterdayString;
+ const fileName = `${Date.now()}_TEST_${uuidv4().slice(0, 8)}_${reportForDate}.csv`;
+ const reportKey = `${ENV}/${fileName}`;
+
+ await uploadToS3(messageContent, REPORTING_S3_BUCKET_NAME, reportKey);
+ await publishReportGeneratedEvent(reportKey);
+
+ await expectToPassEventually(async () => {
+ await expectReportSentEventAndMeshMessageSent(trustMeshMailboxReportsId);
+ }, 120_000);
+ });
+
+ test('should send message to report-sender DLQ when file does not exists', async () => {
+ test.setTimeout(400_000);
+
+ const missingReportFileName = 'report-does-not-exist.csv';
+
+ await publishReportGeneratedEvent(missingReportFileName);
+
+ await expectMessageContainingString(
+ REPORT_SENDER_DLQ_NAME,
+ missingReportFileName,
+ 300,
+ );
+ });
+});