From 3d2919f7fa53bd0a2f2bc7fe8ef37edec9d241ed Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Fri, 5 Sep 2025 17:06:33 +0100 Subject: [PATCH 01/20] init: redis_sync makefiles --- .../redis_sync/.vscode/settings.json.default | 27 ++++++++++++++++ {redis_sync => lambdas/redis_sync}/Dockerfile | 32 +++++++++++++------ lambdas/redis_sync/Makefile | 16 ++++++++++ {redis_sync => lambdas/redis_sync}/README.md | 0 .../redis_sync}/poetry.lock | 0 .../redis_sync}/pyproject.toml | 3 +- .../redis_sync}/src/__init__.py | 0 .../redis_sync}/src/clients.py | 0 .../redis_sync}/src/constants.py | 0 .../redis_sync}/src/event_read.py | 0 .../redis_sync}/src/log_decorator.py | 0 .../redis_sync}/src/record_processor.py | 0 .../redis_sync}/src/redis_cacher.py | 0 .../redis_sync}/src/redis_sync.py | 0 .../redis_sync}/src/s3_event.py | 0 .../redis_sync}/src/s3_reader.py | 0 .../redis_sync}/src/transform_configs.py | 0 .../redis_sync}/src/transform_map.py | 0 .../redis_sync}/tests/__init__.py | 0 .../redis_sync}/tests/test_clients.py | 0 .../tests/test_data/disease_mapping.json | 0 .../test_data/expected_disease_to_vacc.json | 0 .../expected_ods_code_to_supplier.json | 0 .../expected_supplier_permissions.json | 0 .../test_data/expected_vacc_to_diseases.json | 0 .../tests/test_data/permissions_config.json | 0 .../s3-notification-single-filename.json | 0 .../test_data/test_read_vaccine_mapping.json | 0 .../redis_sync}/tests/test_event_read.py | 0 .../redis_sync}/tests/test_handler.py | 0 .../tests/test_handler_decorator.py | 0 .../tests/test_record_processor.py | 0 .../redis_sync}/tests/test_redis_cacher.py | 0 .../redis_sync}/tests/test_s3_reader.py | 0 .../tests/test_transform_config.py | 0 redis_sync/Makefile | 14 -------- 36 files changed, 68 insertions(+), 24 deletions(-) create mode 100644 lambdas/redis_sync/.vscode/settings.json.default rename {redis_sync => lambdas/redis_sync}/Dockerfile (52%) create mode 100644 lambdas/redis_sync/Makefile rename {redis_sync => lambdas/redis_sync}/README.md (100%) rename {redis_sync => lambdas/redis_sync}/poetry.lock (100%) rename {redis_sync => lambdas/redis_sync}/pyproject.toml (89%) rename {redis_sync => lambdas/redis_sync}/src/__init__.py (100%) rename {redis_sync => lambdas/redis_sync}/src/clients.py (100%) rename {redis_sync => lambdas/redis_sync}/src/constants.py (100%) rename {redis_sync => lambdas/redis_sync}/src/event_read.py (100%) rename {redis_sync => lambdas/redis_sync}/src/log_decorator.py (100%) rename {redis_sync => lambdas/redis_sync}/src/record_processor.py (100%) rename {redis_sync => lambdas/redis_sync}/src/redis_cacher.py (100%) rename {redis_sync => lambdas/redis_sync}/src/redis_sync.py (100%) rename {redis_sync => lambdas/redis_sync}/src/s3_event.py (100%) rename {redis_sync => lambdas/redis_sync}/src/s3_reader.py (100%) rename {redis_sync => lambdas/redis_sync}/src/transform_configs.py (100%) rename {redis_sync => lambdas/redis_sync}/src/transform_map.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/__init__.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_clients.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/disease_mapping.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/expected_disease_to_vacc.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/expected_ods_code_to_supplier.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/expected_supplier_permissions.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/expected_vacc_to_diseases.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/permissions_config.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/s3-notification-single-filename.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_data/test_read_vaccine_mapping.json (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_event_read.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_handler.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_handler_decorator.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_record_processor.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_redis_cacher.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_s3_reader.py (100%) rename {redis_sync => lambdas/redis_sync}/tests/test_transform_config.py (100%) delete mode 100644 redis_sync/Makefile diff --git a/lambdas/redis_sync/.vscode/settings.json.default b/lambdas/redis_sync/.vscode/settings.json.default new file mode 100644 index 000000000..f06a81ed7 --- /dev/null +++ b/lambdas/redis_sync/.vscode/settings.json.default @@ -0,0 +1,27 @@ +{ + "python.analysis.extraPaths": [ + "./src" + ], + "python.testing.unittestArgs": [ + "-v", + "-s", + "./", + "-p", + "test_*.py" + ], + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": true, + "pylint.args": [ + "--init-hook", + "import sys; sys.path.append('./src')" + ], + "[makefile]": { + "editor.insertSpaces": false, + "editor.detectIndentation": false + }, + "files.trimTrailingWhitespace": true, + "[python]": { + "files.trimTrailingWhitespace": true + }, + "files.insertFinalNewline": true +} diff --git a/redis_sync/Dockerfile b/lambdas/redis_sync/Dockerfile similarity index 52% rename from redis_sync/Dockerfile rename to lambdas/redis_sync/Dockerfile index 40d155f7a..9e9b5bdf5 100644 --- a/redis_sync/Dockerfile +++ b/lambdas/redis_sync/Dockerfile @@ -6,20 +6,34 @@ RUN mkdir -p /home/appuser && \ echo 'appuser:x:1001:' >> /etc/group && \ chown -R 1001:1001 /home/appuser && pip install "poetry~=2.1.4" -# Install Poetry as root -COPY poetry.lock pyproject.toml README.md ./ +# Install Poetry dependencies +# Copy redis_sync Poetry files +COPY ./redis_sync/poetry.lock ./redis_sync/pyproject.toml ./ +COPY ./shared/src/common ./src/common + +RUN echo "Listing /var/task after source code copy:" && ls -R /var/task + +# Install redis_sync dependencies +WORKDIR /var/task RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main -# ----------------------------- -FROM base AS test -COPY src src -COPY tests tests -RUN poetry install --no-interaction --no-ansi --no-root && \ - pytest --disable-warnings tests # ----------------------------- FROM base AS build -COPY src . + +# Set working directory back to Lambda task root +WORKDIR /var/task + +# Copy shared source code +COPY ./shared/src/common ./common + +# Copy redis_sync source code +COPY ./redis_sync/src . + +# Set correct permissions RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d) + # Build as non-root user USER 1001:1001 + +# Set the Lambda handler CMD ["redis_sync.handler"] diff --git a/lambdas/redis_sync/Makefile b/lambdas/redis_sync/Makefile new file mode 100644 index 000000000..b0a8ffb26 --- /dev/null +++ b/lambdas/redis_sync/Makefile @@ -0,0 +1,16 @@ +TEST_ENV := @PYTHONPATH=src:tests:../shared/src + +test: + $(TEST_ENV) python -m unittest + +coverage-run: + $(TEST_ENV) coverage run -m unittest discover -v + +coverage-report: + $(TEST_ENV) coverage report -m + +coverage-html: + $(TEST_ENV) coverage html + + +.PHONY: build package \ No newline at end of file diff --git a/redis_sync/README.md b/lambdas/redis_sync/README.md similarity index 100% rename from redis_sync/README.md rename to lambdas/redis_sync/README.md diff --git a/redis_sync/poetry.lock b/lambdas/redis_sync/poetry.lock similarity index 100% rename from redis_sync/poetry.lock rename to lambdas/redis_sync/poetry.lock diff --git a/redis_sync/pyproject.toml b/lambdas/redis_sync/pyproject.toml similarity index 89% rename from redis_sync/pyproject.toml rename to lambdas/redis_sync/pyproject.toml index 5ae96774b..c83900200 100644 --- a/redis_sync/pyproject.toml +++ b/lambdas/redis_sync/pyproject.toml @@ -13,7 +13,8 @@ description = "" authors = ["s.wates "] readme = "README.md" packages = [ - {include = "src"} + {include = "src"}, + {include = "common", from = "../shared/src"} ] [tool.poetry.dependencies] diff --git a/redis_sync/src/__init__.py b/lambdas/redis_sync/src/__init__.py similarity index 100% rename from redis_sync/src/__init__.py rename to lambdas/redis_sync/src/__init__.py diff --git a/redis_sync/src/clients.py b/lambdas/redis_sync/src/clients.py similarity index 100% rename from redis_sync/src/clients.py rename to lambdas/redis_sync/src/clients.py diff --git a/redis_sync/src/constants.py b/lambdas/redis_sync/src/constants.py similarity index 100% rename from redis_sync/src/constants.py rename to lambdas/redis_sync/src/constants.py diff --git a/redis_sync/src/event_read.py b/lambdas/redis_sync/src/event_read.py similarity index 100% rename from redis_sync/src/event_read.py rename to lambdas/redis_sync/src/event_read.py diff --git a/redis_sync/src/log_decorator.py b/lambdas/redis_sync/src/log_decorator.py similarity index 100% rename from redis_sync/src/log_decorator.py rename to lambdas/redis_sync/src/log_decorator.py diff --git a/redis_sync/src/record_processor.py b/lambdas/redis_sync/src/record_processor.py similarity index 100% rename from redis_sync/src/record_processor.py rename to lambdas/redis_sync/src/record_processor.py diff --git a/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py similarity index 100% rename from redis_sync/src/redis_cacher.py rename to lambdas/redis_sync/src/redis_cacher.py diff --git a/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py similarity index 100% rename from redis_sync/src/redis_sync.py rename to lambdas/redis_sync/src/redis_sync.py diff --git a/redis_sync/src/s3_event.py b/lambdas/redis_sync/src/s3_event.py similarity index 100% rename from redis_sync/src/s3_event.py rename to lambdas/redis_sync/src/s3_event.py diff --git a/redis_sync/src/s3_reader.py b/lambdas/redis_sync/src/s3_reader.py similarity index 100% rename from redis_sync/src/s3_reader.py rename to lambdas/redis_sync/src/s3_reader.py diff --git a/redis_sync/src/transform_configs.py b/lambdas/redis_sync/src/transform_configs.py similarity index 100% rename from redis_sync/src/transform_configs.py rename to lambdas/redis_sync/src/transform_configs.py diff --git a/redis_sync/src/transform_map.py b/lambdas/redis_sync/src/transform_map.py similarity index 100% rename from redis_sync/src/transform_map.py rename to lambdas/redis_sync/src/transform_map.py diff --git a/redis_sync/tests/__init__.py b/lambdas/redis_sync/tests/__init__.py similarity index 100% rename from redis_sync/tests/__init__.py rename to lambdas/redis_sync/tests/__init__.py diff --git a/redis_sync/tests/test_clients.py b/lambdas/redis_sync/tests/test_clients.py similarity index 100% rename from redis_sync/tests/test_clients.py rename to lambdas/redis_sync/tests/test_clients.py diff --git a/redis_sync/tests/test_data/disease_mapping.json b/lambdas/redis_sync/tests/test_data/disease_mapping.json similarity index 100% rename from redis_sync/tests/test_data/disease_mapping.json rename to lambdas/redis_sync/tests/test_data/disease_mapping.json diff --git a/redis_sync/tests/test_data/expected_disease_to_vacc.json b/lambdas/redis_sync/tests/test_data/expected_disease_to_vacc.json similarity index 100% rename from redis_sync/tests/test_data/expected_disease_to_vacc.json rename to lambdas/redis_sync/tests/test_data/expected_disease_to_vacc.json diff --git a/redis_sync/tests/test_data/expected_ods_code_to_supplier.json b/lambdas/redis_sync/tests/test_data/expected_ods_code_to_supplier.json similarity index 100% rename from redis_sync/tests/test_data/expected_ods_code_to_supplier.json rename to lambdas/redis_sync/tests/test_data/expected_ods_code_to_supplier.json diff --git a/redis_sync/tests/test_data/expected_supplier_permissions.json b/lambdas/redis_sync/tests/test_data/expected_supplier_permissions.json similarity index 100% rename from redis_sync/tests/test_data/expected_supplier_permissions.json rename to lambdas/redis_sync/tests/test_data/expected_supplier_permissions.json diff --git a/redis_sync/tests/test_data/expected_vacc_to_diseases.json b/lambdas/redis_sync/tests/test_data/expected_vacc_to_diseases.json similarity index 100% rename from redis_sync/tests/test_data/expected_vacc_to_diseases.json rename to lambdas/redis_sync/tests/test_data/expected_vacc_to_diseases.json diff --git a/redis_sync/tests/test_data/permissions_config.json b/lambdas/redis_sync/tests/test_data/permissions_config.json similarity index 100% rename from redis_sync/tests/test_data/permissions_config.json rename to lambdas/redis_sync/tests/test_data/permissions_config.json diff --git a/redis_sync/tests/test_data/s3-notification-single-filename.json b/lambdas/redis_sync/tests/test_data/s3-notification-single-filename.json similarity index 100% rename from redis_sync/tests/test_data/s3-notification-single-filename.json rename to lambdas/redis_sync/tests/test_data/s3-notification-single-filename.json diff --git a/redis_sync/tests/test_data/test_read_vaccine_mapping.json b/lambdas/redis_sync/tests/test_data/test_read_vaccine_mapping.json similarity index 100% rename from redis_sync/tests/test_data/test_read_vaccine_mapping.json rename to lambdas/redis_sync/tests/test_data/test_read_vaccine_mapping.json diff --git a/redis_sync/tests/test_event_read.py b/lambdas/redis_sync/tests/test_event_read.py similarity index 100% rename from redis_sync/tests/test_event_read.py rename to lambdas/redis_sync/tests/test_event_read.py diff --git a/redis_sync/tests/test_handler.py b/lambdas/redis_sync/tests/test_handler.py similarity index 100% rename from redis_sync/tests/test_handler.py rename to lambdas/redis_sync/tests/test_handler.py diff --git a/redis_sync/tests/test_handler_decorator.py b/lambdas/redis_sync/tests/test_handler_decorator.py similarity index 100% rename from redis_sync/tests/test_handler_decorator.py rename to lambdas/redis_sync/tests/test_handler_decorator.py diff --git a/redis_sync/tests/test_record_processor.py b/lambdas/redis_sync/tests/test_record_processor.py similarity index 100% rename from redis_sync/tests/test_record_processor.py rename to lambdas/redis_sync/tests/test_record_processor.py diff --git a/redis_sync/tests/test_redis_cacher.py b/lambdas/redis_sync/tests/test_redis_cacher.py similarity index 100% rename from redis_sync/tests/test_redis_cacher.py rename to lambdas/redis_sync/tests/test_redis_cacher.py diff --git a/redis_sync/tests/test_s3_reader.py b/lambdas/redis_sync/tests/test_s3_reader.py similarity index 100% rename from redis_sync/tests/test_s3_reader.py rename to lambdas/redis_sync/tests/test_s3_reader.py diff --git a/redis_sync/tests/test_transform_config.py b/lambdas/redis_sync/tests/test_transform_config.py similarity index 100% rename from redis_sync/tests/test_transform_config.py rename to lambdas/redis_sync/tests/test_transform_config.py diff --git a/redis_sync/Makefile b/redis_sync/Makefile deleted file mode 100644 index fec3a6e17..000000000 --- a/redis_sync/Makefile +++ /dev/null @@ -1,14 +0,0 @@ - -test: - @PYTHONPATH=src:tests python -m unittest - -coverage-run: - coverage run -m unittest discover -v - -coverage-report: - coverage report -m - -coverage-html: - coverage html - -.PHONY: build package \ No newline at end of file From 83e4be1a7f0df8aac805dc53ac72e7abb13d5f2e Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Fri, 5 Sep 2025 17:52:23 +0100 Subject: [PATCH 02/20] clients.py --- lambdas/redis_sync/src/clients.py | 20 ----- lambdas/redis_sync/src/log_decorator.py | 2 +- lambdas/redis_sync/src/record_processor.py | 2 +- lambdas/redis_sync/src/redis_cacher.py | 3 +- lambdas/redis_sync/src/redis_sync.py | 3 +- lambdas/redis_sync/src/s3_reader.py | 3 +- lambdas/redis_sync/src/transform_configs.py | 2 +- lambdas/redis_sync/src/transform_map.py | 2 +- lambdas/redis_sync/tests/test_clients.py | 89 ------------------- lambdas/shared/src/common/clients.py | 12 ++- .../shared/tests/test_common/test_clients.py | 28 +++++- 11 files changed, 43 insertions(+), 123 deletions(-) delete mode 100644 lambdas/redis_sync/src/clients.py delete mode 100644 lambdas/redis_sync/tests/test_clients.py diff --git a/lambdas/redis_sync/src/clients.py b/lambdas/redis_sync/src/clients.py deleted file mode 100644 index 535f6e013..000000000 --- a/lambdas/redis_sync/src/clients.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import logging -import redis -from boto3 import client as boto3_client - - -logging.basicConfig(level="INFO") -logger = logging.getLogger() -logger.setLevel("INFO") - -STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose") -CONFIG_BUCKET_NAME = os.getenv("CONFIG_BUCKET_NAME", "variable-not-defined") -REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") -REDIS_HOST = os.getenv("REDIS_HOST", "") -REDIS_PORT = os.getenv("REDIS_PORT", 6379) - -s3_client = boto3_client("s3", region_name=REGION_NAME) -firehose_client = boto3_client("firehose", region_name=REGION_NAME) -logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") -redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) diff --git a/lambdas/redis_sync/src/log_decorator.py b/lambdas/redis_sync/src/log_decorator.py index e014e41c3..8f28c4d88 100644 --- a/lambdas/redis_sync/src/log_decorator.py +++ b/lambdas/redis_sync/src/log_decorator.py @@ -8,7 +8,7 @@ import time from datetime import datetime from functools import wraps -from clients import firehose_client, logger, STREAM_NAME +from common.clients import firehose_client, logger, STREAM_NAME def send_log_to_firehose(log_data: dict) -> None: diff --git a/lambdas/redis_sync/src/record_processor.py b/lambdas/redis_sync/src/record_processor.py index e94456ff9..7430a86bd 100644 --- a/lambdas/redis_sync/src/record_processor.py +++ b/lambdas/redis_sync/src/record_processor.py @@ -1,4 +1,4 @@ -from clients import logger +from common.clients import logger from s3_event import S3EventRecord from redis_cacher import RedisCacher ''' diff --git a/lambdas/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py index 88303994d..276652944 100644 --- a/lambdas/redis_sync/src/redis_cacher.py +++ b/lambdas/redis_sync/src/redis_cacher.py @@ -1,8 +1,7 @@ "Upload the content from a config file in S3 to ElastiCache (Redis)" import json -from clients import redis_client -from clients import logger +from common.clients import redis_client, logger from transform_map import transform_map from s3_reader import S3Reader diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index cf8e41363..17b0f819c 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -1,9 +1,8 @@ -from clients import logger +from common.clients import redis_client, logger from s3_event import S3Event from record_processor import process_record from event_read import read_event from log_decorator import logging_decorator -from clients import redis_client ''' Event Processor The Business Logic for the Redis Sync Lambda Function. diff --git a/lambdas/redis_sync/src/s3_reader.py b/lambdas/redis_sync/src/s3_reader.py index 5b6e28dbe..2f740956a 100644 --- a/lambdas/redis_sync/src/s3_reader.py +++ b/lambdas/redis_sync/src/s3_reader.py @@ -1,5 +1,4 @@ -from clients import s3_client -from clients import logger +from common.clients import s3_client, logger class S3Reader: diff --git a/lambdas/redis_sync/src/transform_configs.py b/lambdas/redis_sync/src/transform_configs.py index 7f991170c..f7ddee722 100644 --- a/lambdas/redis_sync/src/transform_configs.py +++ b/lambdas/redis_sync/src/transform_configs.py @@ -1,4 +1,4 @@ -from clients import logger +from common.clients import logger def transform_vaccine_map(mapping): diff --git a/lambdas/redis_sync/src/transform_map.py b/lambdas/redis_sync/src/transform_map.py index 83938930c..8af72f8ce 100644 --- a/lambdas/redis_sync/src/transform_map.py +++ b/lambdas/redis_sync/src/transform_map.py @@ -1,5 +1,5 @@ from constants import RedisCacheKey -from clients import logger +from common.clients import logger from transform_configs import transform_vaccine_map, transform_supplier_permissions ''' Transform config file to format required in REDIS cache. diff --git a/lambdas/redis_sync/tests/test_clients.py b/lambdas/redis_sync/tests/test_clients.py deleted file mode 100644 index b3db556a7..000000000 --- a/lambdas/redis_sync/tests/test_clients.py +++ /dev/null @@ -1,89 +0,0 @@ -import unittest -from unittest.mock import patch -import clients as clients -import importlib - - -class TestClients(unittest.TestCase): - - BUCKET_NAME = "default-bucket" - AWS_REGION = "eu-west-2" - REDIS_HOST = "mock-redis-host" - REDIS_PORT = 6379 - - def setUp(self): - self.boto3_client_patch = patch("boto3.client") - self.mock_boto3_client = self.boto3_client_patch.start() - self.logging_patch = patch("logging.getLogger") - self.mock_logging = self.logging_patch.start() - self.logger_info_patcher = patch("logging.Logger.info") - self.mock_logger_info = self.logger_info_patcher.start() - self.getenv_patch = patch("os.getenv") - self.mock_getenv = self.getenv_patch.start() - self.mock_getenv.side_effect = lambda key, default=None: { - "CONFIG_BUCKET_NAME": self.BUCKET_NAME, - "AWS_REGION": self.AWS_REGION, - "REDIS_HOST": self.REDIS_HOST, - "REDIS_PORT": self.REDIS_PORT - }.get(key, default) - - self.redis_patch = patch("redis.StrictRedis") - self.mock_redis = self.redis_patch.start() - - self.mock_redis.return_value = self.mock_redis - self.mock_boto3_client.return_value = self.mock_boto3_client - self.mock_boto3_client.return_value.send_message = {} - - def tearDown(self): - patch.stopall() - - def test_os_environ(self): - # Test if environment variables are set correctly - importlib.reload(clients) - self.assertEqual(clients.CONFIG_BUCKET_NAME, self.BUCKET_NAME) - self.assertEqual(clients.REGION_NAME, self.AWS_REGION) - self.assertEqual(clients.REDIS_HOST, self.REDIS_HOST) - self.assertEqual(clients.REDIS_PORT, self.REDIS_PORT) - - def test_boto3_client(self): - ''' Test boto3 client is created with correct parameters ''' - importlib.reload(clients) - self.mock_boto3_client.assert_any_call("s3", region_name=self.AWS_REGION) - - def test_firehose_client(self): - ''' Test firehose client is created with correct parameters ''' - importlib.reload(clients) - self.mock_boto3_client.assert_any_call("firehose", region_name=self.AWS_REGION) - - def test_redis_client(self): - ''' Test redis client is created with correct parameters ''' - importlib.reload(clients) - self.mock_redis.assert_called_once_with( - host=self.REDIS_HOST, - port=self.REDIS_PORT, - decode_responses=True - ) - - def test_logging_setup(self): - ''' Test logging is set up correctly ''' - importlib.reload(clients) - self.assertTrue(hasattr(clients, 'logger')) - - def test_logging_configuration(self): - ''' Test logging configuration ''' - importlib.reload(clients) - clients.logger.setLevel.assert_called_once_with("INFO") - - def test_redis_client_initialization(self): - ''' Test redis client initialization ''' - importlib.reload(clients) - self.mock_redis.assert_called_once_with(host=self.REDIS_HOST, port=self.REDIS_PORT, decode_responses=True) - self.assertTrue(hasattr(clients, 'redis_client')) - self.assertIsInstance(clients.redis_client, self.mock_redis.return_value.__class__) - - def test_logging_initialization(self): - ''' Test logging initialization ''' - importlib.reload(clients) - self.mock_logging.assert_called_once_with() - self.assertTrue(hasattr(clients, 'logger')) - clients.logger.setLevel.assert_any_call("INFO") diff --git a/lambdas/shared/src/common/clients.py b/lambdas/shared/src/common/clients.py index 41ad7831f..966eab37b 100644 --- a/lambdas/shared/src/common/clients.py +++ b/lambdas/shared/src/common/clients.py @@ -1,7 +1,7 @@ import os import logging -from boto3 import client as boto3_client -import boto3 +import redis +from boto3 import client as boto3_client, resource as boto3_resource logging.basicConfig(level="INFO") logger = logging.getLogger() @@ -12,9 +12,15 @@ REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") +REDIS_HOST = os.getenv("REDIS_HOST", "") +REDIS_PORT = os.getenv("REDIS_PORT", 6379) + s3_client = boto3_client("s3", region_name=REGION_NAME) firehose_client = boto3_client("firehose", region_name=REGION_NAME) secrets_manager_client = boto3_client("secretsmanager", region_name=REGION_NAME) -dynamodb_resource = boto3.resource("dynamodb", region_name=REGION_NAME) +dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) + +logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") +redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) diff --git a/lambdas/shared/tests/test_common/test_clients.py b/lambdas/shared/tests/test_common/test_clients.py index fd9868b3c..8e5da75fd 100644 --- a/lambdas/shared/tests/test_common/test_clients.py +++ b/lambdas/shared/tests/test_common/test_clients.py @@ -8,6 +8,8 @@ class TestClients(unittest.TestCase): BUCKET_NAME = "default-bucket" AWS_REGION = "eu-west-2" + REDIS_HOST = "mock-redis-host" + REDIS_PORT = 6379 def setUp(self): self.boto3_client_patch = patch("boto3.client") @@ -20,9 +22,15 @@ def setUp(self): self.mock_getenv = self.getenv_patch.start() self.mock_getenv.side_effect = lambda key, default=None: { "CONFIG_BUCKET_NAME": self.BUCKET_NAME, - "AWS_REGION": self.AWS_REGION + "AWS_REGION": self.AWS_REGION, + "REDIS_HOST": self.REDIS_HOST, + "REDIS_PORT": self.REDIS_PORT }.get(key, default) + self.redis_patch = patch("redis.StrictRedis") + self.mock_redis = self.redis_patch.start() + + self.mock_redis.return_value = self.mock_redis self.mock_boto3_client.return_value = self.mock_boto3_client self.mock_boto3_client.return_value.send_message = {} @@ -34,6 +42,8 @@ def test_os_environ(self): importlib.reload(clients) self.assertEqual(clients.CONFIG_BUCKET_NAME, self.BUCKET_NAME) self.assertEqual(clients.REGION_NAME, self.AWS_REGION) + self.assertEqual(clients.REDIS_HOST, self.REDIS_HOST) + self.assertEqual(clients.REDIS_PORT, self.REDIS_PORT) def test_boto3_client(self): ''' Test boto3 client is created with correct parameters ''' @@ -45,6 +55,15 @@ def test_firehose_client(self): importlib.reload(clients) self.mock_boto3_client.assert_any_call("firehose", region_name=self.AWS_REGION) + def test_redis_client(self): + ''' Test redis client is created with correct parameters ''' + importlib.reload(clients) + self.mock_redis.assert_called_once_with( + host=self.REDIS_HOST, + port=self.REDIS_PORT, + decode_responses=True + ) + def test_logging_setup(self): ''' Test logging is set up correctly ''' importlib.reload(clients) @@ -55,6 +74,13 @@ def test_logging_configuration(self): importlib.reload(clients) clients.logger.setLevel.assert_called_once_with("INFO") + def test_redis_client_initialization(self): + ''' Test redis client initialization ''' + importlib.reload(clients) + self.mock_redis.assert_called_once_with(host=self.REDIS_HOST, port=self.REDIS_PORT, decode_responses=True) + self.assertTrue(hasattr(clients, 'redis_client')) + self.assertIsInstance(clients.redis_client, self.mock_redis.return_value.__class__) + def test_logging_initialization(self): ''' Test logging initialization ''' importlib.reload(clients) From e8fd211101c626efe6547b09f0ec89c1f08c6ad4 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 10:54:22 +0100 Subject: [PATCH 03/20] log_decorator --- lambdas/redis_sync/src/log_decorator.py | 53 ------------------- lambdas/redis_sync/src/redis_sync.py | 6 +-- lambdas/redis_sync/tests/test_handler.py | 14 ++--- .../tests/test_handler_decorator.py | 2 +- 4 files changed, 11 insertions(+), 64 deletions(-) delete mode 100644 lambdas/redis_sync/src/log_decorator.py diff --git a/lambdas/redis_sync/src/log_decorator.py b/lambdas/redis_sync/src/log_decorator.py deleted file mode 100644 index 8f28c4d88..000000000 --- a/lambdas/redis_sync/src/log_decorator.py +++ /dev/null @@ -1,53 +0,0 @@ -"""This module contains the logging decorator for sending the appropriate logs to Cloudwatch and Firehose. - The decorator log pattern is shared by filenameprocessor, recordprocessor, ack_backend and redis_sync modules. - and therefore could be moved to a common module in the future. - TODO: refactor to a common module. - TODO: Duplication check has been suppressed in sonar-project.properties. Remove once refactored. -""" -import json -import time -from datetime import datetime -from functools import wraps -from common.clients import firehose_client, logger, STREAM_NAME - - -def send_log_to_firehose(log_data: dict) -> None: - """Sends the log_message to Firehose""" - try: - record = {"Data": json.dumps({"event": log_data}).encode("utf-8")} - response = firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record) - logger.info("Log sent to Firehose: %s", response) - except Exception as error: # pylint:disable = broad-exception-caught - logger.exception("Error sending log to Firehose: %s", error) - - -def generate_and_send_logs( - start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False -) -> None: - """Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the - current time and given start_time) and sends them to Cloudwatch and Firehose.""" - log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data} - log_function = logger.error if is_error_log else logger.info - log_function(json.dumps(log_data)) - send_log_to_firehose(log_data) - - -def logging_decorator(prefix="redis_sync"): - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - base_log_data = { - "function_name": f"{prefix}_{func.__name__}", - "date_time": str(datetime.now()) - } - start_time = time.time() - try: - result = func(*args, **kwargs) - generate_and_send_logs(start_time, base_log_data, additional_log_data=result) - return result - except Exception as e: - additional_log_data = {"statusCode": 500, "error": str(e)} - generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) - raise - return wrapper - return decorator diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index 17b0f819c..bb6b30999 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -1,15 +1,15 @@ -from common.clients import redis_client, logger +from common.clients import redis_client, STREAM_NAME, logger from s3_event import S3Event from record_processor import process_record from event_read import read_event -from log_decorator import logging_decorator +from common.log_decorator import logging_decorator ''' Event Processor The Business Logic for the Redis Sync Lambda Function. This module processes S3 events and iterates through each record to process them individually.''' -@logging_decorator(prefix="redis_sync") +@logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME) def handler(event, _): try: diff --git a/lambdas/redis_sync/tests/test_handler.py b/lambdas/redis_sync/tests/test_handler.py index c63bb6240..8ae1cb2a7 100644 --- a/lambdas/redis_sync/tests/test_handler.py +++ b/lambdas/redis_sync/tests/test_handler.py @@ -40,7 +40,7 @@ def tearDown(self): self.logger_exception_patcher.stop() def test_handler_success(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} self.mock_get_s3_records.return_value = [self.s3_vaccine] @@ -53,7 +53,7 @@ def test_handler_success(self): self.assertEqual(result["file_keys"], ['test-key']) def test_handler_failure(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} @@ -66,7 +66,7 @@ def test_handler_failure(self): self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) def test_handler_no_records(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': []} self.mock_get_s3_records.return_value = [] @@ -74,7 +74,7 @@ def test_handler_no_records(self): self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) def test_handler_exception(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} self.mock_get_s3_records.return_value = [self.s3_vaccine] @@ -84,14 +84,14 @@ def test_handler_exception(self): self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) def test_handler_with_empty_event(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) self.mock_get_s3_records.return_value = [] result = redis_sync.handler({}, None) self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) def test_handler_multi_record(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]} # If you need S3EventRecord, uncomment the import and use it here @@ -112,7 +112,7 @@ def test_handler_multi_record(self): self.assertEqual(result['file_keys'][1], 'test-key2') def test_handler_read_event(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'read': 'myhash'} mock_read_event_response = {'field1': 'value1'} diff --git a/lambdas/redis_sync/tests/test_handler_decorator.py b/lambdas/redis_sync/tests/test_handler_decorator.py index 5b477fb89..550d78553 100644 --- a/lambdas/redis_sync/tests/test_handler_decorator.py +++ b/lambdas/redis_sync/tests/test_handler_decorator.py @@ -37,7 +37,7 @@ def setUp(self): self.mock_get_s3_records = self.get_s3_records_patcher.start() self.record_processor_patcher = patch("redis_sync.process_record") self.mock_record_processor = self.record_processor_patcher.start() - self.firehose_patcher = patch("log_decorator.firehose_client") + self.firehose_patcher = patch("common.log_decorator.firehose_client") self.mock_firehose_client = self.firehose_patcher.start() self.mock_firehose_client.put_record.return_value = True From 79cd57ce0fff12074cab4a23588b06a17bc74166 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 11:09:17 +0100 Subject: [PATCH 04/20] sonar --- sonar-project.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sonar-project.properties b/sonar-project.properties index d179273a4..01e49930c 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,9 +3,9 @@ sonar.projectKey=NHSDigital_immunisation-fhir-api sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 -sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,redis_sync/src/log_decorator.py +sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,lambdas/redis_sync/src/log_decorator.py sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml -sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,redis_sync/src/log_decorator.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py +sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,lambdas/redis_sync/src/log_decorator.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py sonar.issue.ignore.multicriteria=exclude_snomed_urls,exclude_hl7_urls sonar.issue.ignore.multicriteria.exclude_snomed_urls.ruleKey=python:S5332 sonar.issue.ignore.multicriteria.exclude_snomed_urls.resourceKey=**http://snomed\.info/sct** From 4b3bb8e3501ca7cf400975f124102a026485c912 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 11:22:22 +0100 Subject: [PATCH 05/20] temp: coverage --- sonar-project.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sonar-project.properties b/sonar-project.properties index 01e49930c..6904c2cad 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,9 +3,9 @@ sonar.projectKey=NHSDigital_immunisation-fhir-api sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 -sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,lambdas/redis_sync/src/log_decorator.py +sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,lambdas/redis_sync/src/** sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml -sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,lambdas/redis_sync/src/log_decorator.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py +sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py sonar.issue.ignore.multicriteria=exclude_snomed_urls,exclude_hl7_urls sonar.issue.ignore.multicriteria.exclude_snomed_urls.ruleKey=python:S5332 sonar.issue.ignore.multicriteria.exclude_snomed_urls.resourceKey=**http://snomed\.info/sct** From d462ae317fd9a3b412d1528c2b3526cebda4f775 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 12:12:18 +0100 Subject: [PATCH 06/20] terraform, sonar, github --- .github/dependabot.yml | 2 +- .github/workflows/sonarcloud.yml | 8 ++++---- Makefile | 2 +- immunisation-fhir-api.code-workspace | 2 +- lambdas/.coveragerc | 3 +++ terraform/redis_sync_lambda.tf | 22 +++++++++++++++------- 6 files changed, 25 insertions(+), 14 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 3b8a0256c..ddd7043d4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -58,7 +58,7 @@ updates: - "/filenameprocessor" - "/mesh_processor" - "/recordprocessor" - - "/redis_sync" + - "/lambdas/redis_sync" - "/lambdas/id_sync" - "/lambdas/shared" - "/mns_subscription" diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index d56208fca..39827c97f 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -135,15 +135,15 @@ jobs: poetry run coverage xml -o ../mns_subscription-coverage.xml - name: Run unittest with redis_sync - working-directory: redis_sync + working-directory: lambdas/redis_sync id: redis_sync env: - PYTHONPATH: ${{ github.workspace }}/redis_sync/src:${{ github.workspace }}/redis_sync/tests + PYTHONPATH: ${{ env.LAMBDA_PATH }}/redis_sync/src:${{ env.SHARED_PATH }}/src continue-on-error: true run: | poetry install - poetry run coverage run -m unittest discover || echo "redis_sync tests failed" >> ../failed_tests.txt - poetry run coverage xml -o ../redis_sync-coverage.xml + poetry run coverage run --rcfile=.coveragerc --source=src -m unittest discover || echo "redis_sync tests failed" >> ../../failed_tests.txt + poetry run coverage xml -o ../../redis_sync-coverage.xml - name: Run unittest with shared working-directory: lambdas/shared diff --git a/Makefile b/Makefile index 0eb3d83c6..7dc1a63b0 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ SHELL=/usr/bin/env bash -euo pipefail -PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor redis_sync lambdas/id_sync lambdas/shared mns_subscription +PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor lambdas/redis_sync lambdas/id_sync lambdas/shared mns_subscription PYTHON_PROJECT_DIRS = e2e e2e_batch $(PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS) #Installs dependencies using poetry. diff --git a/immunisation-fhir-api.code-workspace b/immunisation-fhir-api.code-workspace index dabc71a68..b92caa122 100644 --- a/immunisation-fhir-api.code-workspace +++ b/immunisation-fhir-api.code-workspace @@ -28,7 +28,7 @@ "path": "e2e_batch" }, { - "path": "redis_sync" + "path": "lambdas/redis_sync" }, { "path": "mns_subscription" diff --git a/lambdas/.coveragerc b/lambdas/.coveragerc index 908bb06bc..0b7b3a55a 100644 --- a/lambdas/.coveragerc +++ b/lambdas/.coveragerc @@ -6,3 +6,6 @@ omit = id_sync/tests/* id_sync/tests/*/* id_sync/tests/*/*/* + redis_sync/tests/* + redis_sync/tests/*/* + redis_sync/tests/*/*/* diff --git a/terraform/redis_sync_lambda.tf b/terraform/redis_sync_lambda.tf index ec7e34945..2fdc273cf 100644 --- a/terraform/redis_sync_lambda.tf +++ b/terraform/redis_sync_lambda.tf @@ -1,8 +1,16 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - redis_sync_lambda_dir = abspath("${path.root}/../redis_sync") - redis_sync_lambda_files = fileset(local.redis_sync_lambda_dir, "**") + shared_dir = abspath("${path.root}/../shared") + redis_sync_lambda_dir = abspath("${path.root}/../redis_sync") + + # Get files from both directories + shared_files = fileset(local.shared_dir, "**") + redis_sync_lambda_files = fileset(local.redis_sync_lambda_dir, "**") + + # Calculate SHA for both directories + shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) redis_sync_lambda_dir_sha = sha1(join("", [for f in local.redis_sync_lambda_files : filesha1("${local.redis_sync_lambda_dir}/${f}")])) + redis_sync_lambda_name = "${local.short_prefix}-redis_sync_lambda" } resource "aws_ecr_repository" "redis_sync_lambda_repository" { @@ -15,11 +23,11 @@ resource "aws_ecr_repository" "redis_sync_lambda_repository" { # Module for building and pushing Docker image to ECR module "redis_sync_docker_image" { - source = "terraform-aws-modules/lambda/aws//modules/docker-build" - version = "8.0.1" - - create_ecr_repo = false - ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name + source = "terraform-aws-modules/lambda/aws//modules/docker-build" + version = "8.0.1" + docker_file_path = "./redis_sync/Dockerfile" + create_ecr_repo = false + ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name ecr_repo_lifecycle_policy = jsonencode({ "rules" : [ { From 9444b1f85903eda0acb6e67ddb793a1b435b191c Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 12:29:35 +0100 Subject: [PATCH 07/20] shared.tf --- terraform/id_sync_lambda.tf | 5 ----- terraform/redis_sync_lambda.tf | 5 ----- terraform/shared.tf | 8 ++++++++ 3 files changed, 8 insertions(+), 10 deletions(-) create mode 100644 terraform/shared.tf diff --git a/terraform/id_sync_lambda.tf b/terraform/id_sync_lambda.tf index f5fcfbea6..a5b9f51d9 100644 --- a/terraform/id_sync_lambda.tf +++ b/terraform/id_sync_lambda.tf @@ -1,14 +1,9 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - shared_dir = abspath("${path.root}/../shared") id_sync_lambda_dir = abspath("${path.root}/../id_sync") - # Get files from both directories - shared_files = fileset(local.shared_dir, "**") id_sync_lambda_files = fileset(local.id_sync_lambda_dir, "**") - # Calculate SHA for both directories - shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) id_sync_lambda_dir_sha = sha1(join("", [for f in local.id_sync_lambda_files : filesha1("${local.id_sync_lambda_dir}/${f}")])) id_sync_lambda_name = "${local.short_prefix}-id_sync_lambda" } diff --git a/terraform/redis_sync_lambda.tf b/terraform/redis_sync_lambda.tf index 2fdc273cf..44a9646e9 100644 --- a/terraform/redis_sync_lambda.tf +++ b/terraform/redis_sync_lambda.tf @@ -1,14 +1,9 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - shared_dir = abspath("${path.root}/../shared") redis_sync_lambda_dir = abspath("${path.root}/../redis_sync") - # Get files from both directories - shared_files = fileset(local.shared_dir, "**") redis_sync_lambda_files = fileset(local.redis_sync_lambda_dir, "**") - # Calculate SHA for both directories - shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) redis_sync_lambda_dir_sha = sha1(join("", [for f in local.redis_sync_lambda_files : filesha1("${local.redis_sync_lambda_dir}/${f}")])) redis_sync_lambda_name = "${local.short_prefix}-redis_sync_lambda" } diff --git a/terraform/shared.tf b/terraform/shared.tf new file mode 100644 index 000000000..fa2c9d694 --- /dev/null +++ b/terraform/shared.tf @@ -0,0 +1,8 @@ +# Define locals for shared lambdas +locals { + shared_dir = abspath("${path.root}/../shared") + + shared_files = fileset(local.shared_dir, "**") + + shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) +} From a4a36a8f5679138397485f45212caae88e68cd18 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 13:04:28 +0100 Subject: [PATCH 08/20] redis_sync_lambda.tf --- terraform/redis_sync_lambda.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/terraform/redis_sync_lambda.tf b/terraform/redis_sync_lambda.tf index 44a9646e9..fe7fac5b0 100644 --- a/terraform/redis_sync_lambda.tf +++ b/terraform/redis_sync_lambda.tf @@ -20,7 +20,7 @@ resource "aws_ecr_repository" "redis_sync_lambda_repository" { module "redis_sync_docker_image" { source = "terraform-aws-modules/lambda/aws//modules/docker-build" version = "8.0.1" - docker_file_path = "./redis_sync/Dockerfile" + create_ecr_repo = false ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name ecr_repo_lifecycle_policy = jsonencode({ From 233b26f91575a968c85e12911db5edbcd050b0f5 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 13:35:48 +0100 Subject: [PATCH 09/20] redis_sync_lambda.tf II --- terraform/redis_sync_lambda.tf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/terraform/redis_sync_lambda.tf b/terraform/redis_sync_lambda.tf index fe7fac5b0..a915a4b6a 100644 --- a/terraform/redis_sync_lambda.tf +++ b/terraform/redis_sync_lambda.tf @@ -20,7 +20,7 @@ resource "aws_ecr_repository" "redis_sync_lambda_repository" { module "redis_sync_docker_image" { source = "terraform-aws-modules/lambda/aws//modules/docker-build" version = "8.0.1" - + docker_file_path = "./redis_sync/Dockerfile" create_ecr_repo = false ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name ecr_repo_lifecycle_policy = jsonencode({ @@ -42,7 +42,7 @@ module "redis_sync_docker_image" { platform = "linux/amd64" use_image_tag = false - source_path = local.redis_sync_lambda_dir + source_path = abspath("${path.root}/..") triggers = { dir_sha = local.redis_sync_lambda_dir_sha } From 79afd23efd986040aa06ac46f1f81bc5fa949751 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Mon, 8 Sep 2025 14:36:34 +0100 Subject: [PATCH 10/20] sonar.exclusions --- sonar-project.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sonar-project.properties b/sonar-project.properties index 6904c2cad..c44de3cb9 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,7 +3,7 @@ sonar.projectKey=NHSDigital_immunisation-fhir-api sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 -sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,lambdas/redis_sync/src/** +sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/** sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py sonar.issue.ignore.multicriteria=exclude_snomed_urls,exclude_hl7_urls From 3893008f22440930cad0c78ede904dfe5bd63fa7 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Tue, 16 Sep 2025 17:09:27 +0100 Subject: [PATCH 11/20] smells --- lambdas/redis_sync/src/redis_sync.py | 49 +++++++++++++++------------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index bb6b30999..3b38f213a 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -9,39 +9,44 @@ This module processes S3 events and iterates through each record to process them individually.''' +def _process_all_records(s3_records: list) -> dict: + record_count = len(s3_records) + error_count = 0 + file_keys = [] + for record in s3_records: + record_result = process_record(record) + file_keys.append(record_result["file_key"]) + if record_result["status"] == "error": + error_count += 1 + if error_count > 0: + logger.error("Processed %d records with %d errors", record_count, error_count) + return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors", + "file_keys": file_keys} + else: + logger.info("Successfully processed all %d records", record_count) + return {"status": "success", "message": f"Successfully processed {record_count} records", + "file_keys": file_keys} + + @logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME) def handler(event, _): try: + no_records = "No records found in event" # check if the event requires a read, ie {"read": "my-hashmap"} if "read" in event: return read_event(redis_client, event, logger) elif "Records" in event: logger.info("Processing S3 event with %d records", len(event.get('Records', []))) - s3_event = S3Event(event) - record_count = len(s3_event.get_s3_records()) - if record_count == 0: - logger.info("No records found in event") - return {"status": "success", "message": "No records found in event"} + s3_records = S3Event(event).get_s3_records() + if not s3_records: + logger.info(no_records) + return {"status": "success", "message": no_records} else: - error_count = 0 - file_keys = [] - for record in s3_event.get_s3_records(): - record_result = process_record(record) - file_keys.append(record_result["file_key"]) - if record_result["status"] == "error": - error_count += 1 - if error_count > 0: - logger.error("Processed %d records with %d errors", record_count, error_count) - return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors", - "file_keys": file_keys} - else: - logger.info("Successfully processed all %d records", record_count) - return {"status": "success", "message": f"Successfully processed {record_count} records", - "file_keys": file_keys} + return _process_all_records(s3_records) else: - logger.info("No records found in event") - return {"status": "success", "message": "No records found in event"} + logger.info(no_records) + return {"status": "success", "message": no_records} except Exception: logger.exception("Error processing S3 event") From 155204eb946adb9742b1e0b06526ada5470fca87 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 16:41:24 +0100 Subject: [PATCH 12/20] s3_event/s3_reader --- lambdas/redis_sync/src/record_processor.py | 4 ++-- lambdas/redis_sync/src/redis_cacher.py | 4 ++-- lambdas/redis_sync/src/redis_sync.py | 6 +++--- lambdas/redis_sync/src/transform_map.py | 2 +- lambdas/redis_sync/tests/test_handler.py | 2 +- lambdas/redis_sync/tests/test_handler_decorator.py | 4 ++-- lambdas/redis_sync/tests/test_record_processor.py | 2 +- lambdas/redis_sync/tests/test_s3_reader.py | 4 ++-- lambdas/{redis_sync/src => shared/src/common}/s3_event.py | 8 +++++--- .../{redis_sync/src => shared/src/common}/s3_reader.py | 0 10 files changed, 19 insertions(+), 17 deletions(-) rename lambdas/{redis_sync/src => shared/src/common}/s3_event.py (80%) rename lambdas/{redis_sync/src => shared/src/common}/s3_reader.py (100%) diff --git a/lambdas/redis_sync/src/record_processor.py b/lambdas/redis_sync/src/record_processor.py index 7430a86bd..40232714b 100644 --- a/lambdas/redis_sync/src/record_processor.py +++ b/lambdas/redis_sync/src/record_processor.py @@ -1,6 +1,6 @@ -from common.clients import logger -from s3_event import S3EventRecord from redis_cacher import RedisCacher +from common.clients import logger +from common.s3_event import S3EventRecord ''' Record Processor This module processes individual S3 records from an event. diff --git a/lambdas/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py index 276652944..64b7d63c8 100644 --- a/lambdas/redis_sync/src/redis_cacher.py +++ b/lambdas/redis_sync/src/redis_cacher.py @@ -1,9 +1,9 @@ "Upload the content from a config file in S3 to ElastiCache (Redis)" import json -from common.clients import redis_client, logger from transform_map import transform_map -from s3_reader import S3Reader +from common.clients import redis_client, logger +from common.s3_reader import S3Reader class RedisCacher: diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index 3b38f213a..b9b32bda4 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -1,8 +1,8 @@ -from common.clients import redis_client, STREAM_NAME, logger -from s3_event import S3Event -from record_processor import process_record from event_read import read_event +from record_processor import process_record +from common.clients import redis_client, STREAM_NAME, logger from common.log_decorator import logging_decorator +from common.s3_event import S3Event ''' Event Processor The Business Logic for the Redis Sync Lambda Function. diff --git a/lambdas/redis_sync/src/transform_map.py b/lambdas/redis_sync/src/transform_map.py index 8af72f8ce..e602a2f6f 100644 --- a/lambdas/redis_sync/src/transform_map.py +++ b/lambdas/redis_sync/src/transform_map.py @@ -1,6 +1,6 @@ from constants import RedisCacheKey -from common.clients import logger from transform_configs import transform_vaccine_map, transform_supplier_permissions +from common.clients import logger ''' Transform config file to format required in REDIS cache. ''' diff --git a/lambdas/redis_sync/tests/test_handler.py b/lambdas/redis_sync/tests/test_handler.py index 8ae1cb2a7..382371684 100644 --- a/lambdas/redis_sync/tests/test_handler.py +++ b/lambdas/redis_sync/tests/test_handler.py @@ -27,7 +27,7 @@ def setUp(self): self.mock_logger_error = self.logger_error_patcher.start() self.logger_exception_patcher = patch("logging.Logger.exception") self.mock_logger_exception = self.logger_exception_patcher.start() - self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records") + self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records") self.mock_get_s3_records = self.get_s3_records_patcher.start() self.record_processor_patcher = patch("redis_sync.process_record") self.mock_record_processor = self.record_processor_patcher.start() diff --git a/lambdas/redis_sync/tests/test_handler_decorator.py b/lambdas/redis_sync/tests/test_handler_decorator.py index 550d78553..7775d183e 100644 --- a/lambdas/redis_sync/tests/test_handler_decorator.py +++ b/lambdas/redis_sync/tests/test_handler_decorator.py @@ -3,8 +3,8 @@ import json from unittest.mock import patch from redis_sync import handler -from s3_event import S3EventRecord from constants import RedisCacheKey +from common.s3_event import S3EventRecord class TestHandlerDecorator(unittest.TestCase): @@ -33,7 +33,7 @@ def setUp(self): self.mock_logger_error = self.logger_error_patcher.start() self.logger_exception_patcher = patch("logging.Logger.exception") self.mock_logger_exception = self.logger_exception_patcher.start() - self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records") + self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records") self.mock_get_s3_records = self.get_s3_records_patcher.start() self.record_processor_patcher = patch("redis_sync.process_record") self.mock_record_processor = self.record_processor_patcher.start() diff --git a/lambdas/redis_sync/tests/test_record_processor.py b/lambdas/redis_sync/tests/test_record_processor.py index baec40b96..d1036ff60 100644 --- a/lambdas/redis_sync/tests/test_record_processor.py +++ b/lambdas/redis_sync/tests/test_record_processor.py @@ -2,8 +2,8 @@ import unittest from unittest.mock import patch -from s3_event import S3EventRecord from constants import RedisCacheKey +from common.s3_event import S3EventRecord class TestRecordProcessor(unittest.TestCase): diff --git a/lambdas/redis_sync/tests/test_s3_reader.py b/lambdas/redis_sync/tests/test_s3_reader.py index 4b33e58d4..4be69a2c7 100644 --- a/lambdas/redis_sync/tests/test_s3_reader.py +++ b/lambdas/redis_sync/tests/test_s3_reader.py @@ -1,6 +1,6 @@ import unittest from unittest.mock import patch, MagicMock -from s3_reader import S3Reader +from common.s3_reader import S3Reader class TestS3Reader(unittest.TestCase): @@ -10,7 +10,7 @@ def setUp(self): self.key = "test.json" # Patch s3_client - self.s3_client_patcher = patch("s3_reader.s3_client") + self.s3_client_patcher = patch("common.s3_reader.s3_client") self.mock_s3_client = self.s3_client_patcher.start() self.logger_info_patcher = patch("logging.Logger.info") diff --git a/lambdas/redis_sync/src/s3_event.py b/lambdas/shared/src/common/s3_event.py similarity index 80% rename from lambdas/redis_sync/src/s3_event.py rename to lambdas/shared/src/common/s3_event.py index 164bc2143..956f8c11a 100644 --- a/lambdas/redis_sync/src/s3_event.py +++ b/lambdas/shared/src/common/s3_event.py @@ -1,3 +1,5 @@ +from common.aws_lambda_event import AwsLambdaEvent + class S3EventRecord: """ S3 Event Parsing Utilities @@ -18,10 +20,10 @@ def get_object_key(self): return ret -class S3Event: +class S3Event(AwsLambdaEvent): def __init__(self, event): - self.event = event + super().__init__(event) def get_s3_records(self): # return a list of S3EventRecord objects - stripping out the s3 key - return [S3EventRecord(record['s3']) for record in self.event['Records']] + return [S3EventRecord(record['s3']) for record in self.records] diff --git a/lambdas/redis_sync/src/s3_reader.py b/lambdas/shared/src/common/s3_reader.py similarity index 100% rename from lambdas/redis_sync/src/s3_reader.py rename to lambdas/shared/src/common/s3_reader.py From ec31ce57b9e26c93e7a3de0b607d697bf1f6b6d2 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 16:43:53 +0100 Subject: [PATCH 13/20] lint --- lambdas/shared/src/common/s3_event.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lambdas/shared/src/common/s3_event.py b/lambdas/shared/src/common/s3_event.py index 956f8c11a..7dc4b098f 100644 --- a/lambdas/shared/src/common/s3_event.py +++ b/lambdas/shared/src/common/s3_event.py @@ -1,5 +1,6 @@ from common.aws_lambda_event import AwsLambdaEvent + class S3EventRecord: """ S3 Event Parsing Utilities From a3f4c46eb72c9068618dfdbacde9dfe571485d51 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 16:48:59 +0100 Subject: [PATCH 14/20] imports --- lambdas/id_sync/src/id_sync.py | 5 ++--- lambdas/id_sync/src/pds_details.py | 6 +++--- lambdas/shared/src/common/authentication.py | 2 +- lambdas/shared/src/common/pds_service.py | 2 +- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index 91855efef..05d355d79 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -1,7 +1,6 @@ -from common.clients import logger -from common.clients import STREAM_NAME -from common.log_decorator import logging_decorator from common.aws_lambda_event import AwsLambdaEvent +from common.clients import logger, STREAM_NAME +from common.log_decorator import logging_decorator from exceptions.id_sync_exception import IdSyncException from record_processor import process_record ''' diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index e8fecb5a7..81da574e9 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -2,11 +2,11 @@ Operations related to PDS (Patient Demographic Service) ''' import tempfile -from common.clients import logger, secrets_manager_client -from common.cache import Cache from os_vars import get_pds_env -from common.pds_service import PdsService from common.authentication import AppRestrictedAuth, Service +from common.cache import Cache +from common.clients import logger, secrets_manager_client +from common.pds_service import PdsService from exceptions.id_sync_exception import IdSyncException pds_env = get_pds_env() diff --git a/lambdas/shared/src/common/authentication.py b/lambdas/shared/src/common/authentication.py index c9d14525f..f4663605d 100644 --- a/lambdas/shared/src/common/authentication.py +++ b/lambdas/shared/src/common/authentication.py @@ -7,8 +7,8 @@ from enum import Enum from .cache import Cache -from common.models.errors import UnhandledResponseError from common.clients import logger +from common.models.errors import UnhandledResponseError class Service(Enum): diff --git a/lambdas/shared/src/common/pds_service.py b/lambdas/shared/src/common/pds_service.py index 859488493..c334bb963 100644 --- a/lambdas/shared/src/common/pds_service.py +++ b/lambdas/shared/src/common/pds_service.py @@ -2,8 +2,8 @@ import uuid from common.authentication import AppRestrictedAuth -from common.models.errors import UnhandledResponseError from common.clients import logger +from common.models.errors import UnhandledResponseError class PdsService: From de50dae9f03bea1d66b5981eb923ba725d2957dc Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 17:08:16 +0100 Subject: [PATCH 15/20] coveragerc --- lambdas/redis_sync/.coveragerc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 lambdas/redis_sync/.coveragerc diff --git a/lambdas/redis_sync/.coveragerc b/lambdas/redis_sync/.coveragerc new file mode 100644 index 000000000..f222a4f26 --- /dev/null +++ b/lambdas/redis_sync/.coveragerc @@ -0,0 +1,14 @@ +[report] +omit = + tests/* + tests/*/* + tests/*/*/* + tests/test_*.py + tests/*/test_*.py + tests/*/*/test_*.py + redis_sync/tests/* + redis_sync/tests/*/* + redis_sync/tests/*/*/* + ../shared/tests/* + ../shared/tests/*/* + ../shared/tests/*/*/* From d3156f7448bcd71ef4ee0ba9410e0edcb3eb66ec Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 17:29:07 +0100 Subject: [PATCH 16/20] test: coverage --- .github/workflows/sonarcloud.yml | 2 +- lambdas/.coveragerc | 3 --- lambdas/redis_sync/.coveragerc | 14 -------------- 3 files changed, 1 insertion(+), 18 deletions(-) delete mode 100644 lambdas/redis_sync/.coveragerc diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 39827c97f..b279b012f 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -142,7 +142,7 @@ jobs: continue-on-error: true run: | poetry install - poetry run coverage run --rcfile=.coveragerc --source=src -m unittest discover || echo "redis_sync tests failed" >> ../../failed_tests.txt + poetry run coverage run --source=src -m unittest discover || echo "redis_sync tests failed" >> ../../failed_tests.txt poetry run coverage xml -o ../../redis_sync-coverage.xml - name: Run unittest with shared diff --git a/lambdas/.coveragerc b/lambdas/.coveragerc index 0b7b3a55a..908bb06bc 100644 --- a/lambdas/.coveragerc +++ b/lambdas/.coveragerc @@ -6,6 +6,3 @@ omit = id_sync/tests/* id_sync/tests/*/* id_sync/tests/*/*/* - redis_sync/tests/* - redis_sync/tests/*/* - redis_sync/tests/*/*/* diff --git a/lambdas/redis_sync/.coveragerc b/lambdas/redis_sync/.coveragerc deleted file mode 100644 index f222a4f26..000000000 --- a/lambdas/redis_sync/.coveragerc +++ /dev/null @@ -1,14 +0,0 @@ -[report] -omit = - tests/* - tests/*/* - tests/*/*/* - tests/test_*.py - tests/*/test_*.py - tests/*/*/test_*.py - redis_sync/tests/* - redis_sync/tests/*/* - redis_sync/tests/*/*/* - ../shared/tests/* - ../shared/tests/*/* - ../shared/tests/*/*/* From 2006460440f4095777e382f69e14c1614878a504 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Wed, 17 Sep 2025 17:47:34 +0100 Subject: [PATCH 17/20] move test_s3_reader --- .../tests => shared/tests/test_common}/test_s3_reader.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename lambdas/{redis_sync/tests => shared/tests/test_common}/test_s3_reader.py (100%) diff --git a/lambdas/redis_sync/tests/test_s3_reader.py b/lambdas/shared/tests/test_common/test_s3_reader.py similarity index 100% rename from lambdas/redis_sync/tests/test_s3_reader.py rename to lambdas/shared/tests/test_common/test_s3_reader.py From 3dd03dff2e9c485c74043c7c19d3889a9fe4c767 Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Thu, 18 Sep 2025 13:39:03 +0100 Subject: [PATCH 18/20] test_s3_event --- .../shared/tests/test_common/test_s3_event.py | 95 +++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 lambdas/shared/tests/test_common/test_s3_event.py diff --git a/lambdas/shared/tests/test_common/test_s3_event.py b/lambdas/shared/tests/test_common/test_s3_event.py new file mode 100644 index 000000000..562ef2817 --- /dev/null +++ b/lambdas/shared/tests/test_common/test_s3_event.py @@ -0,0 +1,95 @@ +import unittest +from common.aws_lambda_event import AwsEventType +from common.s3_event import S3Event + + +class TestS3Event(unittest.TestCase): + + def setUp(self): + """Set up test fixtures""" + self.s3_record_dict = { + "eventVersion": "2.1", + "eventSource": "aws:s3", + "awsRegion": "us-west-2", + "eventTime": "1970-01-01T00:00:00.000Z", + "eventName": "ObjectCreated:Put", + "userIdentity": { + "principalId": "my-example-user" + }, + "requestParameters": { + "sourceIPAddress": "172.16.0.1" + }, + "responseElements": { + "x-amz-request-id": "C3D13FE58DE4C810", + "x-amz-id-2": "FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD" + }, + "s3": { + "s3SchemaVersion": "1.0", + "configurationId": "my-test-config", + "bucket": { + "name": "my-test-bucket", + "ownerIdentity": { + "principalId": "my-example-id" + }, + "arn": "arn:aws:s3:::my-test-bucket" + }, + "object": { + "key": "my-test-key.csv", + "size": 1024, + "eTag": "d41d8cd98f00b204e9800998ecf8427e", + "versionId": "096fKKXTRTtl3on89fVO.nfljtsv6qko", + "sequencer": "0055AED6DCD90281E5" + } + } + } + + def test_s3_event(self): + """Test initialization with S3 event""" + event = { + 'Records': [self.s3_record_dict], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 1) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 1) + self.assertEqual(s3_records[0].get_bucket_name(), "my-test-bucket") + self.assertEqual(s3_records[0].get_object_key(), "my-test-key.csv") + + def test_s3_event_with_multiple_records(self): + """Test initialization with multiple s3 records""" + s3_record_2 = self.s3_record_dict.copy() + s3_record_2['s3']['bucket']['name'] = 'my-second-test-bucket' + + event = { + 'Records': [self.s3_record_dict, s3_record_2], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 2) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 2) + self.assertEqual(s3_records[1].get_bucket_name(), "my-second-test-bucket") + + def test_s3_event_with_no_records(self): + """Test initialization with no s3 records""" + event = { + 'Records': [], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 0) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 0) From 1b8bae0ba41aa735fa0d7ea67ab297e62bd991bc Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Fri, 19 Sep 2025 10:24:11 +0100 Subject: [PATCH 19/20] typo --- lambdas/id_sync/src/id_sync.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index 05d355d79..fe0fbc234 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -1,3 +1,11 @@ +""" +- Parses the incoming AWS event into `AwsLambdaEvent` and iterate its `records`. +- Delegate each record to `process_record` and collect `nhs_number` from each result. +- If any record has status == "error" raise `IdSyncException` with aggregated nhs_numbers. +- Any unexpected error is wrapped into `IdSyncException(message="Error processing id_sync event")`. +""" + +from typing import Any, Dict from common.aws_lambda_event import AwsLambdaEvent from common.clients import logger, STREAM_NAME from common.log_decorator import logging_decorator From 4adb7205ae13a573d08edfcb028ff96140a7b6bc Mon Sep 17 00:00:00 2001 From: James Wharmby Date: Fri, 19 Sep 2025 13:55:07 +0100 Subject: [PATCH 20/20] fixed redis_client --- lambdas/redis_sync/src/redis_cacher.py | 3 ++- lambdas/redis_sync/src/redis_sync.py | 4 ++-- lambdas/redis_sync/tests/test_redis_cacher.py | 2 +- lambdas/shared/src/common/clients.py | 11 +++++++++-- lambdas/shared/tests/test_common/test_clients.py | 12 +++++------- 5 files changed, 19 insertions(+), 13 deletions(-) diff --git a/lambdas/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py index 64b7d63c8..c9a40e995 100644 --- a/lambdas/redis_sync/src/redis_cacher.py +++ b/lambdas/redis_sync/src/redis_cacher.py @@ -2,7 +2,7 @@ import json from transform_map import transform_map -from common.clients import redis_client, logger +from common.clients import get_redis_client, logger from common.s3_reader import S3Reader @@ -24,6 +24,7 @@ def upload(bucket_name: str, file_key: str) -> dict: # Transform redis_mappings = transform_map(config_file_content, file_key) + redis_client = get_redis_client() for key, mapping in redis_mappings.items(): safe_mapping = { k: json.dumps(v) if isinstance(v, list) else v diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py index b9b32bda4..7153738e3 100644 --- a/lambdas/redis_sync/src/redis_sync.py +++ b/lambdas/redis_sync/src/redis_sync.py @@ -1,6 +1,6 @@ from event_read import read_event from record_processor import process_record -from common.clients import redis_client, STREAM_NAME, logger +from common.clients import get_redis_client, STREAM_NAME, logger from common.log_decorator import logging_decorator from common.s3_event import S3Event ''' @@ -35,7 +35,7 @@ def handler(event, _): no_records = "No records found in event" # check if the event requires a read, ie {"read": "my-hashmap"} if "read" in event: - return read_event(redis_client, event, logger) + return read_event(get_redis_client(), event, logger) elif "Records" in event: logger.info("Processing S3 event with %d records", len(event.get('Records', []))) s3_records = S3Event(event).get_s3_records() diff --git a/lambdas/redis_sync/tests/test_redis_cacher.py b/lambdas/redis_sync/tests/test_redis_cacher.py index 5792cbc4b..642fa9bd1 100644 --- a/lambdas/redis_sync/tests/test_redis_cacher.py +++ b/lambdas/redis_sync/tests/test_redis_cacher.py @@ -11,7 +11,7 @@ def setUp(self): self.mock_s3_reader = self.s3_reader_patcher.start() self.transform_map_patcher = patch("redis_cacher.transform_map") self.mock_transform_map = self.transform_map_patcher.start() - self.redis_client_patcher = patch("redis_cacher.redis_client") + self.redis_client_patcher = patch("common.clients.redis_client") self.mock_redis_client = self.redis_client_patcher.start() self.logger_info_patcher = patch("logging.Logger.info") self.mock_logger_info = self.logger_info_patcher.start() diff --git a/lambdas/shared/src/common/clients.py b/lambdas/shared/src/common/clients.py index 966eab37b..5e8be5e8c 100644 --- a/lambdas/shared/src/common/clients.py +++ b/lambdas/shared/src/common/clients.py @@ -22,5 +22,12 @@ dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) -logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") -redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) +redis_client = None + + +def get_redis_client(): + global redis_client + if redis_client is None: + logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") + redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) + return redis_client diff --git a/lambdas/shared/tests/test_common/test_clients.py b/lambdas/shared/tests/test_common/test_clients.py index 8e5da75fd..c07770bf9 100644 --- a/lambdas/shared/tests/test_common/test_clients.py +++ b/lambdas/shared/tests/test_common/test_clients.py @@ -56,13 +56,9 @@ def test_firehose_client(self): self.mock_boto3_client.assert_any_call("firehose", region_name=self.AWS_REGION) def test_redis_client(self): - ''' Test redis client is created with correct parameters ''' + ''' Test redis client is not initialized on import ''' importlib.reload(clients) - self.mock_redis.assert_called_once_with( - host=self.REDIS_HOST, - port=self.REDIS_PORT, - decode_responses=True - ) + self.mock_redis.assert_not_called() def test_logging_setup(self): ''' Test logging is set up correctly ''' @@ -75,8 +71,10 @@ def test_logging_configuration(self): clients.logger.setLevel.assert_called_once_with("INFO") def test_redis_client_initialization(self): - ''' Test redis client initialization ''' + ''' Test redis client is initialized exactly once even with multiple invocations''' importlib.reload(clients) + clients.get_redis_client() + clients.get_redis_client() self.mock_redis.assert_called_once_with(host=self.REDIS_HOST, port=self.REDIS_PORT, decode_responses=True) self.assertTrue(hasattr(clients, 'redis_client')) self.assertIsInstance(clients.redis_client, self.mock_redis.return_value.__class__)