diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 3b8a0256c..ddd7043d4 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -58,7 +58,7 @@ updates: - "/filenameprocessor" - "/mesh_processor" - "/recordprocessor" - - "/redis_sync" + - "/lambdas/redis_sync" - "/lambdas/id_sync" - "/lambdas/shared" - "/mns_subscription" diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index d56208fca..b279b012f 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -135,15 +135,15 @@ jobs: poetry run coverage xml -o ../mns_subscription-coverage.xml - name: Run unittest with redis_sync - working-directory: redis_sync + working-directory: lambdas/redis_sync id: redis_sync env: - PYTHONPATH: ${{ github.workspace }}/redis_sync/src:${{ github.workspace }}/redis_sync/tests + PYTHONPATH: ${{ env.LAMBDA_PATH }}/redis_sync/src:${{ env.SHARED_PATH }}/src continue-on-error: true run: | poetry install - poetry run coverage run -m unittest discover || echo "redis_sync tests failed" >> ../failed_tests.txt - poetry run coverage xml -o ../redis_sync-coverage.xml + poetry run coverage run --source=src -m unittest discover || echo "redis_sync tests failed" >> ../../failed_tests.txt + poetry run coverage xml -o ../../redis_sync-coverage.xml - name: Run unittest with shared working-directory: lambdas/shared diff --git a/Makefile b/Makefile index 0eb3d83c6..7dc1a63b0 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ SHELL=/usr/bin/env bash -euo pipefail -PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor redis_sync lambdas/id_sync lambdas/shared mns_subscription +PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor lambdas/redis_sync lambdas/id_sync lambdas/shared mns_subscription PYTHON_PROJECT_DIRS = e2e e2e_batch $(PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS) #Installs dependencies using poetry. diff --git a/immunisation-fhir-api.code-workspace b/immunisation-fhir-api.code-workspace index dabc71a68..b92caa122 100644 --- a/immunisation-fhir-api.code-workspace +++ b/immunisation-fhir-api.code-workspace @@ -28,7 +28,7 @@ "path": "e2e_batch" }, { - "path": "redis_sync" + "path": "lambdas/redis_sync" }, { "path": "mns_subscription" diff --git a/lambdas/id_sync/src/id_sync.py b/lambdas/id_sync/src/id_sync.py index 06c8d577d..c42c333f3 100644 --- a/lambdas/id_sync/src/id_sync.py +++ b/lambdas/id_sync/src/id_sync.py @@ -6,9 +6,9 @@ """ from typing import Any, Dict +from common.aws_lambda_event import AwsLambdaEvent from common.clients import logger, STREAM_NAME from common.log_decorator import logging_decorator -from common.aws_lambda_event import AwsLambdaEvent from exceptions.id_sync_exception import IdSyncException from record_processor import process_record diff --git a/lambdas/id_sync/src/pds_details.py b/lambdas/id_sync/src/pds_details.py index 4729a0aec..e9af7601a 100644 --- a/lambdas/id_sync/src/pds_details.py +++ b/lambdas/id_sync/src/pds_details.py @@ -2,11 +2,11 @@ Operations related to PDS (Patient Demographic Service) ''' import tempfile -from common.clients import logger, secrets_manager_client -from common.cache import Cache from os_vars import get_pds_env -from common.pds_service import PdsService from common.authentication import AppRestrictedAuth, Service +from common.cache import Cache +from common.clients import logger, secrets_manager_client +from common.pds_service import PdsService from exceptions.id_sync_exception import IdSyncException pds_env = get_pds_env() diff --git a/lambdas/redis_sync/.vscode/settings.json.default b/lambdas/redis_sync/.vscode/settings.json.default new file mode 100644 index 000000000..f06a81ed7 --- /dev/null +++ b/lambdas/redis_sync/.vscode/settings.json.default @@ -0,0 +1,27 @@ +{ + "python.analysis.extraPaths": [ + "./src" + ], + "python.testing.unittestArgs": [ + "-v", + "-s", + "./", + "-p", + "test_*.py" + ], + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": true, + "pylint.args": [ + "--init-hook", + "import sys; sys.path.append('./src')" + ], + "[makefile]": { + "editor.insertSpaces": false, + "editor.detectIndentation": false + }, + "files.trimTrailingWhitespace": true, + "[python]": { + "files.trimTrailingWhitespace": true + }, + "files.insertFinalNewline": true +} diff --git a/redis_sync/Dockerfile b/lambdas/redis_sync/Dockerfile similarity index 52% rename from redis_sync/Dockerfile rename to lambdas/redis_sync/Dockerfile index 40d155f7a..9e9b5bdf5 100644 --- a/redis_sync/Dockerfile +++ b/lambdas/redis_sync/Dockerfile @@ -6,20 +6,34 @@ RUN mkdir -p /home/appuser && \ echo 'appuser:x:1001:' >> /etc/group && \ chown -R 1001:1001 /home/appuser && pip install "poetry~=2.1.4" -# Install Poetry as root -COPY poetry.lock pyproject.toml README.md ./ +# Install Poetry dependencies +# Copy redis_sync Poetry files +COPY ./redis_sync/poetry.lock ./redis_sync/pyproject.toml ./ +COPY ./shared/src/common ./src/common + +RUN echo "Listing /var/task after source code copy:" && ls -R /var/task + +# Install redis_sync dependencies +WORKDIR /var/task RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main -# ----------------------------- -FROM base AS test -COPY src src -COPY tests tests -RUN poetry install --no-interaction --no-ansi --no-root && \ - pytest --disable-warnings tests # ----------------------------- FROM base AS build -COPY src . + +# Set working directory back to Lambda task root +WORKDIR /var/task + +# Copy shared source code +COPY ./shared/src/common ./common + +# Copy redis_sync source code +COPY ./redis_sync/src . + +# Set correct permissions RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d) + # Build as non-root user USER 1001:1001 + +# Set the Lambda handler CMD ["redis_sync.handler"] diff --git a/lambdas/redis_sync/Makefile b/lambdas/redis_sync/Makefile new file mode 100644 index 000000000..b0a8ffb26 --- /dev/null +++ b/lambdas/redis_sync/Makefile @@ -0,0 +1,16 @@ +TEST_ENV := @PYTHONPATH=src:tests:../shared/src + +test: + $(TEST_ENV) python -m unittest + +coverage-run: + $(TEST_ENV) coverage run -m unittest discover -v + +coverage-report: + $(TEST_ENV) coverage report -m + +coverage-html: + $(TEST_ENV) coverage html + + +.PHONY: build package \ No newline at end of file diff --git a/redis_sync/README.md b/lambdas/redis_sync/README.md similarity index 100% rename from redis_sync/README.md rename to lambdas/redis_sync/README.md diff --git a/redis_sync/poetry.lock b/lambdas/redis_sync/poetry.lock similarity index 100% rename from redis_sync/poetry.lock rename to lambdas/redis_sync/poetry.lock diff --git a/redis_sync/pyproject.toml b/lambdas/redis_sync/pyproject.toml similarity index 89% rename from redis_sync/pyproject.toml rename to lambdas/redis_sync/pyproject.toml index 168d12d59..b958404e2 100644 --- a/redis_sync/pyproject.toml +++ b/lambdas/redis_sync/pyproject.toml @@ -13,7 +13,8 @@ description = "" authors = ["s.wates "] readme = "README.md" packages = [ - {include = "src"} + {include = "src"}, + {include = "common", from = "../shared/src"} ] [tool.poetry.dependencies] diff --git a/redis_sync/src/__init__.py b/lambdas/redis_sync/src/__init__.py similarity index 100% rename from redis_sync/src/__init__.py rename to lambdas/redis_sync/src/__init__.py diff --git a/redis_sync/src/constants.py b/lambdas/redis_sync/src/constants.py similarity index 100% rename from redis_sync/src/constants.py rename to lambdas/redis_sync/src/constants.py diff --git a/redis_sync/src/event_read.py b/lambdas/redis_sync/src/event_read.py similarity index 100% rename from redis_sync/src/event_read.py rename to lambdas/redis_sync/src/event_read.py diff --git a/redis_sync/src/record_processor.py b/lambdas/redis_sync/src/record_processor.py similarity index 93% rename from redis_sync/src/record_processor.py rename to lambdas/redis_sync/src/record_processor.py index e94456ff9..40232714b 100644 --- a/redis_sync/src/record_processor.py +++ b/lambdas/redis_sync/src/record_processor.py @@ -1,6 +1,6 @@ -from clients import logger -from s3_event import S3EventRecord from redis_cacher import RedisCacher +from common.clients import logger +from common.s3_event import S3EventRecord ''' Record Processor This module processes individual S3 records from an event. diff --git a/redis_sync/src/redis_cacher.py b/lambdas/redis_sync/src/redis_cacher.py similarity index 93% rename from redis_sync/src/redis_cacher.py rename to lambdas/redis_sync/src/redis_cacher.py index 88303994d..c9a40e995 100644 --- a/redis_sync/src/redis_cacher.py +++ b/lambdas/redis_sync/src/redis_cacher.py @@ -1,10 +1,9 @@ "Upload the content from a config file in S3 to ElastiCache (Redis)" import json -from clients import redis_client -from clients import logger from transform_map import transform_map -from s3_reader import S3Reader +from common.clients import get_redis_client, logger +from common.s3_reader import S3Reader class RedisCacher: @@ -25,6 +24,7 @@ def upload(bucket_name: str, file_key: str) -> dict: # Transform redis_mappings = transform_map(config_file_content, file_key) + redis_client = get_redis_client() for key, mapping in redis_mappings.items(): safe_mapping = { k: json.dumps(v) if isinstance(v, list) else v diff --git a/lambdas/redis_sync/src/redis_sync.py b/lambdas/redis_sync/src/redis_sync.py new file mode 100644 index 000000000..7153738e3 --- /dev/null +++ b/lambdas/redis_sync/src/redis_sync.py @@ -0,0 +1,53 @@ +from event_read import read_event +from record_processor import process_record +from common.clients import get_redis_client, STREAM_NAME, logger +from common.log_decorator import logging_decorator +from common.s3_event import S3Event +''' + Event Processor + The Business Logic for the Redis Sync Lambda Function. + This module processes S3 events and iterates through each record to process them individually.''' + + +def _process_all_records(s3_records: list) -> dict: + record_count = len(s3_records) + error_count = 0 + file_keys = [] + for record in s3_records: + record_result = process_record(record) + file_keys.append(record_result["file_key"]) + if record_result["status"] == "error": + error_count += 1 + if error_count > 0: + logger.error("Processed %d records with %d errors", record_count, error_count) + return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors", + "file_keys": file_keys} + else: + logger.info("Successfully processed all %d records", record_count) + return {"status": "success", "message": f"Successfully processed {record_count} records", + "file_keys": file_keys} + + +@logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME) +def handler(event, _): + + try: + no_records = "No records found in event" + # check if the event requires a read, ie {"read": "my-hashmap"} + if "read" in event: + return read_event(get_redis_client(), event, logger) + elif "Records" in event: + logger.info("Processing S3 event with %d records", len(event.get('Records', []))) + s3_records = S3Event(event).get_s3_records() + if not s3_records: + logger.info(no_records) + return {"status": "success", "message": no_records} + else: + return _process_all_records(s3_records) + else: + logger.info(no_records) + return {"status": "success", "message": no_records} + + except Exception: + logger.exception("Error processing S3 event") + return {"status": "error", "message": "Error processing S3 event"} diff --git a/redis_sync/src/transform_configs.py b/lambdas/redis_sync/src/transform_configs.py similarity index 97% rename from redis_sync/src/transform_configs.py rename to lambdas/redis_sync/src/transform_configs.py index 7f991170c..f7ddee722 100644 --- a/redis_sync/src/transform_configs.py +++ b/lambdas/redis_sync/src/transform_configs.py @@ -1,4 +1,4 @@ -from clients import logger +from common.clients import logger def transform_vaccine_map(mapping): diff --git a/redis_sync/src/transform_map.py b/lambdas/redis_sync/src/transform_map.py similarity index 95% rename from redis_sync/src/transform_map.py rename to lambdas/redis_sync/src/transform_map.py index 83938930c..e602a2f6f 100644 --- a/redis_sync/src/transform_map.py +++ b/lambdas/redis_sync/src/transform_map.py @@ -1,6 +1,6 @@ from constants import RedisCacheKey -from clients import logger from transform_configs import transform_vaccine_map, transform_supplier_permissions +from common.clients import logger ''' Transform config file to format required in REDIS cache. ''' diff --git a/redis_sync/tests/__init__.py b/lambdas/redis_sync/tests/__init__.py similarity index 100% rename from redis_sync/tests/__init__.py rename to lambdas/redis_sync/tests/__init__.py diff --git a/redis_sync/tests/test_data/disease_mapping.json b/lambdas/redis_sync/tests/test_data/disease_mapping.json similarity index 100% rename from redis_sync/tests/test_data/disease_mapping.json rename to lambdas/redis_sync/tests/test_data/disease_mapping.json diff --git a/redis_sync/tests/test_data/expected_disease_to_vacc.json b/lambdas/redis_sync/tests/test_data/expected_disease_to_vacc.json similarity index 100% rename from redis_sync/tests/test_data/expected_disease_to_vacc.json rename to lambdas/redis_sync/tests/test_data/expected_disease_to_vacc.json diff --git a/redis_sync/tests/test_data/expected_ods_code_to_supplier.json b/lambdas/redis_sync/tests/test_data/expected_ods_code_to_supplier.json similarity index 100% rename from redis_sync/tests/test_data/expected_ods_code_to_supplier.json rename to lambdas/redis_sync/tests/test_data/expected_ods_code_to_supplier.json diff --git a/redis_sync/tests/test_data/expected_supplier_permissions.json b/lambdas/redis_sync/tests/test_data/expected_supplier_permissions.json similarity index 100% rename from redis_sync/tests/test_data/expected_supplier_permissions.json rename to lambdas/redis_sync/tests/test_data/expected_supplier_permissions.json diff --git a/redis_sync/tests/test_data/expected_vacc_to_diseases.json b/lambdas/redis_sync/tests/test_data/expected_vacc_to_diseases.json similarity index 100% rename from redis_sync/tests/test_data/expected_vacc_to_diseases.json rename to lambdas/redis_sync/tests/test_data/expected_vacc_to_diseases.json diff --git a/redis_sync/tests/test_data/permissions_config.json b/lambdas/redis_sync/tests/test_data/permissions_config.json similarity index 100% rename from redis_sync/tests/test_data/permissions_config.json rename to lambdas/redis_sync/tests/test_data/permissions_config.json diff --git a/redis_sync/tests/test_data/s3-notification-single-filename.json b/lambdas/redis_sync/tests/test_data/s3-notification-single-filename.json similarity index 100% rename from redis_sync/tests/test_data/s3-notification-single-filename.json rename to lambdas/redis_sync/tests/test_data/s3-notification-single-filename.json diff --git a/redis_sync/tests/test_data/test_read_vaccine_mapping.json b/lambdas/redis_sync/tests/test_data/test_read_vaccine_mapping.json similarity index 100% rename from redis_sync/tests/test_data/test_read_vaccine_mapping.json rename to lambdas/redis_sync/tests/test_data/test_read_vaccine_mapping.json diff --git a/redis_sync/tests/test_event_read.py b/lambdas/redis_sync/tests/test_event_read.py similarity index 100% rename from redis_sync/tests/test_event_read.py rename to lambdas/redis_sync/tests/test_event_read.py diff --git a/redis_sync/tests/test_handler.py b/lambdas/redis_sync/tests/test_handler.py similarity index 86% rename from redis_sync/tests/test_handler.py rename to lambdas/redis_sync/tests/test_handler.py index c63bb6240..382371684 100644 --- a/redis_sync/tests/test_handler.py +++ b/lambdas/redis_sync/tests/test_handler.py @@ -27,7 +27,7 @@ def setUp(self): self.mock_logger_error = self.logger_error_patcher.start() self.logger_exception_patcher = patch("logging.Logger.exception") self.mock_logger_exception = self.logger_exception_patcher.start() - self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records") + self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records") self.mock_get_s3_records = self.get_s3_records_patcher.start() self.record_processor_patcher = patch("redis_sync.process_record") self.mock_record_processor = self.record_processor_patcher.start() @@ -40,7 +40,7 @@ def tearDown(self): self.logger_exception_patcher.stop() def test_handler_success(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} self.mock_get_s3_records.return_value = [self.s3_vaccine] @@ -53,7 +53,7 @@ def test_handler_success(self): self.assertEqual(result["file_keys"], ['test-key']) def test_handler_failure(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} @@ -66,7 +66,7 @@ def test_handler_failure(self): self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) def test_handler_no_records(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': []} self.mock_get_s3_records.return_value = [] @@ -74,7 +74,7 @@ def test_handler_no_records(self): self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) def test_handler_exception(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine]} self.mock_get_s3_records.return_value = [self.s3_vaccine] @@ -84,14 +84,14 @@ def test_handler_exception(self): self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'}) def test_handler_with_empty_event(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) self.mock_get_s3_records.return_value = [] result = redis_sync.handler({}, None) self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'}) def test_handler_multi_record(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]} # If you need S3EventRecord, uncomment the import and use it here @@ -112,7 +112,7 @@ def test_handler_multi_record(self): self.assertEqual(result['file_keys'][1], 'test-key2') def test_handler_read_event(self): - with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)): + with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)): importlib.reload(redis_sync) mock_event = {'read': 'myhash'} mock_read_event_response = {'field1': 'value1'} diff --git a/redis_sync/tests/test_handler_decorator.py b/lambdas/redis_sync/tests/test_handler_decorator.py similarity index 97% rename from redis_sync/tests/test_handler_decorator.py rename to lambdas/redis_sync/tests/test_handler_decorator.py index 5b477fb89..7775d183e 100644 --- a/redis_sync/tests/test_handler_decorator.py +++ b/lambdas/redis_sync/tests/test_handler_decorator.py @@ -3,8 +3,8 @@ import json from unittest.mock import patch from redis_sync import handler -from s3_event import S3EventRecord from constants import RedisCacheKey +from common.s3_event import S3EventRecord class TestHandlerDecorator(unittest.TestCase): @@ -33,11 +33,11 @@ def setUp(self): self.mock_logger_error = self.logger_error_patcher.start() self.logger_exception_patcher = patch("logging.Logger.exception") self.mock_logger_exception = self.logger_exception_patcher.start() - self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records") + self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records") self.mock_get_s3_records = self.get_s3_records_patcher.start() self.record_processor_patcher = patch("redis_sync.process_record") self.mock_record_processor = self.record_processor_patcher.start() - self.firehose_patcher = patch("log_decorator.firehose_client") + self.firehose_patcher = patch("common.log_decorator.firehose_client") self.mock_firehose_client = self.firehose_patcher.start() self.mock_firehose_client.put_record.return_value = True diff --git a/redis_sync/tests/test_record_processor.py b/lambdas/redis_sync/tests/test_record_processor.py similarity index 98% rename from redis_sync/tests/test_record_processor.py rename to lambdas/redis_sync/tests/test_record_processor.py index baec40b96..d1036ff60 100644 --- a/redis_sync/tests/test_record_processor.py +++ b/lambdas/redis_sync/tests/test_record_processor.py @@ -2,8 +2,8 @@ import unittest from unittest.mock import patch -from s3_event import S3EventRecord from constants import RedisCacheKey +from common.s3_event import S3EventRecord class TestRecordProcessor(unittest.TestCase): diff --git a/redis_sync/tests/test_redis_cacher.py b/lambdas/redis_sync/tests/test_redis_cacher.py similarity index 97% rename from redis_sync/tests/test_redis_cacher.py rename to lambdas/redis_sync/tests/test_redis_cacher.py index 5792cbc4b..642fa9bd1 100644 --- a/redis_sync/tests/test_redis_cacher.py +++ b/lambdas/redis_sync/tests/test_redis_cacher.py @@ -11,7 +11,7 @@ def setUp(self): self.mock_s3_reader = self.s3_reader_patcher.start() self.transform_map_patcher = patch("redis_cacher.transform_map") self.mock_transform_map = self.transform_map_patcher.start() - self.redis_client_patcher = patch("redis_cacher.redis_client") + self.redis_client_patcher = patch("common.clients.redis_client") self.mock_redis_client = self.redis_client_patcher.start() self.logger_info_patcher = patch("logging.Logger.info") self.mock_logger_info = self.logger_info_patcher.start() diff --git a/redis_sync/tests/test_transform_config.py b/lambdas/redis_sync/tests/test_transform_config.py similarity index 100% rename from redis_sync/tests/test_transform_config.py rename to lambdas/redis_sync/tests/test_transform_config.py diff --git a/lambdas/shared/src/common/authentication.py b/lambdas/shared/src/common/authentication.py index c9d14525f..f4663605d 100644 --- a/lambdas/shared/src/common/authentication.py +++ b/lambdas/shared/src/common/authentication.py @@ -7,8 +7,8 @@ from enum import Enum from .cache import Cache -from common.models.errors import UnhandledResponseError from common.clients import logger +from common.models.errors import UnhandledResponseError class Service(Enum): diff --git a/lambdas/shared/src/common/clients.py b/lambdas/shared/src/common/clients.py index 41ad7831f..5e8be5e8c 100644 --- a/lambdas/shared/src/common/clients.py +++ b/lambdas/shared/src/common/clients.py @@ -1,7 +1,7 @@ import os import logging -from boto3 import client as boto3_client -import boto3 +import redis +from boto3 import client as boto3_client, resource as boto3_resource logging.basicConfig(level="INFO") logger = logging.getLogger() @@ -12,9 +12,22 @@ REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") +REDIS_HOST = os.getenv("REDIS_HOST", "") +REDIS_PORT = os.getenv("REDIS_PORT", 6379) + s3_client = boto3_client("s3", region_name=REGION_NAME) firehose_client = boto3_client("firehose", region_name=REGION_NAME) secrets_manager_client = boto3_client("secretsmanager", region_name=REGION_NAME) -dynamodb_resource = boto3.resource("dynamodb", region_name=REGION_NAME) +dynamodb_resource = boto3_resource("dynamodb", region_name=REGION_NAME) dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME) + +redis_client = None + + +def get_redis_client(): + global redis_client + if redis_client is None: + logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") + redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) + return redis_client diff --git a/lambdas/shared/src/common/pds_service.py b/lambdas/shared/src/common/pds_service.py index 859488493..c334bb963 100644 --- a/lambdas/shared/src/common/pds_service.py +++ b/lambdas/shared/src/common/pds_service.py @@ -2,8 +2,8 @@ import uuid from common.authentication import AppRestrictedAuth -from common.models.errors import UnhandledResponseError from common.clients import logger +from common.models.errors import UnhandledResponseError class PdsService: diff --git a/redis_sync/src/s3_event.py b/lambdas/shared/src/common/s3_event.py similarity index 80% rename from redis_sync/src/s3_event.py rename to lambdas/shared/src/common/s3_event.py index 164bc2143..7dc4b098f 100644 --- a/redis_sync/src/s3_event.py +++ b/lambdas/shared/src/common/s3_event.py @@ -1,3 +1,6 @@ +from common.aws_lambda_event import AwsLambdaEvent + + class S3EventRecord: """ S3 Event Parsing Utilities @@ -18,10 +21,10 @@ def get_object_key(self): return ret -class S3Event: +class S3Event(AwsLambdaEvent): def __init__(self, event): - self.event = event + super().__init__(event) def get_s3_records(self): # return a list of S3EventRecord objects - stripping out the s3 key - return [S3EventRecord(record['s3']) for record in self.event['Records']] + return [S3EventRecord(record['s3']) for record in self.records] diff --git a/redis_sync/src/s3_reader.py b/lambdas/shared/src/common/s3_reader.py similarity index 92% rename from redis_sync/src/s3_reader.py rename to lambdas/shared/src/common/s3_reader.py index 5b6e28dbe..2f740956a 100644 --- a/redis_sync/src/s3_reader.py +++ b/lambdas/shared/src/common/s3_reader.py @@ -1,5 +1,4 @@ -from clients import s3_client -from clients import logger +from common.clients import s3_client, logger class S3Reader: diff --git a/lambdas/shared/tests/test_common/test_clients.py b/lambdas/shared/tests/test_common/test_clients.py index fd9868b3c..c07770bf9 100644 --- a/lambdas/shared/tests/test_common/test_clients.py +++ b/lambdas/shared/tests/test_common/test_clients.py @@ -8,6 +8,8 @@ class TestClients(unittest.TestCase): BUCKET_NAME = "default-bucket" AWS_REGION = "eu-west-2" + REDIS_HOST = "mock-redis-host" + REDIS_PORT = 6379 def setUp(self): self.boto3_client_patch = patch("boto3.client") @@ -20,9 +22,15 @@ def setUp(self): self.mock_getenv = self.getenv_patch.start() self.mock_getenv.side_effect = lambda key, default=None: { "CONFIG_BUCKET_NAME": self.BUCKET_NAME, - "AWS_REGION": self.AWS_REGION + "AWS_REGION": self.AWS_REGION, + "REDIS_HOST": self.REDIS_HOST, + "REDIS_PORT": self.REDIS_PORT }.get(key, default) + self.redis_patch = patch("redis.StrictRedis") + self.mock_redis = self.redis_patch.start() + + self.mock_redis.return_value = self.mock_redis self.mock_boto3_client.return_value = self.mock_boto3_client self.mock_boto3_client.return_value.send_message = {} @@ -34,6 +42,8 @@ def test_os_environ(self): importlib.reload(clients) self.assertEqual(clients.CONFIG_BUCKET_NAME, self.BUCKET_NAME) self.assertEqual(clients.REGION_NAME, self.AWS_REGION) + self.assertEqual(clients.REDIS_HOST, self.REDIS_HOST) + self.assertEqual(clients.REDIS_PORT, self.REDIS_PORT) def test_boto3_client(self): ''' Test boto3 client is created with correct parameters ''' @@ -45,6 +55,11 @@ def test_firehose_client(self): importlib.reload(clients) self.mock_boto3_client.assert_any_call("firehose", region_name=self.AWS_REGION) + def test_redis_client(self): + ''' Test redis client is not initialized on import ''' + importlib.reload(clients) + self.mock_redis.assert_not_called() + def test_logging_setup(self): ''' Test logging is set up correctly ''' importlib.reload(clients) @@ -55,6 +70,15 @@ def test_logging_configuration(self): importlib.reload(clients) clients.logger.setLevel.assert_called_once_with("INFO") + def test_redis_client_initialization(self): + ''' Test redis client is initialized exactly once even with multiple invocations''' + importlib.reload(clients) + clients.get_redis_client() + clients.get_redis_client() + self.mock_redis.assert_called_once_with(host=self.REDIS_HOST, port=self.REDIS_PORT, decode_responses=True) + self.assertTrue(hasattr(clients, 'redis_client')) + self.assertIsInstance(clients.redis_client, self.mock_redis.return_value.__class__) + def test_logging_initialization(self): ''' Test logging initialization ''' importlib.reload(clients) diff --git a/lambdas/shared/tests/test_common/test_s3_event.py b/lambdas/shared/tests/test_common/test_s3_event.py new file mode 100644 index 000000000..562ef2817 --- /dev/null +++ b/lambdas/shared/tests/test_common/test_s3_event.py @@ -0,0 +1,95 @@ +import unittest +from common.aws_lambda_event import AwsEventType +from common.s3_event import S3Event + + +class TestS3Event(unittest.TestCase): + + def setUp(self): + """Set up test fixtures""" + self.s3_record_dict = { + "eventVersion": "2.1", + "eventSource": "aws:s3", + "awsRegion": "us-west-2", + "eventTime": "1970-01-01T00:00:00.000Z", + "eventName": "ObjectCreated:Put", + "userIdentity": { + "principalId": "my-example-user" + }, + "requestParameters": { + "sourceIPAddress": "172.16.0.1" + }, + "responseElements": { + "x-amz-request-id": "C3D13FE58DE4C810", + "x-amz-id-2": "FMyUVURIY8/IgAtTv8xRjskZQpcIZ9KG4V5Wp6S7S/JRWeUWerMUE5JgHvANOjpD" + }, + "s3": { + "s3SchemaVersion": "1.0", + "configurationId": "my-test-config", + "bucket": { + "name": "my-test-bucket", + "ownerIdentity": { + "principalId": "my-example-id" + }, + "arn": "arn:aws:s3:::my-test-bucket" + }, + "object": { + "key": "my-test-key.csv", + "size": 1024, + "eTag": "d41d8cd98f00b204e9800998ecf8427e", + "versionId": "096fKKXTRTtl3on89fVO.nfljtsv6qko", + "sequencer": "0055AED6DCD90281E5" + } + } + } + + def test_s3_event(self): + """Test initialization with S3 event""" + event = { + 'Records': [self.s3_record_dict], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 1) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 1) + self.assertEqual(s3_records[0].get_bucket_name(), "my-test-bucket") + self.assertEqual(s3_records[0].get_object_key(), "my-test-key.csv") + + def test_s3_event_with_multiple_records(self): + """Test initialization with multiple s3 records""" + s3_record_2 = self.s3_record_dict.copy() + s3_record_2['s3']['bucket']['name'] = 'my-second-test-bucket' + + event = { + 'Records': [self.s3_record_dict, s3_record_2], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 2) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 2) + self.assertEqual(s3_records[1].get_bucket_name(), "my-second-test-bucket") + + def test_s3_event_with_no_records(self): + """Test initialization with no s3 records""" + event = { + 'Records': [], + 'eventSource': 'aws:s3' + } + + s3_event = S3Event(event) + + self.assertEqual(s3_event.event_type, AwsEventType.S3) + self.assertEqual(len(s3_event.records), 0) + + s3_records = s3_event.get_s3_records() + self.assertEqual(len(s3_records), 0) diff --git a/redis_sync/tests/test_s3_reader.py b/lambdas/shared/tests/test_common/test_s3_reader.py similarity index 93% rename from redis_sync/tests/test_s3_reader.py rename to lambdas/shared/tests/test_common/test_s3_reader.py index 4b33e58d4..4be69a2c7 100644 --- a/redis_sync/tests/test_s3_reader.py +++ b/lambdas/shared/tests/test_common/test_s3_reader.py @@ -1,6 +1,6 @@ import unittest from unittest.mock import patch, MagicMock -from s3_reader import S3Reader +from common.s3_reader import S3Reader class TestS3Reader(unittest.TestCase): @@ -10,7 +10,7 @@ def setUp(self): self.key = "test.json" # Patch s3_client - self.s3_client_patcher = patch("s3_reader.s3_client") + self.s3_client_patcher = patch("common.s3_reader.s3_client") self.mock_s3_client = self.s3_client_patcher.start() self.logger_info_patcher = patch("logging.Logger.info") diff --git a/redis_sync/Makefile b/redis_sync/Makefile deleted file mode 100644 index fec3a6e17..000000000 --- a/redis_sync/Makefile +++ /dev/null @@ -1,14 +0,0 @@ - -test: - @PYTHONPATH=src:tests python -m unittest - -coverage-run: - coverage run -m unittest discover -v - -coverage-report: - coverage report -m - -coverage-html: - coverage html - -.PHONY: build package \ No newline at end of file diff --git a/redis_sync/src/clients.py b/redis_sync/src/clients.py deleted file mode 100644 index 535f6e013..000000000 --- a/redis_sync/src/clients.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -import logging -import redis -from boto3 import client as boto3_client - - -logging.basicConfig(level="INFO") -logger = logging.getLogger() -logger.setLevel("INFO") - -STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose") -CONFIG_BUCKET_NAME = os.getenv("CONFIG_BUCKET_NAME", "variable-not-defined") -REGION_NAME = os.getenv("AWS_REGION", "eu-west-2") -REDIS_HOST = os.getenv("REDIS_HOST", "") -REDIS_PORT = os.getenv("REDIS_PORT", 6379) - -s3_client = boto3_client("s3", region_name=REGION_NAME) -firehose_client = boto3_client("firehose", region_name=REGION_NAME) -logger.info(f"Connecting to Redis at {REDIS_HOST}:{REDIS_PORT}") -redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True) diff --git a/redis_sync/src/log_decorator.py b/redis_sync/src/log_decorator.py deleted file mode 100644 index e014e41c3..000000000 --- a/redis_sync/src/log_decorator.py +++ /dev/null @@ -1,53 +0,0 @@ -"""This module contains the logging decorator for sending the appropriate logs to Cloudwatch and Firehose. - The decorator log pattern is shared by filenameprocessor, recordprocessor, ack_backend and redis_sync modules. - and therefore could be moved to a common module in the future. - TODO: refactor to a common module. - TODO: Duplication check has been suppressed in sonar-project.properties. Remove once refactored. -""" -import json -import time -from datetime import datetime -from functools import wraps -from clients import firehose_client, logger, STREAM_NAME - - -def send_log_to_firehose(log_data: dict) -> None: - """Sends the log_message to Firehose""" - try: - record = {"Data": json.dumps({"event": log_data}).encode("utf-8")} - response = firehose_client.put_record(DeliveryStreamName=STREAM_NAME, Record=record) - logger.info("Log sent to Firehose: %s", response) - except Exception as error: # pylint:disable = broad-exception-caught - logger.exception("Error sending log to Firehose: %s", error) - - -def generate_and_send_logs( - start_time, base_log_data: dict, additional_log_data: dict, is_error_log: bool = False -) -> None: - """Generates log data which includes the base_log_data, additional_log_data, and time taken (calculated using the - current time and given start_time) and sends them to Cloudwatch and Firehose.""" - log_data = {**base_log_data, "time_taken": f"{round(time.time() - start_time, 5)}s", **additional_log_data} - log_function = logger.error if is_error_log else logger.info - log_function(json.dumps(log_data)) - send_log_to_firehose(log_data) - - -def logging_decorator(prefix="redis_sync"): - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - base_log_data = { - "function_name": f"{prefix}_{func.__name__}", - "date_time": str(datetime.now()) - } - start_time = time.time() - try: - result = func(*args, **kwargs) - generate_and_send_logs(start_time, base_log_data, additional_log_data=result) - return result - except Exception as e: - additional_log_data = {"statusCode": 500, "error": str(e)} - generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True) - raise - return wrapper - return decorator diff --git a/redis_sync/src/redis_sync.py b/redis_sync/src/redis_sync.py deleted file mode 100644 index cf8e41363..000000000 --- a/redis_sync/src/redis_sync.py +++ /dev/null @@ -1,49 +0,0 @@ -from clients import logger -from s3_event import S3Event -from record_processor import process_record -from event_read import read_event -from log_decorator import logging_decorator -from clients import redis_client -''' - Event Processor - The Business Logic for the Redis Sync Lambda Function. - This module processes S3 events and iterates through each record to process them individually.''' - - -@logging_decorator(prefix="redis_sync") -def handler(event, _): - - try: - # check if the event requires a read, ie {"read": "my-hashmap"} - if "read" in event: - return read_event(redis_client, event, logger) - elif "Records" in event: - logger.info("Processing S3 event with %d records", len(event.get('Records', []))) - s3_event = S3Event(event) - record_count = len(s3_event.get_s3_records()) - if record_count == 0: - logger.info("No records found in event") - return {"status": "success", "message": "No records found in event"} - else: - error_count = 0 - file_keys = [] - for record in s3_event.get_s3_records(): - record_result = process_record(record) - file_keys.append(record_result["file_key"]) - if record_result["status"] == "error": - error_count += 1 - if error_count > 0: - logger.error("Processed %d records with %d errors", record_count, error_count) - return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors", - "file_keys": file_keys} - else: - logger.info("Successfully processed all %d records", record_count) - return {"status": "success", "message": f"Successfully processed {record_count} records", - "file_keys": file_keys} - else: - logger.info("No records found in event") - return {"status": "success", "message": "No records found in event"} - - except Exception: - logger.exception("Error processing S3 event") - return {"status": "error", "message": "Error processing S3 event"} diff --git a/redis_sync/tests/test_clients.py b/redis_sync/tests/test_clients.py deleted file mode 100644 index b3db556a7..000000000 --- a/redis_sync/tests/test_clients.py +++ /dev/null @@ -1,89 +0,0 @@ -import unittest -from unittest.mock import patch -import clients as clients -import importlib - - -class TestClients(unittest.TestCase): - - BUCKET_NAME = "default-bucket" - AWS_REGION = "eu-west-2" - REDIS_HOST = "mock-redis-host" - REDIS_PORT = 6379 - - def setUp(self): - self.boto3_client_patch = patch("boto3.client") - self.mock_boto3_client = self.boto3_client_patch.start() - self.logging_patch = patch("logging.getLogger") - self.mock_logging = self.logging_patch.start() - self.logger_info_patcher = patch("logging.Logger.info") - self.mock_logger_info = self.logger_info_patcher.start() - self.getenv_patch = patch("os.getenv") - self.mock_getenv = self.getenv_patch.start() - self.mock_getenv.side_effect = lambda key, default=None: { - "CONFIG_BUCKET_NAME": self.BUCKET_NAME, - "AWS_REGION": self.AWS_REGION, - "REDIS_HOST": self.REDIS_HOST, - "REDIS_PORT": self.REDIS_PORT - }.get(key, default) - - self.redis_patch = patch("redis.StrictRedis") - self.mock_redis = self.redis_patch.start() - - self.mock_redis.return_value = self.mock_redis - self.mock_boto3_client.return_value = self.mock_boto3_client - self.mock_boto3_client.return_value.send_message = {} - - def tearDown(self): - patch.stopall() - - def test_os_environ(self): - # Test if environment variables are set correctly - importlib.reload(clients) - self.assertEqual(clients.CONFIG_BUCKET_NAME, self.BUCKET_NAME) - self.assertEqual(clients.REGION_NAME, self.AWS_REGION) - self.assertEqual(clients.REDIS_HOST, self.REDIS_HOST) - self.assertEqual(clients.REDIS_PORT, self.REDIS_PORT) - - def test_boto3_client(self): - ''' Test boto3 client is created with correct parameters ''' - importlib.reload(clients) - self.mock_boto3_client.assert_any_call("s3", region_name=self.AWS_REGION) - - def test_firehose_client(self): - ''' Test firehose client is created with correct parameters ''' - importlib.reload(clients) - self.mock_boto3_client.assert_any_call("firehose", region_name=self.AWS_REGION) - - def test_redis_client(self): - ''' Test redis client is created with correct parameters ''' - importlib.reload(clients) - self.mock_redis.assert_called_once_with( - host=self.REDIS_HOST, - port=self.REDIS_PORT, - decode_responses=True - ) - - def test_logging_setup(self): - ''' Test logging is set up correctly ''' - importlib.reload(clients) - self.assertTrue(hasattr(clients, 'logger')) - - def test_logging_configuration(self): - ''' Test logging configuration ''' - importlib.reload(clients) - clients.logger.setLevel.assert_called_once_with("INFO") - - def test_redis_client_initialization(self): - ''' Test redis client initialization ''' - importlib.reload(clients) - self.mock_redis.assert_called_once_with(host=self.REDIS_HOST, port=self.REDIS_PORT, decode_responses=True) - self.assertTrue(hasattr(clients, 'redis_client')) - self.assertIsInstance(clients.redis_client, self.mock_redis.return_value.__class__) - - def test_logging_initialization(self): - ''' Test logging initialization ''' - importlib.reload(clients) - self.mock_logging.assert_called_once_with() - self.assertTrue(hasattr(clients, 'logger')) - clients.logger.setLevel.assert_any_call("INFO") diff --git a/sonar-project.properties b/sonar-project.properties index d179273a4..c44de3cb9 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -3,9 +3,9 @@ sonar.projectKey=NHSDigital_immunisation-fhir-api sonar.organization=nhsdigital sonar.host.url=https://sonarcloud.io sonar.python.version=3.11 -sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/**,redis_sync/src/log_decorator.py +sonar.exclusions=**/e2e/**,**/e2e_batch/**,**/temporary_sandbox/**,**/devtools/**,**/proxies/**,**/scripts/**,**/terraform/**,**/tests/** sonar.python.coverage.reportPaths=backend-coverage.xml,delta-coverage.xml,ack-lambda-coverage.xml,filenameprocessor-coverage.xml,recordforwarder-coverage.xml,recordprocessor-coverage.xml,mesh_processor-coverage.xml,redis_sync-coverage.xml,mns_subscription-coverage.xml,id_sync-coverage.xml,shared-coverage.xml,batchprocessorfilter-coverage.xml -sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,redis_sync/src/log_decorator.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py +sonar.cpd.exclusions=**/cache.py,**/authentication.py,**/test_cache.py,**/test_authentication.py,**/mns_service.py,**/errors.py,**/Dockerfile,lambdas/shared/src/common/**,filenameprocessor/src/logging_decorator.py,backend/src/fhir_service.py sonar.issue.ignore.multicriteria=exclude_snomed_urls,exclude_hl7_urls sonar.issue.ignore.multicriteria.exclude_snomed_urls.ruleKey=python:S5332 sonar.issue.ignore.multicriteria.exclude_snomed_urls.resourceKey=**http://snomed\.info/sct** diff --git a/terraform/id_sync_lambda.tf b/terraform/id_sync_lambda.tf index f5fcfbea6..a5b9f51d9 100644 --- a/terraform/id_sync_lambda.tf +++ b/terraform/id_sync_lambda.tf @@ -1,14 +1,9 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - shared_dir = abspath("${path.root}/../shared") id_sync_lambda_dir = abspath("${path.root}/../id_sync") - # Get files from both directories - shared_files = fileset(local.shared_dir, "**") id_sync_lambda_files = fileset(local.id_sync_lambda_dir, "**") - # Calculate SHA for both directories - shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) id_sync_lambda_dir_sha = sha1(join("", [for f in local.id_sync_lambda_files : filesha1("${local.id_sync_lambda_dir}/${f}")])) id_sync_lambda_name = "${local.short_prefix}-id_sync_lambda" } diff --git a/terraform/redis_sync_lambda.tf b/terraform/redis_sync_lambda.tf index ec7e34945..a915a4b6a 100644 --- a/terraform/redis_sync_lambda.tf +++ b/terraform/redis_sync_lambda.tf @@ -1,8 +1,11 @@ # Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments locals { - redis_sync_lambda_dir = abspath("${path.root}/../redis_sync") - redis_sync_lambda_files = fileset(local.redis_sync_lambda_dir, "**") + redis_sync_lambda_dir = abspath("${path.root}/../redis_sync") + + redis_sync_lambda_files = fileset(local.redis_sync_lambda_dir, "**") + redis_sync_lambda_dir_sha = sha1(join("", [for f in local.redis_sync_lambda_files : filesha1("${local.redis_sync_lambda_dir}/${f}")])) + redis_sync_lambda_name = "${local.short_prefix}-redis_sync_lambda" } resource "aws_ecr_repository" "redis_sync_lambda_repository" { @@ -15,11 +18,11 @@ resource "aws_ecr_repository" "redis_sync_lambda_repository" { # Module for building and pushing Docker image to ECR module "redis_sync_docker_image" { - source = "terraform-aws-modules/lambda/aws//modules/docker-build" - version = "8.0.1" - - create_ecr_repo = false - ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name + source = "terraform-aws-modules/lambda/aws//modules/docker-build" + version = "8.0.1" + docker_file_path = "./redis_sync/Dockerfile" + create_ecr_repo = false + ecr_repo = aws_ecr_repository.redis_sync_lambda_repository.name ecr_repo_lifecycle_policy = jsonencode({ "rules" : [ { @@ -39,7 +42,7 @@ module "redis_sync_docker_image" { platform = "linux/amd64" use_image_tag = false - source_path = local.redis_sync_lambda_dir + source_path = abspath("${path.root}/..") triggers = { dir_sha = local.redis_sync_lambda_dir_sha } diff --git a/terraform/shared.tf b/terraform/shared.tf new file mode 100644 index 000000000..fa2c9d694 --- /dev/null +++ b/terraform/shared.tf @@ -0,0 +1,8 @@ +# Define locals for shared lambdas +locals { + shared_dir = abspath("${path.root}/../shared") + + shared_files = fileset(local.shared_dir, "**") + + shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")])) +}