Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ updates:
- "/filenameprocessor"
- "/mesh_processor"
- "/recordprocessor"
- "/redis_sync"
- "/lambdas/redis_sync"
- "/lambdas/id_sync"
- "/lambdas/shared"
- "/mns_subscription"
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/sonarcloud.yml
Original file line number Diff line number Diff line change
Expand Up @@ -135,15 +135,15 @@ jobs:
poetry run coverage xml -o ../mns_subscription-coverage.xml

- name: Run unittest with redis_sync
working-directory: redis_sync
working-directory: lambdas/redis_sync
id: redis_sync
env:
PYTHONPATH: ${{ github.workspace }}/redis_sync/src:${{ github.workspace }}/redis_sync/tests
PYTHONPATH: ${{ env.LAMBDA_PATH }}/redis_sync/src:${{ env.SHARED_PATH }}/src
continue-on-error: true
run: |
poetry install
poetry run coverage run -m unittest discover || echo "redis_sync tests failed" >> ../failed_tests.txt
poetry run coverage xml -o ../redis_sync-coverage.xml
poetry run coverage run --source=src -m unittest discover || echo "redis_sync tests failed" >> ../../failed_tests.txt
poetry run coverage xml -o ../../redis_sync-coverage.xml

- name: Run unittest with shared
working-directory: lambdas/shared
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
SHELL=/usr/bin/env bash -euo pipefail

PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor redis_sync lambdas/id_sync lambdas/shared mns_subscription
PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS = ack_backend backend batch_processor_filter delta_backend filenameprocessor mesh_processor recordprocessor lambdas/redis_sync lambdas/id_sync lambdas/shared mns_subscription
PYTHON_PROJECT_DIRS = e2e e2e_batch $(PYTHON_PROJECT_DIRS_WITH_UNIT_TESTS)

#Installs dependencies using poetry.
Expand Down
2 changes: 1 addition & 1 deletion immunisation-fhir-api.code-workspace
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
"path": "e2e_batch"
},
{
"path": "redis_sync"
"path": "lambdas/redis_sync"
},
{
"path": "mns_subscription"
Expand Down
2 changes: 1 addition & 1 deletion lambdas/id_sync/src/id_sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
"""

from typing import Any, Dict
from common.aws_lambda_event import AwsLambdaEvent
from common.clients import logger, STREAM_NAME
from common.log_decorator import logging_decorator
from common.aws_lambda_event import AwsLambdaEvent
from exceptions.id_sync_exception import IdSyncException
from record_processor import process_record

Expand Down
6 changes: 3 additions & 3 deletions lambdas/id_sync/src/pds_details.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
Operations related to PDS (Patient Demographic Service)
'''
import tempfile
from common.clients import logger, secrets_manager_client
from common.cache import Cache
from os_vars import get_pds_env
from common.pds_service import PdsService
from common.authentication import AppRestrictedAuth, Service
from common.cache import Cache
from common.clients import logger, secrets_manager_client
from common.pds_service import PdsService
from exceptions.id_sync_exception import IdSyncException

pds_env = get_pds_env()
Expand Down
27 changes: 27 additions & 0 deletions lambdas/redis_sync/.vscode/settings.json.default
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
{
"python.analysis.extraPaths": [
"./src"
],
"python.testing.unittestArgs": [
"-v",
"-s",
"./",
"-p",
"test_*.py"
],
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true,
"pylint.args": [
"--init-hook",
"import sys; sys.path.append('./src')"
],
"[makefile]": {
"editor.insertSpaces": false,
"editor.detectIndentation": false
},
"files.trimTrailingWhitespace": true,
"[python]": {
"files.trimTrailingWhitespace": true
},
"files.insertFinalNewline": true
}
32 changes: 23 additions & 9 deletions redis_sync/Dockerfile → lambdas/redis_sync/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,34 @@ RUN mkdir -p /home/appuser && \
echo 'appuser:x:1001:' >> /etc/group && \
chown -R 1001:1001 /home/appuser && pip install "poetry~=2.1.4"

# Install Poetry as root
COPY poetry.lock pyproject.toml README.md ./
# Install Poetry dependencies
# Copy redis_sync Poetry files
COPY ./redis_sync/poetry.lock ./redis_sync/pyproject.toml ./
COPY ./shared/src/common ./src/common

RUN echo "Listing /var/task after source code copy:" && ls -R /var/task

# Install redis_sync dependencies
WORKDIR /var/task
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main
# -----------------------------
FROM base AS test
COPY src src
COPY tests tests
RUN poetry install --no-interaction --no-ansi --no-root && \
pytest --disable-warnings tests

# -----------------------------
FROM base AS build
COPY src .

# Set working directory back to Lambda task root
WORKDIR /var/task

# Copy shared source code
COPY ./shared/src/common ./common

# Copy redis_sync source code
COPY ./redis_sync/src .

# Set correct permissions
RUN chmod 644 $(find . -type f) && chmod 755 $(find . -type d)

# Build as non-root user
USER 1001:1001

# Set the Lambda handler
CMD ["redis_sync.handler"]
16 changes: 16 additions & 0 deletions lambdas/redis_sync/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
TEST_ENV := @PYTHONPATH=src:tests:../shared/src

test:
$(TEST_ENV) python -m unittest

coverage-run:
$(TEST_ENV) coverage run -m unittest discover -v

coverage-report:
$(TEST_ENV) coverage report -m

coverage-html:
$(TEST_ENV) coverage html


.PHONY: build package
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ description = ""
authors = ["s.wates <stephen.wates1@nhs.net>"]
readme = "README.md"
packages = [
{include = "src"}
{include = "src"},
{include = "common", from = "../shared/src"}
]

[tool.poetry.dependencies]
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from clients import logger
from s3_event import S3EventRecord
from redis_cacher import RedisCacher
from common.clients import logger
from common.s3_event import S3EventRecord
'''
Record Processor
This module processes individual S3 records from an event.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
"Upload the content from a config file in S3 to ElastiCache (Redis)"

import json
from clients import redis_client
from clients import logger
from transform_map import transform_map
from s3_reader import S3Reader
from common.clients import get_redis_client, logger
from common.s3_reader import S3Reader


class RedisCacher:
Expand All @@ -25,6 +24,7 @@ def upload(bucket_name: str, file_key: str) -> dict:
# Transform
redis_mappings = transform_map(config_file_content, file_key)

redis_client = get_redis_client()
for key, mapping in redis_mappings.items():
safe_mapping = {
k: json.dumps(v) if isinstance(v, list) else v
Expand Down
53 changes: 53 additions & 0 deletions lambdas/redis_sync/src/redis_sync.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
from event_read import read_event
from record_processor import process_record
from common.clients import get_redis_client, STREAM_NAME, logger
from common.log_decorator import logging_decorator
from common.s3_event import S3Event
'''
Event Processor
The Business Logic for the Redis Sync Lambda Function.
This module processes S3 events and iterates through each record to process them individually.'''


def _process_all_records(s3_records: list) -> dict:
record_count = len(s3_records)
error_count = 0
file_keys = []
for record in s3_records:
record_result = process_record(record)
file_keys.append(record_result["file_key"])
if record_result["status"] == "error":
error_count += 1
if error_count > 0:
logger.error("Processed %d records with %d errors", record_count, error_count)
return {"status": "error", "message": f"Processed {record_count} records with {error_count} errors",
"file_keys": file_keys}
else:
logger.info("Successfully processed all %d records", record_count)
return {"status": "success", "message": f"Successfully processed {record_count} records",
"file_keys": file_keys}


@logging_decorator(prefix="redis_sync", stream_name=STREAM_NAME)
def handler(event, _):

try:
no_records = "No records found in event"
# check if the event requires a read, ie {"read": "my-hashmap"}
if "read" in event:
return read_event(get_redis_client(), event, logger)
elif "Records" in event:
logger.info("Processing S3 event with %d records", len(event.get('Records', [])))
s3_records = S3Event(event).get_s3_records()
if not s3_records:
logger.info(no_records)
return {"status": "success", "message": no_records}
else:
return _process_all_records(s3_records)
else:
logger.info(no_records)
return {"status": "success", "message": no_records}

except Exception:
logger.exception("Error processing S3 event")
return {"status": "error", "message": "Error processing S3 event"}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from clients import logger
from common.clients import logger


def transform_vaccine_map(mapping):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from constants import RedisCacheKey
from clients import logger
from transform_configs import transform_vaccine_map, transform_supplier_permissions
from common.clients import logger
'''
Transform config file to format required in REDIS cache.
'''
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def setUp(self):
self.mock_logger_error = self.logger_error_patcher.start()
self.logger_exception_patcher = patch("logging.Logger.exception")
self.mock_logger_exception = self.logger_exception_patcher.start()
self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records")
self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records")
self.mock_get_s3_records = self.get_s3_records_patcher.start()
self.record_processor_patcher = patch("redis_sync.process_record")
self.mock_record_processor = self.record_processor_patcher.start()
Expand All @@ -40,7 +40,7 @@ def tearDown(self):
self.logger_exception_patcher.stop()

def test_handler_success(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
mock_event = {'Records': [self.s3_vaccine]}
self.mock_get_s3_records.return_value = [self.s3_vaccine]
Expand All @@ -53,7 +53,7 @@ def test_handler_success(self):
self.assertEqual(result["file_keys"], ['test-key'])

def test_handler_failure(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)

mock_event = {'Records': [self.s3_vaccine]}
Expand All @@ -66,15 +66,15 @@ def test_handler_failure(self):
self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'})

def test_handler_no_records(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
mock_event = {'Records': []}
self.mock_get_s3_records.return_value = []
result = redis_sync.handler(mock_event, None)
self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'})

def test_handler_exception(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
mock_event = {'Records': [self.s3_vaccine]}
self.mock_get_s3_records.return_value = [self.s3_vaccine]
Expand All @@ -84,14 +84,14 @@ def test_handler_exception(self):
self.assertEqual(result, {'status': 'error', 'message': 'Error processing S3 event'})

def test_handler_with_empty_event(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
self.mock_get_s3_records.return_value = []
result = redis_sync.handler({}, None)
self.assertEqual(result, {'status': 'success', 'message': 'No records found in event'})

def test_handler_multi_record(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
mock_event = {'Records': [self.s3_vaccine, self.s3_supplier]}
# If you need S3EventRecord, uncomment the import and use it here
Expand All @@ -112,7 +112,7 @@ def test_handler_multi_record(self):
self.assertEqual(result['file_keys'][1], 'test-key2')

def test_handler_read_event(self):
with patch("log_decorator.logging_decorator", lambda prefix=None: (lambda f: f)):
with patch("common.log_decorator.logging_decorator", lambda prefix=None, stream_name=None: (lambda f: f)):
importlib.reload(redis_sync)
mock_event = {'read': 'myhash'}
mock_read_event_response = {'field1': 'value1'}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import json
from unittest.mock import patch
from redis_sync import handler
from s3_event import S3EventRecord
from constants import RedisCacheKey
from common.s3_event import S3EventRecord


class TestHandlerDecorator(unittest.TestCase):
Expand Down Expand Up @@ -33,11 +33,11 @@ def setUp(self):
self.mock_logger_error = self.logger_error_patcher.start()
self.logger_exception_patcher = patch("logging.Logger.exception")
self.mock_logger_exception = self.logger_exception_patcher.start()
self.get_s3_records_patcher = patch("s3_event.S3Event.get_s3_records")
self.get_s3_records_patcher = patch("common.s3_event.S3Event.get_s3_records")
self.mock_get_s3_records = self.get_s3_records_patcher.start()
self.record_processor_patcher = patch("redis_sync.process_record")
self.mock_record_processor = self.record_processor_patcher.start()
self.firehose_patcher = patch("log_decorator.firehose_client")
self.firehose_patcher = patch("common.log_decorator.firehose_client")
self.mock_firehose_client = self.firehose_patcher.start()
self.mock_firehose_client.put_record.return_value = True

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import unittest
from unittest.mock import patch

from s3_event import S3EventRecord
from constants import RedisCacheKey
from common.s3_event import S3EventRecord


class TestRecordProcessor(unittest.TestCase):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def setUp(self):
self.mock_s3_reader = self.s3_reader_patcher.start()
self.transform_map_patcher = patch("redis_cacher.transform_map")
self.mock_transform_map = self.transform_map_patcher.start()
self.redis_client_patcher = patch("redis_cacher.redis_client")
self.redis_client_patcher = patch("common.clients.redis_client")
self.mock_redis_client = self.redis_client_patcher.start()
self.logger_info_patcher = patch("logging.Logger.info")
self.mock_logger_info = self.logger_info_patcher.start()
Expand Down
2 changes: 1 addition & 1 deletion lambdas/shared/src/common/authentication.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
from enum import Enum

from .cache import Cache
from common.models.errors import UnhandledResponseError
from common.clients import logger
from common.models.errors import UnhandledResponseError


class Service(Enum):
Expand Down
Loading
Loading