From 08df6847de016f59d8c8b993a2907d48285edd32 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Thu, 4 Dec 2025 09:14:17 +0000 Subject: [PATCH 01/23] Enhance document review functionality with new validation and status updates --- lambdas/enums/document_review_status.py | 5 + lambdas/enums/lambda_error.py | 27 +++ lambdas/models/document_review.py | 11 +- lambdas/services/document_service.py | 48 +++-- .../document_upload_review_service.py | 169 +++++++++++++++- lambdas/tests/unit/handlers/conftest.py | 1 - lambdas/utils/dynamo_utils.py | 180 +++++++++++++++++- lambdas/utils/lambda_exceptions.py | 5 + 8 files changed, 411 insertions(+), 35 deletions(-) diff --git a/lambdas/enums/document_review_status.py b/lambdas/enums/document_review_status.py index 3fdc5c4519..a9e40c2848 100644 --- a/lambdas/enums/document_review_status.py +++ b/lambdas/enums/document_review_status.py @@ -4,4 +4,9 @@ class DocumentReviewStatus(StrEnum): PENDING_REVIEW = "PENDING_REVIEW" APPROVED = "APPROVED" + APPROVED_PENDING_DOCUMENTS = "APPROVED_PENDING_DOCUMENTS" REJECTED = "REJECTED" + REJECTED_DUPLICATE = "REJECTED_DUPLICATE" + REASSIGNED = "REASSIGNED" + REASSIGNED_PATIENT_UNKNOWN = "REASSIGNED_PATIENT_UNKNOWN" + NEVER_REVIEWED = "NEVER_REVIEWED" \ No newline at end of file diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index 9f3a21b507..3322e279d6 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -486,6 +486,33 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: "message": "User is unauthorised to view record", "fhir_coding": UKCoreSpineError.ACCESS_DENIED, } + """ + Errors for document review lambda + """ + DocumentReviewNotFound = { + "err_code": "DRV_4041", + "message": "Document review not found", + "fhir_coding": UKCoreSpineError.RESOURCE_NOT_FOUND, + } + DocumentReviewGeneralError = { + "err_code": "DRV_4002", + "message": "An error occurred while fetching the document review", + "fhir_coding": FhirIssueCoding.EXCEPTION, + } + UpdateDocStatusUnavailable = { + "err_code": "DRV_4003", + "message": "This Document is not available for review update", + "fhir_coding": FhirIssueCoding.FORBIDDEN, + } + DocumentReviewInvalidBody = { + "err_code": "DRV_4004", + "message": "Invalid request body", + } + DocumentReviewInvalidNhsNumber = { + "err_code": "DRV_4005", + "message": "The NHS number provided is invalid", + } + """ Errors for get ods report lambda """ diff --git a/lambdas/models/document_review.py b/lambdas/models/document_review.py index 90cec0cfd1..f265957c4f 100644 --- a/lambdas/models/document_review.py +++ b/lambdas/models/document_review.py @@ -1,11 +1,12 @@ import uuid -from typing import Optional from enums.document_review_status import DocumentReviewStatus from enums.metadata_field_names import DocumentReferenceMetadataFields from enums.snomed_codes import SnomedCodes -from pydantic import BaseModel, ConfigDict, Field -from pydantic.alias_generators import to_pascal +from pydantic import BaseModel, ConfigDict, Field, model_validator +from pydantic.alias_generators import to_camel, to_pascal +from utils.exceptions import InvalidNhsNumberException +from utils.utilities import validate_nhs_number class DocumentReviewFileDetails(BaseModel): @@ -42,8 +43,6 @@ class DocumentUploadReviewReference(BaseModel): upload_date: int files: list[DocumentReviewFileDetails] = Field(min_length=1) nhs_number: str - ttl: int | None = Field( - alias=str(DocumentReferenceMetadataFields.TTL.value), default=None - ) + version: int = Field(default=1) document_reference_id: str = Field(default=None) document_snomed_code_type: str = Field(default=SnomedCodes.LLOYD_GEORGE.value.code) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 07dbe3ef8f..449fe7329f 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -106,15 +106,17 @@ def fetch_documents_from_table( return documents def get_item( - self, - document_id: str, - table_name: str = None, - model_class: type[BaseModel] = None, + self, + document_id: str, + sort_key: dict = None, + table_name: str = None, + model_class: type[BaseModel] = None, ) -> Optional[BaseModel]: - """Fetch a single document by ID from specified or configured table. + """Fetch a single document by ID from a specified or configured table. Args: document_id: The document ID to retrieve. + sort_key: Optional sort key, defaults to None. table_name: Optional table name, defaults to self.table_name. model_class: Optional model class, defaults to self.model_class. @@ -123,10 +125,12 @@ def get_item( """ table_to_use = table_name or self.table_name model_to_use = model_class or self.model_class - + document_key = {"ID": document_id} + if sort_key: + document_key.update(sort_key) try: response = self.dynamo_service.get_item( - table_name=table_to_use, key={"ID": document_id} + table_name=table_to_use, key=document_key ) if "Item" not in response: @@ -137,7 +141,6 @@ def get_item( return document except ValidationError as e: - logger.error(f"Validation error on document: {response.get('Item')}") logger.error(f"{e}") return None @@ -208,22 +211,35 @@ def delete_document_object(self, bucket: str, key: str): def update_document( self, table_name: str | None = None, - update_key: dict[str, str] | None = None, document: BaseModel = None, update_fields_name: set[str] | None = None, + condition_expression: str | Attr | ConditionBase = None, + expression_attribute_values: dict = None, + key_pair: dict = None, ): """Update document in specified or configured table.""" table_name = table_name or self.table_name - if not update_key: - update_key = {DocumentReferenceMetadataFields.ID.value: document.id} - self.dynamo_service.update_item( - table_name=table_name, - key_pair=update_key, - updated_fields=document.model_dump( + update_kwargs = { + "table_name": table_name, + "updated_fields": document.model_dump( exclude_none=True, by_alias=True, include=update_fields_name ), - ) + } + if key_pair: + update_kwargs["key_pair"] = key_pair + else: + update_kwargs["key_pair"] = { + DocumentReferenceMetadataFields.ID.value: document.id + } + + if condition_expression: + update_kwargs["condition_expression"] = condition_expression + + if expression_attribute_values: + update_kwargs["expression_attribute_values"] = expression_attribute_values + + return self.dynamo_service.update_item(**update_kwargs) def hard_delete_metadata_records( self, table_name: str, document_references: list[BaseModel] diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 68375f67fe..077fcaaad9 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -4,20 +4,24 @@ from botocore.exceptions import ClientError from enums.document_review_status import DocumentReviewStatus from enums.dynamo_filter import AttributeOperator -from enums.lambda_error import LambdaError +from enums.metadata_field_names import DocumentReferenceMetadataFields +from models.document_reference import S3_PREFIX from models.document_review import DocumentUploadReviewReference from pydantic import ValidationError from services.document_service import DocumentService from utils.audit_logging_setup import LoggingService from utils.dynamo_query_filter_builder import DynamoQueryFilterBuilder -from utils.lambda_exceptions import DocumentReviewException +from utils.dynamo_utils import build_transaction_item +from utils.exceptions import DocumentReviewException logger = LoggingService(__name__) class DocumentUploadReviewService(DocumentService): """Service for handling DocumentUploadReviewReference operations.""" + DEFAULT_QUERY_LIMIT = 50 + def __init__(self): super().__init__() self._table_name = os.environ.get("DOCUMENT_REVIEW_DYNAMODB_NAME") @@ -68,7 +72,7 @@ def query_docs_pending_review_by_custodian_with_limit( except ClientError as e: logger.error(e) - raise DocumentReviewException(500, LambdaError.DocumentReviewDB) + raise DocumentReviewException("Failed to query document reviews") def _validate_review_references( self, items: list[dict] @@ -81,7 +85,9 @@ def _validate_review_references( return review_references except ValidationError as e: logger.error(e) - raise DocumentReviewException(500, LambdaError.DocumentReviewValidation) + raise DocumentReviewException( + "Failed to validate document review references" + ) def update_document_review_custodian( self, @@ -94,15 +100,166 @@ def update_document_review_custodian( for review in patient_documents: logger.info("Updating document review custodian...") - - if review.custodian != updated_ods_code: + if review.review_status != DocumentReviewStatus.PENDING_REVIEW: + pass + elif review.custodian != updated_ods_code: review.custodian = updated_ods_code self.update_document( document=review, + key_pair={"ID": review.id, "Version": review.version}, update_fields_name=review_update_field, ) + def get_document_review_by_id(self, document_id: str, document_version: int): + return self.get_item(document_id, {"Version": document_version}) + + def update_pending_review_status( + self, review_update: DocumentUploadReviewReference, field_names: set[str] + ) -> None: + self.update_review_document_with_status_filter( + review_update, + field_names, + DocumentReviewStatus.PENDING_REVIEW, + ) + + def update_approved_pending_review_status( + self, review_update: DocumentUploadReviewReference, field_names: set[str] + ) -> None: + self.update_review_document_with_status_filter( + review_update, + field_names, + DocumentReviewStatus.APPROVED_PENDING_DOCUMENTS, + ) + + def update_review_document_with_status_filter( + self, + review_update: DocumentUploadReviewReference, + field_names: set[str], + status: DocumentReviewStatus, + ): + condition_expression = ( + Attr(DocumentReferenceMetadataFields.ID.value).exists() + & Attr("NhsNumber").eq(review_update.nhs_number) + & Attr("ReviewStatus").eq(status) + ) + self.update_document_review_for_patient( + review_update, field_names, condition_expression + ) + + def update_document_review_for_patient( + self, + review_update: DocumentUploadReviewReference, + field_names: set[str], + condition_expression, + ): + try: + return self.update_document( + document=review_update, + key_pair={"ID": review_update.id, "Version": review_update.version}, + update_fields_name=field_names, + condition_expression=condition_expression, + ) + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code", "") + + if error_code == "ConditionalCheckFailedException": + logger.error( + f"Condition check failed: Document ID {review_update.id}", + {"Result": "Failed to update document review"}, + ) + raise DocumentReviewException( + f"Document ID {review_update.id} does not meet the required conditions for update" + ) + + logger.error( + f"DynamoDB error updating document review: {str(e)}", + {"Result": "Failed to update document review"}, + ) + raise DocumentReviewException(f"Failed to update document review: {str(e)}") + + def update_document_review_with_transaction( + self, new_review_item, existing_review_item + ): + transact_items = [] + try: + new_doc_transaction = build_transaction_item( + table_name=self.table_name, + action="Update", + key={"ID": new_review_item.id, "Version": new_review_item.version}, + update_fields=new_review_item.model_dump( + exclude_none=True, by_alias=True, exclude={"version", "id"} + ), + conditions=[{"field": "ID", "operator": "attribute_not_exists"}], + ) + transact_items.append(new_doc_transaction) + + existing_update_fields = { + "review_status", + "review_date", + "reviewer", + } + existing_doc_transaction = build_transaction_item( + table_name=self.table_name, + action="Update", + key={ + "ID": existing_review_item.id, + "Version": existing_review_item.version, + }, + update_fields=existing_review_item.model_dump( + exclude_none=True, by_alias=True, include=existing_update_fields + ), + conditions=[ + { + "field": "ReviewStatus", + "operator": "=", + "value": DocumentReviewStatus.PENDING_REVIEW, + }, + { + "field": "NhsNumber", + "operator": "=", + "value": existing_review_item.nhs_number, + }, + { + "field": "Custodian", + "operator": "=", + "value": existing_review_item.custodian, + }, + ], + ) + except ValueError as e: + logger.error(f"Failed to build transaction item: {str(e)}") + raise DocumentReviewException(f"Failed to build transaction item: {str(e)}") + transact_items.append(existing_doc_transaction) + + try: + response = self.dynamo_service.transact_write_items(transact_items) + logger.info("Transaction completed successfully") + except ClientError as e: + error_code = e.response.get("Error", {}).get("Code", "") + if error_code == "TransactionCanceledException": + logger.error( + f"Condition check failed: Document ID {existing_review_item.id} ", + {"Result": "Failed to update document review"}, + ) + raise DocumentReviewException(f"Failed to update document review: {str(e)}") + return response + + def delete_document_review_files( + self, document_review: DocumentUploadReviewReference + ): + for file in document_review.files: + location_without_prefix = file.file_location.replace(S3_PREFIX, "") + bucket, file_key = location_without_prefix.split("/", 1) + try: + self.s3_service.delete_object(bucket, file_key) + except ClientError as e: + logger.warning( + f"Unable to delete file {file.file_name} from S3 due to error: {e}" + ) + logger.warning(f"Skipping file deletion for {file.file_name}") + continue + def build_review_query_filter( self, nhs_number: str | None = None, uploader: str | None = None ) -> Attr | ConditionBase: diff --git a/lambdas/tests/unit/handlers/conftest.py b/lambdas/tests/unit/handlers/conftest.py index f5f29ab3dd..0f9482042d 100755 --- a/lambdas/tests/unit/handlers/conftest.py +++ b/lambdas/tests/unit/handlers/conftest.py @@ -148,4 +148,3 @@ def mock_upload_document_iteration_3_enabled(mocker): FeatureFlags.UPLOAD_DOCUMENT_ITERATION_3_ENABLED: True } yield mock_feature_flag - diff --git a/lambdas/utils/dynamo_utils.py b/lambdas/utils/dynamo_utils.py index f35b998d76..bfa9acb1b4 100644 --- a/lambdas/utils/dynamo_utils.py +++ b/lambdas/utils/dynamo_utils.py @@ -95,18 +95,20 @@ def create_expression_attribute_values(attribute_field_values: dict) -> dict: return expression_attribute_values -def create_expression_value_placeholder(value: str) -> str: +def create_expression_value_placeholder(value: str, suffix: str = "") -> str: """ Creates a placeholder value for an expression attribute name :param value: Value to change into a placeholder + :param suffix: Optional suffix to add before "_val" (e.g. "_condition") - example usage: + Example usage: placeholder = create_expression_value_placeholder("VirusScanResult") - - result: - ":VirusScanResult_val" + # Result: ":VirusScanResult_val" + placeholder = create_expression_value_placeholder("VirusScanResult", "_condition") + # Result: ":VirusScanResult_condition_val" """ - return f":{inflection.camelize(value, uppercase_first_letter=True)}_val" + camelized = inflection.camelize(value, uppercase_first_letter=True) + return f":{camelized}{suffix}_val" def create_expression_attribute_placeholder(value: str) -> str: @@ -157,6 +159,172 @@ def parse_dynamo_record(dynamodb_record: Dict[str, Any]) -> Dict[str, Any]: return result +def build_mixed_condition_expression( + conditions: list[dict[str, Any]], + join_operator: str = "AND", + suffix: str = "_condition", +) -> tuple[str, dict[str, str], dict[str, Any]]: + """ + Build a condition expression with mixed operators and conditions. + + Args: + conditions: List of condition dictionaries, each with: + - "field": field name + - "operator": comparison operator or "attribute_exists"/"attribute_not_exists" + - "value": value to compare (not needed for existence checks) + Example: [ + {"field": "DocStatus", "operator": "=", "value": "final"}, + {"field": "Deleted", "operator": "attribute_not_exists"} + ] + join_operator: Logical operator to join conditions (default: "AND") + suffix: Suffix to add to value placeholders (default: "_condition") + + Returns: + Tuple of (condition_expression, expression_attribute_names, expression_attribute_values) + """ + condition_expressions = [] + condition_attribute_names = {} + condition_attribute_values = {} + + for condition in conditions: + field_name = condition["field"] + operator = condition["operator"] + field_value = condition.get("value") + + condition_placeholder = create_expression_attribute_placeholder(field_name) + condition_attribute_names[condition_placeholder] = field_name + + if operator in ["attribute_exists", "attribute_not_exists"]: + condition_expressions.append(f"{operator}({condition_placeholder})") + else: + condition_value_placeholder = create_expression_value_placeholder( + field_name, suffix + ) + condition_expressions.append( + f"{condition_placeholder} {operator} {condition_value_placeholder}" + ) + condition_attribute_values[condition_value_placeholder] = field_value + + condition_expression = f" {join_operator} ".join(condition_expressions) + + return condition_expression, condition_attribute_names, condition_attribute_values + + +def build_general_transaction_item( + table_name: str, + action: str, + key: dict | None = None, + item: dict | None = None, + update_expression: str | None = None, + condition_expression: str | None = None, + expression_attribute_names: dict | None = None, + expression_attribute_values: dict | None = None, +) -> dict[str, dict[str, Any]]: + """ + Build a general DynamoDB transaction item for any action type. + All expressions and attributes must be pre-formatted. + + Args: + table_name: The name of the DynamoDB table + action: Transaction action type ('Put', 'Update', 'Delete', 'ConditionCheck') + key: The primary key of the item (required for Update, Delete, ConditionCheck) + item: The complete item to put (required for Put) + update_expression: Pre-formatted update expression (optional for Update) + condition_expression: Pre-formatted condition expression (optional) + expression_attribute_names: Pre-formatted expression attribute names (optional) + expression_attribute_values: Pre-formatted expression attribute values (optional) + + Returns: + A transaction item dict ready for transact_write_items + + Raises: + ValueError: If required parameters are missing for the specified action + """ + action = action.capitalize() + + if action not in ["Put", "Update", "Delete", "Conditioncheck"]: + raise ValueError( + f"Invalid action: {action}. Must be one of: Put, Update, Delete, ConditionCheck" + ) + + transaction_item: dict[str, dict[str, Any]] = {action: {"TableName": table_name}} + + if action == "Put": + if item is None: + raise ValueError("'item' is required for Put action") + transaction_item[action]["Item"] = item + + elif action == "Update": + if key is None: + raise ValueError("'key' is required for Update action") + transaction_item[action]["Key"] = key + if update_expression: + transaction_item[action]["UpdateExpression"] = update_expression + + elif action in ["Delete", "Conditioncheck"]: + if key is None: + raise ValueError(f"'key' is required for {action} action") + transaction_item[action]["Key"] = key + + if condition_expression: + transaction_item[action]["ConditionExpression"] = condition_expression + + if expression_attribute_names: + transaction_item[action][ + "ExpressionAttributeNames" + ] = expression_attribute_names + + if expression_attribute_values: + transaction_item[action][ + "ExpressionAttributeValues" + ] = expression_attribute_values + + return transaction_item + + +def build_transaction_item( + table_name: str, + action: str, + key: dict | None = None, + item: dict | None = None, + update_fields: dict | None = None, + conditions: list[dict] | None = None, + condition_join_operator: str = "AND", +) -> dict: + update_expression = None + condition_expression = None + expression_attribute_names = {} + expression_attribute_values = {} + + if action.lower() == "update" and update_fields: + field_names = list(update_fields.keys()) + update_expression = create_update_expression(field_names) + _, update_attr_names = create_expressions(field_names) + update_attr_values = create_expression_attribute_values(update_fields) + + expression_attribute_names.update(update_attr_names) + expression_attribute_values.update(update_attr_values) + + if conditions: + condition_expr, condition_attr_names, condition_attr_values = ( + build_mixed_condition_expression(conditions, condition_join_operator) + ) + condition_expression = condition_expr + expression_attribute_names.update(condition_attr_names) + expression_attribute_values.update(condition_attr_values) + + return build_general_transaction_item( + table_name=table_name, + action=action, + key=key, + item=item, + update_expression=update_expression, + condition_expression=condition_expression, + expression_attribute_names=expression_attribute_names or None, + expression_attribute_values=expression_attribute_values or None, + ) + + class DocTypeTableRouter: def __init__(self): self._define_tables() diff --git a/lambdas/utils/lambda_exceptions.py b/lambdas/utils/lambda_exceptions.py index b53ee4c7fe..4a3c51c8ac 100644 --- a/lambdas/utils/lambda_exceptions.py +++ b/lambdas/utils/lambda_exceptions.py @@ -115,3 +115,8 @@ class DocumentReviewException(LambdaException): class GetDocumentReviewException(LambdaException): pass + + +class UpdateDocumentReviewException(LambdaException): + pass + From c23723904f06773df72500e881a1c97033eebde8 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Thu, 4 Dec 2025 13:21:18 +0000 Subject: [PATCH 02/23] Update document review custodian handling with new status and timestamp logic --- .../document_upload_review_service.py | 39 ++++++++++----- .../test_document_upload_review_service.py | 48 ++++++++++++++++++- lambdas/utils/exceptions.py | 5 ++ 3 files changed, 77 insertions(+), 15 deletions(-) diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 077fcaaad9..862bcc11df 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -1,4 +1,5 @@ import os +from datetime import datetime, timezone from boto3.dynamodb.conditions import Attr, ConditionBase from botocore.exceptions import ClientError @@ -94,22 +95,32 @@ def update_document_review_custodian( patient_documents: list[DocumentUploadReviewReference], updated_ods_code: str, ): - review_update_field = {"custodian"} if not patient_documents: return + review_update_field = {"custodian"} for review in patient_documents: - logger.info("Updating document review custodian...") - if review.review_status != DocumentReviewStatus.PENDING_REVIEW: - pass - elif review.custodian != updated_ods_code: - review.custodian = updated_ods_code - - self.update_document( - document=review, - key_pair={"ID": review.id, "Version": review.version}, - update_fields_name=review_update_field, - ) + if review.custodian != updated_ods_code: + logger.info("Updating document review custodian...") + if review.review_status == DocumentReviewStatus.PENDING_REVIEW: + new_document_review = review.model_copy(deep=True) + new_document_review.version = review.version + 1 + new_document_review.custodian = updated_ods_code + + review_date = int(datetime.now(timezone.utc).timestamp()) + review.review_status = DocumentReviewStatus.NEVER_REVIEWED + review.review_date = review_date + review.reviewer = review.custodian + review.custodian = updated_ods_code + self.update_document_review_with_transaction(new_review_item=new_document_review, existing_review_item=review) + else: + review.custodian = updated_ods_code + + self.update_document( + document=review, + key_pair={"ID": review.id, "Version": review.version}, + update_fields_name=review_update_field, + ) def get_document_review_by_id(self, document_id: str, document_version: int): return self.get_item(document_id, {"Version": document_version}) @@ -179,7 +190,7 @@ def update_document_review_for_patient( raise DocumentReviewException(f"Failed to update document review: {str(e)}") def update_document_review_with_transaction( - self, new_review_item, existing_review_item + self, new_review_item, existing_review_item, additional_update_fields=None ): transact_items = [] try: @@ -199,6 +210,8 @@ def update_document_review_with_transaction( "review_date", "reviewer", } + if additional_update_fields: + existing_update_fields.update(additional_update_fields) existing_doc_transaction = build_transaction_item( table_name=self.table_name, action="Update", diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index 96b1cc1f29..de2d780b2e 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -2,6 +2,7 @@ import pytest from boto3.dynamodb.conditions import Attr +from freezegun import freeze_time from models.document_review import DocumentUploadReviewReference from services.document_upload_review_service import DocumentUploadReviewService from tests.unit.conftest import ( @@ -37,6 +38,8 @@ def mock_document_review_references(): review = MagicMock(spec=DocumentUploadReviewReference) review.id = f"review-id-{i}" review.nhs_number = TEST_NHS_NUMBER + review.review_status = "APPROVED" + review.version = 1 review.custodian = TEST_ODS_CODE reviews.append(review) return reviews @@ -79,7 +82,7 @@ def test_update_document_review_custodian_updates_all_documents( # Verify update_document was called with the correct parameters for review in mock_document_review_references: mock_update_document.assert_any_call( - document=review, update_fields_name={"custodian"} + document=review, update_fields_name={"custodian"}, key_pair={"ID": review.id, "Version": review.version}, ) @@ -156,15 +159,56 @@ def test_update_document_review_custodian_single_document(mock_service, mocker): single_review = MagicMock(spec=DocumentUploadReviewReference) single_review.id = "single-review-id" single_review.custodian = TEST_ODS_CODE + single_review.version = 1 + single_review.review_status = "APPROVED" mock_service.update_document_review_custodian([single_review], NEW_ODS_CODE) assert single_review.custodian == NEW_ODS_CODE mock_update_document.assert_called_once_with( - document=single_review, update_fields_name={"custodian"} + document=single_review, update_fields_name={"custodian"}, key_pair={"ID": single_review.id, "Version": single_review.version}, ) +@freeze_time("2024-01-15 10:30:00") +def test_update_document_review_custodian_with_pending_review_status( + mock_service, mocker +): + mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_transaction_update = mocker.patch.object( + mock_service, "update_document_review_with_transaction" + ) + + expected_timestamp = 1705314600 + + pending_review = MagicMock(spec=DocumentUploadReviewReference) + pending_review.id = "pending-review-id" + pending_review.custodian = TEST_ODS_CODE + pending_review.version = 1 + pending_review.review_status = "PENDING_REVIEW" + + new_review_copy = MagicMock(spec=DocumentUploadReviewReference) + pending_review.model_copy = MagicMock(return_value=new_review_copy) + + mock_service.update_document_review_custodian([pending_review], NEW_ODS_CODE) + + pending_review.model_copy.assert_called_once_with(deep=True) + + assert new_review_copy.version == 2 + assert new_review_copy.custodian == NEW_ODS_CODE + + assert pending_review.review_status == "NEVER_REVIEWED" + assert pending_review.review_date == expected_timestamp + assert pending_review.reviewer == TEST_ODS_CODE + assert pending_review.custodian == NEW_ODS_CODE + + mock_transaction_update.assert_called_once_with( + new_review_item=new_review_copy, existing_review_item=pending_review + ) + + mock_update_document.assert_not_called() + + def test_build_review_query_filter_creates_filter_from_nhs_number(mock_service): expected = Attr("ReviewStatus").eq("PENDING_REVIEW") & Attr("NhsNumber").eq( TEST_NHS_NUMBER diff --git a/lambdas/utils/exceptions.py b/lambdas/utils/exceptions.py index 03a6faf662..2ea43e3e23 100644 --- a/lambdas/utils/exceptions.py +++ b/lambdas/utils/exceptions.py @@ -167,6 +167,11 @@ class FhirDocumentReferenceException(Exception): class TransactionConflictException(Exception): pass + +class DocumentReviewException(Exception): + pass + + class MigrationUnrecoverableException(Exception): def __init__(self, message: str, item_id: str): super().__init__(message) From b32c424f1794914f0f04dae4f666969fd81664b6 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 5 Dec 2025 10:50:54 +0000 Subject: [PATCH 03/23] Remove unnecessary assertion from document service tests --- lambdas/tests/unit/services/test_document_service.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lambdas/tests/unit/services/test_document_service.py b/lambdas/tests/unit/services/test_document_service.py index a68e35d7f3..13064ec8d2 100644 --- a/lambdas/tests/unit/services/test_document_service.py +++ b/lambdas/tests/unit/services/test_document_service.py @@ -492,7 +492,6 @@ def test_get_item_returns_none( table_name=MOCK_LG_TABLE_NAME, key={"ID": document_id} ) assert result is None - assert expected_log in caplog.text @pytest.mark.parametrize( From 9374c2cdfeccca6f413e7770d8354b45d9f36ec7 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 5 Dec 2025 10:55:06 +0000 Subject: [PATCH 04/23] Rename 'update_key' to 'key_pair' in document update methods for consistency --- lambdas/services/upload_document_reference_service.py | 2 +- .../unit/services/test_pdm_upload_document_reference_service.py | 2 +- .../unit/services/test_upload_document_reference_service.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lambdas/services/upload_document_reference_service.py b/lambdas/services/upload_document_reference_service.py index 63993afa20..e9f95748d0 100644 --- a/lambdas/services/upload_document_reference_service.py +++ b/lambdas/services/upload_document_reference_service.py @@ -462,7 +462,7 @@ def _update_dynamo_table( self.document_service.update_document( table_name=self.table_name, - update_key=update_key, + key_pair=update_key, document=document, update_fields_name=update_fields, ) diff --git a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py index 5b57f808c1..813b2bbdd1 100644 --- a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py @@ -436,7 +436,7 @@ def test_update_dynamo_table_clean_scan_result( pdm_service.document_service.update_document.assert_called_once_with( table_name=MOCK_PDM_TABLE_NAME, - update_key={"NhsNumber": "9000000001", "ID": "test-doc-id"}, + key_pair={"NhsNumber": "9000000001", "ID": "test-doc-id"}, document=mock_pdm_document_reference, update_fields_name={ "virus_scanner_result", diff --git a/lambdas/tests/unit/services/test_upload_document_reference_service.py b/lambdas/tests/unit/services/test_upload_document_reference_service.py index 266cba6345..fc8a9c979a 100644 --- a/lambdas/tests/unit/services/test_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_upload_document_reference_service.py @@ -400,7 +400,7 @@ def test_update_dynamo_table_clean_scan_result(service, mock_document_reference) service.document_service.update_document.assert_called_once_with( table_name=MOCK_LG_TABLE_NAME, document=mock_document_reference, - update_key=None, + key_pair=None, update_fields_name={ "virus_scanner_result", "doc_status", From 22e9c6c7623165af3526df642ea3f62538f353ba Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 5 Dec 2025 14:45:14 +0000 Subject: [PATCH 05/23] Refactor document review custodian update logic with improved handling and logging --- lambdas/services/document_service.py | 10 +- .../document_upload_review_service.py | 84 ++++++-- .../test_document_upload_review_service.py | 182 +++++++++++------- 3 files changed, 179 insertions(+), 97 deletions(-) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 449fe7329f..d0941c0048 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -106,11 +106,11 @@ def fetch_documents_from_table( return documents def get_item( - self, - document_id: str, - sort_key: dict = None, - table_name: str = None, - model_class: type[BaseModel] = None, + self, + document_id: str, + sort_key: dict = None, + table_name: str = None, + model_class: type[BaseModel] = None, ) -> Optional[BaseModel]: """Fetch a single document by ID from a specified or configured table. diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 862bcc11df..0bb6095925 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -96,32 +96,78 @@ def update_document_review_custodian( updated_ods_code: str, ): if not patient_documents: + logger.info("No documents to update") return review_update_field = {"custodian"} for review in patient_documents: - if review.custodian != updated_ods_code: - logger.info("Updating document review custodian...") + if review.custodian == updated_ods_code: + logger.info( + f"Custodian {updated_ods_code} already assigned to review ID: {review.id}" + ) + continue + + try: + logger.info( + f"Updating document review custodian for review ID: {review.id}", + { + "current_custodian": review.custodian, + "new_custodian": updated_ods_code, + }, + ) + if review.review_status == DocumentReviewStatus.PENDING_REVIEW: - new_document_review = review.model_copy(deep=True) - new_document_review.version = review.version + 1 - new_document_review.custodian = updated_ods_code - - review_date = int(datetime.now(timezone.utc).timestamp()) - review.review_status = DocumentReviewStatus.NEVER_REVIEWED - review.review_date = review_date - review.reviewer = review.custodian - review.custodian = updated_ods_code - self.update_document_review_with_transaction(new_review_item=new_document_review, existing_review_item=review) + self._handle_pending_review_custodian_update( + review, updated_ods_code, review_update_field + ) else: - review.custodian = updated_ods_code - - self.update_document( - document=review, - key_pair={"ID": review.id, "Version": review.version}, - update_fields_name=review_update_field, + self._handle_standard_custodian_update( + review, updated_ods_code, review_update_field ) + except (ClientError, DocumentReviewException) as e: + logger.error( + f"Failed to update custodian for review ID: {review.id}", + {"error": str(e)}, + ) + continue + + def _handle_pending_review_custodian_update( + self, + review: DocumentUploadReviewReference, + updated_ods_code: str, + review_update_field: set[str], + ) -> None: + new_document_review = review.model_copy(deep=True) + new_document_review.version = review.version + 1 + new_document_review.custodian = updated_ods_code + + review_date = int(datetime.now(timezone.utc).timestamp()) + review.review_status = DocumentReviewStatus.NEVER_REVIEWED + review.review_date = review_date + review.reviewer = review.custodian + review.custodian = updated_ods_code + + self.update_document_review_with_transaction( + new_review_item=new_document_review, + existing_review_item=review, + additional_update_fields=review_update_field, + ) + + def _handle_standard_custodian_update( + self, + review: DocumentUploadReviewReference, + updated_ods_code: str, + update_fields: set[str], + ) -> None: + review.custodian = updated_ods_code + + self.update_document( + document=review, + key_pair={"ID": review.id, "Version": review.version}, + update_fields_name=update_fields, + ) + def get_document_review_by_id(self, document_id: str, document_version: int): return self.get_item(document_id, {"Version": document_version}) @@ -236,7 +282,7 @@ def update_document_review_with_transaction( { "field": "Custodian", "operator": "=", - "value": existing_review_item.custodian, + "value": existing_review_item.reviewer, }, ], ) diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index de2d780b2e..7a9557b986 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -2,6 +2,8 @@ import pytest from boto3.dynamodb.conditions import Attr +from botocore.exceptions import ClientError +from enums.document_review_status import DocumentReviewStatus from freezegun import freeze_time from models.document_review import DocumentUploadReviewReference from services.document_upload_review_service import DocumentUploadReviewService @@ -10,6 +12,7 @@ MOCK_DOCUMENT_REVIEW_TABLE, TEST_NHS_NUMBER, ) +from utils.exceptions import DocumentReviewException TEST_ODS_CODE = "Y12345" NEW_ODS_CODE = "Z98765" @@ -47,7 +50,6 @@ def mock_document_review_references(): def test_table_name(mock_service): """Test that table_name property returns correct environment variable.""" - assert mock_service.table_name == MOCK_DOCUMENT_REVIEW_TABLE @@ -58,51 +60,51 @@ def test_model_class(mock_service): def test_s3_bucket(mock_service, monkeypatch): """Test that s3_bucket property returns the correct environment variable.""" - assert mock_service.s3_bucket == MOCK_DOCUMENT_REVIEW_BUCKET def test_update_document_review_custodian_updates_all_documents( mock_service, mock_document_review_references, mocker ): - """Test that update_document_review_custodian updates all documents with different custodian.""" - mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) - # Verify that all documents were updated - assert mock_update_document.call_count == 3 + assert mock_handle_standard.call_count == 3 - # Verify each document's custodian was changed for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE - - # Verify update_document was called with the correct parameters - for review in mock_document_review_references: - mock_update_document.assert_any_call( - document=review, update_fields_name={"custodian"}, key_pair={"ID": review.id, "Version": review.version}, - ) + mock_handle_standard.assert_any_call(review, NEW_ODS_CODE, {"custodian"}) def test_update_document_review_custodian_empty_list(mock_service, mocker): - """Test that update_document_review_custodian handles an empty list gracefully.""" - mock_update_document = mocker.patch.object(mock_service, "update_document") + + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + mock_handle_pending = mocker.patch.object( + mock_service, "_handle_pending_review_custodian_update" + ) mock_service.update_document_review_custodian([], NEW_ODS_CODE) - # Verify that update_document was not called - mock_update_document.assert_not_called() + mock_handle_standard.assert_not_called() + mock_handle_pending.assert_not_called() def test_update_document_review_custodian_no_changes_needed( mock_service, mock_document_review_references, mocker ): - """Test that update_document_review_custodian skips documents that already have the correct custodian.""" - mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + mock_handle_pending = mocker.patch.object( + mock_service, "_handle_pending_review_custodian_update" + ) - # Set all reviews to already have the target custodian for review in mock_document_review_references: review.custodian = NEW_ODS_CODE @@ -110,103 +112,137 @@ def test_update_document_review_custodian_no_changes_needed( mock_document_review_references, NEW_ODS_CODE ) - # Verify that update_document was not called since no changes needed - mock_update_document.assert_not_called() + mock_handle_standard.assert_not_called() + mock_handle_pending.assert_not_called() def test_update_document_review_custodian_mixed_custodians( mock_service, mock_document_review_references, mocker ): - """Test that update_document_review_custodian only updates documents that need updating.""" - mock_update_document = mocker.patch.object(mock_service, "update_document") - - # Set the first review to already have the new custodian + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) mock_document_review_references[0].custodian = NEW_ODS_CODE - # Keep the other two with the old custodian mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) + assert mock_handle_standard.call_count == 2 - # Verify that update_document was only called twice (for the 2 documents that needed updating) - assert mock_update_document.call_count == 2 - # Verify all documents now have the new custodian - for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE - - -def test_update_document_review_custodian_logging( +def test_update_document_review_custodian_continues_on_error( mock_service, mock_document_review_references, mocker ): - """Test that update_document_review_custodian logs appropriately.""" - mocker.patch.object(mock_service, "update_document") - mock_logger = mocker.patch("services.document_upload_review_service.logger") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + + mock_handle_standard.side_effect = [ + DocumentReviewException("Test error"), + ClientError( + {"Error": {"Code": "ConditionalCheckFailedException"}}, "UpdateItem" + ), + None, + ] mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) + assert mock_handle_standard.call_count == 3 - # Verify logging was called for each document - assert mock_logger.info.call_count == 3 - mock_logger.info.assert_any_call("Updating document review custodian...") - -def test_update_document_review_custodian_single_document(mock_service, mocker): - """Test update_document_review_custodian with a single document.""" +def test_handle_standard_custodian_update_updates_document(mock_service, mocker): mock_update_document = mocker.patch.object(mock_service, "update_document") - single_review = MagicMock(spec=DocumentUploadReviewReference) - single_review.id = "single-review-id" - single_review.custodian = TEST_ODS_CODE - single_review.version = 1 - single_review.review_status = "APPROVED" + review = DocumentUploadReviewReference.model_construct() + review.id = "test-id" + review.version = 1 + review.custodian = TEST_ODS_CODE + + update_fields = {"custodian"} + + mock_service._handle_standard_custodian_update(review, NEW_ODS_CODE, update_fields) - mock_service.update_document_review_custodian([single_review], NEW_ODS_CODE) + assert review.custodian == NEW_ODS_CODE - assert single_review.custodian == NEW_ODS_CODE mock_update_document.assert_called_once_with( - document=single_review, update_fields_name={"custodian"}, key_pair={"ID": single_review.id, "Version": single_review.version}, + document=review, + key_pair={"ID": review.id, "Version": review.version}, + update_fields_name=update_fields, ) +def test_handle_standard_custodian_update_with_client_error(mock_service, mocker): + mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_update_document.side_effect = ClientError( + {"Error": {"Code": "ConditionalCheckFailedException"}}, "UpdateItem" + ) + + review = DocumentUploadReviewReference.model_construct() + review.id = "test-id" + review.version = 1 + review.custodian = TEST_ODS_CODE + + with pytest.raises(ClientError): + mock_service._handle_standard_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) + + @freeze_time("2024-01-15 10:30:00") -def test_update_document_review_custodian_with_pending_review_status( +def test_handle_pending_review_custodian_update_creates_new_version( mock_service, mocker ): - mock_update_document = mocker.patch.object(mock_service, "update_document") mock_transaction_update = mocker.patch.object( mock_service, "update_document_review_with_transaction" ) expected_timestamp = 1705314600 - pending_review = MagicMock(spec=DocumentUploadReviewReference) - pending_review.id = "pending-review-id" - pending_review.custodian = TEST_ODS_CODE - pending_review.version = 1 - pending_review.review_status = "PENDING_REVIEW" + review = DocumentUploadReviewReference.model_construct() + review.id = "pending-review-id" + review.custodian = TEST_ODS_CODE + review.version = 1 + review.review_status = DocumentReviewStatus.PENDING_REVIEW - new_review_copy = MagicMock(spec=DocumentUploadReviewReference) - pending_review.model_copy = MagicMock(return_value=new_review_copy) + new_review_copy = review.model_copy(deep=True) + new_review_copy.version = 2 + new_review_copy.custodian = NEW_ODS_CODE - mock_service.update_document_review_custodian([pending_review], NEW_ODS_CODE) + mock_service._handle_pending_review_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) - pending_review.model_copy.assert_called_once_with(deep=True) + assert review.review_status == DocumentReviewStatus.NEVER_REVIEWED + assert review.review_date == expected_timestamp + assert review.reviewer == TEST_ODS_CODE + assert review.custodian == NEW_ODS_CODE - assert new_review_copy.version == 2 - assert new_review_copy.custodian == NEW_ODS_CODE + mock_transaction_update.assert_called_once_with( + new_review_item=new_review_copy, + existing_review_item=review, + additional_update_fields={"custodian"}, + ) - assert pending_review.review_status == "NEVER_REVIEWED" - assert pending_review.review_date == expected_timestamp - assert pending_review.reviewer == TEST_ODS_CODE - assert pending_review.custodian == NEW_ODS_CODE - mock_transaction_update.assert_called_once_with( - new_review_item=new_review_copy, existing_review_item=pending_review +def test_handle_pending_review_custodian_update_with_transaction_failure( + mock_service, mocker +): + mock_transaction_update = mocker.patch.object( + mock_service, "update_document_review_with_transaction" ) + mock_transaction_update.side_effect = DocumentReviewException("Transaction failed") - mock_update_document.assert_not_called() + review = MagicMock(spec=DocumentUploadReviewReference) + review.id = "test-id" + review.custodian = TEST_ODS_CODE + review.version = 1 + review.review_status = DocumentReviewStatus.PENDING_REVIEW + + with pytest.raises(DocumentReviewException): + mock_service._handle_pending_review_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) def test_build_review_query_filter_creates_filter_from_nhs_number(mock_service): From e4443b5a4caedf3fd64746b5293e01db801ac795 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 8 Dec 2025 16:28:02 +0000 Subject: [PATCH 06/23] [PRMP-908] Refactor error handling in the document upload review service to use centralised error messages --- lambdas/enums/lambda_error.py | 29 +++++++++++-------- .../document_upload_review_service.py | 18 ++++++------ 2 files changed, 26 insertions(+), 21 deletions(-) diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index 3322e279d6..bcad8af0c7 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -1,15 +1,20 @@ -from enum import Enum +from enum import Enum, StrEnum from typing import Optional from enums.fhir.fhir_issue_type import FhirIssueCoding, UKCoreSpineError from utils.error_response import ErrorResponse from utils.request_context import request_context - -class LambdaError(Enum): +class ErrorMessage(StrEnum): MISSING_POST = "Missing POST request body" MISSING_KEY = "An error occurred due to missing key" RETRIEVE_DOCUMENTS = "Unable to retrieve documents for patient" + FAILED_TO_QUERY_DYNAMO = "Failed to query DynamoDB" + FAILED_TO_VALIDATE = "Failed to validate data" + FAILED_TO_UPDATE_DYNAMO = "Failed to update DynamoDB" + FAILED_TO_CREATE_TRANSACTION = "Failed to create transaction" + +class LambdaError(Enum): def create_error_response( self, params: Optional[dict] = None, **kwargs @@ -235,7 +240,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } ManifestMissingBody = { "err_code": "DMS_4002", - "message": MISSING_POST, + "message": ErrorMessage.MISSING_POST, } ManifestFilterDocumentReferences = { "err_code": "DMS_4003", @@ -243,7 +248,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } ManifestMissingJobId = { "err_code": "DMS_4004", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, } ManifestMissingJob = { "err_code": "DMS_4005", @@ -267,7 +272,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } StitchNoService = { "err_code": "LGS_5001", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchClient = { "err_code": "LGS_5002", @@ -275,11 +280,11 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } StitchDB = { "err_code": "LGS_5003", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchValidation = { "err_code": "LGS_5004", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchCloudFront = { "err_code": "LGS_5005", @@ -320,7 +325,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: """ FeedbackMissingBody = { "err_code": "SFB_4001", - "message": MISSING_POST, + "message": ErrorMessage.MISSING_POST, } FeedbackInvalidBody = { @@ -603,7 +608,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } DocTypeKey = { "err_code": "VDT_4003", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, } PatientIdInvalid = { "err_code": "PN_4001", @@ -612,7 +617,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } PatientIdNoKey = { "err_code": "PN_4002", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, "fhir_coding": UKCoreSpineError.MISSING_VALUE, } PatientIdMismatch = { @@ -662,7 +667,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: """ DocumentReviewDB = { "err_code": "SDR_5001", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } DocumentReviewValidation = { diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 0bb6095925..c3cf9f1c64 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -5,6 +5,7 @@ from botocore.exceptions import ClientError from enums.document_review_status import DocumentReviewStatus from enums.dynamo_filter import AttributeOperator +from enums.lambda_error import LambdaError, ErrorMessage from enums.metadata_field_names import DocumentReferenceMetadataFields from models.document_reference import S3_PREFIX from models.document_review import DocumentUploadReviewReference @@ -73,7 +74,7 @@ def query_docs_pending_review_by_custodian_with_limit( except ClientError as e: logger.error(e) - raise DocumentReviewException("Failed to query document reviews") + raise DocumentReviewException(ErrorMessage.FAILED_TO_QUERY_DYNAMO) def _validate_review_references( self, items: list[dict] @@ -87,7 +88,7 @@ def _validate_review_references( except ValidationError as e: logger.error(e) raise DocumentReviewException( - "Failed to validate document review references" + ErrorMessage.FAILED_TO_VALIDATE.value ) def update_document_review_custodian( @@ -219,21 +220,19 @@ def update_document_review_for_patient( ) except ClientError as e: error_code = e.response.get("Error", {}).get("Code", "") - + logger.error(e) if error_code == "ConditionalCheckFailedException": logger.error( f"Condition check failed: Document ID {review_update.id}", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException( - f"Document ID {review_update.id} does not meet the required conditions for update" - ) + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) logger.error( f"DynamoDB error updating document review: {str(e)}", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException(f"Failed to update document review: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) def update_document_review_with_transaction( self, new_review_item, existing_review_item, additional_update_fields=None @@ -288,20 +287,21 @@ def update_document_review_with_transaction( ) except ValueError as e: logger.error(f"Failed to build transaction item: {str(e)}") - raise DocumentReviewException(f"Failed to build transaction item: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_CREATE_TRANSACTION) transact_items.append(existing_doc_transaction) try: response = self.dynamo_service.transact_write_items(transact_items) logger.info("Transaction completed successfully") except ClientError as e: + logger.error(f"Transaction failed: {str(e)}") error_code = e.response.get("Error", {}).get("Code", "") if error_code == "TransactionCanceledException": logger.error( f"Condition check failed: Document ID {existing_review_item.id} ", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException(f"Failed to update document review: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) return response def delete_document_review_files( From a3d612f6f5f2507b961e9dcc185a98aed499b276 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 9 Dec 2025 13:49:40 +0000 Subject: [PATCH 07/23] [PRMP-908] Update document review tests to include reviewer field and improve error handling --- .../test_document_upload_review_service.py | 42 +++++++++---------- 1 file changed, 19 insertions(+), 23 deletions(-) diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index 7d94e9a8e0..9fbc4dfc2c 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -53,6 +53,7 @@ def mock_review_update(): review_update.nhs_number = TEST_NHS_NUMBER review_update.review_status = DocumentReviewStatus.APPROVED review_update.document_reference_id = "test-doc-ref-id" + review_update.reviewer = TEST_ODS_CODE return review_update @@ -139,9 +140,6 @@ def test_update_document_review_custodian_mixed_custodians( assert mock_handle_standard.call_count == 2 - for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE - def test_update_document_review_custodian_continues_on_error( mock_service, mock_document_review_references, mocker ): @@ -246,7 +244,7 @@ def test_update_document_review_for_patient_builds_correct_condition_expression( assert condition_expr == expected_condition assert call_args.kwargs["update_fields_name"] == field_names assert call_args.kwargs["document"] == mock_review_update - assert call_args.kwargs["update_key"] == { + assert call_args.kwargs["key_pair"] == { "ID": mock_review_update.id, "Version": mock_review_update.version, } @@ -281,7 +279,7 @@ def test_update_document_review_for_patient_conditional_check_failed( assert condition_expr == expected_condition assert call_args.kwargs["update_fields_name"] == field_names assert call_args.kwargs["document"] == mock_review_update - assert call_args.kwargs["update_key"] == { + assert call_args.kwargs["key_pair"] == { "ID": mock_review_update.id, "Version": mock_review_update.version, } @@ -306,6 +304,8 @@ def test_update_document_review_for_patient_other_client_error( ) + + def test_update_document_review_with_transaction_builds_correct_items( mock_service, mock_review_update, mocker ): @@ -357,7 +357,6 @@ def test_update_document_review_with_transaction_builds_correct_items( ] assert second_call.kwargs["conditions"] == expected_conditions - def test_delete_document_review_files_success(mock_service, mocker): mock_delete_object = mocker.patch.object(mock_service.s3_service, "delete_object") @@ -407,13 +406,26 @@ def test_delete_document_review_files_handles_s3_error(mock_service, mocker): def test_update_document_review_with_transaction_transaction_cancelled( mock_service, mock_review_update, mocker ): - """Test handling of TransactionCanceledException.""" client_error = ClientError( {"Error": {"Code": "TransactionCanceledException"}}, "TransactWriteItems" ) mock_transact_write = mocker.patch.object( mock_service.dynamo_service, "transact_write_items" ) + mock_transact_write.side_effect = client_error + + new_review = MagicMock(spec=DocumentUploadReviewReference) + new_review.id = "new-review-id" + new_review.version = 2 + + existing_review = mock_review_update + existing_review.custodian = TEST_ODS_CODE + + with pytest.raises(DocumentReviewException) as exc_info: + mock_service.update_document_review_with_transaction( + new_review, existing_review + ) + def test_handle_standard_custodian_update_with_client_error(mock_service, mocker): @@ -487,22 +499,6 @@ def test_handle_pending_review_custodian_update_with_transaction_failure( mock_service._handle_pending_review_custodian_update( review, NEW_ODS_CODE, {"custodian"} ) - mock_transact_write.side_effect = client_error - - new_review = MagicMock(spec=DocumentUploadReviewReference) - new_review.id = "new-review-id" - new_review.version = 2 - - existing_review = mock_review_update - existing_review.custodian = TEST_ODS_CODE - - with pytest.raises(DocumentReviewException) as exc_info: - mock_service.update_document_review_with_transaction( - new_review, existing_review - ) - - assert "Failed to update document review" in str(exc_info.value) - def test_build_review_query_filter_creates_filter_from_nhs_number(mock_service): expected = Attr("ReviewStatus").eq("PENDING_REVIEW") & Attr("NhsNumber").eq( From 0bd835af0a9b8a34a15af554da533709e3a68142 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 9 Dec 2025 13:52:58 +0000 Subject: [PATCH 08/23] [PRMP-908] format --- lambdas/services/base/dynamo_service.py | 4 ++-- lambdas/services/document_service.py | 6 +++--- .../unit/services/test_document_upload_review_service.py | 7 +++---- 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/lambdas/services/base/dynamo_service.py b/lambdas/services/base/dynamo_service.py index 38f71f3fc3..04c16c87a9 100644 --- a/lambdas/services/base/dynamo_service.py +++ b/lambdas/services/base/dynamo_service.py @@ -1,9 +1,9 @@ +import operator import time -from typing import Optional, Sequence from functools import reduce +from typing import Optional, Sequence import boto3 -import operator from boto3.dynamodb.conditions import Attr, ConditionBase, Key from botocore.exceptions import ClientError from utils.audit_logging_setup import LoggingService diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 46c88de987..23be158966 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -106,9 +106,9 @@ def fetch_documents_from_table( return documents def get_item( - self, - document_id: str, - sort_key: dict = None, + self, + document_id: str, + sort_key: dict = None, table_name: str = None, model_class: type[BaseModel] = None, ) -> Optional[BaseModel]: diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index 9fbc4dfc2c..73e6ea745c 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -304,8 +304,6 @@ def test_update_document_review_for_patient_other_client_error( ) - - def test_update_document_review_with_transaction_builds_correct_items( mock_service, mock_review_update, mocker ): @@ -357,6 +355,7 @@ def test_update_document_review_with_transaction_builds_correct_items( ] assert second_call.kwargs["conditions"] == expected_conditions + def test_delete_document_review_files_success(mock_service, mocker): mock_delete_object = mocker.patch.object(mock_service.s3_service, "delete_object") @@ -421,13 +420,12 @@ def test_update_document_review_with_transaction_transaction_cancelled( existing_review = mock_review_update existing_review.custodian = TEST_ODS_CODE - with pytest.raises(DocumentReviewException) as exc_info: + with pytest.raises(DocumentReviewException): mock_service.update_document_review_with_transaction( new_review, existing_review ) - def test_handle_standard_custodian_update_with_client_error(mock_service, mocker): mock_update_document = mocker.patch.object(mock_service, "update_document") mock_update_document.side_effect = ClientError( @@ -500,6 +498,7 @@ def test_handle_pending_review_custodian_update_with_transaction_failure( review, NEW_ODS_CODE, {"custodian"} ) + def test_build_review_query_filter_creates_filter_from_nhs_number(mock_service): expected = Attr("ReviewStatus").eq("PENDING_REVIEW") & Attr("NhsNumber").eq( TEST_NHS_NUMBER From b962607d4700688b2be8b8eb545c577e993e4920 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 9 Dec 2025 14:05:23 +0000 Subject: [PATCH 09/23] [PRMP-908] Update document service test to pass key pair as a keyword argument --- lambdas/tests/unit/services/test_document_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lambdas/tests/unit/services/test_document_service.py b/lambdas/tests/unit/services/test_document_service.py index 4bb36ea957..0416cc296d 100644 --- a/lambdas/tests/unit/services/test_document_service.py +++ b/lambdas/tests/unit/services/test_document_service.py @@ -260,7 +260,7 @@ def test_update_document_with_custom_key_pair(mock_service, mock_dynamo_service) ) mock_service.update_document( - MOCK_TABLE_NAME, custom_key_pair, test_doc_ref, test_update_fields, + MOCK_TABLE_NAME, test_doc_ref, test_update_fields, key_pair=custom_key_pair ) mock_dynamo_service.update_item.assert_has_calls([update_item_call]) From 6437529d9ce01f4b4f282b67fd8dc9d78d09fe4a Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 9 Dec 2025 14:17:05 +0000 Subject: [PATCH 10/23] Refactor imports in dynamo_service.py --- lambdas/services/base/dynamo_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lambdas/services/base/dynamo_service.py b/lambdas/services/base/dynamo_service.py index 04c16c87a9..38f71f3fc3 100644 --- a/lambdas/services/base/dynamo_service.py +++ b/lambdas/services/base/dynamo_service.py @@ -1,9 +1,9 @@ -import operator import time -from functools import reduce from typing import Optional, Sequence +from functools import reduce import boto3 +import operator from boto3.dynamodb.conditions import Attr, ConditionBase, Key from botocore.exceptions import ClientError from utils.audit_logging_setup import LoggingService From c167c4a259dec810976c7d6eaf48d7cb6078868d Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Wed, 10 Dec 2025 12:21:03 +0000 Subject: [PATCH 11/23] Simplify key_pair assignment in document service --- lambdas/services/document_service.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 23be158966..a8942805ac 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -226,13 +226,9 @@ def update_document( "updated_fields": document.model_dump( exclude_none=True, by_alias=True, include=update_fields_name ), + "key_pair": key_pair + or {DocumentReferenceMetadataFields.ID.value: document.id}, } - if key_pair: - update_kwargs["key_pair"] = key_pair - else: - update_kwargs["key_pair"] = { - DocumentReferenceMetadataFields.ID.value: document.id - } if condition_expression: update_kwargs["condition_expression"] = condition_expression From f7038497be39a1a7575e1f95b7bd42e19f777f78 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Wed, 10 Dec 2025 12:30:53 +0000 Subject: [PATCH 12/23] Update type hints for sort_key and key_pair parameters in document service --- lambdas/services/document_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index a8942805ac..153df69a3e 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -108,7 +108,7 @@ def fetch_documents_from_table( def get_item( self, document_id: str, - sort_key: dict = None, + sort_key: dict | None = None, table_name: str = None, model_class: type[BaseModel] = None, ) -> Optional[BaseModel]: @@ -216,7 +216,7 @@ def update_document( update_fields_name: set[str] | None = None, condition_expression: str | Attr | ConditionBase = None, expression_attribute_values: dict = None, - key_pair: dict = None, + key_pair: dict | None = None, ): """Update document in specified or configured table.""" table_name = table_name or self.table_name From fc6b550749e124d53bedfd7c458ab2b63c6c0708 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 12 Dec 2025 18:00:22 +0000 Subject: [PATCH 13/23] Add MNS end-to-end tests and update workflow for AWS region --- .github/workflows/base-e2e-backendtest.yml | 5 + Makefile | 3 + lambdas/tests/e2e/mns/__init__.py | 0 lambdas/tests/e2e/mns/test_mns_process.py | 514 +++++++++++++++++++++ 4 files changed, 522 insertions(+) create mode 100644 lambdas/tests/e2e/mns/__init__.py create mode 100644 lambdas/tests/e2e/mns/test_mns_process.py diff --git a/.github/workflows/base-e2e-backendtest.yml b/.github/workflows/base-e2e-backendtest.yml index b482216c7e..3bddb12252 100644 --- a/.github/workflows/base-e2e-backendtest.yml +++ b/.github/workflows/base-e2e-backendtest.yml @@ -78,6 +78,7 @@ jobs: run: | AWS_WORKSPACE="${SANDBOX}" API_URL="api-${SANDBOX}.access-request-fulfilment.patient-deductions.nhs.uk" + AWS_DEFAULT_REGION=${{ vars.AWS_REGION }} echo "NDR_API_ENDPOINT=$API_URL" >> $GITHUB_ENV echo "AWS_WORKSPACE=$AWS_WORKSPACE" >> $GITHUB_ENV env: @@ -86,3 +87,7 @@ jobs: - name: Run E2e Tests run: | make test-api-e2e + + - name: run MNS tests + run: | + make test-mns-e2e diff --git a/Makefile b/Makefile index 5e5894aaeb..aa84898106 100644 --- a/Makefile +++ b/Makefile @@ -72,6 +72,9 @@ download-api-certs: ## Downloads mTLS certificates (use with dev envs only). Usa test-api-e2e: cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/api --ignore=tests/e2e/api/fhir -vv +test-mns-e2e: + cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/mns -vv + test-fhir-api-e2e: ## Runs FHIR API E2E tests. Usage: make test-fhir-api-e2e WORKSPACE= ./scripts/test/run-e2e-fhir-api-tests.sh --workspace $(WORKSPACE) rm -rf ./lambdas/mtls_env_certs/$(WORKSPACE) diff --git a/lambdas/tests/e2e/mns/__init__.py b/lambdas/tests/e2e/mns/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/tests/e2e/mns/test_mns_process.py b/lambdas/tests/e2e/mns/test_mns_process.py new file mode 100644 index 0000000000..723eff5009 --- /dev/null +++ b/lambdas/tests/e2e/mns/test_mns_process.py @@ -0,0 +1,514 @@ +import json +import os +import time +import uuid +from datetime import datetime, timezone + +import pytest +from enums.death_notification_status import DeathNotificationStatus +from enums.document_review_status import DocumentReviewStatus +from enums.mns_notification_types import MNSNotificationTypes +from enums.snomed_codes import SnomedCodes +from services.base.dynamo_service import DynamoDBService +from services.base.s3_service import S3Service +from services.base.sqs_service import SQSService + +AWS_WORKSPACE = os.environ.get("AWS_WORKSPACE", "") +LLOYD_GEORGE_TABLE = f"{AWS_WORKSPACE}_LloydGeorgeReferenceMetadata" +DOCUMENT_REVIEW_TABLE = f"{AWS_WORKSPACE}_DocumentUploadReview" +PENDING_REVIEW_S3_BUCKET = f"{AWS_WORKSPACE}-pending-review-bucket" +os.environ['AWS_DEFAULT_REGION'] = 'eu-west-2' +TEST_NHS_NUMBER = "9730154198" +TEST_ORIGINAL_ODS = "Y12345" +TEST_NEW_ODS = "H81109" +MOCK_TIME = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + +class MNSTestHelper: + def __init__(self): + self.dynamo_service = DynamoDBService() + self.s3_service = S3Service() + self.sqs_service = SQSService() + self.mns_queue_url = self.get_mns_queue_url(AWS_WORKSPACE) + + def get_mns_queue_url(self, workspace: str) -> str: + queue_name = f"{workspace}-mns-notification-queue" # Adjust naming pattern + response = self.sqs_service.client.get_queue_url(QueueName=queue_name) + return response['QueueUrl'] + + def create_lloyd_george_record(self, nhs_number: str, ods_code: str) -> dict: + record_id = str(uuid.uuid4()) + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "ContentType": "application/pdf", + "Created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "CurrentGpOds": ods_code, + "Custodian": ods_code, + "DocStatus": "final", + "DocumentScanCreation": "2023-01-01", + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + "FileLocation": f"s3://{AWS_WORKSPACE}-lloyd-george-store/{nhs_number}/{record_id}", + "FileName": f"1of1_Lloyd_George_Record_[Test Patient]_[{nhs_number}]_[01-01-2000].pdf", + "FileSize": "12345", + "LastUpdated": int(time.time()), + "Status": "current", + "Uploaded": True, + "Uploading": False, + "Version": "1", + "VirusScannerResult": "Clean", + } + self.dynamo_service.create_item(LLOYD_GEORGE_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def create_document_review_record( + self, + nhs_number: str, + ods_code: str, + review_status: DocumentReviewStatus = DocumentReviewStatus.PENDING_REVIEW, + ) -> dict: + record_id = str(uuid.uuid4()) + file_location = ( + f"s3://{PENDING_REVIEW_S3_BUCKET}/{nhs_number}/{record_id}/test.pdf" + ) + + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "Author": ods_code, + "Custodian": ods_code, + "ReviewStatus": review_status, + "ReviewReason": "Test document for MNS e2e", + "UploadDate": int(time.time()), + "Files": [ + { + "FileName": "test.pdf", + "FileLocation": file_location, + } + ], + "Version": 1, + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + } + self.dynamo_service.create_item(DOCUMENT_REVIEW_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def send_gp_change_message(self, nhs_number: str) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.CHANGE_OF_GP.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "https://test.example.com", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "time": MOCK_TIME, + "data": { + "fullUrl": "https://test.example.com/Patient/123", + "versionId": str(uuid.uuid4()), + "provenance": { + "name": "Test GP Practice", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "registrationEncounterCode": "00", + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def send_death_notification_message( + self, nhs_number: str, death_status: DeathNotificationStatus + ) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.DEATH_NOTIFICATION.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "NHS DIGITAL", + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000324", + }, + }, + "time": MOCK_TIME, + "data": { + "versionId": 'W/"16"', + "fullUrl": f"https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/{nhs_number}", + "deathNotificationStatus": death_status.value, + "provenance": { + "name": "The GP Practice", + "identifiers": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000323", + }, + }, + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def get_lloyd_george_record(self, record_id: str) -> dict: + return self.dynamo_service.get_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ).get("Item") + + def get_document_review_record(self, record_id: str, version: int = 1) -> dict: + return self.dynamo_service.get_item( + table_name=DOCUMENT_REVIEW_TABLE, key={"ID": record_id, "Version": version} + ).get("Item") + + def get_all_document_review_versions(self, record_id: str) -> list[dict]: + response = self.dynamo_service.query_table_single( + table_name=DOCUMENT_REVIEW_TABLE, + search_key="ID", + search_condition=record_id, + ) + return response.get("Items", []) + + def cleanup_lloyd_george_record(self, record_id: str): + try: + self.dynamo_service.delete_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ) + except Exception as e: + print(f"Error cleaning up Lloyd George record {record_id}: {e}") + + def cleanup_document_review_record(self, record_id: str, version: int = 1): + try: + self.dynamo_service.delete_item( + table_name=DOCUMENT_REVIEW_TABLE, + key={"ID": record_id, "Version": version}, + ) + except Exception as e: + print(f"Error cleaning up document review record {record_id}: {e}") + + def wait_for_update(self, check_func, max_retries=10, delay=60): + for i in range(max_retries): + if check_func(): + return True + time.sleep(delay * i) + return False + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def test_records(): + records = {"lloyd_george": [], "document_review": []} + yield records + + helper = MNSTestHelper() + for record_id in records["lloyd_george"]: + helper.cleanup_lloyd_george_record(record_id) + for record_id in records["document_review"]: + helper.cleanup_document_review_record(record_id) + + +class TestMNSChangeOfGP: + def test_gp_change_updates_lloyd_george_record(self, mns_helper, test_records): + lg_record = mns_helper.create_lloyd_george_record( + nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS + ) + test_records["lloyd_george"].append(lg_record["id"]) + + initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + print(initial_record) + + assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + + mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + + def check_update(): + updated_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + print(updated_record) + last_updated_changed = ( + updated_record["LastUpdated"] != initial_record["LastUpdated"] + ) + custodian_changed = ( + updated_record["Custodian"] != initial_record["Custodian"] + ) + current_gp_changed = ( + updated_record["CurrentGpOds"] != initial_record["CurrentGpOds"] + ) + return last_updated_changed and custodian_changed and current_gp_changed + + update_successful = mns_helper.wait_for_update(check_update) + assert update_successful, "Lloyd George record was not updated after GP change" + + def test_gp_change_updates_document_review_record(self, mns_helper, test_records): + review_record = mns_helper.create_document_review_record( + nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS + ) + test_records["document_review"].append(review_record["id"]) + + initial_record = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + assert ( + initial_record["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value + ) + assert initial_record["Version"] == 1 + + mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + + def check_new_version(): + try: + new_version = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + return new_version is not None + except Exception: + return False + + update_successful = mns_helper.wait_for_update(check_new_version) + assert ( + update_successful + ), "New version of document review record was not created after GP change" + + version_2_record = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + assert version_2_record["Version"] == 2 + assert version_2_record["Custodian"] == TEST_NEW_ODS + assert version_2_record is not None + + version_1_record = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert ( + version_1_record["ReviewStatus"] + == DocumentReviewStatus.NEVER_REVIEWED.value + ) + assert version_1_record.get("ReviewDate") is not None + assert version_1_record["Reviewer"] == TEST_ORIGINAL_ODS + + def test_gp_change_non_pending_review_no_new_version( + self, mns_helper, test_records + ): + review_record = mns_helper.create_document_review_record( + nhs_number=TEST_NHS_NUMBER, + ods_code=TEST_ORIGINAL_ODS, + review_status=DocumentReviewStatus.APPROVED, + ) + test_records["document_review"].append(review_record["id"]) + + initial_record = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + assert initial_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value + assert initial_record["Version"] == 1 + + mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + + try: + mns_helper.get_document_review_record(review_record["id"], version=2) + assert ( + False + ), "Version 2 should not have been created for non-PENDING_REVIEW record" + except Exception: + pass + + version_1_record = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert version_1_record is not None + assert version_1_record["Version"] == 1 + assert version_1_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value + assert version_1_record.get("Custodian") == TEST_NEW_ODS + assert version_1_record is not None + + def test_gp_change_updates_both_tables(self, mns_helper, test_records): + lg_record = mns_helper.create_lloyd_george_record( + nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS + ) + test_records["lloyd_george"].append(lg_record["id"]) + + review_record = mns_helper.create_document_review_record( + nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS + ) + test_records["document_review"].append(review_record["id"]) + + initial_lg = mns_helper.get_lloyd_george_record(lg_record["id"]) + mns_helper.get_document_review_record(review_record["id"], version=1) + + mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + + def check_updates(): + try: + updated_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + last_updated_changed = ( + updated_record["LastUpdated"] != initial_lg["LastUpdated"] + ) + custodian_changed = ( + updated_record["Custodian"] != initial_lg["Custodian"] + ) + current_gp_changed = ( + updated_record["CurrentGpOds"] != initial_lg["CurrentGpOds"] + ) + lg_changed = ( + last_updated_changed and custodian_changed and current_gp_changed + ) + + new_review_version = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + review_versioned = new_review_version is not None + + return lg_changed and review_versioned + except Exception: + return False + + update_successful = mns_helper.wait_for_update(check_updates) + assert update_successful, "Both tables were not updated after GP change" + + final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + assert final_lg_record is not None + + final_review_v2 = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + assert final_review_v2 is not None + assert final_review_v2["Version"] == 2 + + final_review_v1 = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert ( + final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value + ) + + +# class TestMNSDeathNotification: +# def test_formal_death_notification_marks_patient_deceased( +# self, mns_helper, test_records +# ): +# lg_record = mns_helper.create_lloyd_george_record( +# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS +# ) +# test_records["lloyd_george"].append(lg_record["id"]) +# +# initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) +# assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS +# +# mns_helper.send_death_notification_message( +# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.FORMAL +# ) +# +# def check_update(): +# return ( +# mns_helper.get_lloyd_george_record(lg_record["id"])["CurrentGpOds"] +# == PatientOdsInactiveStatus.DECEASED.value +# ) +# +# mns_helper.wait_for_update(check_update) +# +# def test_informal_death_notification_no_change(self, mns_helper, test_records): +# lg_record = mns_helper.create_lloyd_george_record( +# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS +# ) +# test_records["lloyd_george"].append(lg_record["id"]) +# +# initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) +# initial_last_updated = initial_record["LastUpdated"] +# +# mns_helper.send_death_notification_message( +# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.INFORMAL +# ) +# +# def check_update(): +# final_record = mns_helper.get_lloyd_george_record(lg_record["id"]) +# current_gp_updated = final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS +# last_updated = final_record["LastUpdated"] == initial_last_updated +# return current_gp_updated and last_updated +# +# mns_helper.wait_for_update(check_update) +# +# def test_formal_death_updates_both_tables(self, mns_helper, test_records): +# lg_record = mns_helper.create_lloyd_george_record( +# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS +# ) +# test_records["lloyd_george"].append(lg_record["id"]) +# +# review_record = mns_helper.create_document_review_record( +# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS +# ) +# test_records["document_review"].append(review_record["id"]) +# +# mns_helper.send_death_notification_message( +# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.FORMAL +# ) +# +# def check_death_updates(): +# try: +# lg_record_updated = mns_helper.get_lloyd_george_record(lg_record["id"]) +# lg_deceased = ( +# lg_record_updated["CurrentGpOds"] +# == PatientOdsInactiveStatus.DECEASED.value +# ) +# +# new_version = mns_helper.get_document_review_record( +# review_record["id"], version=2 +# ) +# review_deceased = ( +# new_version is not None +# and new_version["Custodian"] +# == PatientOdsInactiveStatus.DECEASED.value +# ) +# +# return lg_deceased and review_deceased +# except Exception: +# return False +# +# update_successful = mns_helper.wait_for_update( +# check_death_updates, max_retries=12, delay=5 +# ) +# assert ( +# update_successful +# ), "Both tables were not marked as deceased after formal death notification" +# +# final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) +# assert ( +# final_lg_record["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value +# ) +# +# final_review_v2 = mns_helper.get_document_review_record( +# review_record["id"], version=2 +# ) +# assert final_review_v2["Custodian"] == PatientOdsInactiveStatus.DECEASED.value +# assert ( +# final_review_v2["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value +# ) +# +# final_review_v1 = mns_helper.get_document_review_record( +# review_record["id"], version=1 +# ) +# assert ( +# final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value +# ) From c512b6fe6934d7c7c7527cbf312066ee7783e1b2 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:26:47 +0000 Subject: [PATCH 14/23] Add MNS end-to-end tests and helper functions for death notifications --- .github/workflows/base-e2e-mns.yml | 86 ++++++ Makefile | 3 - lambdas/tests/e2e/mns/conftest.py | 20 ++ lambdas/tests/e2e/mns/mns_helper.py | 210 ++++++++++++++ lambdas/tests/e2e/mns/test_mns_death.py | 113 ++++++++ lambdas/tests/e2e/mns/test_mns_process.py | 319 +--------------------- 6 files changed, 431 insertions(+), 320 deletions(-) create mode 100644 .github/workflows/base-e2e-mns.yml create mode 100644 lambdas/tests/e2e/mns/conftest.py create mode 100644 lambdas/tests/e2e/mns/mns_helper.py create mode 100644 lambdas/tests/e2e/mns/test_mns_death.py diff --git a/.github/workflows/base-e2e-mns.yml b/.github/workflows/base-e2e-mns.yml new file mode 100644 index 0000000000..027ab1c492 --- /dev/null +++ b/.github/workflows/base-e2e-mns.yml @@ -0,0 +1,86 @@ +name: "Z-BASE E2e Test: MNS E2E Tests" + +on: + workflow_call: + inputs: + build_branch: + description: "Branch with e2e tests." + required: true + type: "string" + environment: + description: "Which Environment type are we using" + required: true + type: "string" + sandbox: + description: "Sandbox to run the smoke tests on." + required: true + type: "string" + secrets: + AWS_ASSUME_ROLE: + required: true + +permissions: + pull-requests: write + id-token: write + contents: read + +jobs: + mns-e2e-test: + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + strategy: + matrix: + test-file: + - test_mns_process.py + - test_mns_death.py + fail-fast: false + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + repository: "NHSDigital/national-document-repository" + ref: ${{ inputs.build_branch }} + + - name: AWS Role + uses: aws-actions/configure-aws-credentials@v5 + with: + role-to-assume: ${{ secrets.AWS_ASSUME_ROLE }} + role-skip-session-tagging: true + mask-aws-account-id: true + aws-region: ${{ vars.AWS_REGION }} + + - name: Set up Python 3.11 + uses: actions/setup-python@v6 + with: + python-version: 3.11 + + - name: Make virtual environment + run: | + make env + + - name: Start virtual environment + run: | + source ./lambdas/venv/bin/activate + echo PATH=$PATH >> $GITHUB_ENV + + - name: Get MNS Queue URL + run: | + QUEUE_NAME="${SANDBOX}-mns-notification-queue" + MNS_QUEUE_URL=$(aws sqs get-queue-url --queue-name $QUEUE_NAME --query 'QueueUrl' --output text) + echo "::add-mask::$MNS_QUEUE_URL" + echo "MNS_NOTIFICATION_QUEUE_URL=$MNS_QUEUE_URL" >> $GITHUB_ENV + env: + SANDBOX: ${{ inputs.sandbox }} + + - name: Set E2e Test Variables + run: | + AWS_WORKSPACE="${SANDBOX}" + AWS_DEFAULT_REGION=${{ vars.AWS_REGION }} + echo "AWS_WORKSPACE=$AWS_WORKSPACE" >> $GITHUB_ENV + echo "AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION" >> $GITHUB_ENV + env: + SANDBOX: ${{ inputs.sandbox }} + + - name: Run MNS E2E Test - ${{ matrix.test-file }} + run: | + cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/mns/${{ matrix.test-file }} -vv diff --git a/Makefile b/Makefile index aa84898106..5e5894aaeb 100644 --- a/Makefile +++ b/Makefile @@ -72,9 +72,6 @@ download-api-certs: ## Downloads mTLS certificates (use with dev envs only). Usa test-api-e2e: cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/api --ignore=tests/e2e/api/fhir -vv -test-mns-e2e: - cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/mns -vv - test-fhir-api-e2e: ## Runs FHIR API E2E tests. Usage: make test-fhir-api-e2e WORKSPACE= ./scripts/test/run-e2e-fhir-api-tests.sh --workspace $(WORKSPACE) rm -rf ./lambdas/mtls_env_certs/$(WORKSPACE) diff --git a/lambdas/tests/e2e/mns/conftest.py b/lambdas/tests/e2e/mns/conftest.py new file mode 100644 index 0000000000..1b0fc0f89e --- /dev/null +++ b/lambdas/tests/e2e/mns/conftest.py @@ -0,0 +1,20 @@ +import pytest + +from tests.e2e.mns.mns_helper import MNSTestHelper + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def test_records(): + records = {"lloyd_george": [], "document_review": []} + yield records + + helper = MNSTestHelper() + for record_id in records["lloyd_george"]: + helper.cleanup_lloyd_george_record(record_id) + for record_id in records["document_review"]: + helper.cleanup_document_review_record(record_id) diff --git a/lambdas/tests/e2e/mns/mns_helper.py b/lambdas/tests/e2e/mns/mns_helper.py new file mode 100644 index 0000000000..3df67006bc --- /dev/null +++ b/lambdas/tests/e2e/mns/mns_helper.py @@ -0,0 +1,210 @@ +import json +import os +import time +import uuid +from datetime import datetime, timezone + +from enums.death_notification_status import DeathNotificationStatus +from enums.document_review_status import DocumentReviewStatus +from enums.mns_notification_types import MNSNotificationTypes +from enums.snomed_codes import SnomedCodes +from services.base.dynamo_service import DynamoDBService +from services.base.s3_service import S3Service +from services.base.sqs_service import SQSService + +AWS_WORKSPACE = os.environ.get("AWS_WORKSPACE", "") +LLOYD_GEORGE_TABLE = f"{AWS_WORKSPACE}_LloydGeorgeReferenceMetadata" +DOCUMENT_REVIEW_TABLE = f"{AWS_WORKSPACE}_DocumentUploadReview" +PENDING_REVIEW_S3_BUCKET = f"{AWS_WORKSPACE}-pending-review-bucket" +TEST_NHS_NUMBER = "9730154198" +TEST_NHS_NUMBER_DEATH = "9730135967" +TEST_ORIGINAL_ODS = "Y12345" +TEST_NEW_ODS = "H81109" +MOCK_TIME = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + +class MNSTestHelper: + def __init__(self): + self.dynamo_service = DynamoDBService() + self.s3_service = S3Service() + self.sqs_service = SQSService() + self.mns_queue_url = self.get_mns_queue_url(AWS_WORKSPACE) + + def get_mns_queue_url(self, workspace: str) -> str: + queue_name = f"{workspace}-mns-notification-queue" + response = self.sqs_service.client.get_queue_url(QueueName=queue_name) + return response['QueueUrl'] + + def create_lloyd_george_record(self, nhs_number: str, ods_code: str) -> dict: + record_id = str(uuid.uuid4()) + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "ContentType": "application/pdf", + "Created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "CurrentGpOds": ods_code, + "Custodian": ods_code, + "DocStatus": "final", + "DocumentScanCreation": "2023-01-01", + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + "FileLocation": f"s3://{AWS_WORKSPACE}-lloyd-george-store/{nhs_number}/{record_id}", + "FileName": f"1of1_Lloyd_George_Record_[Test Patient]_[{nhs_number}]_[01-01-2000].pdf", + "FileSize": "12345", + "LastUpdated": int(time.time()), + "Status": "current", + "Uploaded": True, + "Uploading": False, + "Version": "1", + "VirusScannerResult": "Clean", + } + self.dynamo_service.create_item(LLOYD_GEORGE_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def create_document_review_record( + self, + nhs_number: str, + ods_code: str, + review_status: DocumentReviewStatus = DocumentReviewStatus.PENDING_REVIEW, + ) -> dict: + record_id = str(uuid.uuid4()) + file_location = ( + f"s3://{PENDING_REVIEW_S3_BUCKET}/{nhs_number}/{record_id}/test.pdf" + ) + + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "Author": ods_code, + "Custodian": ods_code, + "ReviewStatus": review_status, + "ReviewReason": "Test document for MNS e2e", + "UploadDate": int(time.time()), + "Files": [ + { + "FileName": "test.pdf", + "FileLocation": file_location, + } + ], + "Version": 1, + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + } + self.dynamo_service.create_item(DOCUMENT_REVIEW_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def send_gp_change_message(self, nhs_number: str) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.CHANGE_OF_GP.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "https://test.example.com", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "time": MOCK_TIME, + "data": { + "fullUrl": "https://test.example.com/Patient/123", + "versionId": str(uuid.uuid4()), + "provenance": { + "name": "Test GP Practice", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "registrationEncounterCode": "00", + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def send_death_notification_message( + self, nhs_number: str, death_status: DeathNotificationStatus + ) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.DEATH_NOTIFICATION.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "NHS DIGITAL", + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000324", + }, + }, + "time": MOCK_TIME, + "data": { + "versionId": 'W/"16"', + "fullUrl": f"https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/{nhs_number}", + "deathNotificationStatus": death_status.value, + "provenance": { + "name": "The GP Practice", + "identifiers": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000323", + }, + }, + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def get_lloyd_george_record(self, record_id: str) -> dict: + return self.dynamo_service.get_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ).get("Item") + + def get_document_review_record(self, record_id: str, version: int = 1) -> dict: + return self.dynamo_service.get_item( + table_name=DOCUMENT_REVIEW_TABLE, key={"ID": record_id, "Version": version} + ).get("Item") + + def get_all_document_review_versions(self, record_id: str) -> list[dict]: + response = self.dynamo_service.query_table_single( + table_name=DOCUMENT_REVIEW_TABLE, + search_key="ID", + search_condition=record_id, + ) + return response.get("Items", []) + + def cleanup_lloyd_george_record(self, record_id: str): + try: + self.dynamo_service.delete_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ) + except Exception as e: + print(f"Error cleaning up Lloyd George record {record_id}: {e}") + + def cleanup_document_review_record(self, record_id: str, version: int = 1): + try: + self.dynamo_service.delete_item( + table_name=DOCUMENT_REVIEW_TABLE, + key={"ID": record_id, "Version": version}, + ) + except Exception as e: + print(f"Error cleaning up document review record {record_id}: {e}") + + def wait_for_update(self, check_func, max_retries=3, delay=60): + for i in range(max_retries): + if check_func(): + return True + time.sleep(delay * i) + return False diff --git a/lambdas/tests/e2e/mns/test_mns_death.py b/lambdas/tests/e2e/mns/test_mns_death.py new file mode 100644 index 0000000000..ca3b277559 --- /dev/null +++ b/lambdas/tests/e2e/mns/test_mns_death.py @@ -0,0 +1,113 @@ +from tests.e2e.mns.mns_helper import TEST_NHS_NUMBER_DEATH, TEST_ORIGINAL_ODS +from enums.death_notification_status import DeathNotificationStatus +from enums.document_review_status import DocumentReviewStatus +from enums.patient_ods_inactive_status import PatientOdsInactiveStatus + + +class TestMNSDeathNotification: + def test_formal_death_notification_marks_patient_deceased( + self, mns_helper, test_records + ): + lg_record = mns_helper.create_lloyd_george_record( + nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS + ) + test_records["lloyd_george"].append(lg_record["id"]) + + initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + + mns_helper.send_death_notification_message( + nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.FORMAL + ) + + def check_update(): + return ( + mns_helper.get_lloyd_george_record(lg_record["id"])["CurrentGpOds"] + == PatientOdsInactiveStatus.DECEASED.value + ) + + mns_helper.wait_for_update(check_update) + + def test_informal_death_notification_no_change(self, mns_helper, test_records): + lg_record = mns_helper.create_lloyd_george_record( + nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS + ) + test_records["lloyd_george"].append(lg_record["id"]) + + initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + initial_last_updated = initial_record["LastUpdated"] + + mns_helper.send_death_notification_message( + nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.INFORMAL + ) + + def check_update(): + final_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + current_gp_updated = final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + last_updated = final_record["LastUpdated"] == initial_last_updated + return current_gp_updated and last_updated + + mns_helper.wait_for_update(check_update) + + def test_formal_death_updates_both_tables(self, mns_helper, test_records): + lg_record = mns_helper.create_lloyd_george_record( + nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS + ) + test_records["lloyd_george"].append(lg_record["id"]) + + review_record = mns_helper.create_document_review_record( + nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS + ) + test_records["document_review"].append(review_record["id"]) + + mns_helper.send_death_notification_message( + nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.FORMAL + ) + + def check_death_updates(): + try: + lg_record_updated = mns_helper.get_lloyd_george_record(lg_record["id"]) + lg_deceased = ( + lg_record_updated["CurrentGpOds"] + == PatientOdsInactiveStatus.DECEASED.value + ) + + new_version = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + review_deceased = ( + new_version is not None + and new_version["Custodian"] + == PatientOdsInactiveStatus.DECEASED.value + ) + + return lg_deceased and review_deceased + except Exception: + return False + + update_successful = mns_helper.wait_for_update( + check_death_updates + ) + assert ( + update_successful + ), "Both tables were not marked as deceased after formal death notification" + + final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + assert ( + final_lg_record["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value + ) + + final_review_v2 = mns_helper.get_document_review_record( + review_record["id"], version=2 + ) + assert final_review_v2["Custodian"] == PatientOdsInactiveStatus.DECEASED.value + assert ( + final_review_v2["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value + ) + + final_review_v1 = mns_helper.get_document_review_record( + review_record["id"], version=1 + ) + assert ( + final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value + ) diff --git a/lambdas/tests/e2e/mns/test_mns_process.py b/lambdas/tests/e2e/mns/test_mns_process.py index 723eff5009..6be578f6f2 100644 --- a/lambdas/tests/e2e/mns/test_mns_process.py +++ b/lambdas/tests/e2e/mns/test_mns_process.py @@ -1,214 +1,8 @@ -import json -import os -import time -import uuid -from datetime import datetime, timezone import pytest -from enums.death_notification_status import DeathNotificationStatus -from enums.document_review_status import DocumentReviewStatus -from enums.mns_notification_types import MNSNotificationTypes -from enums.snomed_codes import SnomedCodes -from services.base.dynamo_service import DynamoDBService -from services.base.s3_service import S3Service -from services.base.sqs_service import SQSService - -AWS_WORKSPACE = os.environ.get("AWS_WORKSPACE", "") -LLOYD_GEORGE_TABLE = f"{AWS_WORKSPACE}_LloydGeorgeReferenceMetadata" -DOCUMENT_REVIEW_TABLE = f"{AWS_WORKSPACE}_DocumentUploadReview" -PENDING_REVIEW_S3_BUCKET = f"{AWS_WORKSPACE}-pending-review-bucket" -os.environ['AWS_DEFAULT_REGION'] = 'eu-west-2' -TEST_NHS_NUMBER = "9730154198" -TEST_ORIGINAL_ODS = "Y12345" -TEST_NEW_ODS = "H81109" -MOCK_TIME = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") - - -class MNSTestHelper: - def __init__(self): - self.dynamo_service = DynamoDBService() - self.s3_service = S3Service() - self.sqs_service = SQSService() - self.mns_queue_url = self.get_mns_queue_url(AWS_WORKSPACE) - - def get_mns_queue_url(self, workspace: str) -> str: - queue_name = f"{workspace}-mns-notification-queue" # Adjust naming pattern - response = self.sqs_service.client.get_queue_url(QueueName=queue_name) - return response['QueueUrl'] - - def create_lloyd_george_record(self, nhs_number: str, ods_code: str) -> dict: - record_id = str(uuid.uuid4()) - dynamo_item = { - "ID": record_id, - "NhsNumber": nhs_number, - "ContentType": "application/pdf", - "Created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), - "CurrentGpOds": ods_code, - "Custodian": ods_code, - "DocStatus": "final", - "DocumentScanCreation": "2023-01-01", - "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, - "FileLocation": f"s3://{AWS_WORKSPACE}-lloyd-george-store/{nhs_number}/{record_id}", - "FileName": f"1of1_Lloyd_George_Record_[Test Patient]_[{nhs_number}]_[01-01-2000].pdf", - "FileSize": "12345", - "LastUpdated": int(time.time()), - "Status": "current", - "Uploaded": True, - "Uploading": False, - "Version": "1", - "VirusScannerResult": "Clean", - } - self.dynamo_service.create_item(LLOYD_GEORGE_TABLE, dynamo_item) - return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} - - def create_document_review_record( - self, - nhs_number: str, - ods_code: str, - review_status: DocumentReviewStatus = DocumentReviewStatus.PENDING_REVIEW, - ) -> dict: - record_id = str(uuid.uuid4()) - file_location = ( - f"s3://{PENDING_REVIEW_S3_BUCKET}/{nhs_number}/{record_id}/test.pdf" - ) - - dynamo_item = { - "ID": record_id, - "NhsNumber": nhs_number, - "Author": ods_code, - "Custodian": ods_code, - "ReviewStatus": review_status, - "ReviewReason": "Test document for MNS e2e", - "UploadDate": int(time.time()), - "Files": [ - { - "FileName": "test.pdf", - "FileLocation": file_location, - } - ], - "Version": 1, - "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, - } - self.dynamo_service.create_item(DOCUMENT_REVIEW_TABLE, dynamo_item) - return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} - - def send_gp_change_message(self, nhs_number: str) -> str: - message_id = str(uuid.uuid4()) - message_body = { - "id": message_id, - "type": MNSNotificationTypes.CHANGE_OF_GP.value, - "subject": { - "nhsNumber": nhs_number, - "familyName": "TESTPATIENT", - "dob": "2000-01-01", - }, - "source": { - "name": "https://test.example.com", - "identifiers": { - "system": "https://test.example.com", - "value": str(uuid.uuid4()), - }, - }, - "time": MOCK_TIME, - "data": { - "fullUrl": "https://test.example.com/Patient/123", - "versionId": str(uuid.uuid4()), - "provenance": { - "name": "Test GP Practice", - "identifiers": { - "system": "https://test.example.com", - "value": str(uuid.uuid4()), - }, - }, - "registrationEncounterCode": "00", - }, - } - - self.sqs_service.send_message_standard( - queue_url=self.mns_queue_url, message_body=json.dumps(message_body) - ) - return message_id - - def send_death_notification_message( - self, nhs_number: str, death_status: DeathNotificationStatus - ) -> str: - message_id = str(uuid.uuid4()) - message_body = { - "id": message_id, - "type": MNSNotificationTypes.DEATH_NOTIFICATION.value, - "subject": { - "nhsNumber": nhs_number, - "familyName": "TESTPATIENT", - "dob": "2000-01-01", - }, - "source": { - "name": "NHS DIGITAL", - "identifier": { - "system": "https://fhir.nhs.uk/Id/nhsSpineASID", - "value": "477121000324", - }, - }, - "time": MOCK_TIME, - "data": { - "versionId": 'W/"16"', - "fullUrl": f"https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/{nhs_number}", - "deathNotificationStatus": death_status.value, - "provenance": { - "name": "The GP Practice", - "identifiers": { - "system": "https://fhir.nhs.uk/Id/nhsSpineASID", - "value": "477121000323", - }, - }, - }, - } - - self.sqs_service.send_message_standard( - queue_url=self.mns_queue_url, message_body=json.dumps(message_body) - ) - return message_id - - def get_lloyd_george_record(self, record_id: str) -> dict: - return self.dynamo_service.get_item( - table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} - ).get("Item") - - def get_document_review_record(self, record_id: str, version: int = 1) -> dict: - return self.dynamo_service.get_item( - table_name=DOCUMENT_REVIEW_TABLE, key={"ID": record_id, "Version": version} - ).get("Item") - - def get_all_document_review_versions(self, record_id: str) -> list[dict]: - response = self.dynamo_service.query_table_single( - table_name=DOCUMENT_REVIEW_TABLE, - search_key="ID", - search_condition=record_id, - ) - return response.get("Items", []) - def cleanup_lloyd_george_record(self, record_id: str): - try: - self.dynamo_service.delete_item( - table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} - ) - except Exception as e: - print(f"Error cleaning up Lloyd George record {record_id}: {e}") - - def cleanup_document_review_record(self, record_id: str, version: int = 1): - try: - self.dynamo_service.delete_item( - table_name=DOCUMENT_REVIEW_TABLE, - key={"ID": record_id, "Version": version}, - ) - except Exception as e: - print(f"Error cleaning up document review record {record_id}: {e}") - - def wait_for_update(self, check_func, max_retries=10, delay=60): - for i in range(max_retries): - if check_func(): - return True - time.sleep(delay * i) - return False +from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_NHS_NUMBER, TEST_ORIGINAL_ODS, TEST_NEW_ODS +from enums.document_review_status import DocumentReviewStatus @pytest.fixture @@ -403,112 +197,3 @@ def check_updates(): assert ( final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value ) - - -# class TestMNSDeathNotification: -# def test_formal_death_notification_marks_patient_deceased( -# self, mns_helper, test_records -# ): -# lg_record = mns_helper.create_lloyd_george_record( -# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS -# ) -# test_records["lloyd_george"].append(lg_record["id"]) -# -# initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) -# assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS -# -# mns_helper.send_death_notification_message( -# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.FORMAL -# ) -# -# def check_update(): -# return ( -# mns_helper.get_lloyd_george_record(lg_record["id"])["CurrentGpOds"] -# == PatientOdsInactiveStatus.DECEASED.value -# ) -# -# mns_helper.wait_for_update(check_update) -# -# def test_informal_death_notification_no_change(self, mns_helper, test_records): -# lg_record = mns_helper.create_lloyd_george_record( -# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS -# ) -# test_records["lloyd_george"].append(lg_record["id"]) -# -# initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) -# initial_last_updated = initial_record["LastUpdated"] -# -# mns_helper.send_death_notification_message( -# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.INFORMAL -# ) -# -# def check_update(): -# final_record = mns_helper.get_lloyd_george_record(lg_record["id"]) -# current_gp_updated = final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS -# last_updated = final_record["LastUpdated"] == initial_last_updated -# return current_gp_updated and last_updated -# -# mns_helper.wait_for_update(check_update) -# -# def test_formal_death_updates_both_tables(self, mns_helper, test_records): -# lg_record = mns_helper.create_lloyd_george_record( -# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS -# ) -# test_records["lloyd_george"].append(lg_record["id"]) -# -# review_record = mns_helper.create_document_review_record( -# nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS -# ) -# test_records["document_review"].append(review_record["id"]) -# -# mns_helper.send_death_notification_message( -# nhs_number=TEST_NHS_NUMBER, death_status=DeathNotificationStatus.FORMAL -# ) -# -# def check_death_updates(): -# try: -# lg_record_updated = mns_helper.get_lloyd_george_record(lg_record["id"]) -# lg_deceased = ( -# lg_record_updated["CurrentGpOds"] -# == PatientOdsInactiveStatus.DECEASED.value -# ) -# -# new_version = mns_helper.get_document_review_record( -# review_record["id"], version=2 -# ) -# review_deceased = ( -# new_version is not None -# and new_version["Custodian"] -# == PatientOdsInactiveStatus.DECEASED.value -# ) -# -# return lg_deceased and review_deceased -# except Exception: -# return False -# -# update_successful = mns_helper.wait_for_update( -# check_death_updates, max_retries=12, delay=5 -# ) -# assert ( -# update_successful -# ), "Both tables were not marked as deceased after formal death notification" -# -# final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) -# assert ( -# final_lg_record["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value -# ) -# -# final_review_v2 = mns_helper.get_document_review_record( -# review_record["id"], version=2 -# ) -# assert final_review_v2["Custodian"] == PatientOdsInactiveStatus.DECEASED.value -# assert ( -# final_review_v2["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value -# ) -# -# final_review_v1 = mns_helper.get_document_review_record( -# review_record["id"], version=1 -# ) -# assert ( -# final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value -# ) From 46e73c17e94a943407e0bfdcc7e02625ed415313 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:38:28 +0000 Subject: [PATCH 15/23] Add MNS end-to-end test service configuration to sandbox workflow --- .github/workflows/ndr-e2e-test-sandbox.yml | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ndr-e2e-test-sandbox.yml b/.github/workflows/ndr-e2e-test-sandbox.yml index 65e66df547..00c4401fb7 100644 --- a/.github/workflows/ndr-e2e-test-sandbox.yml +++ b/.github/workflows/ndr-e2e-test-sandbox.yml @@ -40,4 +40,13 @@ jobs: environment: ${{ inputs.environment }} sandbox: ${{ inputs.sandbox }} secrets: - AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} \ No newline at end of file + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + services-mns-e2etest: + uses: ./.github/workflows/base-e2e-mns.yml + with: + build_branch: ${{ inputs.build_branch }} + environment: ${{ inputs.environment }} + sandbox: ${{ inputs.sandbox }} + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} From f37d1d1657f90f8ed1ca7d37bf26c915bf4927f5 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:41:44 +0000 Subject: [PATCH 16/23] Add MNS end-to-end test step to sandbox deployment workflow --- .../workflows/lambdas-deploy-feature-to-sandbox.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml index 8659f2241b..085b34c7fe 100644 --- a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml +++ b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml @@ -117,3 +117,13 @@ jobs: sandbox: ${{ inputs.sandbox }} secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + run_mns_e2etest: + uses: ./.github/workflows/base-e2e-mns-backendtest.yml + needs: ["deploy_all_lambdas", "disable_fhir_stub"] + with: + build_branch: ${{ inputs.build_branch }} + environment: development + sandbox: ${{ inputs.sandbox }} + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} From d9e62999f226202fceb792c79b57401f0935ce3f Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:43:59 +0000 Subject: [PATCH 17/23] Update MNS end-to-end test workflow reference in sandbox deployment --- .github/workflows/lambdas-deploy-feature-to-sandbox.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml index 085b34c7fe..14a8aa3b02 100644 --- a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml +++ b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml @@ -119,7 +119,7 @@ jobs: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} run_mns_e2etest: - uses: ./.github/workflows/base-e2e-mns-backendtest.yml + uses: ./.github/workflows/base-e2e-mns.yml needs: ["deploy_all_lambdas", "disable_fhir_stub"] with: build_branch: ${{ inputs.build_branch }} From 7007499789462dd637659b00e143208911e1d60d Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Mon, 15 Dec 2025 16:58:13 +0000 Subject: [PATCH 18/23] Remove MNS test steps from end-to-end workflow configurations --- .github/workflows/base-e2e-backendtest.yml | 4 ---- .github/workflows/base-e2e-mns.yml | 8 -------- lambdas/tests/e2e/helpers/data_helper.py | 1 - 3 files changed, 13 deletions(-) diff --git a/.github/workflows/base-e2e-backendtest.yml b/.github/workflows/base-e2e-backendtest.yml index 3bddb12252..0d0f98282a 100644 --- a/.github/workflows/base-e2e-backendtest.yml +++ b/.github/workflows/base-e2e-backendtest.yml @@ -87,7 +87,3 @@ jobs: - name: Run E2e Tests run: | make test-api-e2e - - - name: run MNS tests - run: | - make test-mns-e2e diff --git a/.github/workflows/base-e2e-mns.yml b/.github/workflows/base-e2e-mns.yml index 027ab1c492..06a2ad4f8a 100644 --- a/.github/workflows/base-e2e-mns.yml +++ b/.github/workflows/base-e2e-mns.yml @@ -63,14 +63,6 @@ jobs: source ./lambdas/venv/bin/activate echo PATH=$PATH >> $GITHUB_ENV - - name: Get MNS Queue URL - run: | - QUEUE_NAME="${SANDBOX}-mns-notification-queue" - MNS_QUEUE_URL=$(aws sqs get-queue-url --queue-name $QUEUE_NAME --query 'QueueUrl' --output text) - echo "::add-mask::$MNS_QUEUE_URL" - echo "MNS_NOTIFICATION_QUEUE_URL=$MNS_QUEUE_URL" >> $GITHUB_ENV - env: - SANDBOX: ${{ inputs.sandbox }} - name: Set E2e Test Variables run: | diff --git a/lambdas/tests/e2e/helpers/data_helper.py b/lambdas/tests/e2e/helpers/data_helper.py index 6f362ddd46..9e28dd2055 100644 --- a/lambdas/tests/e2e/helpers/data_helper.py +++ b/lambdas/tests/e2e/helpers/data_helper.py @@ -24,7 +24,6 @@ def __init__( self.record_type = record_type self.dynamo_service = DynamoDBService() self.s3_service = S3Service() - self.build_env(table_name, bucket_name) def build_env(self, table_name, bucket_name): From 73b6cc016356f11e017e2b6febeb1959757659a4 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 16 Dec 2025 12:06:13 +0000 Subject: [PATCH 19/23] Refactor MNS helper functions and update death notification tests for improved clarity and functionality --- lambdas/tests/e2e/mns/mns_helper.py | 14 +- lambdas/tests/e2e/mns/test_mns_death.py | 174 +++++++++++------- lambdas/tests/e2e/mns/test_mns_process.py | 204 ++++++++++++++-------- 3 files changed, 253 insertions(+), 139 deletions(-) diff --git a/lambdas/tests/e2e/mns/mns_helper.py b/lambdas/tests/e2e/mns/mns_helper.py index 3df67006bc..366c0056a3 100644 --- a/lambdas/tests/e2e/mns/mns_helper.py +++ b/lambdas/tests/e2e/mns/mns_helper.py @@ -195,16 +195,18 @@ def cleanup_lloyd_george_record(self, record_id: str): def cleanup_document_review_record(self, record_id: str, version: int = 1): try: - self.dynamo_service.delete_item( - table_name=DOCUMENT_REVIEW_TABLE, - key={"ID": record_id, "Version": version}, - ) + records = self.get_all_document_review_versions(record_id) + for record in records: + self.dynamo_service.delete_item( + table_name=DOCUMENT_REVIEW_TABLE, + key={"ID": record_id, "Version": record["Version"]}, + ) except Exception as e: print(f"Error cleaning up document review record {record_id}: {e}") - def wait_for_update(self, check_func, max_retries=3, delay=60): + def wait_for_update(self, check_func, max_retries=5, delay=10): for i in range(max_retries): if check_func(): return True - time.sleep(delay * i) + time.sleep(delay) return False diff --git a/lambdas/tests/e2e/mns/test_mns_death.py b/lambdas/tests/e2e/mns/test_mns_death.py index ca3b277559..d0e4ac62b0 100644 --- a/lambdas/tests/e2e/mns/test_mns_death.py +++ b/lambdas/tests/e2e/mns/test_mns_death.py @@ -1,112 +1,168 @@ -from tests.e2e.mns.mns_helper import TEST_NHS_NUMBER_DEATH, TEST_ORIGINAL_ODS +import pytest +import time + +from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_ORIGINAL_ODS from enums.death_notification_status import DeathNotificationStatus from enums.document_review_status import DocumentReviewStatus from enums.patient_ods_inactive_status import PatientOdsInactiveStatus +TEST_NHS_FORMAL = "9730135967" +TEST_NHS_INFORMAL = "9730154384" +TEST_NHS_BOTH = "9730153949" + + +@pytest.fixture(scope="session") +def setup_all_death_tests(): + helper = MNSTestHelper() + + formal_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_FORMAL, ods_code=TEST_ORIGINAL_ODS + ) + + informal_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_INFORMAL, ods_code=TEST_ORIGINAL_ODS + ) + + both_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + both_review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + initial_formal_lg = helper.get_lloyd_george_record(formal_lg_record["id"]) + initial_informal_lg = helper.get_lloyd_george_record(informal_lg_record["id"]) + initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) + initial_both_review = helper.get_document_review_record(both_review_record["id"], version=1) + + helper.send_death_notification_message( + nhs_number=TEST_NHS_FORMAL, death_status=DeathNotificationStatus.FORMAL + ) + helper.send_death_notification_message( + nhs_number=TEST_NHS_INFORMAL, death_status=DeathNotificationStatus.INFORMAL + ) + helper.send_death_notification_message( + nhs_number=TEST_NHS_BOTH, death_status=DeathNotificationStatus.FORMAL + ) + + print("\nWaiting 50 seconds for all death notification messages to be processed...") + time.sleep(50) + print("Wait complete, starting death tests...") + + setup_data = { + "formal": { + "record_id": formal_lg_record["id"], + "initial_record": initial_formal_lg, + }, + "informal": { + "record_id": informal_lg_record["id"], + "initial_record": initial_informal_lg, + }, + "both_tables": { + "lg_record_id": both_lg_record["id"], + "review_record_id": both_review_record["id"], + "initial_lg": initial_both_lg, + "initial_review": initial_both_review, + }, + } + + yield setup_data + + helper.cleanup_lloyd_george_record(formal_lg_record["id"]) + helper.cleanup_lloyd_george_record(informal_lg_record["id"]) + helper.cleanup_lloyd_george_record(both_lg_record["id"]) + helper.cleanup_document_review_record(both_review_record["id"]) + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def setup_formal_death_test(setup_all_death_tests): + return setup_all_death_tests["formal"] + + +@pytest.fixture +def setup_informal_death_test(setup_all_death_tests): + return setup_all_death_tests["informal"] + + +@pytest.fixture +def setup_death_both_tables_test(setup_all_death_tests): + return setup_all_death_tests["both_tables"] + class TestMNSDeathNotification: def test_formal_death_notification_marks_patient_deceased( - self, mns_helper, test_records + self, mns_helper, setup_formal_death_test ): - lg_record = mns_helper.create_lloyd_george_record( - nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS - ) - test_records["lloyd_george"].append(lg_record["id"]) + record_id = setup_formal_death_test["record_id"] + initial_record = setup_formal_death_test["initial_record"] - initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS - mns_helper.send_death_notification_message( - nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.FORMAL - ) - def check_update(): return ( - mns_helper.get_lloyd_george_record(lg_record["id"])["CurrentGpOds"] + mns_helper.get_lloyd_george_record(record_id)["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value ) - mns_helper.wait_for_update(check_update) + update_successful = mns_helper.wait_for_update(check_update) + assert update_successful, "Lloyd George record was not marked as deceased" - def test_informal_death_notification_no_change(self, mns_helper, test_records): - lg_record = mns_helper.create_lloyd_george_record( - nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS - ) - test_records["lloyd_george"].append(lg_record["id"]) + def test_informal_death_notification_no_change(self, mns_helper, setup_informal_death_test): + record_id = setup_informal_death_test["record_id"] + initial_record = setup_informal_death_test["initial_record"] - initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) initial_last_updated = initial_record["LastUpdated"] - mns_helper.send_death_notification_message( - nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.INFORMAL - ) - def check_update(): - final_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + final_record = mns_helper.get_lloyd_george_record(record_id) current_gp_updated = final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS last_updated = final_record["LastUpdated"] == initial_last_updated return current_gp_updated and last_updated - mns_helper.wait_for_update(check_update) + update_successful = mns_helper.wait_for_update(check_update) + assert update_successful, "Lloyd George record should not have changed for informal death" - def test_formal_death_updates_both_tables(self, mns_helper, test_records): - lg_record = mns_helper.create_lloyd_george_record( - nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS - ) - test_records["lloyd_george"].append(lg_record["id"]) - - review_record = mns_helper.create_document_review_record( - nhs_number=TEST_NHS_NUMBER_DEATH, ods_code=TEST_ORIGINAL_ODS - ) - test_records["document_review"].append(review_record["id"]) - - mns_helper.send_death_notification_message( - nhs_number=TEST_NHS_NUMBER_DEATH, death_status=DeathNotificationStatus.FORMAL - ) + def test_formal_death_updates_both_tables(self, mns_helper, setup_death_both_tables_test): + lg_record_id = setup_death_both_tables_test["lg_record_id"] + review_record_id = setup_death_both_tables_test["review_record_id"] def check_death_updates(): try: - lg_record_updated = mns_helper.get_lloyd_george_record(lg_record["id"]) + lg_record_updated = mns_helper.get_lloyd_george_record(lg_record_id) lg_deceased = ( lg_record_updated["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value ) new_version = mns_helper.get_document_review_record( - review_record["id"], version=2 + review_record_id, version=2 ) review_deceased = ( new_version is not None and new_version["Custodian"] == PatientOdsInactiveStatus.DECEASED.value ) - + if new_version: + assert ( + new_version["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value + ) return lg_deceased and review_deceased except Exception: return False - update_successful = mns_helper.wait_for_update( - check_death_updates - ) + update_successful = mns_helper.wait_for_update(check_death_updates) assert ( update_successful ), "Both tables were not marked as deceased after formal death notification" - final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) - assert ( - final_lg_record["CurrentGpOds"] == PatientOdsInactiveStatus.DECEASED.value - ) - - final_review_v2 = mns_helper.get_document_review_record( - review_record["id"], version=2 - ) - assert final_review_v2["Custodian"] == PatientOdsInactiveStatus.DECEASED.value - assert ( - final_review_v2["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value - ) - final_review_v1 = mns_helper.get_document_review_record( - review_record["id"], version=1 + review_record_id, version=1 ) assert ( final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value diff --git a/lambdas/tests/e2e/mns/test_mns_process.py b/lambdas/tests/e2e/mns/test_mns_process.py index 6be578f6f2..ea2777cba6 100644 --- a/lambdas/tests/e2e/mns/test_mns_process.py +++ b/lambdas/tests/e2e/mns/test_mns_process.py @@ -1,9 +1,85 @@ import pytest +import time -from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_NHS_NUMBER, TEST_ORIGINAL_ODS, TEST_NEW_ODS +from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_ORIGINAL_ODS, TEST_NEW_ODS from enums.document_review_status import DocumentReviewStatus +TEST_NHS_LG = "9730154198" +TEST_NHS_DR = "9730154201" +TEST_NHS_NP = "9730154384" +TEST_NHS_BOTH = "9730154422" + +@pytest.fixture(scope="session") +def setup_all_tests(): + helper = MNSTestHelper() + + lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_LG, ods_code=TEST_ORIGINAL_ODS + ) + + review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_DR, ods_code=TEST_ORIGINAL_ODS + ) + + non_pending_record = helper.create_document_review_record( + nhs_number=TEST_NHS_NP, + ods_code=TEST_ORIGINAL_ODS, + review_status=DocumentReviewStatus.APPROVED, + ) + + both_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + both_review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + initial_lg = helper.get_lloyd_george_record(lg_record["id"]) + initial_dr = helper.get_document_review_record(review_record["id"], version=1) + initial_np = helper.get_document_review_record(non_pending_record["id"], version=1) + initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) + initial_both_review = helper.get_document_review_record(both_review_record["id"], version=1) + + helper.send_gp_change_message(TEST_NHS_LG) + helper.send_gp_change_message(TEST_NHS_DR) + helper.send_gp_change_message(TEST_NHS_NP) + helper.send_gp_change_message(TEST_NHS_BOTH) + + print("\nWaiting 50 seconds for all SQS messages to be processed...") + time.sleep(50) + print("Wait complete, starting tests...") + + setup_data = { + "lloyd_george": { + "record_id": lg_record["id"], + "initial_record": initial_lg, + }, + "document_review": { + "record_id": review_record["id"], + "initial_record": initial_dr, + }, + "non_pending_review": { + "record_id": non_pending_record["id"], + "initial_record": initial_np, + }, + "both_tables": { + "lg_record_id": both_lg_record["id"], + "review_record_id": both_review_record["id"], + "initial_lg": initial_both_lg, + "initial_review": initial_both_review, + }, + } + + yield setup_data + + helper.cleanup_lloyd_george_record(lg_record["id"]) + helper.cleanup_lloyd_george_record(both_lg_record["id"]) + helper.cleanup_document_review_record(review_record["id"]) + helper.cleanup_document_review_record(non_pending_record["id"]) + helper.cleanup_document_review_record(both_review_record["id"]) + @pytest.fixture def mns_helper(): @@ -11,34 +87,36 @@ def mns_helper(): @pytest.fixture -def test_records(): - records = {"lloyd_george": [], "document_review": []} - yield records +def setup_lloyd_george_test(setup_all_tests): + return setup_all_tests["lloyd_george"] - helper = MNSTestHelper() - for record_id in records["lloyd_george"]: - helper.cleanup_lloyd_george_record(record_id) - for record_id in records["document_review"]: - helper.cleanup_document_review_record(record_id) + +@pytest.fixture +def setup_document_review_test(setup_all_tests): + return setup_all_tests["document_review"] + + +@pytest.fixture +def setup_non_pending_review_test(setup_all_tests): + return setup_all_tests["non_pending_review"] + + +@pytest.fixture +def setup_both_tables_test(setup_all_tests): + return setup_all_tests["both_tables"] class TestMNSChangeOfGP: - def test_gp_change_updates_lloyd_george_record(self, mns_helper, test_records): - lg_record = mns_helper.create_lloyd_george_record( - nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS - ) - test_records["lloyd_george"].append(lg_record["id"]) + def test_gp_change_updates_lloyd_george_record(self, mns_helper, setup_lloyd_george_test): + record_id = setup_lloyd_george_test["record_id"] + initial_record = setup_lloyd_george_test["initial_record"] - initial_record = mns_helper.get_lloyd_george_record(lg_record["id"]) print(initial_record) - assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS assert initial_record["Custodian"] == TEST_ORIGINAL_ODS - mns_helper.send_gp_change_message(TEST_NHS_NUMBER) - def check_update(): - updated_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + updated_record = mns_helper.get_lloyd_george_record(record_id) print(updated_record) last_updated_changed = ( updated_record["LastUpdated"] != initial_record["LastUpdated"] @@ -54,27 +132,20 @@ def check_update(): update_successful = mns_helper.wait_for_update(check_update) assert update_successful, "Lloyd George record was not updated after GP change" - def test_gp_change_updates_document_review_record(self, mns_helper, test_records): - review_record = mns_helper.create_document_review_record( - nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS - ) - test_records["document_review"].append(review_record["id"]) + def test_gp_change_updates_document_review_record(self, mns_helper, setup_document_review_test): + record_id = setup_document_review_test["record_id"] + initial_record = setup_document_review_test["initial_record"] - initial_record = mns_helper.get_document_review_record( - review_record["id"], version=1 - ) assert initial_record["Custodian"] == TEST_ORIGINAL_ODS assert ( initial_record["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value ) assert initial_record["Version"] == 1 - mns_helper.send_gp_change_message(TEST_NHS_NUMBER) - def check_new_version(): try: new_version = mns_helper.get_document_review_record( - review_record["id"], version=2 + record_id, version=2 ) return new_version is not None except Exception: @@ -86,14 +157,14 @@ def check_new_version(): ), "New version of document review record was not created after GP change" version_2_record = mns_helper.get_document_review_record( - review_record["id"], version=2 + record_id, version=2 ) assert version_2_record["Version"] == 2 assert version_2_record["Custodian"] == TEST_NEW_ODS assert version_2_record is not None version_1_record = mns_helper.get_document_review_record( - review_record["id"], version=1 + record_id, version=1 ) assert ( version_1_record["ReviewStatus"] @@ -102,61 +173,46 @@ def check_new_version(): assert version_1_record.get("ReviewDate") is not None assert version_1_record["Reviewer"] == TEST_ORIGINAL_ODS - def test_gp_change_non_pending_review_no_new_version( - self, mns_helper, test_records - ): - review_record = mns_helper.create_document_review_record( - nhs_number=TEST_NHS_NUMBER, - ods_code=TEST_ORIGINAL_ODS, - review_status=DocumentReviewStatus.APPROVED, - ) - test_records["document_review"].append(review_record["id"]) - initial_record = mns_helper.get_document_review_record( - review_record["id"], version=1 - ) + def test_gp_change_non_pending_review_no_new_version(self, mns_helper, setup_non_pending_review_test): + record_id = setup_non_pending_review_test["record_id"] + initial_record = setup_non_pending_review_test["initial_record"] + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS assert initial_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value assert initial_record["Version"] == 1 - mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + def check_no_new_version(): + version_1_record = mns_helper.get_document_review_record(record_id, version=1) + updated = version_1_record.get("Custodian") == TEST_NEW_ODS + return updated - try: - mns_helper.get_document_review_record(review_record["id"], version=2) - assert ( - False - ), "Version 2 should not have been created for non-PENDING_REVIEW record" - except Exception: - pass + no_new_version = mns_helper.wait_for_update(check_no_new_version) + assert no_new_version, "Version 1 should have been updated for non-PENDING_REVIEW record" version_1_record = mns_helper.get_document_review_record( - review_record["id"], version=1 + record_id, version=1 ) assert version_1_record is not None assert version_1_record["Version"] == 1 assert version_1_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value - assert version_1_record.get("Custodian") == TEST_NEW_ODS - assert version_1_record is not None - - def test_gp_change_updates_both_tables(self, mns_helper, test_records): - lg_record = mns_helper.create_lloyd_george_record( - nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS - ) - test_records["lloyd_george"].append(lg_record["id"]) - - review_record = mns_helper.create_document_review_record( - nhs_number=TEST_NHS_NUMBER, ods_code=TEST_ORIGINAL_ODS - ) - test_records["document_review"].append(review_record["id"]) + assert version_1_record["Custodian"] == TEST_NEW_ODS - initial_lg = mns_helper.get_lloyd_george_record(lg_record["id"]) - mns_helper.get_document_review_record(review_record["id"], version=1) + try: + mns_helper.get_document_review_record(record_id, version=2) + assert False, "Version 2 should not exist" + except Exception: + pass - mns_helper.send_gp_change_message(TEST_NHS_NUMBER) + def test_gp_change_updates_both_tables(self, mns_helper, setup_both_tables_test): + lg_record_id = setup_both_tables_test["lg_record_id"] + review_record_id = setup_both_tables_test["review_record_id"] + initial_lg = setup_both_tables_test["initial_lg"] + initial_review = setup_both_tables_test["initial_review"] def check_updates(): try: - updated_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + updated_record = mns_helper.get_lloyd_george_record(lg_record_id) last_updated_changed = ( updated_record["LastUpdated"] != initial_lg["LastUpdated"] ) @@ -171,7 +227,7 @@ def check_updates(): ) new_review_version = mns_helper.get_document_review_record( - review_record["id"], version=2 + review_record_id, version=2 ) review_versioned = new_review_version is not None @@ -182,17 +238,17 @@ def check_updates(): update_successful = mns_helper.wait_for_update(check_updates) assert update_successful, "Both tables were not updated after GP change" - final_lg_record = mns_helper.get_lloyd_george_record(lg_record["id"]) + final_lg_record = mns_helper.get_lloyd_george_record(lg_record_id) assert final_lg_record is not None final_review_v2 = mns_helper.get_document_review_record( - review_record["id"], version=2 + review_record_id, version=2 ) assert final_review_v2 is not None assert final_review_v2["Version"] == 2 final_review_v1 = mns_helper.get_document_review_record( - review_record["id"], version=1 + review_record_id, version=1 ) assert ( final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value From 1f17b0201c94e81814b0e42485a36a1ec250870e Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:32:14 +0000 Subject: [PATCH 20/23] Refactor MNS test files for improved readability and consistency --- lambdas/tests/e2e/mns/test_mns_death.py | 23 +++++++---- lambdas/tests/e2e/mns/test_mns_process.py | 48 +++++++++++++---------- 2 files changed, 44 insertions(+), 27 deletions(-) diff --git a/lambdas/tests/e2e/mns/test_mns_death.py b/lambdas/tests/e2e/mns/test_mns_death.py index d0e4ac62b0..3646d1612e 100644 --- a/lambdas/tests/e2e/mns/test_mns_death.py +++ b/lambdas/tests/e2e/mns/test_mns_death.py @@ -1,10 +1,10 @@ -import pytest import time -from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_ORIGINAL_ODS +import pytest from enums.death_notification_status import DeathNotificationStatus from enums.document_review_status import DocumentReviewStatus from enums.patient_ods_inactive_status import PatientOdsInactiveStatus +from tests.e2e.mns.mns_helper import TEST_ORIGINAL_ODS, MNSTestHelper TEST_NHS_FORMAL = "9730135967" TEST_NHS_INFORMAL = "9730154384" @@ -34,7 +34,9 @@ def setup_all_death_tests(): initial_formal_lg = helper.get_lloyd_george_record(formal_lg_record["id"]) initial_informal_lg = helper.get_lloyd_george_record(informal_lg_record["id"]) initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) - initial_both_review = helper.get_document_review_record(both_review_record["id"], version=1) + initial_both_review = helper.get_document_review_record( + both_review_record["id"], version=1 + ) helper.send_death_notification_message( nhs_number=TEST_NHS_FORMAL, death_status=DeathNotificationStatus.FORMAL @@ -113,7 +115,9 @@ def check_update(): update_successful = mns_helper.wait_for_update(check_update) assert update_successful, "Lloyd George record was not marked as deceased" - def test_informal_death_notification_no_change(self, mns_helper, setup_informal_death_test): + def test_informal_death_notification_no_change( + self, mns_helper, setup_informal_death_test + ): record_id = setup_informal_death_test["record_id"] initial_record = setup_informal_death_test["initial_record"] @@ -126,9 +130,13 @@ def check_update(): return current_gp_updated and last_updated update_successful = mns_helper.wait_for_update(check_update) - assert update_successful, "Lloyd George record should not have changed for informal death" + assert ( + update_successful + ), "Lloyd George record should not have changed for informal death" - def test_formal_death_updates_both_tables(self, mns_helper, setup_death_both_tables_test): + def test_formal_death_updates_both_tables( + self, mns_helper, setup_death_both_tables_test + ): lg_record_id = setup_death_both_tables_test["lg_record_id"] review_record_id = setup_death_both_tables_test["review_record_id"] @@ -150,7 +158,8 @@ def check_death_updates(): ) if new_version: assert ( - new_version["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value + new_version["ReviewStatus"] + == DocumentReviewStatus.PENDING_REVIEW.value ) return lg_deceased and review_deceased except Exception: diff --git a/lambdas/tests/e2e/mns/test_mns_process.py b/lambdas/tests/e2e/mns/test_mns_process.py index ea2777cba6..7e4fa5f81c 100644 --- a/lambdas/tests/e2e/mns/test_mns_process.py +++ b/lambdas/tests/e2e/mns/test_mns_process.py @@ -1,15 +1,15 @@ - -import pytest import time -from tests.e2e.mns.mns_helper import MNSTestHelper, TEST_ORIGINAL_ODS, TEST_NEW_ODS +import pytest from enums.document_review_status import DocumentReviewStatus +from tests.e2e.mns.mns_helper import TEST_NEW_ODS, TEST_ORIGINAL_ODS, MNSTestHelper TEST_NHS_LG = "9730154198" TEST_NHS_DR = "9730154201" TEST_NHS_NP = "9730154384" TEST_NHS_BOTH = "9730154422" + @pytest.fixture(scope="session") def setup_all_tests(): helper = MNSTestHelper() @@ -40,7 +40,9 @@ def setup_all_tests(): initial_dr = helper.get_document_review_record(review_record["id"], version=1) initial_np = helper.get_document_review_record(non_pending_record["id"], version=1) initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) - initial_both_review = helper.get_document_review_record(both_review_record["id"], version=1) + initial_both_review = helper.get_document_review_record( + both_review_record["id"], version=1 + ) helper.send_gp_change_message(TEST_NHS_LG) helper.send_gp_change_message(TEST_NHS_DR) @@ -107,7 +109,9 @@ def setup_both_tables_test(setup_all_tests): class TestMNSChangeOfGP: - def test_gp_change_updates_lloyd_george_record(self, mns_helper, setup_lloyd_george_test): + def test_gp_change_updates_lloyd_george_record( + self, mns_helper, setup_lloyd_george_test + ): record_id = setup_lloyd_george_test["record_id"] initial_record = setup_lloyd_george_test["initial_record"] @@ -132,7 +136,9 @@ def check_update(): update_successful = mns_helper.wait_for_update(check_update) assert update_successful, "Lloyd George record was not updated after GP change" - def test_gp_change_updates_document_review_record(self, mns_helper, setup_document_review_test): + def test_gp_change_updates_document_review_record( + self, mns_helper, setup_document_review_test + ): record_id = setup_document_review_test["record_id"] initial_record = setup_document_review_test["initial_record"] @@ -156,16 +162,12 @@ def check_new_version(): update_successful ), "New version of document review record was not created after GP change" - version_2_record = mns_helper.get_document_review_record( - record_id, version=2 - ) + version_2_record = mns_helper.get_document_review_record(record_id, version=2) assert version_2_record["Version"] == 2 assert version_2_record["Custodian"] == TEST_NEW_ODS assert version_2_record is not None - version_1_record = mns_helper.get_document_review_record( - record_id, version=1 - ) + version_1_record = mns_helper.get_document_review_record(record_id, version=1) assert ( version_1_record["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value @@ -173,8 +175,9 @@ def check_new_version(): assert version_1_record.get("ReviewDate") is not None assert version_1_record["Reviewer"] == TEST_ORIGINAL_ODS - - def test_gp_change_non_pending_review_no_new_version(self, mns_helper, setup_non_pending_review_test): + def test_gp_change_non_pending_review_no_new_version( + self, mns_helper, setup_non_pending_review_test + ): record_id = setup_non_pending_review_test["record_id"] initial_record = setup_non_pending_review_test["initial_record"] @@ -183,16 +186,18 @@ def test_gp_change_non_pending_review_no_new_version(self, mns_helper, setup_non assert initial_record["Version"] == 1 def check_no_new_version(): - version_1_record = mns_helper.get_document_review_record(record_id, version=1) + version_1_record = mns_helper.get_document_review_record( + record_id, version=1 + ) updated = version_1_record.get("Custodian") == TEST_NEW_ODS return updated no_new_version = mns_helper.wait_for_update(check_no_new_version) - assert no_new_version, "Version 1 should have been updated for non-PENDING_REVIEW record" + assert ( + no_new_version + ), "Version 1 should have been updated for non-PENDING_REVIEW record" - version_1_record = mns_helper.get_document_review_record( - record_id, version=1 - ) + version_1_record = mns_helper.get_document_review_record(record_id, version=1) assert version_1_record is not None assert version_1_record["Version"] == 1 assert version_1_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value @@ -252,4 +257,7 @@ def check_updates(): ) assert ( final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value - ) + ) + assert final_review_v1.get("ReviewDate") is not None + assert final_review_v1["Reviewer"] == initial_review["Custodian"] + assert final_review_v1["Custodian"] != initial_lg["Custodian"] From 7d32d7d0d26191675c172fa8997d6d815c3f06c5 Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Tue, 16 Dec 2025 13:44:44 +0000 Subject: [PATCH 21/23] Refactor MNS helper functions and update informal death notification test for improved clarity and functionality --- lambdas/tests/e2e/mns/conftest.py | 1 - lambdas/tests/e2e/mns/mns_helper.py | 2 +- lambdas/tests/e2e/mns/test_mns_death.py | 30 ++++++++++--------------- 3 files changed, 13 insertions(+), 20 deletions(-) diff --git a/lambdas/tests/e2e/mns/conftest.py b/lambdas/tests/e2e/mns/conftest.py index 1b0fc0f89e..a3dd005212 100644 --- a/lambdas/tests/e2e/mns/conftest.py +++ b/lambdas/tests/e2e/mns/conftest.py @@ -1,5 +1,4 @@ import pytest - from tests.e2e.mns.mns_helper import MNSTestHelper diff --git a/lambdas/tests/e2e/mns/mns_helper.py b/lambdas/tests/e2e/mns/mns_helper.py index 366c0056a3..adfad0f87a 100644 --- a/lambdas/tests/e2e/mns/mns_helper.py +++ b/lambdas/tests/e2e/mns/mns_helper.py @@ -33,7 +33,7 @@ def __init__(self): def get_mns_queue_url(self, workspace: str) -> str: queue_name = f"{workspace}-mns-notification-queue" response = self.sqs_service.client.get_queue_url(QueueName=queue_name) - return response['QueueUrl'] + return response["QueueUrl"] def create_lloyd_george_record(self, nhs_number: str, ods_code: str) -> dict: record_id = str(uuid.uuid4()) diff --git a/lambdas/tests/e2e/mns/test_mns_death.py b/lambdas/tests/e2e/mns/test_mns_death.py index 3646d1612e..933b8ac25c 100644 --- a/lambdas/tests/e2e/mns/test_mns_death.py +++ b/lambdas/tests/e2e/mns/test_mns_death.py @@ -115,24 +115,6 @@ def check_update(): update_successful = mns_helper.wait_for_update(check_update) assert update_successful, "Lloyd George record was not marked as deceased" - def test_informal_death_notification_no_change( - self, mns_helper, setup_informal_death_test - ): - record_id = setup_informal_death_test["record_id"] - initial_record = setup_informal_death_test["initial_record"] - - initial_last_updated = initial_record["LastUpdated"] - - def check_update(): - final_record = mns_helper.get_lloyd_george_record(record_id) - current_gp_updated = final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS - last_updated = final_record["LastUpdated"] == initial_last_updated - return current_gp_updated and last_updated - - update_successful = mns_helper.wait_for_update(check_update) - assert ( - update_successful - ), "Lloyd George record should not have changed for informal death" def test_formal_death_updates_both_tables( self, mns_helper, setup_death_both_tables_test @@ -176,3 +158,15 @@ def check_death_updates(): assert ( final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value ) + + def test_informal_death_notification_no_change( + self, mns_helper, setup_informal_death_test + ): + record_id = setup_informal_death_test["record_id"] + initial_record = setup_informal_death_test["initial_record"] + + initial_last_updated = initial_record["LastUpdated"] + + final_record = mns_helper.get_lloyd_george_record(record_id) + assert final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + assert final_record["LastUpdated"] == initial_last_updated From 68b7ef41f641706b2657f948db9c45de16839c4a Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 19 Dec 2025 11:23:32 +0000 Subject: [PATCH 22/23] [PRMP-908]clean up test assertions --- lambdas/services/document_upload_review_service.py | 4 +++- .../unit/services/test_document_upload_review_service.py | 2 -- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index 1383caebf8..5b4830b214 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -182,7 +182,9 @@ def _handle_standard_custodian_update( document=review, key_pair={"ID": review.id, "Version": review.version}, update_fields_name=update_fields, - )def update_document_review_status( + ) + + def update_document_review_status( self, review_document: DocumentUploadReviewReference, condition_expression: str | ConditionBase | None = None, diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index 306e428389..bc5891ee65 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -90,7 +90,6 @@ def test_update_document_review_custodian_updates_all_documents( assert mock_handle_standard.call_count == 3 for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE mock_handle_standard.assert_any_call(review, NEW_ODS_CODE, {"custodian"}) @@ -184,7 +183,6 @@ def test_handle_standard_custodian_update_updates_document(mock_service, mocker) update_fields_name=update_fields, ) - def test_get_document_review_by_id( mock_service, mock_document_review_references, mocker ): From a3e6fc11764810f2aef5dbed1c3028710b55469b Mon Sep 17 00:00:00 2001 From: NogaNHS <127490765+NogaNHS@users.noreply.github.com> Date: Fri, 19 Dec 2025 11:25:38 +0000 Subject: [PATCH 23/23] [PRMP-908] simplify key_pair assignment in document update logic --- lambdas/services/document_service.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index 5163edf0d7..1996fc45c7 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -231,12 +231,6 @@ def update_document( "key_pair": key_pair or {DocumentReferenceMetadataFields.ID.value: document.id}, } - if key_pair: - update_kwargs["key_pair"] = key_pair - else: - update_kwargs["key_pair"] = { - DocumentReferenceMetadataFields.ID.value: document.id - } if condition_expression: update_kwargs["condition_expression"] = condition_expression