From 16e44c52cf4866eb45714957fda56dcf80aa16a7 Mon Sep 17 00:00:00 2001 From: PedroSoaresNHS Date: Wed, 21 Jan 2026 11:38:58 +0000 Subject: [PATCH 1/6] [PRMP-739] Implement recommended delete lifecycle policies for S3 --- lambdas/services/base/s3_service.py | 185 ++++++++++++++---- .../fhir_document_reference_service_base.py | 18 +- .../services/post_document_review_service.py | 1 + ...st_fhir_document_reference_service_base.py | 65 ++---- .../test_post_document_review_service.py | 181 ++++++++++------- 5 files changed, 284 insertions(+), 166 deletions(-) diff --git a/lambdas/services/base/s3_service.py b/lambdas/services/base/s3_service.py index 3ee42b4e24..dcb6c76039 100644 --- a/lambdas/services/base/s3_service.py +++ b/lambdas/services/base/s3_service.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta, timezone from io import BytesIO from typing import Any, Mapping +from urllib import parse import boto3 from botocore.client import Config as BotoConfig @@ -18,6 +19,9 @@ class S3Service: EXPIRED_SESSION_WARNING = "Expired session, creating a new role session" S3_PREFIX = "s3://" + DEFAULT_AUTODELETE_TAG_KEY = "autodelete" + DEFAULT_AUTODELETE_TAG_VALUE = "true" + def __new__(cls, *args, **kwargs): if cls._instance is None: cls._instance = super().__new__(cls) @@ -43,17 +47,43 @@ def __init__(self, custom_aws_role=None): self.custom_aws_role, "s3", config=self.config ) + def _refresh_custom_session_if_needed(self) -> None: + if not self.custom_client: + return + if datetime.now(timezone.utc) > self.expiration_time - timedelta(minutes=10): + logger.info(S3Service.EXPIRED_SESSION_WARNING) + self.custom_client, self.expiration_time = self.iam_service.assume_role( + self.custom_aws_role, "s3", config=self.config + ) + + @staticmethod + def build_tagging_query(tags: Mapping[str, str] | None) -> str: + """ + S3 expects Tagging as a URL-encoded querystring, e.g. "autodelete=true&foo=bar" + """ + if not tags: + return "" + return parse.urlencode(dict(tags)) + + @staticmethod + def ensure_autodelete_tag( + tags: Mapping[str, str] | None, + tag_key: str = DEFAULT_AUTODELETE_TAG_KEY, + tag_value: str = DEFAULT_AUTODELETE_TAG_VALUE, + ) -> dict[str, str]: + out = dict(tags or {}) + out.setdefault(tag_key, tag_value) + return out + + # S3 Location should be a minimum of a s3_object_key but can also be a directory location in the form of # {{directory}}/{{s3_object_key}} def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: str): + """ + Backwards-compatible wrapper for presigned POST without enforced tags. + """ if self.custom_client: - if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10 - ): - logger.info(S3Service.EXPIRED_SESSION_WARNING) - self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, "s3", config=self.config - ) + self._refresh_custom_session_if_needed() return self.custom_client.generate_presigned_post( s3_bucket_name, s3_object_location, @@ -61,33 +91,76 @@ def create_upload_presigned_url(self, s3_bucket_name: str, s3_object_location: s Conditions=None, ExpiresIn=self.presigned_url_expiry, ) - - def create_put_presigned_url(self, s3_bucket_name: str, file_key: str): - if self.custom_client: - if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10 - ): - logger.info(S3Service.EXPIRED_SESSION_WARNING) - self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, "s3", config=self.config - ) - logger.info("Generating presigned URL") - return self.custom_client.generate_presigned_url( - "put_object", - Params={"Bucket": s3_bucket_name, "Key": file_key}, - ExpiresIn=self.presigned_url_expiry, - ) return None + def create_upload_presigned_post( + self, + s3_bucket_name: str, + s3_object_location: str, + tags: Mapping[str, str] | None = None, + require_autodelete: bool = False, + ): + if not self.custom_client: + return None + + self._refresh_custom_session_if_needed() + + final_tags = ( + self.ensure_autodelete_tag(tags) if require_autodelete else dict(tags or {}) + ) + + fields: dict[str, Any] = {} + conditions: list[Any] = [] + + if final_tags: + tagging = self.build_tagging_query(final_tags) + # For POST policy, tagging uses the "tagging" form field + fields["tagging"] = tagging + conditions.append({"tagging": tagging}) + + return self.custom_client.generate_presigned_post( + s3_bucket_name, + s3_object_location, + Fields=fields or None, + Conditions=conditions or None, + ExpiresIn=self.presigned_url_expiry, + ) + + def create_put_presigned_url( + self, + s3_bucket_name: str, + file_key: str, + tags: Mapping[str, str] | None = None, + require_autodelete: bool = False, + extra_params: Mapping[str, Any] | None = None, + ): + if not self.custom_client: + return None + + self._refresh_custom_session_if_needed() + + final_tags = ( + self.ensure_autodelete_tag(tags) if require_autodelete else dict(tags or {}) + ) + + params: dict[str, Any] = {"Bucket": s3_bucket_name, "Key": file_key} + + if final_tags: + params["Tagging"] = self.build_tagging_query(final_tags) + + if extra_params: + params.update(extra_params) + + logger.info("Generating presigned URL") + return self.custom_client.generate_presigned_url( + "put_object", + Params=params, + ExpiresIn=self.presigned_url_expiry, + ) + def create_download_presigned_url(self, s3_bucket_name: str, file_key: str): if self.custom_client: - if datetime.now(timezone.utc) > self.expiration_time - timedelta( - minutes=10 - ): - logger.info(S3Service.EXPIRED_SESSION_WARNING) - self.custom_client, self.expiration_time = self.iam_service.assume_role( - self.custom_aws_role, "s3", config=self.config - ) + self._refresh_custom_session_if_needed() logger.info("Generating presigned URL") return self.custom_client.generate_presigned_url( "get_object", @@ -143,11 +216,9 @@ def copy_across_bucket( if_none_match, False, ) - else: - raise e - else: - logger.error(f"Copy failed: {e}") - raise e + raise + logger.error(f"Copy failed: {e}") + raise def delete_object( self, s3_bucket_name: str, file_key: str, version_id: str | None = None @@ -159,6 +230,34 @@ def delete_object( Bucket=s3_bucket_name, Key=file_key, VersionId=version_id ) + def delete_object_hard(self, s3_bucket_name: str, file_key: str) -> None: + """ + Deletes ALL versions + delete markers for a given key. + """ + try: + paginator = self.client.get_paginator("list_object_versions") + to_delete: list[dict[str, str]] = [] + + for page in paginator.paginate(Bucket=s3_bucket_name, Prefix=file_key): + for v in page.get("Versions", []): + if v.get("Key") == file_key: + to_delete.append({"Key": file_key, "VersionId": v["VersionId"]}) + for m in page.get("DeleteMarkers", []): + if m.get("Key") == file_key: + to_delete.append({"Key": file_key, "VersionId": m["VersionId"]}) + + for i in range(0, len(to_delete), 1000): + chunk = to_delete[i : i + 1000] + if chunk: + self.client.delete_objects( + Bucket=s3_bucket_name, + Delete={"Objects": chunk, "Quiet": True}, + ) + except ClientError as e: + logger.error(f"Hard delete failed for s3://{s3_bucket_name}/{file_key}: {e}") + raise + + def create_object_tag( self, s3_bucket_name: str, file_key: str, tag_key: str, tag_value: str ): @@ -202,7 +301,7 @@ def file_exist_on_s3(self, s3_bucket_name: str, file_key: str) -> bool: ): return False logger.error(str(e), {"Result": "Failed to check if file exists on s3"}) - raise e + raise def list_all_objects(self, bucket_name: str) -> list[dict]: s3_paginator = self.client.get_paginator("list_objects_v2") @@ -236,20 +335,30 @@ def upload_file_obj( s3_bucket_name: str, file_key: str, extra_args: Mapping[str, Any] = None, + require_autodelete: bool = False, + tags: Mapping[str, str] | None = None, ): try: + final_extra_args: dict[str, Any] = dict(extra_args or {}) + + if require_autodelete: + final_tags = self.ensure_autodelete_tag(tags) + final_extra_args["Tagging"] = self.build_tagging_query(final_tags) + elif tags: + final_extra_args["Tagging"] = self.build_tagging_query(tags) + self.client.upload_fileobj( Fileobj=file_obj, Bucket=s3_bucket_name, Key=file_key, - ExtraArgs=extra_args or {}, + ExtraArgs=final_extra_args, ) logger.info(f"Uploaded file object to s3://{s3_bucket_name}/{file_key}") except ClientError as e: logger.error( f"Failed to upload file object to s3://{s3_bucket_name}/{file_key} - {e}" ) - raise e + raise def save_or_create_file(self, source_bucket: str, file_key: str, body: bytes): return self.client.put_object( diff --git a/lambdas/services/fhir_document_reference_service_base.py b/lambdas/services/fhir_document_reference_service_base.py index 1ae935e126..1a65996b63 100644 --- a/lambdas/services/fhir_document_reference_service_base.py +++ b/lambdas/services/fhir_document_reference_service_base.py @@ -52,6 +52,7 @@ def _store_binary_in_s3( file_obj=binary_file, s3_bucket_name=document_reference.s3_bucket_name, file_key=document_reference.s3_upload_key, + require_autodelete=True, ) logger.info( f"Successfully stored binary content in S3: {document_reference.s3_upload_key}" @@ -77,7 +78,9 @@ def _create_s3_presigned_url(self, document_reference: DocumentReference) -> str """Create a pre-signed URL for uploading a file""" try: response = self.s3_service.create_put_presigned_url( - document_reference.s3_bucket_name, document_reference.s3_upload_key + s3_bucket_name=document_reference.s3_bucket_name, + file_key=document_reference.s3_upload_key, + require_autodelete=True, ) logger.info( f"Successfully created pre-signed URL for {document_reference.s3_upload_key}" @@ -138,10 +141,9 @@ def _get_document_reference(self, document_id: str, table) -> DocumentReference: if len(documents) > 0: logger.info("Document found for given id") return documents[0] - else: - raise FhirDocumentReferenceException( - f"Did not find any documents for document ID {document_id}" - ) + raise FhirDocumentReferenceException( + f"Did not find any documents for document ID {document_id}" + ) def _determine_document_type(self, fhir_doc: FhirDocumentReference) -> SnomedCode: """Determine the document type based on SNOMED code in the FHIR document""" @@ -190,13 +192,10 @@ def _create_fhir_response( presigned_url: str, ) -> str: """Create a FHIR response document""" - if presigned_url: attachment_url = presigned_url else: - document_retrieve_endpoint = os.getenv( - "DOCUMENT_RETRIEVE_ENDPOINT_APIM", "" - ) + document_retrieve_endpoint = os.getenv("DOCUMENT_RETRIEVE_ENDPOINT_APIM", "") attachment_url = ( document_retrieve_endpoint + "/" @@ -252,6 +251,7 @@ def _handle_document_save( presigned_url = self._create_s3_presigned_url(document_reference) except FhirDocumentReferenceException: raise DocumentRefException(500, LambdaError.InternalServerError) + try: # Save document reference to DynamoDB self._save_document_reference_to_dynamo(dynamo_table, document_reference) diff --git a/lambdas/services/post_document_review_service.py b/lambdas/services/post_document_review_service.py index 859890df29..dd31c8bb0b 100644 --- a/lambdas/services/post_document_review_service.py +++ b/lambdas/services/post_document_review_service.py @@ -154,6 +154,7 @@ def create_review_document_upload_presigned_url( presign_url_response = self.s3_service.create_put_presigned_url( s3_bucket_name=self.staging_bucket, file_key=file_key, + require_autodelete=True, ) presigned_id = f"upload/{upload_id}" deletion_date = datetime.now(timezone.utc) diff --git a/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py b/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py index c91c463c07..e20169425d 100644 --- a/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py +++ b/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py @@ -1,3 +1,4 @@ +import base64 import json import pytest @@ -32,9 +33,8 @@ def mock_service(mocker): mocker.patch("services.fhir_document_reference_service_base.S3Service") mocker.patch("services.fhir_document_reference_service_base.DynamoDBService") - + mocker.patch("services.document_service.DynamoDBService") service = FhirDocumentReferenceServiceBase() - yield service @@ -143,7 +143,6 @@ def test_dynamo_error(mock_service, mocker): def test_save_document_reference_to_dynamo_error(mock_service): """Test _save_document_reference_to_dynamo method with DynamoDB error.""" - mock_service.dynamo_service.create_item.side_effect = ClientError( {"Error": {"Code": "InternalServerError", "Message": "Test error"}}, "CreateItem", @@ -188,10 +187,8 @@ def test_check_nhs_number_with_pds_success(mock_service, mocker): ) mock_service_object.fetch_patient_details.return_value = mock_pds_patient_details - # This should not raise an exception result = mock_service._check_nhs_number_with_pds("9000000009") - # Verify the method was called correctly mock_service_object.fetch_patient_details.assert_called_once_with("9000000009") assert result == mock_pds_patient_details @@ -238,7 +235,7 @@ def test_create_document_reference_with_author(mock_service, mocker): assert result.document_snomed_code_type == "test-code" assert result.custodian == "A12345" assert result.current_gp_ods == "C13579" - assert result.author == "B67890" # Verify author is set + assert result.author == "B67890" assert result.version == "2" @@ -277,9 +274,7 @@ def test_create_document_reference_without_custodian(mock_service, mocker): s3_file_key="mock_s3_file_key", ) - assert ( - result.custodian == current_gp_ods - ) # Custodian should default to current_gp_ods + assert result.custodian == current_gp_ods def test_determine_document_type_with_missing_type(mock_service, mocker): @@ -321,7 +316,6 @@ def test_save_document_reference_to_dynamo_success(mock_service): def test_process_fhir_document_reference_with_invalid_base64_data(mock_service): - """Test process_fhir_document_reference with invalid base64 data.""" with pytest.raises(FhirDocumentReferenceException): mock_service._store_binary_in_s3( TEST_DOCUMENT_REFERENCE, b"invalid-base64-data!!!" @@ -331,8 +325,6 @@ def test_process_fhir_document_reference_with_invalid_base64_data(mock_service): def test_determine_document_type_returns_lloyd_george_type( mock_service, valid_fhir_doc_object ): - """Test that determine_document_type returns the lloyd george type for - a lloyd george document""" result = mock_service._determine_document_type(valid_fhir_doc_object) assert result == SnomedCodes.LLOYD_GEORGE.value @@ -341,12 +333,9 @@ def test_determine_document_type_returns_lloyd_george_type( def test_determine_document_type_non_snomed_coding( mock_service, valid_fhir_doc_object: FhirDocumentReference ): - """Test that determine_document_type returns the lloyd george type for - a lloyd george document""" valid_fhir_doc_object.type.coding.append( Coding(system="mocked_system", code="mocked_code", display="mocked_display") ) - valid_fhir_doc_object.type.coding.reverse() result = mock_service._determine_document_type(valid_fhir_doc_object) @@ -357,12 +346,9 @@ def test_determine_document_type_non_snomed_coding( def test_determine_document_type_non_george_lloyd_code( mock_service, valid_fhir_doc_object: FhirDocumentReference ): - """Test that determine_document_type returns the lloyd george type for - a lloyd george document""" valid_fhir_doc_object.type.coding.append( Coding(system=SNOMED_URL, code="mocked_code", display="mocked_display") ) - valid_fhir_doc_object.type.coding.reverse() result = mock_service._determine_document_type(valid_fhir_doc_object) @@ -371,7 +357,6 @@ def test_determine_document_type_non_george_lloyd_code( def test_get_document_reference_no_documents_found(mocker, mock_service): - """Test that get_document_reference raises an error when there are no document results""" mock_service.fetch_documents_from_table = mocker.patch( "services.fhir_document_reference_service_base.DocumentService.fetch_documents_from_table", return_value=[], @@ -382,7 +367,6 @@ def test_get_document_reference_no_documents_found(mocker, mock_service): def test_get_document_reference_returns_document_reference(mocker, mock_service): - """Test that get_document_reference returns the first document reference from the results""" documents = create_test_lloyd_george_doc_store_refs() mock_service.document_service.fetch_documents_from_table = mocker.patch( @@ -396,7 +380,6 @@ def test_get_document_reference_returns_document_reference(mocker, mock_service) def test_create_s3_presigned_url_error(mock_service): - """Test that create_s3_presigned_url raises a FhirDocumentReferenceException on AWS S3 ClientError""" mock_service.s3_service.create_put_presigned_url.side_effect = ClientError( {"Error": {}}, "" ) @@ -407,7 +390,6 @@ def test_create_s3_presigned_url_error(mock_service): def test_create_s3_presigned_url_returns_url(mock_service): - """Test that create_s3_presigned_url returns a url""" mock_presigned_url_response = "https://test-bucket.s3.amazonaws.com/" mock_service.s3_service.create_put_presigned_url.return_value = ( mock_presigned_url_response @@ -417,6 +399,11 @@ def test_create_s3_presigned_url_returns_url(mock_service): result = mock_service._create_s3_presigned_url(document) assert result == mock_presigned_url_response + mock_service.s3_service.create_put_presigned_url.assert_called_once_with( + s3_bucket_name=document.s3_bucket_name, + file_key=document.s3_upload_key, + require_autodelete=True, + ) def test_store_binary_in_s3_success(mock_service, mocker): @@ -430,12 +417,12 @@ def test_store_binary_in_s3_success(mock_service, mocker): mock_service.s3_service.upload_file_obj.assert_called_once_with( file_obj=mocker.ANY, s3_bucket_name=TEST_DOCUMENT_REFERENCE.s3_bucket_name, - file_key=TEST_DOCUMENT_REFERENCE.s3_file_key, + file_key=TEST_DOCUMENT_REFERENCE.s3_upload_key, + require_autodelete=True, ) def test_store_binary_in_s3_with_client_error(mock_service): - """Test _store_binary_in_s3 method with S3 ClientError.""" binary_data = b"SGVsbG8gV29ybGQ=" mock_service.s3_service.upload_file_obj.side_effect = ClientError( @@ -453,9 +440,8 @@ def test_store_binary_in_s3_with_client_error(mock_service): def test_store_binary_in_s3_with_large_binary_data(mock_service): - """Test _store_binary_in_s3 method with large binary data.""" - # Create a large binary data (8MB) - binary_data = b"A" * (8 * 1024 * 1024) + raw = b"A" * (6 * 1024 * 1024) # 6MB raw + binary_data = base64.b64encode(raw) mock_service._store_binary_in_s3(TEST_DOCUMENT_REFERENCE, binary_data) @@ -463,35 +449,30 @@ def test_store_binary_in_s3_with_large_binary_data(mock_service): def test_store_binary_in_s3_on_memory_error(mock_service): - """Test that store_binary_in_s3 raises FhirDocumentReferenceException when MemoryError is raised""" mock_service.s3_service.upload_file_obj.side_effect = MemoryError() document = create_test_lloyd_george_doc_store_refs()[0] with pytest.raises(FhirDocumentReferenceException): - mock_service._store_binary_in_s3(document, bytes()) + mock_service._store_binary_in_s3(document, b"SGVsbG8=") def test_store_binary_in_s3_on_oserror(mock_service): - """Test that store_binary_in_s3 raises FhirDocumentReferenceException when OSError is raised""" mock_service.s3_service.upload_file_obj.side_effect = OSError() document = create_test_lloyd_george_doc_store_refs()[0] with pytest.raises(FhirDocumentReferenceException): - mock_service._store_binary_in_s3(document, bytes()) + mock_service._store_binary_in_s3(document, b"SGVsbG8=") def test_store_binary_in_s3_on_ioerror(mock_service): - """Test that store_binary_in_s3 raises FhirDocumentReferenceException when IOError is raised""" mock_service.s3_service.upload_file_obj.side_effect = IOError() document = create_test_lloyd_george_doc_store_refs()[0] with pytest.raises(FhirDocumentReferenceException): - mock_service._store_binary_in_s3(document, bytes()) + mock_service._store_binary_in_s3(document, b"SGVsbG8=") def test_get_dynamo_table_for_patient_data_doc_type(set_env, mock_service): - """Test _get_dynamo_table_for_doc_type method with a non-Lloyd George document type.""" - patient_data_code = SnomedCodes.PATIENT_DATA.value result = mock_service._get_dynamo_table_for_doc_type(patient_data_code) @@ -499,8 +480,6 @@ def test_get_dynamo_table_for_patient_data_doc_type(set_env, mock_service): def test_get_dynamo_table_for_unsupported_doc_type(set_env, mock_service): - """Test _get_dynamo_table_for_doc_type method with a non-Lloyd George document type.""" - non_lg_code = SnomedCode(code="non-lg-code", display_name="Non Lloyd George") with pytest.raises(DocumentRefException) as excinfo: @@ -511,7 +490,6 @@ def test_get_dynamo_table_for_unsupported_doc_type(set_env, mock_service): def test_get_dynamo_table_for_lloyd_george_doc_type(set_env, mock_service): - """Test _get_dynamo_table_for_doc_type method with Lloyd George document type.""" lg_code = SnomedCodes.LLOYD_GEORGE.value result = mock_service._get_dynamo_table_for_doc_type(lg_code) @@ -520,8 +498,6 @@ def test_get_dynamo_table_for_lloyd_george_doc_type(set_env, mock_service): def test_create_fhir_response_with_presigned_url(mock_service, mocker): - """Test _create_fhir_response method with a presigned URL.""" - mocker.patch.object( SnomedCodes, "find_by_code", return_value=SnomedCodes.LLOYD_GEORGE.value ) @@ -547,8 +523,6 @@ def test_create_fhir_response_with_presigned_url(mock_service, mocker): def test_create_fhir_response_without_presigned_url(set_env, mock_service, mocker): - """Test _create_fhir_response method without a presigned URL (for binary uploads).""" - mocker.patch.object( SnomedCodes, "find_by_code", return_value=SnomedCodes.LLOYD_GEORGE.value ) @@ -584,8 +558,6 @@ def test_handle_document_save_returns_presigned_url( valid_fhir_doc_object, mock_document_reference, ): - """Test _handle_document_save method returns presigned URL for Lloyd George document.""" - result = mock_service._handle_document_save( mock_document_reference, valid_fhir_doc_object, "test_table" ) @@ -605,6 +577,7 @@ def test_handle_document_save_stores_binary_in_s3( result = mock_service._handle_document_save( mock_document_reference, valid_fhir_doc_object, "test_table" ) + assert result is None mock_service._store_binary_in_s3.assert_called_once() @@ -642,8 +615,6 @@ def test_handle_document_save_create_s3_failure( valid_fhir_doc_object, mock_document_reference, ): - """Test _handle_document_save method raises FhirDocumentReferenceException when S3 presigned URL creation fails.""" - mocker.patch.object( mock_service, "_create_s3_presigned_url", @@ -668,8 +639,6 @@ def test_save_document_reference_to_dynamo_failure( mock_create_s3_presigned_url, mock_document_reference, ): - """Test _handle_document_save method raises FhirDocumentReferenceException when saving to DynamoDB fails.""" - mocker.patch.object( mock_service, "_save_document_reference_to_dynamo", diff --git a/lambdas/tests/unit/services/test_post_document_review_service.py b/lambdas/tests/unit/services/test_post_document_review_service.py index 8576b4a764..37f8817808 100644 --- a/lambdas/tests/unit/services/test_post_document_review_service.py +++ b/lambdas/tests/unit/services/test_post_document_review_service.py @@ -8,7 +8,6 @@ from freezegun import freeze_time from models.document_review import ( DocumentReviewFileDetails, - DocumentReviewUploadEvent, DocumentUploadReviewReference, ) from pydantic import ValidationError @@ -28,37 +27,67 @@ ) from utils.lambda_exceptions import DocumentReviewLambdaException -VALID_EVENT = DocumentReviewUploadEvent( - nhs_number=TEST_NHS_NUMBER, - snomed_code=SnomedCodes.LLOYD_GEORGE.value.code, - documents=["testFile.pdf"], -) +TEST_PRESIGNED_URL_1 = "https://s3.amazonaws.com/presigned1?signature=abc123" +TEST_PRESIGNED_URL_2 = "https://s3.amazonaws.com/presigned2?signature=def456" -MOCK_EVENT_WITH_MULITPLE_FILES = deepcopy(VALID_EVENT) -MOCK_EVENT_WITH_MULITPLE_FILES.documents = ["testFile.pdf", "testFile2.pdf"] +FROZEN_TIMESTAMP = 1704110400 # 2024-01-01 12:00:00 UTC -MOCK_DECEASED_PATIENT_DETAILS = deepcopy(EXPECTED_PARSED_PATIENT_BASE_CASE) -MOCK_DECEASED_PATIENT_DETAILS.general_practice_ods = PatientOdsInactiveStatus.DECEASED -MOCK_DECEASED_PATIENT_DETAILS.active = False -MOCK_DECEASED_PATIENT_DETAILS.deceased = True -TEST_PRESIGNED_URL_1 = "https://s3.amazonaws.com/presigned1?signature=abc123" -TEST_PRESIGNED_URL_2 = "https://s3.amazonaws.com/presigned2?signature=def456" +def make_mock_event(*, nhs_number: str, documents: list[str], snomed_code_str: str): + snomed = MagicMock() + snomed.code = snomed_code_str + + event = MagicMock() + event.nhs_number = nhs_number + event.documents = documents + event.snomed_code = snomed + return event + + +@pytest.fixture +def valid_event(): + return make_mock_event( + nhs_number=TEST_NHS_NUMBER, + documents=["testFile.pdf"], + snomed_code_str="16521000000101", + ) + + +@pytest.fixture +def multi_file_event(): + return make_mock_event( + nhs_number=TEST_NHS_NUMBER, + documents=["testFile.pdf", "testFile2.pdf"], + snomed_code_str="16521000000101", + ) + -FROZEN_TIMESTAMP = 1704110400 +@pytest.fixture +def deceased_patient_details(): + pd = deepcopy(EXPECTED_PARSED_PATIENT_BASE_CASE) + pd.general_practice_ods = PatientOdsInactiveStatus.DECEASED + pd.active = False + pd.deceased = True + return pd @pytest.fixture def mock_service(set_env, mocker): + mocker.patch( + "services.post_document_review_service.get_pds_service", + return_value=MagicMock(), + ) mocker.patch("services.post_document_review_service.S3Service") mocker.patch("services.post_document_review_service.DocumentUploadReviewService") service = PostDocumentReviewService() + service.pds_service = MagicMock() + service.s3_service = MagicMock() service.s3_service.presigned_url_expiry = 1800 # 30 minutes - + service.s3_service.S3_PREFIX = "s3://" service.review_document_service = MagicMock() - mocker.patch.object(service, "pds_service") + service.review_document_service.dynamo_service = MagicMock() yield service @@ -71,19 +100,18 @@ def mock_extract_ods(mocker): @freeze_time("2024-01-01 12:00:00") -def test_process_event_happy_path(mock_extract_ods, mock_service, mock_uuid, mocker): +def test_process_event_happy_path(mock_extract_ods, mock_service, mock_uuid, mocker, valid_event): mock_extract_ods.return_value = TEST_CURRENT_GP_ODS mock_service.pds_service.fetch_patient_details.return_value = ( EXPECTED_PARSED_PATIENT_BASE_CASE ) + mocker.patch.object(mock_service, "create_response") mock_create_url = mocker.patch.object( mock_service, "create_review_document_upload_presigned_url" ) mock_create_url.return_value = TEST_PRESIGNED_URL_1 - - mock_service.process_event(VALID_EVENT) - + mock_service.process_event(valid_event) mock_extract_ods.assert_called() mock_service.pds_service.fetch_patient_details.assert_called_with(TEST_NHS_NUMBER) mock_create_url.assert_called_with( @@ -97,111 +125,117 @@ def test_process_event_happy_path(mock_extract_ods, mock_service, mock_uuid, moc @freeze_time("2024-01-01 12:00:00") def test_process_event_multi_file_event( - mock_service, mock_extract_ods, mock_uuid, mocker + mock_service, mock_extract_ods, mocker, multi_file_event ): mock_extract_ods.return_value = TEST_CURRENT_GP_ODS mock_service.pds_service.fetch_patient_details.return_value = ( EXPECTED_PARSED_PATIENT_BASE_CASE ) + + mocker.patch( + "services.post_document_review_service.uuid.uuid4", + side_effect=[TEST_UUID, TEST_UUID, TEST_UUID], + ) + mock_create_url = mocker.patch.object( mock_service, "create_review_document_upload_presigned_url" ) - mock_create_url.side_effect = [ - TEST_PRESIGNED_URL_1, - TEST_PRESIGNED_URL_2, - ] + mock_create_url.side_effect = [TEST_PRESIGNED_URL_1, TEST_PRESIGNED_URL_2] expected = { "id": TEST_UUID, "uploadDate": FROZEN_TIMESTAMP, "files": [ - { - "fileName": MOCK_EVENT_WITH_MULITPLE_FILES.documents[0], - "presignedUrl": TEST_PRESIGNED_URL_1, - }, - { - "fileName": MOCK_EVENT_WITH_MULITPLE_FILES.documents[1], - "presignedUrl": TEST_PRESIGNED_URL_2, - }, + {"fileName": multi_file_event.documents[0], "presignedUrl": TEST_PRESIGNED_URL_1}, + {"fileName": multi_file_event.documents[1], "presignedUrl": TEST_PRESIGNED_URL_2}, ], "documentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, "version": 1, } - actual = mock_service.process_event(MOCK_EVENT_WITH_MULITPLE_FILES) + actual = mock_service.process_event(multi_file_event) assert actual == expected def test_process_event_throws_error_failure_to_extract_ods_from_request_context( - mock_extract_ods, mock_service + mock_extract_ods, mock_service, valid_event ): mock_extract_ods.side_effect = OdsErrorException() with pytest.raises(DocumentReviewLambdaException) as e: - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) assert e.value.status_code == 400 assert e.value.err_code == "UDR_4001" def test_process_event_calls_pds_for_patient_status_with_nhs_number( - mock_service, mock_extract_ods + mock_service, mock_extract_ods, valid_event ): mock_extract_ods.return_value = TEST_CURRENT_GP_ODS mock_service.pds_service.fetch_patient_details.return_value = ( EXPECTED_PARSED_PATIENT_BASE_CASE ) - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) mock_service.pds_service.fetch_patient_details.assert_called_with(TEST_NHS_NUMBER) -def test_process_event_throws_error_patient_is_DECE(mock_service, mock_extract_ods): - mock_service.pds_service.fetch_patient_details.return_value = ( - MOCK_DECEASED_PATIENT_DETAILS - ) +def test_process_event_throws_error_patient_is_deceased( + mock_service, mock_extract_ods, valid_event, deceased_patient_details +): + mock_extract_ods.return_value = TEST_CURRENT_GP_ODS + mock_service.pds_service.fetch_patient_details.return_value = deceased_patient_details with pytest.raises(DocumentReviewLambdaException) as e: - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) assert e.value.status_code == 403 assert e.value.err_code == "UDR_4031" -def test_process_event_handles_pds_patient_not_found(mock_service, mock_extract_ods): - mock_service.pds_service.fetch_patient_details.side_effect = ( - PatientNotFoundException() - ) +def test_process_event_handles_pds_patient_not_found(mock_service, mock_extract_ods, valid_event): + mock_extract_ods.return_value = TEST_CURRENT_GP_ODS + mock_service.pds_service.fetch_patient_details.side_effect = PatientNotFoundException() with pytest.raises(DocumentReviewLambdaException) as e: - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) assert e.value.status_code == 400 assert e.value.err_code == "UDR_4003" -def test_process_event_handles_client_error(mock_service, mock_extract_ods): +def test_process_event_handles_client_error(mock_service, mock_extract_ods, valid_event): + mock_extract_ods.return_value = TEST_CURRENT_GP_ODS + mock_service.pds_service.fetch_patient_details.return_value = ( + EXPECTED_PARSED_PATIENT_BASE_CASE + ) mock_service.review_document_service.create_dynamo_entry.side_effect = ClientError( {"error": "test error message"}, "test" ) with pytest.raises(DocumentReviewLambdaException) as e: - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) assert e.value.status_code == 500 - assert e.value.err_code == "UDR_5002" + assert e.value.err_code == "UDR_5001" + def test_process_event_handles_validation_error_creating_new_dynamo_entry( - mock_service, mock_extract_ods + mock_service, mock_extract_ods, valid_event ): - mock_service.review_document_service.create_dynamo_entry.side_effect = ( - ValidationError("", []) + mock_extract_ods.return_value = TEST_CURRENT_GP_ODS + mock_service.pds_service.fetch_patient_details.return_value = ( + EXPECTED_PARSED_PATIENT_BASE_CASE + ) + mock_service.review_document_service.create_dynamo_entry.side_effect = ValidationError( + "", [] ) with pytest.raises(DocumentReviewLambdaException) as e: - mock_service.process_event(VALID_EVENT) + mock_service.process_event(valid_event) assert e.value.status_code == 500 assert e.value.err_code == "UDR_5002" @@ -209,26 +243,28 @@ def test_process_event_handles_validation_error_creating_new_dynamo_entry( @freeze_time("2024-01-01 12:00:00") def test_create_presigned_urls_for_review_reference_files_creates_presign_writes_to_table( - mock_service, mock_uuid + mock_service, mocker, valid_event ): + mocker.patch( + "services.post_document_review_service.uuid.uuid4", + side_effect=[TEST_UUID, TEST_UUID], + ) document_review_reference = mock_service.create_review_reference_from_event( - VALID_EVENT, TEST_CURRENT_GP_ODS, EXPECTED_PARSED_PATIENT_BASE_CASE + valid_event, TEST_CURRENT_GP_ODS, EXPECTED_PARSED_PATIENT_BASE_CASE ) + mock_service.s3_service.create_put_presigned_url.return_value = TEST_PRESIGNED_URL_1 - mock_service.create_presigned_urls_for_review_reference_files( - document_review_reference - ) + mock_service.create_presigned_urls_for_review_reference_files(document_review_reference) - mock_service.s3_service.create_put_presigned_url.assert_called_once() - mock_service.s3_service.create_put_presigned_url.assert_called_with( + mock_service.s3_service.create_put_presigned_url.assert_called_once_with( s3_bucket_name=MOCK_STAGING_STORE_BUCKET, file_key=f"review/{document_review_reference.id}/{TEST_UUID}", + require_autodelete=True, ) - mock_service.review_document_service.dynamo_service.create_item.assert_called_once() - mock_service.review_document_service.dynamo_service.create_item.assert_called_with( + mock_service.review_document_service.dynamo_service.create_item.assert_called_once_with( table_name=MOCK_EDGE_REFERENCE_TABLE, item={ "ID": f"upload/{TEST_UUID}", @@ -248,13 +284,14 @@ def test_create_review_document_upload_presigned_url_handles_errors(mock_service def test_create_presigned_urls_for_review_reference_files_handles_error( - mock_service, mock_extract_ods, mocker + mock_service, mocker, valid_event ): mocker.patch.object( mock_service, "create_review_document_upload_presigned_url" ).side_effect = DocumentReviewException("Failed to create presigned url") + document_review_reference = mock_service.create_review_reference_from_event( - VALID_EVENT, TEST_CURRENT_GP_ODS, EXPECTED_PARSED_PATIENT_BASE_CASE + valid_event, TEST_CURRENT_GP_ODS, EXPECTED_PARSED_PATIENT_BASE_CASE ) with pytest.raises(DocumentReviewLambdaException) as e: @@ -266,23 +303,25 @@ def test_create_presigned_urls_for_review_reference_files_handles_error( assert e.value.err_code == "UDR_5003" -def test_create_response(mock_service): +@freeze_time("2024-01-01 12:00:00") +def test_create_response(mock_service, valid_event): expected = { "id": TEST_UUID, "uploadDate": FROZEN_TIMESTAMP, "files": [ - {"fileName": VALID_EVENT.documents[0], "presignedUrl": TEST_PRESIGNED_URL_1} + {"fileName": valid_event.documents[0], "presignedUrl": TEST_PRESIGNED_URL_1} ], "documentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, "version": 1, } + actual = mock_service.create_response( DocumentUploadReviewReference( id=TEST_UUID, upload_date=FROZEN_TIMESTAMP, files=[ DocumentReviewFileDetails( - file_name=VALID_EVENT.documents[0], + file_name=valid_event.documents[0], presigned_url=TEST_PRESIGNED_URL_1, ) ], @@ -293,4 +332,4 @@ def test_create_response(mock_service): nhs_number=TEST_NHS_NUMBER, ) ) - assert actual == expected \ No newline at end of file + assert actual == expected From 0adf394f3443e9d3213a6274f7e26c24a2c7d411 Mon Sep 17 00:00:00 2001 From: PedroSoaresNHS Date: Thu, 22 Jan 2026 09:54:15 +0000 Subject: [PATCH 2/6] [PRMP-739] testing --- .../tests/e2e/api/test_upload_document_api.py | 52 +++++++++++++------ 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/lambdas/tests/e2e/api/test_upload_document_api.py b/lambdas/tests/e2e/api/test_upload_document_api.py index 4b929753e2..f6ffe648ca 100644 --- a/lambdas/tests/e2e/api/test_upload_document_api.py +++ b/lambdas/tests/e2e/api/test_upload_document_api.py @@ -96,14 +96,19 @@ def condition(response_json): attachment_url = upload_response["content"][0]["attachment"]["url"] assert ( - f"https://{APIM_ENDPOINT}/national-document-repository/FHIR/R4/DocumentReference/{LLOYD_GEORGE_SNOMED}~" in attachment_url + f"https://{APIM_ENDPOINT}/national-document-repository/FHIR/R4/DocumentReference/{LLOYD_GEORGE_SNOMED}~" + in attachment_url ) base64_data = retrieve_response["content"][0]["attachment"]["data"] assert base64.b64decode(base64_data, validate=True) - assert upload_response == snapshot_json(exclude=paths("id", "date", "content.0.attachment.url")) - assert retrieve_response == snapshot_json(exclude=paths("id", "date", "content.0.attachment.data")) + assert upload_response == snapshot_json( + exclude=paths("id", "date", "content.0.attachment.url") + ) + assert retrieve_response == snapshot_json( + exclude=paths("id", "date", "content.0.attachment.data") + ) def test_create_document_presign(test_data, snapshot_json): @@ -119,13 +124,24 @@ def test_create_document_presign(test_data, snapshot_json): upload_response = retrieve_response.json() lloyd_george_record["id"] = upload_response["id"].split("~")[1] test_data.append(lloyd_george_record) + presign_uri = upload_response["content"][0]["attachment"]["url"] del upload_response["content"][0]["attachment"]["url"] + required_headers = { + "Content-Type": "application/pdf", + "x-amz-tagging": "autodelete=true", + } + sample_pdf_path = os.path.join(os.path.dirname(__file__), "files", "big-dummy.pdf") with open(sample_pdf_path, "rb") as f: - files = {"file": f} - presign_response = requests.put(presign_uri, files=files) + presign_response = requests.put( + presign_uri, + data=f, + headers=required_headers, + ) + + print(presign_response.status_code, presign_response.text) assert presign_response.status_code == 200 retrieve_url = f"https://{API_ENDPOINT}/FhirDocumentReference/{upload_response['id']}" @@ -137,10 +153,12 @@ def condition(response_json): raw_retrieve_response = fetch_with_retry(retrieve_url, condition) retrieve_response = raw_retrieve_response.json() - expected_presign_uri = f"https://{LLOYD_GEORGE_S3_BUCKET}.s3.eu-west-2.amazonaws.com/{lloyd_george_record['nhs_number']}/{lloyd_george_record['id']}" + expected_presign_uri = ( + f"https://{LLOYD_GEORGE_S3_BUCKET}.s3.eu-west-2.amazonaws.com/" + f"{lloyd_george_record['nhs_number']}/{lloyd_george_record['id']}" + ) assert expected_presign_uri in retrieve_response["content"][0]["attachment"]["url"] - - assert isinstance(retrieve_response["content"][0]["attachment"]["size"], (int)) + assert isinstance(retrieve_response["content"][0]["attachment"]["size"], int) assert upload_response == snapshot_json(exclude=paths("id", "date")) assert retrieve_response == snapshot_json( @@ -151,11 +169,11 @@ def condition(response_json): def test_create_document_virus(test_data, snapshot_json): lloyd_george_record = {} lloyd_george_record["ods"] = "H81109" - lloyd_george_record["nhs_number"] = "9730154260" - # Attach EICAR data - eicar_string = r"X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*" + eicar_string = ( + r"X5O!P%@AP[4\PZX54(P^)7CC)7}$EICAR-STANDARD-ANTIVIRUS-TEST-FILE!$H+H*" + ) lloyd_george_record["data"] = base64.b64encode(eicar_string.encode()).decode() payload = create_upload_payload(lloyd_george_record) @@ -172,15 +190,14 @@ def test_create_document_virus(test_data, snapshot_json): # Poll until processing/scan completes def condition(response_json): logging.info(response_json) - return response_json.get("docStatus") in ( - "cancelled", - "final", - ) + return response_json.get("docStatus") in ("cancelled", "final") raw_retrieve_response = fetch_with_retry(retrieve_url, condition) retrieve_response = raw_retrieve_response.json() - assert upload_response == snapshot_json(exclude=paths("id", "date", "content.0.attachment.url")) + assert upload_response == snapshot_json( + exclude=paths("id", "date", "content.0.attachment.url") + ) assert retrieve_response == snapshot_json(exclude=paths("id", "date")) @@ -192,8 +209,8 @@ def test_create_document_does_not_save_raw(test_data): sample_pdf_path = os.path.join(os.path.dirname(__file__), "files", "dummy.pdf") with open(sample_pdf_path, "rb") as f: lloyd_george_record["data"] = base64.b64encode(f.read()).decode("utf-8") - payload = create_upload_payload(lloyd_george_record) + payload = create_upload_payload(lloyd_george_record) url = f"https://{API_ENDPOINT}/FhirDocumentReference" headers = {"Authorization": "Bearer 123", "X-Api-Key": API_KEY} @@ -202,6 +219,7 @@ def test_create_document_does_not_save_raw(test_data): lloyd_george_record["id"] = json_response.get("id").split("~")[1] test_data.append(lloyd_george_record) + doc_ref = data_helper.retrieve_document_reference(record=lloyd_george_record) assert "Item" in doc_ref assert "RawRequest" not in doc_ref["Item"] From 2019a16e05b26586e2e9988fe5b3b1e8ffa42266 Mon Sep 17 00:00:00 2001 From: PedroSoaresNHS Date: Thu, 22 Jan 2026 10:08:00 +0000 Subject: [PATCH 3/6] [PRMP-739] testing --- .../fhir_document_reference_service_base.py | 2 +- .../services/post_document_review_service.py | 2 +- .../tests/e2e/api/test_upload_document_api.py | 20 +++++++++---------- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/lambdas/services/fhir_document_reference_service_base.py b/lambdas/services/fhir_document_reference_service_base.py index 1a65996b63..7e0fac16ca 100644 --- a/lambdas/services/fhir_document_reference_service_base.py +++ b/lambdas/services/fhir_document_reference_service_base.py @@ -80,7 +80,7 @@ def _create_s3_presigned_url(self, document_reference: DocumentReference) -> str response = self.s3_service.create_put_presigned_url( s3_bucket_name=document_reference.s3_bucket_name, file_key=document_reference.s3_upload_key, - require_autodelete=True, + require_autodelete=False, ) logger.info( f"Successfully created pre-signed URL for {document_reference.s3_upload_key}" diff --git a/lambdas/services/post_document_review_service.py b/lambdas/services/post_document_review_service.py index dd31c8bb0b..f221fa7cac 100644 --- a/lambdas/services/post_document_review_service.py +++ b/lambdas/services/post_document_review_service.py @@ -154,7 +154,7 @@ def create_review_document_upload_presigned_url( presign_url_response = self.s3_service.create_put_presigned_url( s3_bucket_name=self.staging_bucket, file_key=file_key, - require_autodelete=True, + require_autodelete=False, ) presigned_id = f"upload/{upload_id}" deletion_date = datetime.now(timezone.utc) diff --git a/lambdas/tests/e2e/api/test_upload_document_api.py b/lambdas/tests/e2e/api/test_upload_document_api.py index f6ffe648ca..d9d90e49e8 100644 --- a/lambdas/tests/e2e/api/test_upload_document_api.py +++ b/lambdas/tests/e2e/api/test_upload_document_api.py @@ -128,21 +128,16 @@ def test_create_document_presign(test_data, snapshot_json): presign_uri = upload_response["content"][0]["attachment"]["url"] del upload_response["content"][0]["attachment"]["url"] - required_headers = { - "Content-Type": "application/pdf", - "x-amz-tagging": "autodelete=true", - } - sample_pdf_path = os.path.join(os.path.dirname(__file__), "files", "big-dummy.pdf") with open(sample_pdf_path, "rb") as f: presign_response = requests.put( presign_uri, data=f, - headers=required_headers, + headers={"Content-Type": "application/pdf"}, + ) + assert presign_response.status_code == 200, ( + f"{presign_response.status_code} {presign_response.text}" ) - - print(presign_response.status_code, presign_response.text) - assert presign_response.status_code == 200 retrieve_url = f"https://{API_ENDPOINT}/FhirDocumentReference/{upload_response['id']}" @@ -162,7 +157,12 @@ def condition(response_json): assert upload_response == snapshot_json(exclude=paths("id", "date")) assert retrieve_response == snapshot_json( - exclude=paths("id", "date", "content.0.attachment.url", "content.0.attachment.size") + exclude=paths( + "id", + "date", + "content.0.attachment.url", + "content.0.attachment.size", + ) ) From c1d89e9952090faa581caea81f9917269277e09d Mon Sep 17 00:00:00 2001 From: PedroSoaresNHS Date: Fri, 23 Jan 2026 09:50:08 +0000 Subject: [PATCH 4/6] [PRMP-739] fixed tests --- .../unit/services/test_fhir_document_reference_service_base.py | 2 +- .../tests/unit/services/test_post_document_review_service.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py b/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py index e20169425d..c3e2e6aaae 100644 --- a/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py +++ b/lambdas/tests/unit/services/test_fhir_document_reference_service_base.py @@ -402,7 +402,7 @@ def test_create_s3_presigned_url_returns_url(mock_service): mock_service.s3_service.create_put_presigned_url.assert_called_once_with( s3_bucket_name=document.s3_bucket_name, file_key=document.s3_upload_key, - require_autodelete=True, + require_autodelete=False, ) diff --git a/lambdas/tests/unit/services/test_post_document_review_service.py b/lambdas/tests/unit/services/test_post_document_review_service.py index 37f8817808..a4275edf4b 100644 --- a/lambdas/tests/unit/services/test_post_document_review_service.py +++ b/lambdas/tests/unit/services/test_post_document_review_service.py @@ -261,7 +261,7 @@ def test_create_presigned_urls_for_review_reference_files_creates_presign_writes mock_service.s3_service.create_put_presigned_url.assert_called_once_with( s3_bucket_name=MOCK_STAGING_STORE_BUCKET, file_key=f"review/{document_review_reference.id}/{TEST_UUID}", - require_autodelete=True, + require_autodelete=False, ) mock_service.review_document_service.dynamo_service.create_item.assert_called_once_with( From 4f6ebbe60b9a83b9a1dfbf310ac1ce37a5c8825a Mon Sep 17 00:00:00 2001 From: SWhyteAnswer Date: Tue, 27 Jan 2026 14:42:30 +0000 Subject: [PATCH 5/6] [PRMP-1215] mock login url fix for test --- lambdas/services/mock_login_redirect_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lambdas/services/mock_login_redirect_service.py b/lambdas/services/mock_login_redirect_service.py index 9c7c08fb23..875dd5f53e 100644 --- a/lambdas/services/mock_login_redirect_service.py +++ b/lambdas/services/mock_login_redirect_service.py @@ -25,7 +25,7 @@ def prepare_redirect_response(self, event): state = "".join(random.choices(string.ascii_letters + string.digits, k=30)) self.save_state_in_dynamo_db(state) - if os.getenv("WORKSPACE") == "pre-prod": + if os.getenv("WORKSPACE") in ["pre-prod", "ndr-test"]: clean_url = re.sub(r"^api.", "", host) else: clean_url = re.sub(r"^api-", "", host) From ba915fcf3fbf520b9647db7427826ce0f9dededb Mon Sep 17 00:00:00 2001 From: PedroSoaresNHS Date: Tue, 3 Feb 2026 08:36:17 +0000 Subject: [PATCH 6/6] [PRMP-739] updated tests --- lambdas/tests/unit/services/test_post_document_review_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lambdas/tests/unit/services/test_post_document_review_service.py b/lambdas/tests/unit/services/test_post_document_review_service.py index 6e13db23e8..7003dc1b82 100644 --- a/lambdas/tests/unit/services/test_post_document_review_service.py +++ b/lambdas/tests/unit/services/test_post_document_review_service.py @@ -225,6 +225,7 @@ def test_create_presigned_urls_for_review_reference_files_creates_presign_writes mock_service.s3_service.create_put_presigned_url.assert_called_with( s3_bucket_name=MOCK_STAGING_STORE_BUCKET, file_key=f"review/{document_review_reference.id}/{TEST_UUID}", + require_autodelete=False, ) mock_service.review_document_service.dynamo_service.create_item.assert_called_once()