diff --git a/lambdas/enums/infrastructure.py b/lambdas/enums/infrastructure.py index fcfb8dd9fc..c6bc1e9ce5 100644 --- a/lambdas/enums/infrastructure.py +++ b/lambdas/enums/infrastructure.py @@ -18,15 +18,12 @@ def __getattr__(cls, name): class DynamoTables(StrEnum, metaclass=DynamoMeta): LLOYD_GEORGE = "LloydGeorgeReferenceMetadata" - PDM = "PDMDocumentMetadata" CORE = "COREDocumentMetadata" def __str__(self) -> str: workspace = os.getenv("WORKSPACE") if not workspace: - logger.error( - "No workspace environment variable found during table definition." - ) + logger.error("No workspace environment variable found during table definition.") raise DocumentRefException(500, LambdaError.EnvMissing) return f"{workspace}_{self.value}" diff --git a/lambdas/handlers/document_reference_virus_scan_handler.py b/lambdas/handlers/document_reference_virus_scan_handler.py index 782b16a165..b4982bb45e 100644 --- a/lambdas/handlers/document_reference_virus_scan_handler.py +++ b/lambdas/handlers/document_reference_virus_scan_handler.py @@ -15,14 +15,12 @@ @set_request_context_for_logging @ensure_environment_variables( names=[ - "LLOYD_GEORGE_DYNAMODB_NAME", - "PDM_DYNAMODB_NAME", "STAGING_STORE_BUCKET_NAME", "LLOYD_GEORGE_BUCKET_NAME", "PDM_BUCKET_NAME", "VIRUS_SCAN_STUB", "DOCUMENT_REVIEW_DYNAMODB_NAME", - "PENDING_REVIEW_BUCKET_NAME" + "PENDING_REVIEW_BUCKET_NAME", ] ) def lambda_handler(event, context): @@ -42,6 +40,4 @@ def lambda_handler(event, context): service.handle_upload_document_reference_request(object_key, object_size) - return ApiGatewayResponse( - 200, "Virus Scan was successful", "POST" - ).create_api_gateway_response() + return ApiGatewayResponse(200, "Virus Scan was successful", "POST").create_api_gateway_response() diff --git a/lambdas/handlers/get_fhir_document_reference_handler.py b/lambdas/handlers/get_fhir_document_reference_handler.py index d371a0934b..892e7c1f51 100644 --- a/lambdas/handlers/get_fhir_document_reference_handler.py +++ b/lambdas/handlers/get_fhir_document_reference_handler.py @@ -27,8 +27,6 @@ "APPCONFIG_APPLICATION", "APPCONFIG_CONFIGURATION", "APPCONFIG_ENVIRONMENT", - "LLOYD_GEORGE_DYNAMODB_NAME", - "PDM_DYNAMODB_NAME", "PRESIGNED_ASSUME_ROLE", "CLOUDFRONT_URL", ] @@ -41,35 +39,21 @@ def lambda_handler(event, context): document_id, snomed_code = extract_document_parameters(event) get_document_service = GetFhirDocumentReferenceService() - document_reference = get_document_service.handle_get_document_reference_request( - snomed_code, document_id - ) + document_reference = get_document_service.handle_get_document_reference_request(snomed_code, document_id) if selected_role_id and bearer_token: - verify_user_authorisation( - bearer_token, selected_role_id, document_reference.nhs_number - ) + verify_user_authorisation(bearer_token, selected_role_id, document_reference.nhs_number) - document_reference_response = ( - get_document_service.create_document_reference_fhir_response( - document_reference - ) - ) + document_reference_response = get_document_service.create_document_reference_fhir_response(document_reference) - logger.info( - f"Successfully retrieved document reference for document_id: {document_id}, snomed_code: {snomed_code}" - ) + logger.info(f"Successfully retrieved document reference for document_id: {document_id}, snomed_code: {snomed_code}") - return ApiGatewayResponse( - status_code=200, body=document_reference_response, methods="GET" - ).create_api_gateway_response() + return ApiGatewayResponse(status_code=200, body=document_reference_response, methods="GET").create_api_gateway_response() except GetFhirDocumentReferenceException as exception: return ApiGatewayResponse( status_code=exception.status_code, - body=exception.error.create_error_response().create_error_fhir_response( - exception.error.value.get("fhir_coding") - ), + body=exception.error.create_error_response().create_error_fhir_response(exception.error.value.get("fhir_coding")), methods="GET", ).create_api_gateway_response() @@ -81,9 +65,7 @@ def extract_document_parameters(event): if not document_id or not snomed_code: logger.error("Missing document id or snomed code in request path parameters.") - raise GetFhirDocumentReferenceException( - 400, LambdaError.DocumentReferenceMissingParameters - ) + raise GetFhirDocumentReferenceException(400, LambdaError.DocumentReferenceMissingParameters) return document_id, snomed_code @@ -101,19 +83,13 @@ def verify_user_authorisation(bearer_token, selected_role_id, nhs_number): userinfo = oidc_service.fetch_userinfo(bearer_token) org_ods_code = oidc_service.fetch_user_org_code(userinfo, selected_role_id) - smartcard_role_code, _ = oidc_service.fetch_user_role_code( - userinfo, selected_role_id, "R" - ) + smartcard_role_code, _ = oidc_service.fetch_user_role_code(userinfo, selected_role_id, "R") except (OidcApiException, AuthorisationException) as e: logger.error(f"Authorization error: {str(e)}") - raise GetFhirDocumentReferenceException( - 403, LambdaError.DocumentReferenceUnauthorised - ) + raise GetFhirDocumentReferenceException(403, LambdaError.DocumentReferenceUnauthorised) try: - search_patient_service = SearchPatientDetailsService( - smartcard_role_code, org_ods_code - ) + search_patient_service = SearchPatientDetailsService(smartcard_role_code, org_ods_code) search_patient_service.handle_search_patient_request(nhs_number, False) except SearchPatientException as e: raise GetFhirDocumentReferenceException(e.status_code, e.error) diff --git a/lambdas/services/upload_document_reference_service.py b/lambdas/services/upload_document_reference_service.py index 6f196313ab..600f6e7c42 100644 --- a/lambdas/services/upload_document_reference_service.py +++ b/lambdas/services/upload_document_reference_service.py @@ -31,7 +31,7 @@ class UploadDocumentReferenceService: def __init__(self): self.staging_s3_bucket_name = os.environ["STAGING_STORE_BUCKET_NAME"] - self.table_name = os.environ["LLOYD_GEORGE_DYNAMODB_NAME"] + self.table_name = "" self.destination_bucket_name = os.environ["LLOYD_GEORGE_BUCKET_NAME"] self.doc_type = SnomedCodes.LLOYD_GEORGE.value self.document_service = DocumentService() @@ -73,10 +73,11 @@ def handle_upload_document_reference_request( return def _get_infrastructure_for_document_key(self, object_parts: list[str]) -> None: - doc_type = None + doc_type = self.doc_type if object_parts[0] != "fhir_upload" or not ( doc_type := SnomedCodes.find_by_code(object_parts[1]) ): + self.table_name = self.table_router.resolve(doc_type) return try: diff --git a/lambdas/tests/unit/enums/test_infrastructure.py b/lambdas/tests/unit/enums/test_infrastructure.py index 42ef7f9730..18cfb21615 100644 --- a/lambdas/tests/unit/enums/test_infrastructure.py +++ b/lambdas/tests/unit/enums/test_infrastructure.py @@ -9,7 +9,6 @@ ["enum_value", "expected"], [ (DynamoTables.LLOYD_GEORGE, "LloydGeorgeReferenceMetadata"), - (DynamoTables.PDM, "PDMDocumentMetadata"), (DynamoTables.CORE, "COREDocumentMetadata"), ], ) @@ -29,7 +28,7 @@ def test_dynamo_tables_no_workspace(monkeypatch): monkeypatch.delenv("WORKSPACE", raising=False) with pytest.raises(DocumentRefException) as exc: - str(DynamoTables.PDM) + str(DynamoTables.CORE) assert exc.value.status_code == 500 assert exc.value.error == LambdaError.EnvMissing diff --git a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py index 2a08c7a9c5..a93eb6597c 100644 --- a/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_pdm_upload_document_reference_service.py @@ -507,7 +507,7 @@ def test_process_preliminary_document_reference_exception_during_processing( def test_get_infrastructure_for_document_key_pdm(service): - assert service.table_name == MOCK_LG_TABLE_NAME + assert service.table_name == "" assert service.destination_bucket_name == MOCK_LG_BUCKET service._get_infrastructure_for_document_key( object_parts=["fhir_upload", SnomedCodes.PATIENT_DATA.value.code, "1234"] @@ -517,7 +517,9 @@ def test_get_infrastructure_for_document_key_pdm(service): def test_get_infrastructure_for_document_key_non_pdm(service): + assert service.table_name == "" infra = service._get_infrastructure_for_document_key(object_parts=["1234", "123"]) + assert service.table_name == str(DynamoTables.LLOYD_GEORGE) assert infra is None @@ -553,19 +555,19 @@ def test_get_infra_invalid_doc_type(monkeypatch, service): [ ( "staging/documents/test-doc-123", - MOCK_LG_TABLE_NAME, + "dev_LloydGeorgeReferenceMetadata", MOCK_LG_BUCKET, SnomedCodes.LLOYD_GEORGE.value, ), ( "folder/subfolder/another-doc", - MOCK_LG_TABLE_NAME, + "dev_LloydGeorgeReferenceMetadata", MOCK_LG_BUCKET, SnomedCodes.LLOYD_GEORGE.value, ), ( "simple-doc", - MOCK_LG_TABLE_NAME, + "dev_LloydGeorgeReferenceMetadata", MOCK_LG_BUCKET, SnomedCodes.LLOYD_GEORGE.value, ), @@ -577,7 +579,7 @@ def test_get_infra_invalid_doc_type(monkeypatch, service): ), ( f"{SnomedCodes.LLOYD_GEORGE.value.code}/staging/test-doc-123", - MOCK_LG_TABLE_NAME, + "dev_LloydGeorgeReferenceMetadata", MOCK_LG_BUCKET, SnomedCodes.LLOYD_GEORGE.value, ), diff --git a/lambdas/tests/unit/services/test_upload_document_reference_service.py b/lambdas/tests/unit/services/test_upload_document_reference_service.py index 12f10401cd..c7581eff03 100644 --- a/lambdas/tests/unit/services/test_upload_document_reference_service.py +++ b/lambdas/tests/unit/services/test_upload_document_reference_service.py @@ -68,44 +68,6 @@ def service(set_env, mock_virus_scan_service): return service -@pytest.fixture -def mock_pdm_document_reference(): - """Create a mock document reference""" - doc_ref = Mock(spec=DocumentReference) - doc_ref.id = "test-doc-id" - doc_ref.nhs_number = "9000000001" - doc_ref.s3_file_key = ( - f"fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id" - ) - doc_ref.s3_bucket_name = "test-staging-bucket" - doc_ref.virus_scanner_result = None - doc_ref.file_size = 1234567890 - doc_ref.doc_status = "uploading" - doc_ref._build_s3_location = Mock( - return_value=f"s3://test-staging-bucket/fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id" - ) - return doc_ref - - -@pytest.fixture -def pdm_service(set_env, mock_virus_scan_service): - with patch.multiple( - "services.upload_document_reference_service", - DocumentService=Mock(), - DynamoDBService=Mock(), - S3Service=Mock(), - ): - service = UploadDocumentReferenceService() - service.document_service = Mock() - service.dynamo_service = Mock() - service.virus_scan_service = MockVirusScanService() - service.s3_service = Mock() - service.table_name = MOCK_PDM_TABLE_NAME - service.destination_bucket_name = MOCK_PDM_BUCKET - service.doc_type = SnomedCodes.PATIENT_DATA.value - return service - - def test_handle_upload_document_reference_request_with_empty_object_key(service): """Test handling of an empty object key""" service.handle_upload_document_reference_request("", 122) @@ -167,6 +129,7 @@ def test_handle_upload_document_reference_request_with_exception(service): def test_fetch_preliminary_document_reference_success(service, mock_document_reference): """Test successful document reference fetching""" document_key = "test-doc-id" + service.table_name = "dev_LloydGeorgeReferenceMetadata" service.document_service.fetch_documents_from_table.return_value = [ mock_document_reference ] @@ -175,7 +138,7 @@ def test_fetch_preliminary_document_reference_success(service, mock_document_ref assert result == mock_document_reference service.document_service.fetch_documents_from_table.assert_called_once_with( - table_name=MOCK_LG_TABLE_NAME, + table_name="dev_LloydGeorgeReferenceMetadata", search_condition=document_key, search_key="ID", query_filter=PreliminaryStatus, @@ -400,10 +363,11 @@ def test_delete_file_from_staging_bucket_client_error(service): def test_update_dynamo_table_clean_scan_result(service, mock_document_reference): """Test updating DynamoDB table with a clean scan result""" + service.table_name = "dev_LloydGeorgeReferenceMetadata" service._update_dynamo_table(mock_document_reference) service.document_service.update_document.assert_called_once_with( - table_name=MOCK_LG_TABLE_NAME, + table_name="dev_LloydGeorgeReferenceMetadata", document=mock_document_reference, key_pair=None, update_fields_name={ @@ -472,13 +436,13 @@ def test_document_key_extraction_from_object_key_for_lg( # Check first call (preliminary document) first_call = service.document_service.fetch_documents_from_table.call_args_list[0] - assert first_call[1]["table_name"] == MOCK_LG_TABLE_NAME + assert first_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata" assert first_call[1]["search_condition"] == expected_document_key assert first_call[1]["search_key"] == "ID" # Check second call (existing final documents) second_call = service.document_service.fetch_documents_from_table.call_args_list[1] - assert second_call[1]["table_name"] == MOCK_LG_TABLE_NAME + assert second_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata" assert second_call[1]["index_name"] == "S3FileKeyIndex" assert second_call[1]["search_condition"] == mock_document_reference.s3_file_key assert second_call[1]["search_key"] == "S3FileKey" @@ -500,6 +464,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( existing_final_doc.doc_status = "final" existing_final_doc.version = "1" + service.table_name = "dev_LloydGeorgeReferenceMetadata" service.document_service.fetch_documents_from_table.return_value = [ existing_final_doc ] @@ -511,7 +476,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( # Assert fetch was called with the correct parameters service.document_service.fetch_documents_from_table.assert_called_once_with( - table_name=MOCK_LG_TABLE_NAME, + table_name="dev_LloydGeorgeReferenceMetadata", index_name="S3FileKeyIndex", search_condition=new_doc.s3_file_key, search_key="S3FileKey", @@ -523,7 +488,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( # Assert the first call is for the new document with correct fields first_call = service.dynamo_service.build_update_transaction_item.call_args_list[0] - assert first_call[1]["table_name"] == MOCK_LG_TABLE_NAME + assert first_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata" assert first_call[1]["document_key"] == {"ID": new_doc.id} update_fields = first_call[1]["update_fields"] assert "S3VersionID" in update_fields @@ -534,7 +499,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals( # Assert the second call is for superseding the existing final document second_call = service.dynamo_service.build_update_transaction_item.call_args_list[1] - assert second_call[1]["table_name"] == MOCK_LG_TABLE_NAME + assert second_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata" assert second_call[1]["document_key"] == {"ID": existing_final_doc.id} assert second_call[1]["update_fields"] == { "Status": "superseded", @@ -703,8 +668,10 @@ def test_process_preliminary_document_reference_exception_during_processing( def test_get_infrastructure_for_document_key_non_pdm(service): + assert service.table_name == "" infra = service._get_infrastructure_for_document_key(object_parts=["1234", "123"]) assert infra is None + assert service.table_name == "dev_LloydGeorgeReferenceMetadata" def test_get_infra_invalid_doc_type(monkeypatch, service): diff --git a/lambdas/utils/dynamo_utils.py b/lambdas/utils/dynamo_utils.py index 36f7ff0bb4..0f979edb81 100644 --- a/lambdas/utils/dynamo_utils.py +++ b/lambdas/utils/dynamo_utils.py @@ -87,9 +87,7 @@ def create_expression_attribute_values(attribute_field_values: dict) -> dict: """ expression_attribute_values = {} for field_name, field_value in attribute_field_values.items(): - expression_attribute_values[ - f"{create_expression_value_placeholder(field_name)}" - ] = field_value + expression_attribute_values[f"{create_expression_value_placeholder(field_name)}"] = field_value return expression_attribute_values @@ -133,14 +131,12 @@ def filter_uploaded_docs_and_recently_uploading_docs(): filter_builder.add_condition("Uploaded", AttributeOperator.EQUAL, True) uploaded_filter_expression = filter_builder.build() - filter_builder.add_condition( - "Uploading", AttributeOperator.EQUAL, True - ).add_condition("LastUpdated", AttributeOperator.GREATER_OR_EQUAL, time_limit) + filter_builder.add_condition("Uploading", AttributeOperator.EQUAL, True).add_condition( + "LastUpdated", AttributeOperator.GREATER_OR_EQUAL, time_limit + ) uploading_filter_expression = filter_builder.build() - return delete_filter_expression & ( - uploaded_filter_expression | uploading_filter_expression - ) + return delete_filter_expression & (uploaded_filter_expression | uploading_filter_expression) def parse_dynamo_record(dynamodb_record: Dict[str, Any]) -> Dict[str, Any]: @@ -196,12 +192,8 @@ def build_mixed_condition_expression( if operator in ["attribute_exists", "attribute_not_exists"]: condition_expressions.append(f"{operator}({condition_placeholder})") else: - condition_value_placeholder = create_expression_value_placeholder( - field_name, suffix - ) - condition_expressions.append( - f"{condition_placeholder} {operator} {condition_value_placeholder}" - ) + condition_value_placeholder = create_expression_value_placeholder(field_name, suffix) + condition_expressions.append(f"{condition_placeholder} {operator} {condition_value_placeholder}") condition_attribute_values[condition_value_placeholder] = field_value condition_expression = f" {join_operator} ".join(condition_expressions) @@ -242,9 +234,7 @@ def build_general_transaction_item( action = action.capitalize() if action not in ["Put", "Update", "Delete", "Conditioncheck"]: - raise ValueError( - f"Invalid action: {action}. Must be one of: Put, Update, Delete, ConditionCheck" - ) + raise ValueError(f"Invalid action: {action}. Must be one of: Put, Update, Delete, ConditionCheck") transaction_item: dict[str, dict[str, Any]] = {action: {"TableName": table_name}} @@ -269,14 +259,10 @@ def build_general_transaction_item( transaction_item[action]["ConditionExpression"] = condition_expression if expression_attribute_names: - transaction_item[action][ - "ExpressionAttributeNames" - ] = expression_attribute_names + transaction_item[action]["ExpressionAttributeNames"] = expression_attribute_names if expression_attribute_values: - transaction_item[action][ - "ExpressionAttributeValues" - ] = expression_attribute_values + transaction_item[action]["ExpressionAttributeValues"] = expression_attribute_values return transaction_item @@ -305,8 +291,8 @@ def build_transaction_item( expression_attribute_values.update(update_attr_values) if conditions: - condition_expr, condition_attr_names, condition_attr_values = ( - build_mixed_condition_expression(conditions, condition_join_operator) + condition_expr, condition_attr_names, condition_attr_values = build_mixed_condition_expression( + conditions, condition_join_operator ) condition_expression = condition_expr expression_attribute_names.update(condition_attr_names) @@ -336,7 +322,6 @@ def __init__(self): def _define_tables(self): self.lg_dynamo_table = DynamoTables.LLOYD_GEORGE - self.pdm_dynamo_table = DynamoTables.PDM self.core_dynamo_table = DynamoTables.CORE def resolve(self, doc_type: SnomedCode) -> str: @@ -344,7 +329,5 @@ def resolve(self, doc_type: SnomedCode) -> str: table = self.mapping[doc_type.code] return str(table) except KeyError: - logger.error( - f"SNOMED code {doc_type.code} - {doc_type.display_name} is not supported" - ) + logger.error(f"SNOMED code {doc_type.code} - {doc_type.display_name} is not supported") raise InvalidDocTypeException(400, LambdaError.DocTypeDB)