Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions lambdas/enums/infrastructure.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,12 @@ def __getattr__(cls, name):

class DynamoTables(StrEnum, metaclass=DynamoMeta):
LLOYD_GEORGE = "LloydGeorgeReferenceMetadata"
PDM = "PDMDocumentMetadata"
CORE = "COREDocumentMetadata"

def __str__(self) -> str:
workspace = os.getenv("WORKSPACE")
if not workspace:
logger.error(
"No workspace environment variable found during table definition."
)
logger.error("No workspace environment variable found during table definition.")
raise DocumentRefException(500, LambdaError.EnvMissing)
return f"{workspace}_{self.value}"

Expand Down
8 changes: 2 additions & 6 deletions lambdas/handlers/document_reference_virus_scan_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,12 @@
@set_request_context_for_logging
@ensure_environment_variables(
names=[
"LLOYD_GEORGE_DYNAMODB_NAME",
"PDM_DYNAMODB_NAME",
"STAGING_STORE_BUCKET_NAME",
"LLOYD_GEORGE_BUCKET_NAME",
"PDM_BUCKET_NAME",
"VIRUS_SCAN_STUB",
"DOCUMENT_REVIEW_DYNAMODB_NAME",
"PENDING_REVIEW_BUCKET_NAME"
"PENDING_REVIEW_BUCKET_NAME",
]
)
def lambda_handler(event, context):
Expand All @@ -42,6 +40,4 @@ def lambda_handler(event, context):

service.handle_upload_document_reference_request(object_key, object_size)

return ApiGatewayResponse(
200, "Virus Scan was successful", "POST"
).create_api_gateway_response()
return ApiGatewayResponse(200, "Virus Scan was successful", "POST").create_api_gateway_response()
44 changes: 10 additions & 34 deletions lambdas/handlers/get_fhir_document_reference_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@
"APPCONFIG_APPLICATION",
"APPCONFIG_CONFIGURATION",
"APPCONFIG_ENVIRONMENT",
"LLOYD_GEORGE_DYNAMODB_NAME",
"PDM_DYNAMODB_NAME",
"PRESIGNED_ASSUME_ROLE",
"CLOUDFRONT_URL",
]
Expand All @@ -41,35 +39,21 @@ def lambda_handler(event, context):
document_id, snomed_code = extract_document_parameters(event)

get_document_service = GetFhirDocumentReferenceService()
document_reference = get_document_service.handle_get_document_reference_request(
snomed_code, document_id
)
document_reference = get_document_service.handle_get_document_reference_request(snomed_code, document_id)

if selected_role_id and bearer_token:
verify_user_authorisation(
bearer_token, selected_role_id, document_reference.nhs_number
)
verify_user_authorisation(bearer_token, selected_role_id, document_reference.nhs_number)

document_reference_response = (
get_document_service.create_document_reference_fhir_response(
document_reference
)
)
document_reference_response = get_document_service.create_document_reference_fhir_response(document_reference)

logger.info(
f"Successfully retrieved document reference for document_id: {document_id}, snomed_code: {snomed_code}"
)
logger.info(f"Successfully retrieved document reference for document_id: {document_id}, snomed_code: {snomed_code}")

return ApiGatewayResponse(
status_code=200, body=document_reference_response, methods="GET"
).create_api_gateway_response()
return ApiGatewayResponse(status_code=200, body=document_reference_response, methods="GET").create_api_gateway_response()

except GetFhirDocumentReferenceException as exception:
return ApiGatewayResponse(
status_code=exception.status_code,
body=exception.error.create_error_response().create_error_fhir_response(
exception.error.value.get("fhir_coding")
),
body=exception.error.create_error_response().create_error_fhir_response(exception.error.value.get("fhir_coding")),
methods="GET",
).create_api_gateway_response()

Expand All @@ -81,9 +65,7 @@ def extract_document_parameters(event):

if not document_id or not snomed_code:
logger.error("Missing document id or snomed code in request path parameters.")
raise GetFhirDocumentReferenceException(
400, LambdaError.DocumentReferenceMissingParameters
)
raise GetFhirDocumentReferenceException(400, LambdaError.DocumentReferenceMissingParameters)

return document_id, snomed_code

Expand All @@ -101,19 +83,13 @@ def verify_user_authorisation(bearer_token, selected_role_id, nhs_number):
userinfo = oidc_service.fetch_userinfo(bearer_token)

org_ods_code = oidc_service.fetch_user_org_code(userinfo, selected_role_id)
smartcard_role_code, _ = oidc_service.fetch_user_role_code(
userinfo, selected_role_id, "R"
)
smartcard_role_code, _ = oidc_service.fetch_user_role_code(userinfo, selected_role_id, "R")
except (OidcApiException, AuthorisationException) as e:
logger.error(f"Authorization error: {str(e)}")
raise GetFhirDocumentReferenceException(
403, LambdaError.DocumentReferenceUnauthorised
)
raise GetFhirDocumentReferenceException(403, LambdaError.DocumentReferenceUnauthorised)

try:
search_patient_service = SearchPatientDetailsService(
smartcard_role_code, org_ods_code
)
search_patient_service = SearchPatientDetailsService(smartcard_role_code, org_ods_code)
search_patient_service.handle_search_patient_request(nhs_number, False)
except SearchPatientException as e:
raise GetFhirDocumentReferenceException(e.status_code, e.error)
Expand Down
5 changes: 3 additions & 2 deletions lambdas/services/upload_document_reference_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
class UploadDocumentReferenceService:
def __init__(self):
self.staging_s3_bucket_name = os.environ["STAGING_STORE_BUCKET_NAME"]
self.table_name = os.environ["LLOYD_GEORGE_DYNAMODB_NAME"]
self.table_name = ""
self.destination_bucket_name = os.environ["LLOYD_GEORGE_BUCKET_NAME"]
self.doc_type = SnomedCodes.LLOYD_GEORGE.value
self.document_service = DocumentService()
Expand Down Expand Up @@ -73,10 +73,11 @@ def handle_upload_document_reference_request(
return

def _get_infrastructure_for_document_key(self, object_parts: list[str]) -> None:
doc_type = None
doc_type = self.doc_type
if object_parts[0] != "fhir_upload" or not (
doc_type := SnomedCodes.find_by_code(object_parts[1])
):
self.table_name = self.table_router.resolve(doc_type)
return

try:
Expand Down
3 changes: 1 addition & 2 deletions lambdas/tests/unit/enums/test_infrastructure.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
["enum_value", "expected"],
[
(DynamoTables.LLOYD_GEORGE, "LloydGeorgeReferenceMetadata"),
(DynamoTables.PDM, "PDMDocumentMetadata"),
(DynamoTables.CORE, "COREDocumentMetadata"),
],
)
Expand All @@ -29,7 +28,7 @@ def test_dynamo_tables_no_workspace(monkeypatch):
monkeypatch.delenv("WORKSPACE", raising=False)

with pytest.raises(DocumentRefException) as exc:
str(DynamoTables.PDM)
str(DynamoTables.CORE)

assert exc.value.status_code == 500
assert exc.value.error == LambdaError.EnvMissing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -507,7 +507,7 @@ def test_process_preliminary_document_reference_exception_during_processing(


def test_get_infrastructure_for_document_key_pdm(service):
assert service.table_name == MOCK_LG_TABLE_NAME
assert service.table_name == ""
assert service.destination_bucket_name == MOCK_LG_BUCKET
service._get_infrastructure_for_document_key(
object_parts=["fhir_upload", SnomedCodes.PATIENT_DATA.value.code, "1234"]
Expand All @@ -517,7 +517,9 @@ def test_get_infrastructure_for_document_key_pdm(service):


def test_get_infrastructure_for_document_key_non_pdm(service):
assert service.table_name == ""
infra = service._get_infrastructure_for_document_key(object_parts=["1234", "123"])
assert service.table_name == str(DynamoTables.LLOYD_GEORGE)
assert infra is None


Expand Down Expand Up @@ -553,19 +555,19 @@ def test_get_infra_invalid_doc_type(monkeypatch, service):
[
(
"staging/documents/test-doc-123",
MOCK_LG_TABLE_NAME,
"dev_LloydGeorgeReferenceMetadata",
MOCK_LG_BUCKET,
SnomedCodes.LLOYD_GEORGE.value,
),
(
"folder/subfolder/another-doc",
MOCK_LG_TABLE_NAME,
"dev_LloydGeorgeReferenceMetadata",
MOCK_LG_BUCKET,
SnomedCodes.LLOYD_GEORGE.value,
),
(
"simple-doc",
MOCK_LG_TABLE_NAME,
"dev_LloydGeorgeReferenceMetadata",
MOCK_LG_BUCKET,
SnomedCodes.LLOYD_GEORGE.value,
),
Expand All @@ -577,7 +579,7 @@ def test_get_infra_invalid_doc_type(monkeypatch, service):
),
(
f"{SnomedCodes.LLOYD_GEORGE.value.code}/staging/test-doc-123",
MOCK_LG_TABLE_NAME,
"dev_LloydGeorgeReferenceMetadata",
MOCK_LG_BUCKET,
SnomedCodes.LLOYD_GEORGE.value,
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,44 +68,6 @@ def service(set_env, mock_virus_scan_service):
return service


@pytest.fixture
def mock_pdm_document_reference():
"""Create a mock document reference"""
doc_ref = Mock(spec=DocumentReference)
doc_ref.id = "test-doc-id"
doc_ref.nhs_number = "9000000001"
doc_ref.s3_file_key = (
f"fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id"
)
doc_ref.s3_bucket_name = "test-staging-bucket"
doc_ref.virus_scanner_result = None
doc_ref.file_size = 1234567890
doc_ref.doc_status = "uploading"
doc_ref._build_s3_location = Mock(
return_value=f"s3://test-staging-bucket/fhir_upload/{SnomedCodes.PATIENT_DATA.value.code}/9000000001/test-doc-id"
)
return doc_ref


@pytest.fixture
def pdm_service(set_env, mock_virus_scan_service):
with patch.multiple(
"services.upload_document_reference_service",
DocumentService=Mock(),
DynamoDBService=Mock(),
S3Service=Mock(),
):
service = UploadDocumentReferenceService()
service.document_service = Mock()
service.dynamo_service = Mock()
service.virus_scan_service = MockVirusScanService()
service.s3_service = Mock()
service.table_name = MOCK_PDM_TABLE_NAME
service.destination_bucket_name = MOCK_PDM_BUCKET
service.doc_type = SnomedCodes.PATIENT_DATA.value
return service


def test_handle_upload_document_reference_request_with_empty_object_key(service):
"""Test handling of an empty object key"""
service.handle_upload_document_reference_request("", 122)
Expand Down Expand Up @@ -167,6 +129,7 @@ def test_handle_upload_document_reference_request_with_exception(service):
def test_fetch_preliminary_document_reference_success(service, mock_document_reference):
"""Test successful document reference fetching"""
document_key = "test-doc-id"
service.table_name = "dev_LloydGeorgeReferenceMetadata"
service.document_service.fetch_documents_from_table.return_value = [
mock_document_reference
]
Expand All @@ -175,7 +138,7 @@ def test_fetch_preliminary_document_reference_success(service, mock_document_ref

assert result == mock_document_reference
service.document_service.fetch_documents_from_table.assert_called_once_with(
table_name=MOCK_LG_TABLE_NAME,
table_name="dev_LloydGeorgeReferenceMetadata",
search_condition=document_key,
search_key="ID",
query_filter=PreliminaryStatus,
Expand Down Expand Up @@ -400,10 +363,11 @@ def test_delete_file_from_staging_bucket_client_error(service):

def test_update_dynamo_table_clean_scan_result(service, mock_document_reference):
"""Test updating DynamoDB table with a clean scan result"""
service.table_name = "dev_LloydGeorgeReferenceMetadata"
service._update_dynamo_table(mock_document_reference)

service.document_service.update_document.assert_called_once_with(
table_name=MOCK_LG_TABLE_NAME,
table_name="dev_LloydGeorgeReferenceMetadata",
document=mock_document_reference,
key_pair=None,
update_fields_name={
Expand Down Expand Up @@ -472,13 +436,13 @@ def test_document_key_extraction_from_object_key_for_lg(

# Check first call (preliminary document)
first_call = service.document_service.fetch_documents_from_table.call_args_list[0]
assert first_call[1]["table_name"] == MOCK_LG_TABLE_NAME
assert first_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata"
assert first_call[1]["search_condition"] == expected_document_key
assert first_call[1]["search_key"] == "ID"

# Check second call (existing final documents)
second_call = service.document_service.fetch_documents_from_table.call_args_list[1]
assert second_call[1]["table_name"] == MOCK_LG_TABLE_NAME
assert second_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata"
assert second_call[1]["index_name"] == "S3FileKeyIndex"
assert second_call[1]["search_condition"] == mock_document_reference.s3_file_key
assert second_call[1]["search_key"] == "S3FileKey"
Expand All @@ -500,6 +464,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals(
existing_final_doc.doc_status = "final"
existing_final_doc.version = "1"

service.table_name = "dev_LloydGeorgeReferenceMetadata"
service.document_service.fetch_documents_from_table.return_value = [
existing_final_doc
]
Expand All @@ -511,7 +476,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals(

# Assert fetch was called with the correct parameters
service.document_service.fetch_documents_from_table.assert_called_once_with(
table_name=MOCK_LG_TABLE_NAME,
table_name="dev_LloydGeorgeReferenceMetadata",
index_name="S3FileKeyIndex",
search_condition=new_doc.s3_file_key,
search_key="S3FileKey",
Expand All @@ -523,7 +488,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals(

# Assert the first call is for the new document with correct fields
first_call = service.dynamo_service.build_update_transaction_item.call_args_list[0]
assert first_call[1]["table_name"] == MOCK_LG_TABLE_NAME
assert first_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata"
assert first_call[1]["document_key"] == {"ID": new_doc.id}
update_fields = first_call[1]["update_fields"]
assert "S3VersionID" in update_fields
Expand All @@ -534,7 +499,7 @@ def test_finalize_and_supersede_with_transaction_with_existing_finals(

# Assert the second call is for superseding the existing final document
second_call = service.dynamo_service.build_update_transaction_item.call_args_list[1]
assert second_call[1]["table_name"] == MOCK_LG_TABLE_NAME
assert second_call[1]["table_name"] == "dev_LloydGeorgeReferenceMetadata"
assert second_call[1]["document_key"] == {"ID": existing_final_doc.id}
assert second_call[1]["update_fields"] == {
"Status": "superseded",
Expand Down Expand Up @@ -703,8 +668,10 @@ def test_process_preliminary_document_reference_exception_during_processing(


def test_get_infrastructure_for_document_key_non_pdm(service):
assert service.table_name == ""
infra = service._get_infrastructure_for_document_key(object_parts=["1234", "123"])
assert infra is None
assert service.table_name == "dev_LloydGeorgeReferenceMetadata"


def test_get_infra_invalid_doc_type(monkeypatch, service):
Expand Down
Loading
Loading