diff --git a/.github/workflows/base-e2e-backendtest.yml b/.github/workflows/base-e2e-backendtest.yml index b482216c7e..0d0f98282a 100644 --- a/.github/workflows/base-e2e-backendtest.yml +++ b/.github/workflows/base-e2e-backendtest.yml @@ -78,6 +78,7 @@ jobs: run: | AWS_WORKSPACE="${SANDBOX}" API_URL="api-${SANDBOX}.access-request-fulfilment.patient-deductions.nhs.uk" + AWS_DEFAULT_REGION=${{ vars.AWS_REGION }} echo "NDR_API_ENDPOINT=$API_URL" >> $GITHUB_ENV echo "AWS_WORKSPACE=$AWS_WORKSPACE" >> $GITHUB_ENV env: diff --git a/.github/workflows/base-e2e-mns.yml b/.github/workflows/base-e2e-mns.yml new file mode 100644 index 0000000000..06a2ad4f8a --- /dev/null +++ b/.github/workflows/base-e2e-mns.yml @@ -0,0 +1,78 @@ +name: "Z-BASE E2e Test: MNS E2E Tests" + +on: + workflow_call: + inputs: + build_branch: + description: "Branch with e2e tests." + required: true + type: "string" + environment: + description: "Which Environment type are we using" + required: true + type: "string" + sandbox: + description: "Sandbox to run the smoke tests on." + required: true + type: "string" + secrets: + AWS_ASSUME_ROLE: + required: true + +permissions: + pull-requests: write + id-token: write + contents: read + +jobs: + mns-e2e-test: + runs-on: ubuntu-latest + environment: ${{ inputs.environment }} + strategy: + matrix: + test-file: + - test_mns_process.py + - test_mns_death.py + fail-fast: false + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + repository: "NHSDigital/national-document-repository" + ref: ${{ inputs.build_branch }} + + - name: AWS Role + uses: aws-actions/configure-aws-credentials@v5 + with: + role-to-assume: ${{ secrets.AWS_ASSUME_ROLE }} + role-skip-session-tagging: true + mask-aws-account-id: true + aws-region: ${{ vars.AWS_REGION }} + + - name: Set up Python 3.11 + uses: actions/setup-python@v6 + with: + python-version: 3.11 + + - name: Make virtual environment + run: | + make env + + - name: Start virtual environment + run: | + source ./lambdas/venv/bin/activate + echo PATH=$PATH >> $GITHUB_ENV + + + - name: Set E2e Test Variables + run: | + AWS_WORKSPACE="${SANDBOX}" + AWS_DEFAULT_REGION=${{ vars.AWS_REGION }} + echo "AWS_WORKSPACE=$AWS_WORKSPACE" >> $GITHUB_ENV + echo "AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION" >> $GITHUB_ENV + env: + SANDBOX: ${{ inputs.sandbox }} + + - name: Run MNS E2E Test - ${{ matrix.test-file }} + run: | + cd ./lambdas && ./venv/bin/python3 -m pytest tests/e2e/mns/${{ matrix.test-file }} -vv diff --git a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml index 8659f2241b..14a8aa3b02 100644 --- a/.github/workflows/lambdas-deploy-feature-to-sandbox.yml +++ b/.github/workflows/lambdas-deploy-feature-to-sandbox.yml @@ -117,3 +117,13 @@ jobs: sandbox: ${{ inputs.sandbox }} secrets: AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + run_mns_e2etest: + uses: ./.github/workflows/base-e2e-mns.yml + needs: ["deploy_all_lambdas", "disable_fhir_stub"] + with: + build_branch: ${{ inputs.build_branch }} + environment: development + sandbox: ${{ inputs.sandbox }} + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} diff --git a/.github/workflows/ndr-e2e-test-sandbox.yml b/.github/workflows/ndr-e2e-test-sandbox.yml index 7f05df261d..82927590fd 100644 --- a/.github/workflows/ndr-e2e-test-sandbox.yml +++ b/.github/workflows/ndr-e2e-test-sandbox.yml @@ -40,4 +40,13 @@ jobs: environment: ${{ inputs.environment }} sandbox: ${{ inputs.sandbox }} secrets: - AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} \ No newline at end of file + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} + + services-mns-e2etest: + uses: ./.github/workflows/base-e2e-mns.yml + with: + build_branch: ${{ inputs.build_branch }} + environment: ${{ inputs.environment }} + sandbox: ${{ inputs.sandbox }} + secrets: + AWS_ASSUME_ROLE: ${{ secrets.AWS_ASSUME_ROLE }} diff --git a/lambdas/enums/lambda_error.py b/lambdas/enums/lambda_error.py index a14ccc1550..947ad83861 100644 --- a/lambdas/enums/lambda_error.py +++ b/lambdas/enums/lambda_error.py @@ -1,15 +1,20 @@ -from enum import Enum +from enum import Enum, StrEnum from typing import Optional from enums.fhir.fhir_issue_type import FhirIssueCoding, UKCoreSpineError from utils.error_response import ErrorResponse from utils.request_context import request_context - -class LambdaError(Enum): +class ErrorMessage(StrEnum): MISSING_POST = "Missing POST request body" MISSING_KEY = "An error occurred due to missing key" RETRIEVE_DOCUMENTS = "Unable to retrieve documents for patient" + FAILED_TO_QUERY_DYNAMO = "Failed to query DynamoDB" + FAILED_TO_VALIDATE = "Failed to validate data" + FAILED_TO_UPDATE_DYNAMO = "Failed to update DynamoDB" + FAILED_TO_CREATE_TRANSACTION = "Failed to create transaction" + +class LambdaError(Enum): def create_error_response( self, params: Optional[dict] = None, **kwargs @@ -235,7 +240,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } ManifestMissingBody = { "err_code": "DMS_4002", - "message": MISSING_POST, + "message": ErrorMessage.MISSING_POST, } ManifestFilterDocumentReferences = { "err_code": "DMS_4003", @@ -243,7 +248,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } ManifestMissingJobId = { "err_code": "DMS_4004", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, } ManifestMissingJob = { "err_code": "DMS_4005", @@ -267,7 +272,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } StitchNoService = { "err_code": "LGS_5001", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchClient = { "err_code": "LGS_5002", @@ -275,11 +280,11 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } StitchDB = { "err_code": "LGS_5003", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchValidation = { "err_code": "LGS_5004", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } StitchCloudFront = { "err_code": "LGS_5005", @@ -320,7 +325,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: """ FeedbackMissingBody = { "err_code": "SFB_4001", - "message": MISSING_POST, + "message": ErrorMessage.MISSING_POST, } FeedbackInvalidBody = { @@ -603,7 +608,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } DocTypeKey = { "err_code": "VDT_4003", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, } PatientIdInvalid = { "err_code": "PN_4001", @@ -612,7 +617,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: } PatientIdNoKey = { "err_code": "PN_4002", - "message": MISSING_KEY, + "message": ErrorMessage.MISSING_KEY, "fhir_coding": UKCoreSpineError.MISSING_VALUE, } PatientIdMismatch = { @@ -662,7 +667,7 @@ def create_error_body(self, params: Optional[dict] = None, **kwargs) -> str: """ DocumentReviewDB = { "err_code": "UDR_5001", - "message": RETRIEVE_DOCUMENTS, + "message": ErrorMessage.RETRIEVE_DOCUMENTS, } DocumentReviewValidation = { diff --git a/lambdas/services/document_service.py b/lambdas/services/document_service.py index daa701ac15..1996fc45c7 100644 --- a/lambdas/services/document_service.py +++ b/lambdas/services/document_service.py @@ -109,7 +109,7 @@ def fetch_documents_from_table( def get_item( self, document_id: str, - sort_key: dict = None, + sort_key: dict | None = None, table_name: str = None, model_class: type[BaseModel] = None, ) -> Optional[BaseModel]: @@ -218,7 +218,7 @@ def update_document( update_fields_name: set[str] | None = None, condition_expression: str | Attr | ConditionBase = None, expression_attribute_values: dict = None, - key_pair: dict | None = None + key_pair: dict | None = None, ): """Update document in specified or configured table.""" table_name = table_name or self.table_name @@ -228,13 +228,9 @@ def update_document( "updated_fields": document.model_dump( exclude_none=True, by_alias=True, include=update_fields_name ), + "key_pair": key_pair + or {DocumentReferenceMetadataFields.ID.value: document.id}, } - if key_pair: - update_kwargs["key_pair"] = key_pair - else: - update_kwargs["key_pair"] = { - DocumentReferenceMetadataFields.ID.value: document.id - } if condition_expression: update_kwargs["condition_expression"] = condition_expression diff --git a/lambdas/services/document_upload_review_service.py b/lambdas/services/document_upload_review_service.py index da0e2c0b9a..5b4830b214 100644 --- a/lambdas/services/document_upload_review_service.py +++ b/lambdas/services/document_upload_review_service.py @@ -1,9 +1,11 @@ import os +from datetime import datetime, timezone from boto3.dynamodb.conditions import Attr, ConditionBase from botocore.exceptions import ClientError from enums.document_review_status import DocumentReviewStatus from enums.dynamo_filter import AttributeOperator +from enums.lambda_error import LambdaError, ErrorMessage from enums.metadata_field_names import DocumentReferenceMetadataFields from models.document_reference import S3_PREFIX from models.document_review import DocumentUploadReviewReference @@ -73,7 +75,7 @@ def query_docs_pending_review_by_custodian_with_limit( except ClientError as e: logger.error(e) - raise DocumentReviewException("Error querying document review references") + raise DocumentReviewException(ErrorMessage.FAILED_TO_QUERY_DYNAMO) def _validate_review_references( self, items: list[dict] @@ -86,7 +88,9 @@ def _validate_review_references( return review_references except ValidationError as e: logger.error(e) - raise DocumentReviewException("Error validating document review references") + raise DocumentReviewException( + ErrorMessage.FAILED_TO_VALIDATE.value + ) def get_document( self, document_id: str, version: int | None @@ -107,21 +111,78 @@ def update_document_review_custodian( patient_documents: list[DocumentUploadReviewReference], updated_ods_code: str, ): - review_update_field = {"custodian"} if not patient_documents: + logger.info("No documents to update") return + review_update_field = {"custodian"} for review in patient_documents: - logger.info("Updating document review custodian...") + if review.custodian == updated_ods_code: + logger.info( + f"Custodian {updated_ods_code} already assigned to review ID: {review.id}" + ) + continue - if review.custodian != updated_ods_code: - review.custodian = updated_ods_code + try: + logger.info( + f"Updating document review custodian for review ID: {review.id}", + { + "current_custodian": review.custodian, + "new_custodian": updated_ods_code, + }, + ) + + if review.review_status == DocumentReviewStatus.PENDING_REVIEW: + self._handle_pending_review_custodian_update( + review, updated_ods_code, review_update_field + ) + else: + self._handle_standard_custodian_update( + review, updated_ods_code, review_update_field + ) - self.update_document( - document=review, - key_pair={"ID": review.id, "Version": review.version}, - update_fields_name=review_update_field, + except (ClientError, DocumentReviewException) as e: + logger.error( + f"Failed to update custodian for review ID: {review.id}", + {"error": str(e)}, ) + continue + + def _handle_pending_review_custodian_update( + self, + review: DocumentUploadReviewReference, + updated_ods_code: str, + review_update_field: set[str], + ) -> None: + new_document_review = review.model_copy(deep=True) + new_document_review.version = review.version + 1 + new_document_review.custodian = updated_ods_code + + review_date = int(datetime.now(timezone.utc).timestamp()) + review.review_status = DocumentReviewStatus.NEVER_REVIEWED + review.review_date = review_date + review.reviewer = review.custodian + review.custodian = updated_ods_code + + self.update_document_review_with_transaction( + new_review_item=new_document_review, + existing_review_item=review, + additional_update_fields=review_update_field, + ) + + def _handle_standard_custodian_update( + self, + review: DocumentUploadReviewReference, + updated_ods_code: str, + update_fields: set[str], + ) -> None: + review.custodian = updated_ods_code + + self.update_document( + document=review, + key_pair={"ID": review.id, "Version": review.version}, + update_fields_name=update_fields, + ) def update_document_review_status( self, @@ -193,24 +254,22 @@ def update_document_review_for_patient( ) except ClientError as e: error_code = e.response.get("Error", {}).get("Code", "") - + logger.error(e) if error_code == "ConditionalCheckFailedException": logger.error( f"Condition check failed: Document ID {review_update.id}", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException( - f"Document ID {review_update.id} does not meet the required conditions for update" - ) + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) logger.error( f"DynamoDB error updating document review: {str(e)}", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException(f"Failed to update document review: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) def update_document_review_with_transaction( - self, new_review_item, existing_review_item + self, new_review_item, existing_review_item, additional_update_fields=None ): transact_items = [] try: @@ -230,6 +289,8 @@ def update_document_review_with_transaction( "review_date", "reviewer", } + if additional_update_fields: + existing_update_fields.update(additional_update_fields) existing_doc_transaction = build_transaction_item( table_name=self.table_name, action="Update", @@ -254,26 +315,27 @@ def update_document_review_with_transaction( { "field": "Custodian", "operator": "=", - "value": existing_review_item.custodian, + "value": existing_review_item.reviewer, }, ], ) except ValueError as e: logger.error(f"Failed to build transaction item: {str(e)}") - raise DocumentReviewException(f"Failed to build transaction item: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_CREATE_TRANSACTION) transact_items.append(existing_doc_transaction) try: response = self.dynamo_service.transact_write_items(transact_items) logger.info("Transaction completed successfully") except ClientError as e: + logger.error(f"Transaction failed: {str(e)}") error_code = e.response.get("Error", {}).get("Code", "") if error_code == "TransactionCanceledException": logger.error( f"Condition check failed: Document ID {existing_review_item.id} ", {"Result": "Failed to update document review"}, ) - raise DocumentReviewException(f"Failed to update document review: {str(e)}") + raise DocumentReviewException(ErrorMessage.FAILED_TO_UPDATE_DYNAMO) return response def delete_document_review_files( diff --git a/lambdas/tests/e2e/helpers/data_helper.py b/lambdas/tests/e2e/helpers/data_helper.py index b7b3f69d55..258c050009 100644 --- a/lambdas/tests/e2e/helpers/data_helper.py +++ b/lambdas/tests/e2e/helpers/data_helper.py @@ -25,7 +25,6 @@ def __init__( self.dynamo_service = DynamoDBService() self.s3_service = S3Service() self.apim_url = None - self.build_env(table_name, bucket_name) def build_env(self, table_name, bucket_name): diff --git a/lambdas/tests/e2e/mns/__init__.py b/lambdas/tests/e2e/mns/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lambdas/tests/e2e/mns/conftest.py b/lambdas/tests/e2e/mns/conftest.py new file mode 100644 index 0000000000..a3dd005212 --- /dev/null +++ b/lambdas/tests/e2e/mns/conftest.py @@ -0,0 +1,19 @@ +import pytest +from tests.e2e.mns.mns_helper import MNSTestHelper + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def test_records(): + records = {"lloyd_george": [], "document_review": []} + yield records + + helper = MNSTestHelper() + for record_id in records["lloyd_george"]: + helper.cleanup_lloyd_george_record(record_id) + for record_id in records["document_review"]: + helper.cleanup_document_review_record(record_id) diff --git a/lambdas/tests/e2e/mns/mns_helper.py b/lambdas/tests/e2e/mns/mns_helper.py new file mode 100644 index 0000000000..adfad0f87a --- /dev/null +++ b/lambdas/tests/e2e/mns/mns_helper.py @@ -0,0 +1,212 @@ +import json +import os +import time +import uuid +from datetime import datetime, timezone + +from enums.death_notification_status import DeathNotificationStatus +from enums.document_review_status import DocumentReviewStatus +from enums.mns_notification_types import MNSNotificationTypes +from enums.snomed_codes import SnomedCodes +from services.base.dynamo_service import DynamoDBService +from services.base.s3_service import S3Service +from services.base.sqs_service import SQSService + +AWS_WORKSPACE = os.environ.get("AWS_WORKSPACE", "") +LLOYD_GEORGE_TABLE = f"{AWS_WORKSPACE}_LloydGeorgeReferenceMetadata" +DOCUMENT_REVIEW_TABLE = f"{AWS_WORKSPACE}_DocumentUploadReview" +PENDING_REVIEW_S3_BUCKET = f"{AWS_WORKSPACE}-pending-review-bucket" +TEST_NHS_NUMBER = "9730154198" +TEST_NHS_NUMBER_DEATH = "9730135967" +TEST_ORIGINAL_ODS = "Y12345" +TEST_NEW_ODS = "H81109" +MOCK_TIME = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + + +class MNSTestHelper: + def __init__(self): + self.dynamo_service = DynamoDBService() + self.s3_service = S3Service() + self.sqs_service = SQSService() + self.mns_queue_url = self.get_mns_queue_url(AWS_WORKSPACE) + + def get_mns_queue_url(self, workspace: str) -> str: + queue_name = f"{workspace}-mns-notification-queue" + response = self.sqs_service.client.get_queue_url(QueueName=queue_name) + return response["QueueUrl"] + + def create_lloyd_george_record(self, nhs_number: str, ods_code: str) -> dict: + record_id = str(uuid.uuid4()) + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "ContentType": "application/pdf", + "Created": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "CurrentGpOds": ods_code, + "Custodian": ods_code, + "DocStatus": "final", + "DocumentScanCreation": "2023-01-01", + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + "FileLocation": f"s3://{AWS_WORKSPACE}-lloyd-george-store/{nhs_number}/{record_id}", + "FileName": f"1of1_Lloyd_George_Record_[Test Patient]_[{nhs_number}]_[01-01-2000].pdf", + "FileSize": "12345", + "LastUpdated": int(time.time()), + "Status": "current", + "Uploaded": True, + "Uploading": False, + "Version": "1", + "VirusScannerResult": "Clean", + } + self.dynamo_service.create_item(LLOYD_GEORGE_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def create_document_review_record( + self, + nhs_number: str, + ods_code: str, + review_status: DocumentReviewStatus = DocumentReviewStatus.PENDING_REVIEW, + ) -> dict: + record_id = str(uuid.uuid4()) + file_location = ( + f"s3://{PENDING_REVIEW_S3_BUCKET}/{nhs_number}/{record_id}/test.pdf" + ) + + dynamo_item = { + "ID": record_id, + "NhsNumber": nhs_number, + "Author": ods_code, + "Custodian": ods_code, + "ReviewStatus": review_status, + "ReviewReason": "Test document for MNS e2e", + "UploadDate": int(time.time()), + "Files": [ + { + "FileName": "test.pdf", + "FileLocation": file_location, + } + ], + "Version": 1, + "DocumentSnomedCodeType": SnomedCodes.LLOYD_GEORGE.value.code, + } + self.dynamo_service.create_item(DOCUMENT_REVIEW_TABLE, dynamo_item) + return {"id": record_id, "nhs_number": nhs_number, "ods": ods_code} + + def send_gp_change_message(self, nhs_number: str) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.CHANGE_OF_GP.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "https://test.example.com", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "time": MOCK_TIME, + "data": { + "fullUrl": "https://test.example.com/Patient/123", + "versionId": str(uuid.uuid4()), + "provenance": { + "name": "Test GP Practice", + "identifiers": { + "system": "https://test.example.com", + "value": str(uuid.uuid4()), + }, + }, + "registrationEncounterCode": "00", + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def send_death_notification_message( + self, nhs_number: str, death_status: DeathNotificationStatus + ) -> str: + message_id = str(uuid.uuid4()) + message_body = { + "id": message_id, + "type": MNSNotificationTypes.DEATH_NOTIFICATION.value, + "subject": { + "nhsNumber": nhs_number, + "familyName": "TESTPATIENT", + "dob": "2000-01-01", + }, + "source": { + "name": "NHS DIGITAL", + "identifier": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000324", + }, + }, + "time": MOCK_TIME, + "data": { + "versionId": 'W/"16"', + "fullUrl": f"https://int.api.service.nhs.uk/personal-demographics/FHIR/R4/Patient/{nhs_number}", + "deathNotificationStatus": death_status.value, + "provenance": { + "name": "The GP Practice", + "identifiers": { + "system": "https://fhir.nhs.uk/Id/nhsSpineASID", + "value": "477121000323", + }, + }, + }, + } + + self.sqs_service.send_message_standard( + queue_url=self.mns_queue_url, message_body=json.dumps(message_body) + ) + return message_id + + def get_lloyd_george_record(self, record_id: str) -> dict: + return self.dynamo_service.get_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ).get("Item") + + def get_document_review_record(self, record_id: str, version: int = 1) -> dict: + return self.dynamo_service.get_item( + table_name=DOCUMENT_REVIEW_TABLE, key={"ID": record_id, "Version": version} + ).get("Item") + + def get_all_document_review_versions(self, record_id: str) -> list[dict]: + response = self.dynamo_service.query_table_single( + table_name=DOCUMENT_REVIEW_TABLE, + search_key="ID", + search_condition=record_id, + ) + return response.get("Items", []) + + def cleanup_lloyd_george_record(self, record_id: str): + try: + self.dynamo_service.delete_item( + table_name=LLOYD_GEORGE_TABLE, key={"ID": record_id} + ) + except Exception as e: + print(f"Error cleaning up Lloyd George record {record_id}: {e}") + + def cleanup_document_review_record(self, record_id: str, version: int = 1): + try: + records = self.get_all_document_review_versions(record_id) + for record in records: + self.dynamo_service.delete_item( + table_name=DOCUMENT_REVIEW_TABLE, + key={"ID": record_id, "Version": record["Version"]}, + ) + except Exception as e: + print(f"Error cleaning up document review record {record_id}: {e}") + + def wait_for_update(self, check_func, max_retries=5, delay=10): + for i in range(max_retries): + if check_func(): + return True + time.sleep(delay) + return False diff --git a/lambdas/tests/e2e/mns/test_mns_death.py b/lambdas/tests/e2e/mns/test_mns_death.py new file mode 100644 index 0000000000..933b8ac25c --- /dev/null +++ b/lambdas/tests/e2e/mns/test_mns_death.py @@ -0,0 +1,172 @@ +import time + +import pytest +from enums.death_notification_status import DeathNotificationStatus +from enums.document_review_status import DocumentReviewStatus +from enums.patient_ods_inactive_status import PatientOdsInactiveStatus +from tests.e2e.mns.mns_helper import TEST_ORIGINAL_ODS, MNSTestHelper + +TEST_NHS_FORMAL = "9730135967" +TEST_NHS_INFORMAL = "9730154384" +TEST_NHS_BOTH = "9730153949" + + +@pytest.fixture(scope="session") +def setup_all_death_tests(): + helper = MNSTestHelper() + + formal_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_FORMAL, ods_code=TEST_ORIGINAL_ODS + ) + + informal_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_INFORMAL, ods_code=TEST_ORIGINAL_ODS + ) + + both_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + both_review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + initial_formal_lg = helper.get_lloyd_george_record(formal_lg_record["id"]) + initial_informal_lg = helper.get_lloyd_george_record(informal_lg_record["id"]) + initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) + initial_both_review = helper.get_document_review_record( + both_review_record["id"], version=1 + ) + + helper.send_death_notification_message( + nhs_number=TEST_NHS_FORMAL, death_status=DeathNotificationStatus.FORMAL + ) + helper.send_death_notification_message( + nhs_number=TEST_NHS_INFORMAL, death_status=DeathNotificationStatus.INFORMAL + ) + helper.send_death_notification_message( + nhs_number=TEST_NHS_BOTH, death_status=DeathNotificationStatus.FORMAL + ) + + print("\nWaiting 50 seconds for all death notification messages to be processed...") + time.sleep(50) + print("Wait complete, starting death tests...") + + setup_data = { + "formal": { + "record_id": formal_lg_record["id"], + "initial_record": initial_formal_lg, + }, + "informal": { + "record_id": informal_lg_record["id"], + "initial_record": initial_informal_lg, + }, + "both_tables": { + "lg_record_id": both_lg_record["id"], + "review_record_id": both_review_record["id"], + "initial_lg": initial_both_lg, + "initial_review": initial_both_review, + }, + } + + yield setup_data + + helper.cleanup_lloyd_george_record(formal_lg_record["id"]) + helper.cleanup_lloyd_george_record(informal_lg_record["id"]) + helper.cleanup_lloyd_george_record(both_lg_record["id"]) + helper.cleanup_document_review_record(both_review_record["id"]) + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def setup_formal_death_test(setup_all_death_tests): + return setup_all_death_tests["formal"] + + +@pytest.fixture +def setup_informal_death_test(setup_all_death_tests): + return setup_all_death_tests["informal"] + + +@pytest.fixture +def setup_death_both_tables_test(setup_all_death_tests): + return setup_all_death_tests["both_tables"] + + +class TestMNSDeathNotification: + def test_formal_death_notification_marks_patient_deceased( + self, mns_helper, setup_formal_death_test + ): + record_id = setup_formal_death_test["record_id"] + initial_record = setup_formal_death_test["initial_record"] + + assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + + def check_update(): + return ( + mns_helper.get_lloyd_george_record(record_id)["CurrentGpOds"] + == PatientOdsInactiveStatus.DECEASED.value + ) + + update_successful = mns_helper.wait_for_update(check_update) + assert update_successful, "Lloyd George record was not marked as deceased" + + + def test_formal_death_updates_both_tables( + self, mns_helper, setup_death_both_tables_test + ): + lg_record_id = setup_death_both_tables_test["lg_record_id"] + review_record_id = setup_death_both_tables_test["review_record_id"] + + def check_death_updates(): + try: + lg_record_updated = mns_helper.get_lloyd_george_record(lg_record_id) + lg_deceased = ( + lg_record_updated["CurrentGpOds"] + == PatientOdsInactiveStatus.DECEASED.value + ) + + new_version = mns_helper.get_document_review_record( + review_record_id, version=2 + ) + review_deceased = ( + new_version is not None + and new_version["Custodian"] + == PatientOdsInactiveStatus.DECEASED.value + ) + if new_version: + assert ( + new_version["ReviewStatus"] + == DocumentReviewStatus.PENDING_REVIEW.value + ) + return lg_deceased and review_deceased + except Exception: + return False + + update_successful = mns_helper.wait_for_update(check_death_updates) + assert ( + update_successful + ), "Both tables were not marked as deceased after formal death notification" + + final_review_v1 = mns_helper.get_document_review_record( + review_record_id, version=1 + ) + assert ( + final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value + ) + + def test_informal_death_notification_no_change( + self, mns_helper, setup_informal_death_test + ): + record_id = setup_informal_death_test["record_id"] + initial_record = setup_informal_death_test["initial_record"] + + initial_last_updated = initial_record["LastUpdated"] + + final_record = mns_helper.get_lloyd_george_record(record_id) + assert final_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + assert final_record["LastUpdated"] == initial_last_updated diff --git a/lambdas/tests/e2e/mns/test_mns_process.py b/lambdas/tests/e2e/mns/test_mns_process.py new file mode 100644 index 0000000000..7e4fa5f81c --- /dev/null +++ b/lambdas/tests/e2e/mns/test_mns_process.py @@ -0,0 +1,263 @@ +import time + +import pytest +from enums.document_review_status import DocumentReviewStatus +from tests.e2e.mns.mns_helper import TEST_NEW_ODS, TEST_ORIGINAL_ODS, MNSTestHelper + +TEST_NHS_LG = "9730154198" +TEST_NHS_DR = "9730154201" +TEST_NHS_NP = "9730154384" +TEST_NHS_BOTH = "9730154422" + + +@pytest.fixture(scope="session") +def setup_all_tests(): + helper = MNSTestHelper() + + lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_LG, ods_code=TEST_ORIGINAL_ODS + ) + + review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_DR, ods_code=TEST_ORIGINAL_ODS + ) + + non_pending_record = helper.create_document_review_record( + nhs_number=TEST_NHS_NP, + ods_code=TEST_ORIGINAL_ODS, + review_status=DocumentReviewStatus.APPROVED, + ) + + both_lg_record = helper.create_lloyd_george_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + both_review_record = helper.create_document_review_record( + nhs_number=TEST_NHS_BOTH, ods_code=TEST_ORIGINAL_ODS + ) + + initial_lg = helper.get_lloyd_george_record(lg_record["id"]) + initial_dr = helper.get_document_review_record(review_record["id"], version=1) + initial_np = helper.get_document_review_record(non_pending_record["id"], version=1) + initial_both_lg = helper.get_lloyd_george_record(both_lg_record["id"]) + initial_both_review = helper.get_document_review_record( + both_review_record["id"], version=1 + ) + + helper.send_gp_change_message(TEST_NHS_LG) + helper.send_gp_change_message(TEST_NHS_DR) + helper.send_gp_change_message(TEST_NHS_NP) + helper.send_gp_change_message(TEST_NHS_BOTH) + + print("\nWaiting 50 seconds for all SQS messages to be processed...") + time.sleep(50) + print("Wait complete, starting tests...") + + setup_data = { + "lloyd_george": { + "record_id": lg_record["id"], + "initial_record": initial_lg, + }, + "document_review": { + "record_id": review_record["id"], + "initial_record": initial_dr, + }, + "non_pending_review": { + "record_id": non_pending_record["id"], + "initial_record": initial_np, + }, + "both_tables": { + "lg_record_id": both_lg_record["id"], + "review_record_id": both_review_record["id"], + "initial_lg": initial_both_lg, + "initial_review": initial_both_review, + }, + } + + yield setup_data + + helper.cleanup_lloyd_george_record(lg_record["id"]) + helper.cleanup_lloyd_george_record(both_lg_record["id"]) + helper.cleanup_document_review_record(review_record["id"]) + helper.cleanup_document_review_record(non_pending_record["id"]) + helper.cleanup_document_review_record(both_review_record["id"]) + + +@pytest.fixture +def mns_helper(): + return MNSTestHelper() + + +@pytest.fixture +def setup_lloyd_george_test(setup_all_tests): + return setup_all_tests["lloyd_george"] + + +@pytest.fixture +def setup_document_review_test(setup_all_tests): + return setup_all_tests["document_review"] + + +@pytest.fixture +def setup_non_pending_review_test(setup_all_tests): + return setup_all_tests["non_pending_review"] + + +@pytest.fixture +def setup_both_tables_test(setup_all_tests): + return setup_all_tests["both_tables"] + + +class TestMNSChangeOfGP: + def test_gp_change_updates_lloyd_george_record( + self, mns_helper, setup_lloyd_george_test + ): + record_id = setup_lloyd_george_test["record_id"] + initial_record = setup_lloyd_george_test["initial_record"] + + print(initial_record) + assert initial_record["CurrentGpOds"] == TEST_ORIGINAL_ODS + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + + def check_update(): + updated_record = mns_helper.get_lloyd_george_record(record_id) + print(updated_record) + last_updated_changed = ( + updated_record["LastUpdated"] != initial_record["LastUpdated"] + ) + custodian_changed = ( + updated_record["Custodian"] != initial_record["Custodian"] + ) + current_gp_changed = ( + updated_record["CurrentGpOds"] != initial_record["CurrentGpOds"] + ) + return last_updated_changed and custodian_changed and current_gp_changed + + update_successful = mns_helper.wait_for_update(check_update) + assert update_successful, "Lloyd George record was not updated after GP change" + + def test_gp_change_updates_document_review_record( + self, mns_helper, setup_document_review_test + ): + record_id = setup_document_review_test["record_id"] + initial_record = setup_document_review_test["initial_record"] + + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + assert ( + initial_record["ReviewStatus"] == DocumentReviewStatus.PENDING_REVIEW.value + ) + assert initial_record["Version"] == 1 + + def check_new_version(): + try: + new_version = mns_helper.get_document_review_record( + record_id, version=2 + ) + return new_version is not None + except Exception: + return False + + update_successful = mns_helper.wait_for_update(check_new_version) + assert ( + update_successful + ), "New version of document review record was not created after GP change" + + version_2_record = mns_helper.get_document_review_record(record_id, version=2) + assert version_2_record["Version"] == 2 + assert version_2_record["Custodian"] == TEST_NEW_ODS + assert version_2_record is not None + + version_1_record = mns_helper.get_document_review_record(record_id, version=1) + assert ( + version_1_record["ReviewStatus"] + == DocumentReviewStatus.NEVER_REVIEWED.value + ) + assert version_1_record.get("ReviewDate") is not None + assert version_1_record["Reviewer"] == TEST_ORIGINAL_ODS + + def test_gp_change_non_pending_review_no_new_version( + self, mns_helper, setup_non_pending_review_test + ): + record_id = setup_non_pending_review_test["record_id"] + initial_record = setup_non_pending_review_test["initial_record"] + + assert initial_record["Custodian"] == TEST_ORIGINAL_ODS + assert initial_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value + assert initial_record["Version"] == 1 + + def check_no_new_version(): + version_1_record = mns_helper.get_document_review_record( + record_id, version=1 + ) + updated = version_1_record.get("Custodian") == TEST_NEW_ODS + return updated + + no_new_version = mns_helper.wait_for_update(check_no_new_version) + assert ( + no_new_version + ), "Version 1 should have been updated for non-PENDING_REVIEW record" + + version_1_record = mns_helper.get_document_review_record(record_id, version=1) + assert version_1_record is not None + assert version_1_record["Version"] == 1 + assert version_1_record["ReviewStatus"] == DocumentReviewStatus.APPROVED.value + assert version_1_record["Custodian"] == TEST_NEW_ODS + + try: + mns_helper.get_document_review_record(record_id, version=2) + assert False, "Version 2 should not exist" + except Exception: + pass + + def test_gp_change_updates_both_tables(self, mns_helper, setup_both_tables_test): + lg_record_id = setup_both_tables_test["lg_record_id"] + review_record_id = setup_both_tables_test["review_record_id"] + initial_lg = setup_both_tables_test["initial_lg"] + initial_review = setup_both_tables_test["initial_review"] + + def check_updates(): + try: + updated_record = mns_helper.get_lloyd_george_record(lg_record_id) + last_updated_changed = ( + updated_record["LastUpdated"] != initial_lg["LastUpdated"] + ) + custodian_changed = ( + updated_record["Custodian"] != initial_lg["Custodian"] + ) + current_gp_changed = ( + updated_record["CurrentGpOds"] != initial_lg["CurrentGpOds"] + ) + lg_changed = ( + last_updated_changed and custodian_changed and current_gp_changed + ) + + new_review_version = mns_helper.get_document_review_record( + review_record_id, version=2 + ) + review_versioned = new_review_version is not None + + return lg_changed and review_versioned + except Exception: + return False + + update_successful = mns_helper.wait_for_update(check_updates) + assert update_successful, "Both tables were not updated after GP change" + + final_lg_record = mns_helper.get_lloyd_george_record(lg_record_id) + assert final_lg_record is not None + + final_review_v2 = mns_helper.get_document_review_record( + review_record_id, version=2 + ) + assert final_review_v2 is not None + assert final_review_v2["Version"] == 2 + + final_review_v1 = mns_helper.get_document_review_record( + review_record_id, version=1 + ) + assert ( + final_review_v1["ReviewStatus"] == DocumentReviewStatus.NEVER_REVIEWED.value + ) + assert final_review_v1.get("ReviewDate") is not None + assert final_review_v1["Reviewer"] == initial_review["Custodian"] + assert final_review_v1["Custodian"] != initial_lg["Custodian"] diff --git a/lambdas/tests/unit/services/test_document_upload_review_service.py b/lambdas/tests/unit/services/test_document_upload_review_service.py index 9cd488f957..bc5891ee65 100644 --- a/lambdas/tests/unit/services/test_document_upload_review_service.py +++ b/lambdas/tests/unit/services/test_document_upload_review_service.py @@ -45,6 +45,7 @@ def mock_document_review_references(): review.id = f"review-id-{i}" review.version = i review.nhs_number = TEST_NHS_NUMBER + review.review_status = "APPROVED" review.custodian = TEST_ODS_CODE reviews.append(review) return reviews @@ -59,6 +60,7 @@ def mock_review_update(): review_update.nhs_number = TEST_NHS_NUMBER review_update.review_status = DocumentReviewStatus.APPROVED review_update.document_reference_id = "test-doc-ref-id" + review_update.reviewer = TEST_ODS_CODE return review_update @@ -77,37 +79,43 @@ def test_s3_bucket(mock_service, monkeypatch): def test_update_document_review_custodian_updates_all_documents( mock_service, mock_document_review_references, mocker ): - mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) - assert mock_update_document.call_count == 3 - - for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE + assert mock_handle_standard.call_count == 3 for review in mock_document_review_references: - mock_update_document.assert_any_call( - document=review, - update_fields_name={"custodian"}, - key_pair={"ID": review.id, "Version": review.version}, - ) + mock_handle_standard.assert_any_call(review, NEW_ODS_CODE, {"custodian"}) def test_update_document_review_custodian_empty_list(mock_service, mocker): - mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + mock_handle_pending = mocker.patch.object( + mock_service, "_handle_pending_review_custodian_update" + ) mock_service.update_document_review_custodian([], NEW_ODS_CODE) - mock_update_document.assert_not_called() + mock_handle_standard.assert_not_called() + mock_handle_pending.assert_not_called() def test_update_document_review_custodian_no_changes_needed( mock_service, mock_document_review_references, mocker ): - mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + mock_handle_pending = mocker.patch.object( + mock_service, "_handle_pending_review_custodian_update" + ) for review in mock_document_review_references: review.custodian = NEW_ODS_CODE @@ -116,59 +124,65 @@ def test_update_document_review_custodian_no_changes_needed( mock_document_review_references, NEW_ODS_CODE ) - mock_update_document.assert_not_called() + mock_handle_standard.assert_not_called() + mock_handle_pending.assert_not_called() def test_update_document_review_custodian_mixed_custodians( mock_service, mock_document_review_references, mocker ): - mock_update_document = mocker.patch.object(mock_service, "update_document") - + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) mock_document_review_references[0].custodian = NEW_ODS_CODE mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) + assert mock_handle_standard.call_count == 2 - assert mock_update_document.call_count == 2 - for review in mock_document_review_references: - assert review.custodian == NEW_ODS_CODE - - -def test_update_document_review_custodian_logging( +def test_update_document_review_custodian_continues_on_error( mock_service, mock_document_review_references, mocker ): - """Test that update_document_review_custodian logs appropriately.""" - mocker.patch.object(mock_service, "update_document") - mock_logger = mocker.patch("services.document_upload_review_service.logger") + mock_handle_standard = mocker.patch.object( + mock_service, "_handle_standard_custodian_update" + ) + + mock_handle_standard.side_effect = [ + DocumentReviewException("Test error"), + ClientError( + {"Error": {"Code": "ConditionalCheckFailedException"}}, "UpdateItem" + ), + None, + ] mock_service.update_document_review_custodian( mock_document_review_references, NEW_ODS_CODE ) + assert mock_handle_standard.call_count == 3 - assert mock_logger.info.call_count == 3 - mock_logger.info.assert_any_call("Updating document review custodian...") - -def test_update_document_review_custodian_single_document(mock_service, mocker): +def test_handle_standard_custodian_update_updates_document(mock_service, mocker): mock_update_document = mocker.patch.object(mock_service, "update_document") - single_review = MagicMock(spec=DocumentUploadReviewReference) - single_review.id = "single-review-id" - single_review.version = 1 - single_review.custodian = TEST_ODS_CODE + review = DocumentUploadReviewReference.model_construct() + review.id = "test-id" + review.version = 1 + review.custodian = TEST_ODS_CODE + + update_fields = {"custodian"} - mock_service.update_document_review_custodian([single_review], NEW_ODS_CODE) + mock_service._handle_standard_custodian_update(review, NEW_ODS_CODE, update_fields) + + assert review.custodian == NEW_ODS_CODE - assert single_review.custodian == NEW_ODS_CODE mock_update_document.assert_called_once_with( - document=single_review, - update_fields_name={"custodian"}, - key_pair={"ID": single_review.id, "Version": single_review.version}, + document=review, + key_pair={"ID": review.id, "Version": review.version}, + update_fields_name=update_fields, ) - def test_get_document_review_by_id( mock_service, mock_document_review_references, mocker ): @@ -392,7 +406,6 @@ def test_delete_document_review_files_handles_s3_error(mock_service, mocker): def test_update_document_review_with_transaction_transaction_cancelled( mock_service, mock_review_update, mocker ): - """Test handling of TransactionCanceledException.""" client_error = ClientError( {"Error": {"Code": "TransactionCanceledException"}}, "TransactWriteItems" ) @@ -408,12 +421,83 @@ def test_update_document_review_with_transaction_transaction_cancelled( existing_review = mock_review_update existing_review.custodian = TEST_ODS_CODE - with pytest.raises(DocumentReviewException) as exc_info: + with pytest.raises(DocumentReviewException): mock_service.update_document_review_with_transaction( new_review, existing_review ) - assert "Failed to update document review" in str(exc_info.value) + +def test_handle_standard_custodian_update_with_client_error(mock_service, mocker): + mock_update_document = mocker.patch.object(mock_service, "update_document") + mock_update_document.side_effect = ClientError( + {"Error": {"Code": "ConditionalCheckFailedException"}}, "UpdateItem" + ) + + review = DocumentUploadReviewReference.model_construct() + review.id = "test-id" + review.version = 1 + review.custodian = TEST_ODS_CODE + + with pytest.raises(ClientError): + mock_service._handle_standard_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) + + +@freeze_time("2024-01-15 10:30:00") +def test_handle_pending_review_custodian_update_creates_new_version( + mock_service, mocker +): + mock_transaction_update = mocker.patch.object( + mock_service, "update_document_review_with_transaction" + ) + + expected_timestamp = 1705314600 + + review = DocumentUploadReviewReference.model_construct() + review.id = "pending-review-id" + review.custodian = TEST_ODS_CODE + review.version = 1 + review.review_status = DocumentReviewStatus.PENDING_REVIEW + + new_review_copy = review.model_copy(deep=True) + new_review_copy.version = 2 + new_review_copy.custodian = NEW_ODS_CODE + + mock_service._handle_pending_review_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) + + assert review.review_status == DocumentReviewStatus.NEVER_REVIEWED + assert review.review_date == expected_timestamp + assert review.reviewer == TEST_ODS_CODE + assert review.custodian == NEW_ODS_CODE + + mock_transaction_update.assert_called_once_with( + new_review_item=new_review_copy, + existing_review_item=review, + additional_update_fields={"custodian"}, + ) + + +def test_handle_pending_review_custodian_update_with_transaction_failure( + mock_service, mocker +): + mock_transaction_update = mocker.patch.object( + mock_service, "update_document_review_with_transaction" + ) + mock_transaction_update.side_effect = DocumentReviewException("Transaction failed") + + review = MagicMock(spec=DocumentUploadReviewReference) + review.id = "test-id" + review.custodian = TEST_ODS_CODE + review.version = 1 + review.review_status = DocumentReviewStatus.PENDING_REVIEW + + with pytest.raises(DocumentReviewException): + mock_service._handle_pending_review_custodian_update( + review, NEW_ODS_CODE, {"custodian"} + ) def test_build_review_dynamo_filter_creates_filter_from_nhs_number(mock_service): diff --git a/lambdas/utils/exceptions.py b/lambdas/utils/exceptions.py index f748086a3c..75fba33261 100644 --- a/lambdas/utils/exceptions.py +++ b/lambdas/utils/exceptions.py @@ -172,6 +172,30 @@ class TransactionConflictException(Exception): pass +class ReviewProcessVerifyingException(Exception): + pass + + +class ReviewProcessMovingException(Exception): + pass + + +class ReviewProcessDeleteException(Exception): + pass + + +class ReviewProcessCreateRecordException(Exception): + pass + + +class SSHKeyManagementException(Exception): + pass + + +class CorruptedFileException(Exception): + pass + + class MigrationUnrecoverableException(Exception): def __init__(self, message: str, item_id: str): super().__init__(message) @@ -190,27 +214,3 @@ def __init__(self, message: str, segment_id: str): def to_dict(self): return {"segmentId": self.segment_id, "message": self.message} - - -class SSHKeyManagementException(Exception): - pass - - -class ReviewProcessVerifyingException(Exception): - pass - - -class ReviewProcessMovingException(Exception): - pass - - -class ReviewProcessDeleteException(Exception): - pass - - -class ReviewProcessCreateRecordException(Exception): - pass - - -class CorruptedFileException(Exception): - pass