diff --git a/lambdas/handlers/im_alerting_handler.py b/lambdas/handlers/im_alerting_handler.py index c49e6419d3..0178f60745 100644 --- a/lambdas/handlers/im_alerting_handler.py +++ b/lambdas/handlers/im_alerting_handler.py @@ -1,4 +1,5 @@ import json +import os from services.im_alerting_service import IMAlertingService from utils.audit_logging_setup import LoggingService @@ -21,6 +22,7 @@ "SLACK_CHANNEL_ID", "SLACK_BOT_TOKEN", "WORKSPACE", + "VIRUS_SCANNER_TOPIC_ARN", ] ) def lambda_handler(event, context): @@ -32,4 +34,15 @@ def lambda_handler(event, context): logger.info(f"Processing message: {message}") message_service = IMAlertingService(message) + + if is_virus_scanner_topic(sns_message["Sns"]): + message_service.handle_virus_scanner_alert() + return + message_service.handle_alarm_alert() + + +def is_virus_scanner_topic(message): + + topic_arn = message.get("TopicArn", "") + return topic_arn == os.environ["VIRUS_SCANNER_TOPIC_ARN"] diff --git a/lambdas/models/fhir/R4/fhir_document_reference.py b/lambdas/models/fhir/R4/fhir_document_reference.py index da091b5d0b..bccc4dd79e 100644 --- a/lambdas/models/fhir/R4/fhir_document_reference.py +++ b/lambdas/models/fhir/R4/fhir_document_reference.py @@ -13,7 +13,6 @@ ) from pydantic import BaseModel, Field from utils.exceptions import FhirDocumentReferenceException - from utils.ods_utils import PCSE_ODS_CODE # Constants diff --git a/lambdas/models/templates/slack_alert_blocks.json b/lambdas/models/templates/slack_alert_blocks.json index bfed9f8745..76592d3327 100644 --- a/lambdas/models/templates/slack_alert_blocks.json +++ b/lambdas/models/templates/slack_alert_blocks.json @@ -27,7 +27,7 @@ "type": "section", "text": { "type": "mrkdwn", - "text": "*Info:*\n <{{ action_url }}>" + "text": "{% if is_initial_message %}*Info:*\n <{{ action_url }}>{% endif %}" } } ] \ No newline at end of file diff --git a/lambdas/models/templates/virus_scanner_alert_slack_blocks.json b/lambdas/models/templates/virus_scanner_alert_slack_blocks.json new file mode 100644 index 0000000000..e539bce16a --- /dev/null +++ b/lambdas/models/templates/virus_scanner_alert_slack_blocks.json @@ -0,0 +1,33 @@ +[ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "{{ topic }}: {{ scan_result }} {{ severity }}" + } + }, + { + "type": "divider" + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "Scan date and time: {{ scan_date }}" + } + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "Scan ID: {{ scan_id }}" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "*Info:*\n <{{ action_url }}>" + } + } +] \ No newline at end of file diff --git a/lambdas/services/im_alerting_service.py b/lambdas/services/im_alerting_service.py index c807da574a..4555c58146 100644 --- a/lambdas/services/im_alerting_service.py +++ b/lambdas/services/im_alerting_service.py @@ -194,6 +194,19 @@ def handle_ok_action_trigger(self, tags: dict, alarm_entry: AlarmEntry): f"Alarm entry for {alarm_entry.alarm_name_metric} has been updated since reaching OK state" ) + def handle_virus_scanner_alert(self): + + slack_blocks = { + "blocks": self.compose_virus_scanner_slack_blocks(), + "channel": os.environ["SLACK_CHANNEL_ID"], + } + + requests.post( + url=self.SLACK_POST_CHAT_API, + headers=self.slack_headers, + data=json.dumps(slack_blocks), + ) + """ We want to wait for a set time (ALARM_OK_WAIT_SECONDS) to allow the alarm's OK state to stabilise before updating the teams & slack alerts to display OK. This will prevent a situation where an alarm temporarily reaches an OK @@ -383,6 +396,10 @@ def extract_alarm_names_from_arns(self, arn_list: list) -> list: alarm_names.append(match.group(1)) return alarm_names + def extract_topic_name_from_arn(self, arn: str) -> str: + components = arn.split(":") + return components[-1] + def add_ttl_to_alarm_entry(self, alarm_entry: AlarmEntry): alarm_entry.time_to_exist = int( ( @@ -471,7 +488,9 @@ def compose_teams_message(self, alarm_entry: AlarmEntry): def send_initial_slack_alert(self, alarm_entry: AlarmEntry): slack_message = { "channel": alarm_entry.channel_id, - "blocks": self.compose_slack_message_blocks(alarm_entry), + "blocks": self.compose_slack_message_blocks( + alarm_entry=alarm_entry, is_initial_message=True + ), } try: @@ -505,7 +524,9 @@ def send_slack_response(self, alarm_entry: AlarmEntry): slack_message = { "channel": alarm_entry.channel_id, "thread_ts": alarm_entry.slack_timestamp, - "blocks": self.compose_slack_message_blocks(alarm_entry), + "blocks": self.compose_slack_message_blocks( + alarm_entry, is_initial_message=False + ), } try: @@ -529,7 +550,9 @@ def update_original_slack_message(self, alarm_entry: AlarmEntry): slack_message = { "channel": alarm_entry.channel_id, "ts": alarm_entry.slack_timestamp, - "blocks": self.compose_slack_message_blocks(alarm_entry), + "blocks": self.compose_slack_message_blocks( + alarm_entry=alarm_entry, is_initial_message=True + ), } requests.post( @@ -547,7 +570,11 @@ def update_original_slack_message(self, alarm_entry: AlarmEntry): f"Unexpected error updating original Slack message for alarm {alarm_entry.alarm_name_metric}: {e}" ) - def compose_slack_message_blocks(self, alarm_entry: AlarmEntry): + def compose_slack_message_blocks( + self, + alarm_entry: AlarmEntry, + is_initial_message: bool, + ): with open(f"{os.getcwd()}/models/templates/slack_alert_blocks.json", "r") as f: template_content = f.read() @@ -560,6 +587,37 @@ def compose_slack_message_blocks(self, alarm_entry: AlarmEntry): "action_url": self.create_action_url( self.confluence_base_url, alarm_entry.alarm_name_metric ), + "is_initial_message": is_initial_message, + } + + rendered_json = template.render(context) + return json.loads(rendered_json) + + def compose_virus_scanner_slack_blocks(self): + with open( + f"{os.getcwd()}/models/templates/virus_scanner_alert_slack_blocks.json", "r" + ) as f: + template_content = f.read() + + template = Template(template_content) + + topic = self.extract_topic_name_from_arn(self.message["TopicArn"]) + result = self.message["Message"].get("result", "") + + timestamp = self.create_alarm_timestamp( + self.message["Message"].get("dateScanned", "") + ) + scan_date = self.format_time_string(timestamp) + + context = { + "topic": topic, + "scan_result": result, + "scan_date": scan_date, + "severity": f":{AlarmSeverity.HIGH.additional_value}:", + "scan_id": self.message["Message"].get("id", ""), + "action_url": self.create_action_url( + self.confluence_base_url, f"{topic} {result}" + ), } rendered_json = template.render(context) diff --git a/lambdas/tests/unit/conftest.py b/lambdas/tests/unit/conftest.py index 7eaf123bd2..9abfd3ec95 100644 --- a/lambdas/tests/unit/conftest.py +++ b/lambdas/tests/unit/conftest.py @@ -225,6 +225,7 @@ def set_env(monkeypatch): monkeypatch.setenv("SLACK_BOT_TOKEN", MOCK_SLACK_BOT_TOKEN) monkeypatch.setenv("SLACK_CHANNEL_ID", MOCK_ALERTING_SLACK_CHANNEL_ID) monkeypatch.setenv("ITOC_TESTING_ODS_CODES", MOCK_ITOC_ODS_CODES) + monkeypatch.setenv("VIRUS_SCANNER_TOPIC_ARN", "virus_scanner_topic_arn") monkeypatch.setenv("STAGING_STORE_BUCKET_NAME", MOCK_STAGING_STORE_BUCKET) monkeypatch.setenv("METADATA_SQS_QUEUE_URL", MOCK_LG_METADATA_SQS_QUEUE) diff --git a/lambdas/tests/unit/handlers/test_im_alerting_handler.py b/lambdas/tests/unit/handlers/test_im_alerting_handler.py new file mode 100644 index 0000000000..dd16bca396 --- /dev/null +++ b/lambdas/tests/unit/handlers/test_im_alerting_handler.py @@ -0,0 +1,64 @@ +import json + +import pytest +from handlers.im_alerting_handler import is_virus_scanner_topic, lambda_handler +from tests.unit.helpers.data.alerting.mock_sns_alerts import ( + MOCK_LAMBDA_ALERT_MESSAGE, + MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE, +) + + +@pytest.fixture +def mock_service_with_alarm_alert(mocker): + mocked_class = mocker.patch("handlers.im_alerting_handler.IMAlertingService") + mocked_instance = mocked_class.return_value + mocker.patch.object(mocked_instance, "dynamo_service") + mocked_class.return_value.message = MOCK_LAMBDA_ALERT_MESSAGE + return mocked_instance + + +@pytest.fixture +def mock_service_with_virus_scanner_alert(mocker): + mocked_class = mocker.patch("handlers.im_alerting_handler.IMAlertingService") + mocked_instance = mocked_class.return_value + mocker.patch.object(mocked_instance, "dynamo_service") + mocked_class.return_value.message = MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE + return mocked_instance + + +def test_handler_calls_handle_alarm_message_lambda_triggered_by_alarm_message( + mock_service_with_alarm_alert, context, set_env +): + + event = {"Records": [{"Sns": {"Message": json.dumps(MOCK_LAMBDA_ALERT_MESSAGE)}}]} + lambda_handler(event, context) + + mock_service_with_alarm_alert.handle_virus_scanner_alert.assert_not_called() + mock_service_with_alarm_alert.handle_alarm_alert.assert_called() + + +def test_handler_calls_handle_virus_scanner_alert_lambda_triggered_by_virus_scanner_sns( + mock_service_with_virus_scanner_alert, context, set_env +): + + event = { + "Records": [ + { + "Sns": { + "TopicArn": MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE["TopicArn"], + "Message": json.dumps( + MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE["Message"] + ), + } + } + ] + } + lambda_handler(event, context) + mock_service_with_virus_scanner_alert.handle_virus_scanner_alert.assert_called() + mock_service_with_virus_scanner_alert.handle_alarm_alert.assert_not_called() + + +def test_is_virus_scanner_topic(set_env): + + assert is_virus_scanner_topic(MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE) + assert not is_virus_scanner_topic(MOCK_LAMBDA_ALERT_MESSAGE) diff --git a/lambdas/tests/unit/helpers/data/mock_slack_alert.json b/lambdas/tests/unit/helpers/data/alerting/mock_slack_initial_alert.json similarity index 100% rename from lambdas/tests/unit/helpers/data/mock_slack_alert.json rename to lambdas/tests/unit/helpers/data/alerting/mock_slack_initial_alert.json diff --git a/lambdas/tests/unit/helpers/data/alerting/mock_slack_reply.json b/lambdas/tests/unit/helpers/data/alerting/mock_slack_reply.json new file mode 100644 index 0000000000..78b1a01da1 --- /dev/null +++ b/lambdas/tests/unit/helpers/data/alerting/mock_slack_reply.json @@ -0,0 +1,33 @@ +[ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "dev-test_bulk_upload_metadata_queue ApproximateAgeOfOldestMessage Alert: :red_circle:" + } + }, + { + "type": "divider" + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "History: :red_circle:" + } + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "Last state change: 15:10:41 17-04-2025 UTC" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "" + } + } +] \ No newline at end of file diff --git a/lambdas/tests/unit/helpers/data/alerting/mock_sns_alerts.py b/lambdas/tests/unit/helpers/data/alerting/mock_sns_alerts.py new file mode 100644 index 0000000000..f68117bcb5 --- /dev/null +++ b/lambdas/tests/unit/helpers/data/alerting/mock_sns_alerts.py @@ -0,0 +1,66 @@ +from tests.unit.conftest import MOCK_LG_METADATA_SQS_QUEUE, TEST_UUID + +ALERT_TIME = "2025-04-17T15:10:41.433+0000" + +QUEUE_ALERT_MESSAGE = { + "AlarmName": "dev_lg_bulk_main_oldest_message_alarm_6d", + "AlarmDescription": f"Alarm when a message in queue dev-{MOCK_LG_METADATA_SQS_QUEUE} is older than 6 days.", + "NewStateValue": "ALARM", + "StateChangeTime": ALERT_TIME, + "OldStateValue": "OK", + "Trigger": { + "MetricName": "ApproximateAgeOfOldestMessage", + "Namespace": "AWS/SQS", + "StatisticType": "Statistic", + "Statistic": "Maximum", + "Unit": None, + "Dimensions": [ + { + "QueueName": f"dev-{MOCK_LG_METADATA_SQS_QUEUE}", + } + ], + }, +} + +MOCK_LAMBDA_ALERT_MESSAGE = { + "AlarmName": "dev-alarm_search_patient_details_handler_error", + "AlarmDescription": "Triggers when an error has occurred in dev_SearchPatientDetailsLambda.", + "AlarmConfigurationUpdatedTimestamp": "2025-04-17T15:08:51.604+0000", + "NewStateValue": "ALARM", + "StateChangeTime": ALERT_TIME, + "OldStateValue": "OK", + "Trigger": { + "MetricName": "Errors", + "Namespace": "AWS/Lambda", + "StatisticType": "Statistic", + "Statistic": "SUM", + "Unit": None, + "Dimensions": [ + { + "value": "dev_SearchPatientDetailsLambda", + "name": "FunctionName", + } + ], + }, +} + +MOCK_LAMBDA_ALARM_SNS_ALERT = { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:region:xxxxxx:dev-sns-search_patient_details_alarms-topicxxxxx:xxxxxx", + "Sns": { + "Type": "Notification", + "MessageId": "xxxxxx", + "TopicArn": "arn:aws:sns:region:xxxxxx:dev-sns-search_patient_details_alarms-topicxxxxx", + "Subject": 'ALARM: "dev-alarm_search_patient_details_handler_error"', + "Message": MOCK_LAMBDA_ALERT_MESSAGE, + }, +} + +MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE = { + "Type": "Notification", + "MessageId": "xxxxxx", + "TopicArn": "virus_scanner_topic_arn", + "Subject": "", + "Message": {"id": TEST_UUID, "dateScanned": ALERT_TIME, "result": "Error"}, +} diff --git a/lambdas/tests/unit/helpers/data/mock_teams_alert.json b/lambdas/tests/unit/helpers/data/alerting/mock_teams_alert.json similarity index 100% rename from lambdas/tests/unit/helpers/data/mock_teams_alert.json rename to lambdas/tests/unit/helpers/data/alerting/mock_teams_alert.json diff --git a/lambdas/tests/unit/helpers/data/alerting/mock_virus_scanner_alert.json b/lambdas/tests/unit/helpers/data/alerting/mock_virus_scanner_alert.json new file mode 100644 index 0000000000..71076f8bad --- /dev/null +++ b/lambdas/tests/unit/helpers/data/alerting/mock_virus_scanner_alert.json @@ -0,0 +1,33 @@ +[ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "virus_scanner_topic_arn: Error :red_circle:" + } + }, + { + "type": "divider" + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "Scan date and time: 15:10:41 17-04-2025 UTC" + } + }, + { + "type": "section", + "text": { + "type": "plain_text", + "text": "Scan ID: 1234-4567-8912-HSDF-TEST" + } + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "*Info:*\n " + } + } +] \ No newline at end of file diff --git a/lambdas/tests/unit/services/test_im_alerting.py b/lambdas/tests/unit/services/test_im_alerting.py index c63e60f555..0609ef6a66 100644 --- a/lambdas/tests/unit/services/test_im_alerting.py +++ b/lambdas/tests/unit/services/test_im_alerting.py @@ -14,6 +14,12 @@ MOCK_ALERTING_SLACK_CHANNEL_ID, MOCK_CONFLUENCE_URL, MOCK_LG_METADATA_SQS_QUEUE, + MOCK_SLACK_BOT_TOKEN, +) +from tests.unit.helpers.data.alerting.mock_sns_alerts import ( + MOCK_LAMBDA_ALERT_MESSAGE, + MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE, + QUEUE_ALERT_MESSAGE, ) ALERT_TIME = "2025-04-17T15:10:41.433+0000" @@ -28,26 +34,6 @@ BASE_URL = MOCK_CONFLUENCE_URL ALERT_TIMESTAMP = int(datetime.fromisoformat(ALERT_TIME).timestamp()) -QUEUE_ALERT_MESSAGE = { - "AlarmName": "dev_lg_bulk_main_oldest_message_alarm_6d", - "AlarmDescription": f"Alarm when a message in queue dev-{MOCK_LG_METADATA_SQS_QUEUE} is older than 6 days.", - "NewStateValue": "ALARM", - "StateChangeTime": ALERT_TIME, - "OldStateValue": "OK", - "Trigger": { - "MetricName": "ApproximateAgeOfOldestMessage", - "Namespace": "AWS/SQS", - "StatisticType": "Statistic", - "Statistic": "Maximum", - "Unit": None, - "Dimensions": [ - { - "QueueName": f"dev-{MOCK_LG_METADATA_SQS_QUEUE}", - } - ], - }, -} - QUEUE_ALERT_TAGS = { "alarm_group": f"dev-{MOCK_LG_METADATA_SQS_QUEUE}", "alarm_metric": "ApproximateAgeOfOldestMessage", @@ -55,29 +41,6 @@ } -LAMBDA_ALERT_MESSAGE = { - "AlarmName": "dev-alarm_search_patient_details_handler_error", - "AlarmDescription": "Triggers when an error has occurred in dev_SearchPatientDetailsLambda.", - "AlarmConfigurationUpdatedTimestamp": "2025-04-17T15:08:51.604+0000", - "NewStateValue": "ALARM", - "StateChangeTime": ALERT_TIME, - "OldStateValue": "OK", - "Trigger": { - "MetricName": "Errors", - "Namespace": "AWS/Lambda", - "StatisticType": "Statistic", - "Statistic": "SUM", - "Unit": None, - "Dimensions": [ - { - "value": "dev_SearchPatientDetailsLambda", - "name": "FunctionName", - } - ], - }, -} - - def read_json(filename: str) -> str: filepath = os.path.join(os.path.dirname(__file__), filename) with open(filepath, "r") as file: @@ -125,6 +88,14 @@ def existing_alarm_alerting_service(alerting_service, mocker): yield alerting_service +@pytest.fixture +def virus_scanner_alerting_service(mocker, set_env): + service = IMAlertingService(MOCK_VIRUS_SCANNER_ALERT_SNS_MESSAGE) + mocker.patch.object(service, "dynamo_service") + mocker.patch.object(service, "send_initial_slack_alert") + yield service + + ALARM_METRIC_NAME = ( f'{QUEUE_ALERT_MESSAGE["Trigger"]["Dimensions"][0]["QueueName"]}' f' {QUEUE_ALERT_MESSAGE["Trigger"]["MetricName"]}' @@ -143,7 +114,7 @@ def existing_alarm_entry(): @freeze_time(ALERT_TIME) -def test_handle_new_alert_happy_path(alerting_service): +def test_handle_new_alarm_alert_happy_path(alerting_service): alerting_service.get_all_alarm_tags.return_value = QUEUE_ALERT_TAGS alerting_service.get_alarm_history.return_value = [] @@ -202,7 +173,7 @@ def test_handle_existing_alarm_entry_happy_path(alerting_service, existing_alarm @freeze_time(ALERT_TIME) -def test_handle_ok_action_happy_path(ok_alerting_service, existing_alarm_entry): +def test_handle_alarm_ok_action_happy_path(ok_alerting_service, existing_alarm_entry): ok_alerting_service.all_alarm_state_ok.return_value = True ok_alerting_service.is_last_updated.return_value = True ok_alerting_service.get_all_alarm_tags.return_value = QUEUE_ALERT_TAGS @@ -245,7 +216,7 @@ def test_handle_ok_action_happy_path(ok_alerting_service, existing_alarm_entry): @freeze_time(ALERT_TIME) -def test_handle_ok_action_not_all_alarms_ok( +def test_handle_alarm_ok_action_not_all_alarms_ok( mocker, ok_alerting_service, existing_alarm_entry ): ok_alerting_service.all_alarm_state_ok.return_value = False @@ -266,7 +237,7 @@ def test_handle_ok_action_not_all_alarms_ok( @freeze_time(ALERT_TIME) -def test_handle_ok_action_not_last_updated( +def test_handle_alarm_ok_action_not_last_updated( mocker, ok_alerting_service, existing_alarm_entry ): ok_alerting_service.all_alarm_state_ok.return_value = True @@ -335,7 +306,7 @@ def test_handle_existing_alarm_history_no_active_alarm_new_episode_created( existing_alarm_alerting_service.handle_new_alarm_episode.assert_called() -def test_handle_existing_alarm_history_ok_action_trigger_alert_ignored( +def test_handle_existing_alarm_history_alarm_ok_action_trigger_alert_ignored( existing_alarm_alerting_service, existing_alarm_entry ): alarm_history = [existing_alarm_entry] @@ -414,8 +385,8 @@ def test_create_action_url_with_lambda_alert(alerting_service): "https://confluence.example.com#:~:text=SearchPatientDetailsLambda%20Errors" ) alarm_metric_name = ( - f'{LAMBDA_ALERT_MESSAGE["Trigger"]["Dimensions"][0]["value"]}' - f' {LAMBDA_ALERT_MESSAGE["Trigger"]["MetricName"]}' + f'{MOCK_LAMBDA_ALERT_MESSAGE["Trigger"]["Dimensions"][0]["value"]}' + f' {MOCK_LAMBDA_ALERT_MESSAGE["Trigger"]["MetricName"]}' ) actual = alerting_service.create_action_url(BASE_URL, alarm_metric_name) @@ -467,13 +438,30 @@ def test_compose_teams_message(alerting_service): channel_id=MOCK_ALERTING_SLACK_CHANNEL_ID, history=[AlarmSeverity.HIGH], ) - expected = read_json("../helpers/data/mock_teams_alert.json") + expected = read_json("../helpers/data/alerting/mock_teams_alert.json") actual = json.loads(alerting_service.compose_teams_message(alarm_entry)) assert actual == expected -def test_compose_slack_message_blocks(alerting_service): +def test_compose_slack_message_blocks_initial_message(alerting_service): + alarm_entry = AlarmEntry( + alarm_name_metric=ALARM_METRIC_NAME, + time_created=ALERT_TIMESTAMP, + last_updated=ALERT_TIMESTAMP, + channel_id=MOCK_ALERTING_SLACK_CHANNEL_ID, + history=[AlarmSeverity.HIGH], + ) + expected = read_json("../helpers/data/alerting/mock_slack_initial_alert.json") + + actual = alerting_service.compose_slack_message_blocks( + alarm_entry=alarm_entry, is_initial_message=True + ) + + assert actual == expected + + +def test_compose_slack_message_blocks_message_reply(alerting_service): alarm_entry = AlarmEntry( alarm_name_metric=ALARM_METRIC_NAME, time_created=ALERT_TIMESTAMP, @@ -481,9 +469,12 @@ def test_compose_slack_message_blocks(alerting_service): channel_id=MOCK_ALERTING_SLACK_CHANNEL_ID, history=[AlarmSeverity.HIGH], ) - expected = read_json("../helpers/data/mock_slack_alert.json") - actual = alerting_service.compose_slack_message_blocks(alarm_entry) + expected = read_json("../helpers/data/alerting/mock_slack_reply.json") + + actual = alerting_service.compose_slack_message_blocks( + alarm_entry=alarm_entry, is_initial_message=False + ) assert actual == expected @@ -522,6 +513,15 @@ def test_extract_alarm_names_from_arns(alerting_service): assert actual == expected +def test_extract_topic_name_from_arns(alerting_service): + arn = "arn:aws:sns:region:xxxxxx:dev-sns-search_patient_details_alarms-topicxxxxx" + + expected = "dev-sns-search_patient_details_alarms-topicxxxxx" + actual = alerting_service.extract_topic_name_from_arn(arn) + + assert actual == expected + + @freeze_time(ALERT_TIME) def test_is_last_updated(alerting_service, existing_alarm_entry): alerting_service.dynamo_service.get_item.return_value = { @@ -608,3 +608,38 @@ def test_is_episode_expired_TTL_past_returns_false(alerting_service): ) assert alerting_service.is_episode_expired(alarm_entry) is False + + +def test_compose_virus_scanner_slack_blocks(virus_scanner_alerting_service, set_env): + + expected_blocks = read_json( + "../helpers/data/alerting/mock_virus_scanner_alert.json" + ) + + actual_blocks = virus_scanner_alerting_service.compose_virus_scanner_slack_blocks() + + assert actual_blocks == expected_blocks + + +def test_handle_virus_scanner_alert(virus_scanner_alerting_service, mocker): + mock_post = mocker.patch("lambdas.services.im_alerting_service.requests.post") + + expected_blocks = read_json( + "../helpers/data/alerting/mock_virus_scanner_alert.json" + ) + + expected_slack_message = { + "blocks": expected_blocks, + "channel": MOCK_ALERTING_SLACK_CHANNEL_ID, + } + + virus_scanner_alerting_service.handle_virus_scanner_alert() + + mock_post.assert_called_with( + url="https://slack.com/api/chat.postMessage", + headers={ + "Content-type": "application/json; charset=utf-8", + "Authorization": f"Bearer {MOCK_SLACK_BOT_TOKEN}", + }, + data=json.dumps(expected_slack_message), + )