2024-09-23 15:01:59 -03:00
|
|
|
import hashlib
|
2023-09-19 09:41:47 +01:00
|
|
|
from unittest.mock import call, patch
|
2023-08-28 14:13:01 +02:00
|
|
|
|
2022-06-03 08:09:47 -06:00
|
|
|
import pytest
|
|
|
|
|
|
2024-03-27 13:37:01 +01:00
|
|
|
from apps.alerts.constants import ActionSource, AlertGroupState
|
2022-06-03 08:09:47 -06:00
|
|
|
from apps.alerts.incident_appearance.renderers.phone_call_renderer import AlertGroupPhoneCallRenderer
|
2024-03-20 10:54:27 +00:00
|
|
|
from apps.alerts.models import Alert, AlertGroup, AlertGroupLogRecord
|
2023-10-05 14:32:40 +01:00
|
|
|
from apps.alerts.tasks import wipe
|
2024-01-31 15:54:50 -07:00
|
|
|
from apps.alerts.tasks.delete_alert_group import (
|
|
|
|
|
delete_alert_group,
|
|
|
|
|
finish_delete_alert_group,
|
|
|
|
|
send_alert_group_signal_for_delete,
|
|
|
|
|
)
|
2023-09-19 09:41:47 +01:00
|
|
|
from apps.slack.client import SlackClient
|
|
|
|
|
from apps.slack.errors import SlackAPIMessageNotFoundError, SlackAPIRatelimitError
|
|
|
|
|
from apps.slack.tests.conftest import build_slack_response
|
2022-06-03 08:09:47 -06:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_render_for_phone_call(
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
2024-11-26 06:03:38 -05:00
|
|
|
make_slack_channel,
|
|
|
|
|
make_slack_message,
|
2022-06-03 08:09:47 -06:00
|
|
|
):
|
2024-11-26 06:03:38 -05:00
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
2023-06-29 11:09:02 +01:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
2022-06-03 08:09:47 -06:00
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2024-11-26 06:03:38 -05:00
|
|
|
slack_channel = make_slack_channel(slack_team_identity)
|
2024-12-06 11:43:40 -05:00
|
|
|
make_slack_message(slack_channel, alert_group=alert_group)
|
2022-06-03 08:09:47 -06:00
|
|
|
|
|
|
|
|
make_alert(
|
|
|
|
|
alert_group,
|
|
|
|
|
raw_request_data={
|
|
|
|
|
"status": "firing",
|
|
|
|
|
"labels": {
|
|
|
|
|
"alertname": "TestAlert",
|
|
|
|
|
"region": "eu-1",
|
|
|
|
|
},
|
|
|
|
|
"annotations": {},
|
|
|
|
|
"startsAt": "2018-12-25T15:47:47.377363608Z",
|
|
|
|
|
"endsAt": "0001-01-01T00:00:00Z",
|
|
|
|
|
"generatorURL": "",
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
expected_verbose_name = (
|
2023-07-26 18:08:07 +03:00
|
|
|
f"to check an Alert Group from Grafana OnCall. "
|
2024-04-24 16:02:51 +08:00
|
|
|
f"Alert via {alert_receive_channel.verbal_name} - Grafana Legacy Alerting with title TestAlert triggered 1 times"
|
2022-06-03 08:09:47 -06:00
|
|
|
)
|
|
|
|
|
rendered_text = AlertGroupPhoneCallRenderer(alert_group).render()
|
|
|
|
|
assert expected_verbose_name in rendered_text
|
|
|
|
|
|
|
|
|
|
|
2023-10-05 14:32:40 +01:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_wipe(
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
alert = make_alert(alert_group, raw_request_data={"test": 42})
|
|
|
|
|
|
|
|
|
|
wipe(alert_group.pk, user.pk)
|
|
|
|
|
|
|
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
alert.refresh_from_db()
|
|
|
|
|
assert alert_group.wiped_at is not None
|
|
|
|
|
assert alert_group.wiped_by == user
|
|
|
|
|
assert alert.raw_request_data == {}
|
|
|
|
|
|
|
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
@patch.object(SlackClient, "reactions_remove")
|
|
|
|
|
@patch.object(SlackClient, "chat_delete")
|
2022-06-03 08:09:47 -06:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_delete(
|
2023-09-19 09:41:47 +01:00
|
|
|
mock_chat_delete,
|
|
|
|
|
mock_reactions_remove,
|
2022-06-03 08:09:47 -06:00
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
2024-11-04 13:34:06 -05:00
|
|
|
make_slack_channel,
|
2023-09-19 09:41:47 +01:00
|
|
|
make_slack_message,
|
|
|
|
|
make_resolution_note_slack_message,
|
2024-01-31 15:54:50 -07:00
|
|
|
django_capture_on_commit_callbacks,
|
2022-06-03 08:09:47 -06:00
|
|
|
):
|
|
|
|
|
"""test alert group deleting"""
|
|
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel1 = make_slack_channel(slack_team_identity)
|
|
|
|
|
slack_channel2 = make_slack_channel(slack_team_identity)
|
2022-11-29 09:41:56 +01:00
|
|
|
user = make_user(organization=organization)
|
2022-06-03 08:09:47 -06:00
|
|
|
|
2023-06-29 11:09:02 +01:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
2022-06-03 08:09:47 -06:00
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2023-09-19 09:41:47 +01:00
|
|
|
make_alert(alert_group, raw_request_data={})
|
2022-06-03 08:09:47 -06:00
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
# Create Slack messages
|
2024-12-06 11:43:40 -05:00
|
|
|
slack_message = make_slack_message(slack_channel1, alert_group=alert_group)
|
2023-09-19 09:41:47 +01:00
|
|
|
resolution_note_1 = make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
posted_by_bot=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel1,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test1_ts",
|
|
|
|
|
)
|
|
|
|
|
resolution_note_2 = make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
added_to_resolution_note=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel2,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test2_ts",
|
2022-06-03 08:09:47 -06:00
|
|
|
)
|
|
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
assert alert_group.alerts.count() == 1
|
|
|
|
|
assert alert_group.slack_messages.count() == 1
|
|
|
|
|
assert alert_group.resolution_note_slack_messages.count() == 2
|
2022-06-03 08:09:47 -06:00
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.send_alert_group_signal_for_delete.delay", return_value=None
|
|
|
|
|
) as mock_send_alert_group_signal:
|
|
|
|
|
with django_capture_on_commit_callbacks(execute=True):
|
|
|
|
|
delete_alert_group(alert_group.pk, user.pk)
|
|
|
|
|
assert mock_send_alert_group_signal.call_count == 1
|
|
|
|
|
|
|
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.finish_delete_alert_group.apply_async", return_value=None
|
|
|
|
|
) as mock_finish_delete_alert_group:
|
|
|
|
|
send_alert_group_signal_for_delete(*mock_send_alert_group_signal.call_args.args)
|
|
|
|
|
assert mock_finish_delete_alert_group.call_count == 1
|
|
|
|
|
|
|
|
|
|
finish_delete_alert_group(alert_group.pk)
|
2022-06-03 08:09:47 -06:00
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
assert not alert_group.alerts.exists()
|
|
|
|
|
assert not alert_group.slack_messages.exists()
|
|
|
|
|
assert not alert_group.resolution_note_slack_messages.exists()
|
2022-06-03 08:09:47 -06:00
|
|
|
|
|
|
|
|
with pytest.raises(AlertGroup.DoesNotExist):
|
|
|
|
|
alert_group.refresh_from_db()
|
2023-06-19 12:43:46 +08:00
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
# Check that appropriate Slack API calls are made
|
|
|
|
|
assert mock_chat_delete.call_count == 2
|
|
|
|
|
assert mock_chat_delete.call_args_list[0] == call(
|
2024-11-29 08:21:29 -05:00
|
|
|
channel=resolution_note_1.slack_channel.slack_id, ts=resolution_note_1.ts
|
2023-09-19 09:41:47 +01:00
|
|
|
)
|
2024-11-26 06:03:38 -05:00
|
|
|
assert mock_chat_delete.call_args_list[1] == call(channel=slack_message.channel.slack_id, ts=slack_message.slack_id)
|
2023-09-19 09:41:47 +01:00
|
|
|
mock_reactions_remove.assert_called_once_with(
|
2024-11-29 08:21:29 -05:00
|
|
|
channel=resolution_note_2.slack_channel.slack_id, name="memo", timestamp=resolution_note_2.ts
|
2023-09-19 09:41:47 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("api_method", ["reactions_remove", "chat_delete"])
|
2024-01-31 15:54:50 -07:00
|
|
|
@patch.object(send_alert_group_signal_for_delete, "apply_async")
|
2023-09-19 09:41:47 +01:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_delete_slack_ratelimit(
|
2024-01-31 15:54:50 -07:00
|
|
|
mock_send_alert_group_signal_for_delete,
|
2023-09-19 09:41:47 +01:00
|
|
|
api_method,
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
2024-11-04 13:34:06 -05:00
|
|
|
make_slack_channel,
|
2023-09-19 09:41:47 +01:00
|
|
|
make_slack_message,
|
|
|
|
|
make_resolution_note_slack_message,
|
2024-01-31 15:54:50 -07:00
|
|
|
django_capture_on_commit_callbacks,
|
2023-09-19 09:41:47 +01:00
|
|
|
):
|
|
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel1 = make_slack_channel(slack_team_identity)
|
|
|
|
|
slack_channel2 = make_slack_channel(slack_team_identity)
|
|
|
|
|
|
2023-09-19 09:41:47 +01:00
|
|
|
user = make_user(organization=organization)
|
|
|
|
|
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
make_alert(alert_group, raw_request_data={})
|
|
|
|
|
|
|
|
|
|
# Create Slack messages
|
2024-12-06 11:43:40 -05:00
|
|
|
make_slack_message(slack_channel1, alert_group=alert_group)
|
2023-09-19 09:41:47 +01:00
|
|
|
make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
posted_by_bot=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel1,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test1_ts",
|
|
|
|
|
)
|
|
|
|
|
make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
added_to_resolution_note=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel2,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test2_ts",
|
|
|
|
|
)
|
|
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.send_alert_group_signal_for_delete.delay", return_value=None
|
|
|
|
|
) as mock_send_alert_group_signal:
|
|
|
|
|
with django_capture_on_commit_callbacks(execute=True):
|
|
|
|
|
delete_alert_group(alert_group.pk, user.pk)
|
|
|
|
|
assert mock_send_alert_group_signal.call_count == 1
|
|
|
|
|
|
|
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.finish_delete_alert_group.apply_async", return_value=None
|
|
|
|
|
) as mock_finish_delete_alert_group:
|
|
|
|
|
with patch.object(
|
|
|
|
|
SlackClient,
|
|
|
|
|
api_method,
|
|
|
|
|
side_effect=SlackAPIRatelimitError(
|
|
|
|
|
response=build_slack_response({"ok": False, "error": "ratelimited"}, headers={"Retry-After": 42})
|
|
|
|
|
),
|
|
|
|
|
):
|
|
|
|
|
send_alert_group_signal_for_delete(*mock_send_alert_group_signal.call_args.args)
|
|
|
|
|
|
|
|
|
|
assert mock_finish_delete_alert_group.call_count == 0
|
2023-09-19 09:41:47 +01:00
|
|
|
|
|
|
|
|
# Check task is retried gracefully
|
2024-01-31 15:54:50 -07:00
|
|
|
mock_send_alert_group_signal_for_delete.assert_called_once_with(
|
|
|
|
|
mock_send_alert_group_signal.call_args.args, countdown=42
|
|
|
|
|
)
|
2023-09-19 09:41:47 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("api_method", ["reactions_remove", "chat_delete"])
|
|
|
|
|
@patch.object(delete_alert_group, "apply_async")
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_delete_slack_api_error_other_than_ratelimit(
|
|
|
|
|
mock_delete_alert_group,
|
|
|
|
|
api_method,
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
2024-11-04 13:34:06 -05:00
|
|
|
make_slack_channel,
|
2023-09-19 09:41:47 +01:00
|
|
|
make_slack_message,
|
|
|
|
|
make_resolution_note_slack_message,
|
|
|
|
|
):
|
|
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel1 = make_slack_channel(slack_team_identity)
|
|
|
|
|
slack_channel2 = make_slack_channel(slack_team_identity)
|
2023-09-19 09:41:47 +01:00
|
|
|
user = make_user(organization=organization)
|
|
|
|
|
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
make_alert(alert_group, raw_request_data={})
|
|
|
|
|
|
|
|
|
|
# Create Slack messages
|
2024-12-06 11:43:40 -05:00
|
|
|
make_slack_message(slack_channel1, alert_group=alert_group)
|
2023-09-19 09:41:47 +01:00
|
|
|
make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
posted_by_bot=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel1,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test1_ts",
|
|
|
|
|
)
|
|
|
|
|
make_resolution_note_slack_message(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
user=user,
|
|
|
|
|
added_by_user=user,
|
|
|
|
|
added_to_resolution_note=True,
|
2024-11-04 13:34:06 -05:00
|
|
|
slack_channel=slack_channel2,
|
2023-09-19 09:41:47 +01:00
|
|
|
ts="test2_ts",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with patch.object(
|
|
|
|
|
SlackClient,
|
|
|
|
|
api_method,
|
|
|
|
|
side_effect=SlackAPIMessageNotFoundError(
|
|
|
|
|
response=build_slack_response({"ok": False, "error": "message_not_found"})
|
|
|
|
|
),
|
|
|
|
|
):
|
|
|
|
|
delete_alert_group(alert_group.pk, user.pk) # check no exception is raised
|
|
|
|
|
|
|
|
|
|
# Check task is not retried
|
|
|
|
|
mock_delete_alert_group.assert_not_called()
|
|
|
|
|
|
2023-06-19 12:43:46 +08:00
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_alerts_count_gt(
|
|
|
|
|
make_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_alert,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
2023-06-29 11:09:02 +01:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
2023-06-19 12:43:46 +08:00
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
# Check case when there is no alerts
|
|
|
|
|
assert alert_group.alerts_count_gt(1) is False
|
|
|
|
|
|
|
|
|
|
make_alert(alert_group, raw_request_data={})
|
|
|
|
|
make_alert(alert_group, raw_request_data={})
|
|
|
|
|
|
|
|
|
|
assert alert_group.alerts_count_gt(1) is True
|
|
|
|
|
assert alert_group.alerts_count_gt(2) is False
|
|
|
|
|
assert alert_group.alerts_count_gt(3) is False
|
2023-08-28 14:13:01 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.start_unsilence_task", return_value=None)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_silence_by_user_for_period(
|
|
|
|
|
mocked_start_unsilence_task,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
raw_next_step_eta = "2023-08-28T09:27:26.627047Z"
|
|
|
|
|
silence_delay = 120 * 60
|
|
|
|
|
updated_raw_next_step_eta = "2023-08-28T11:27:36.627047Z" # silence_delay + START_ESCALATION_DELAY
|
|
|
|
|
|
|
|
|
|
alert_group.raw_escalation_snapshot = alert_group.build_raw_escalation_snapshot()
|
|
|
|
|
alert_group.raw_escalation_snapshot["next_step_eta"] = raw_next_step_eta
|
|
|
|
|
|
|
|
|
|
assert not alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group.silence_by_user_or_backsync(user, silence_delay=silence_delay)
|
2023-08-28 14:13:01 +02:00
|
|
|
|
|
|
|
|
assert alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
|
|
|
|
|
assert alert_group.silenced
|
|
|
|
|
assert alert_group.raw_escalation_snapshot["next_step_eta"] == updated_raw_next_step_eta
|
|
|
|
|
assert mocked_start_unsilence_task.called
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.start_unsilence_task", return_value=None)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_silence_by_user_forever(
|
|
|
|
|
mocked_start_unsilence_task,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
raw_next_step_eta = "2023-08-28T09:27:26.627047Z"
|
|
|
|
|
|
|
|
|
|
alert_group.raw_escalation_snapshot = alert_group.build_raw_escalation_snapshot()
|
|
|
|
|
alert_group.raw_escalation_snapshot["next_step_eta"] = raw_next_step_eta
|
|
|
|
|
|
|
|
|
|
assert not alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group.silence_by_user_or_backsync(user, silence_delay=None)
|
2023-08-28 14:13:01 +02:00
|
|
|
|
|
|
|
|
assert alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
|
|
|
|
|
assert alert_group.silenced
|
|
|
|
|
assert alert_group.raw_escalation_snapshot["next_step_eta"] == raw_next_step_eta
|
|
|
|
|
assert not mocked_start_unsilence_task.called
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.start_unsilence_task", return_value=None)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_bulk_silence_for_period(
|
|
|
|
|
mocked_start_unsilence_task,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
raw_next_step_eta = "2023-08-28T09:27:26.627047Z"
|
|
|
|
|
silence_delay = 120 * 60
|
|
|
|
|
updated_raw_next_step_eta = "2023-08-28T11:27:36.627047Z" # silence_delay + START_ESCALATION_DELAY
|
|
|
|
|
|
|
|
|
|
alert_group.raw_escalation_snapshot = alert_group.build_raw_escalation_snapshot()
|
|
|
|
|
alert_group.raw_escalation_snapshot["next_step_eta"] = raw_next_step_eta
|
|
|
|
|
alert_group.save()
|
|
|
|
|
|
|
|
|
|
alert_groups = AlertGroup.objects.filter(pk__in=[alert_group.id])
|
|
|
|
|
|
|
|
|
|
assert not alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
AlertGroup.bulk_silence(user, alert_groups, silence_delay=silence_delay)
|
|
|
|
|
|
|
|
|
|
assert alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
|
|
|
|
|
assert alert_group.silenced
|
|
|
|
|
assert alert_group.raw_escalation_snapshot["next_step_eta"] == updated_raw_next_step_eta
|
|
|
|
|
assert mocked_start_unsilence_task.called
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.start_unsilence_task", return_value=None)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_bulk_silence_forever(
|
|
|
|
|
mocked_start_unsilence_task,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
raw_next_step_eta = "2023-08-28T09:27:26.627047Z"
|
|
|
|
|
|
|
|
|
|
alert_group.raw_escalation_snapshot = alert_group.build_raw_escalation_snapshot()
|
|
|
|
|
alert_group.raw_escalation_snapshot["next_step_eta"] = raw_next_step_eta
|
|
|
|
|
alert_group.save()
|
|
|
|
|
|
|
|
|
|
alert_groups = AlertGroup.objects.filter(pk__in=[alert_group.id])
|
|
|
|
|
|
|
|
|
|
assert not alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
AlertGroup.bulk_silence(user, alert_groups, silence_delay=0)
|
|
|
|
|
|
|
|
|
|
assert alert_group.log_records.filter(
|
|
|
|
|
type=AlertGroupLogRecord.TYPE_SILENCE,
|
|
|
|
|
author=user,
|
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
|
|
|
|
|
assert alert_group.silenced
|
|
|
|
|
assert alert_group.raw_escalation_snapshot["next_step_eta"] == raw_next_step_eta
|
|
|
|
|
assert not mocked_start_unsilence_task.called
|
2023-10-05 09:46:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("action_source", ActionSource)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_alert_group_log_record_action_source(
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
action_source,
|
|
|
|
|
):
|
|
|
|
|
"""Test that action source is saved in alert group log record"""
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
root_alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
2024-04-05 13:04:13 -03:00
|
|
|
if action_source == ActionSource.BACKSYNC:
|
|
|
|
|
base_kwargs = {
|
|
|
|
|
"source_channel": alert_receive_channel,
|
|
|
|
|
}
|
|
|
|
|
else:
|
|
|
|
|
base_kwargs = {
|
|
|
|
|
"user": user,
|
|
|
|
|
}
|
|
|
|
|
|
2023-10-05 09:46:48 +01:00
|
|
|
# Silence alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.silence_by_user_or_backsync(**base_kwargs, silence_delay=42, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_SILENCE, action_source)
|
|
|
|
|
|
|
|
|
|
# Unsilence alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.un_silence_by_user_or_backsync(**base_kwargs, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_UN_SILENCE, action_source)
|
|
|
|
|
|
|
|
|
|
# Acknowledge alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.acknowledge_by_user_or_backsync(**base_kwargs, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_ACK, action_source)
|
|
|
|
|
|
|
|
|
|
# Unacknowledge alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.un_acknowledge_by_user_or_backsync(**base_kwargs, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_UN_ACK, action_source)
|
|
|
|
|
|
|
|
|
|
# Resolve alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.resolve_by_user_or_backsync(**base_kwargs, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_RESOLVED, action_source)
|
|
|
|
|
|
|
|
|
|
# Unresolve alert group
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.un_resolve_by_user_or_backsync(**base_kwargs, action_source=action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_UN_RESOLVED, action_source)
|
|
|
|
|
|
2024-04-05 13:04:13 -03:00
|
|
|
if action_source != ActionSource.BACKSYNC:
|
|
|
|
|
# Attach alert group
|
|
|
|
|
alert_group.attach_by_user(user, root_alert_group, action_source=action_source)
|
|
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_ATTACHED, action_source)
|
2023-10-05 09:46:48 +01:00
|
|
|
|
2024-04-05 13:04:13 -03:00
|
|
|
# Unattach alert group
|
|
|
|
|
alert_group.un_attach_by_user(user, action_source=action_source)
|
|
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert (log_record.type, log_record.action_source) == (AlertGroupLogRecord.TYPE_UNATTACHED, action_source)
|
Add responders improvements (#3128)
# What this PR does
https://www.loom.com/share/c5e10b5ec51343d0954c6f41cfd6a5fb
## Summary of backend changes
- Add `AlertReceiveChannel.get_orgs_direct_paging_integrations` method
and `AlertReceiveChannel.is_contactable` property. These are needed to
be able to (optionally) filter down teams, in the `GET /teams` internal
API endpoint
([here](https://github.com/grafana/oncall/pull/3128/files#diff-a4bd76e557f7e11dafb28a52c1034c075028c693b3c12d702d53c07fc6f24c05R55-R63)),
to just teams that have a "contactable" Direct Paging integration
- `engine/apps/alerts/paging.py`
- update these functions to support new UX. In short `direct_paging` no
longer takes a list of `ScheduleNotifications` or an `EscalationChain`
object
- add `user_is_oncall` helper function
- add `_construct_title` helper function. In short if no `title` is
provided, which is the case for Direct Pages originating from OnCall
(either UI or Slack), then the format is `f"{from_user.username} is
paging <team.name (if team is specified> <comma separated list of
user.usernames> to join escalation"`
- `engine/apps/api/serializers/team.py` - add
`number_of_users_currently_oncall` attribute to response schema
([code](https://github.com/grafana/oncall/pull/3128/files#diff-26af48f796c9e987a76447586dd0f92349783d6ea6a0b6039a2f0f28bd58c2ebR45-R52))
- `engine/apps/api/serializers/user.py` - add `is_currently_oncall`
attribute to response schema
([code](https://github.com/grafana/oncall/pull/3128/files#diff-6744b5544ebb120437af98a996da5ad7d48ee1139a6112c7e3904010ab98f232R157-R162))
- `engine/apps/api/views/team.py` - add support for two new optional
query params `only_include_notifiable_teams` and `include_no_team`
([code](https://github.com/grafana/oncall/pull/3128/files#diff-a4bd76e557f7e11dafb28a52c1034c075028c693b3c12d702d53c07fc6f24c05R55-R70))
- `engine/apps/api/views/user.py`
- in the `GET /users` internal API endpoint, when specifying the
`search` query param now also search on `teams__name`
([code](https://github.com/grafana/oncall/pull/3128/files#diff-30309629484ad28e6fe09816e1bd226226d652ea977b6f3b6775976c729bf4b5R223);
this is a new UX requirement)
- add support for a new optional query param, `is_currently_oncall`, to
allow filtering users based on.. whether they are currently on call or
not
([code](https://github.com/grafana/oncall/pull/3128/files#diff-30309629484ad28e6fe09816e1bd226226d652ea977b6f3b6775976c729bf4b5R272-R282))
- remove `check_availability` endpoint (no longer used with new UX; also
removed references in frontend code)
- `engine/apps/slack/scenarios/paging.py` and
`engine/apps/slack/scenarios/manage_responders.py` - update Slack
workflows to support new UX. Schedules are no longer a concept here.
When creating a new alert group via `/escalate` the user either
specifies a team and/or user(s) (they must specify at least one of the
two and validation is done here to check this). When adding responders
to an existing alert group it's simply a list of users that they can
add, no more schedules.
- add `Organization.slack_is_configured` and
`Organization.telegram_is_configured` properties. These are needed to
support [this new functionality
](https://github.com/grafana/oncall/pull/3128/files#diff-9d96504027309f2bd1e95352bac1433b09b60eb4fafb611b52a6c15ed16cbc48R271-R272)
in the `AlertReceiveChannel` model.
## Summary of frontend changes
- Refactor/rename `EscalationVariants` component to `AddResponders` +
remove `grafana-plugin/src/containers/UserWarningModal` (no longer
needed with new UX)
- Remove `grafana-plugin/src/models/user.ts` as it seemed to be a
duplicate of `grafana-plugin/src/models/user/user.types.ts`
Related to https://github.com/grafana/incident/issues/4278
- Closes #3115
- Closes #3116
- Closes #3117
- Closes #3118
- Closes #3177
## TODO
- [x] make frontend changes
- [x] update Slack backend functionality
- [x] update public documentation
- [x] add/update e2e tests
## Post-deploy To-dos
- [ ] update dev/ops/production Slack bots to update `/escalate` command
description (should now say "Direct page a team or user(s)")
## Checklist
- [x] Unit, integration, and e2e (if applicable) tests updated
- [x] Documentation added (or `pr:no public docs` PR label added if not
required)
- [x] `CHANGELOG.md` updated (or `pr:no changelog` PR label added if not
required)
2023-10-27 12:12:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_alert_group_get_paged_users(
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
other_user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
def _make_log_record(alert_group, user, log_type, important=False):
|
|
|
|
|
alert_group.log_records.create(
|
|
|
|
|
type=log_type,
|
|
|
|
|
author=user,
|
|
|
|
|
reason="paged user",
|
|
|
|
|
step_specific_info={
|
|
|
|
|
"user": user.public_primary_key,
|
|
|
|
|
"important": important,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# user was paged - also check that important is persisted/available
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, other_user, AlertGroupLogRecord.TYPE_DIRECT_PAGING, True)
|
|
|
|
|
|
|
|
|
|
paged_users = {u["pk"]: u["important"] for u in alert_group.get_paged_users()}
|
|
|
|
|
|
|
|
|
|
assert user.public_primary_key in paged_users
|
|
|
|
|
assert paged_users[user.public_primary_key] is False
|
|
|
|
|
|
|
|
|
|
assert other_user.public_primary_key in paged_users
|
|
|
|
|
assert paged_users[other_user.public_primary_key] is True
|
|
|
|
|
|
|
|
|
|
# user was paged and then unpaged
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_UNPAGE_USER)
|
|
|
|
|
|
|
|
|
|
_make_log_record(alert_group, other_user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
|
2023-10-27 16:47:00 -04:00
|
|
|
assert alert_group.get_paged_users()[0]["pk"] == other_user.public_primary_key
|
|
|
|
|
|
|
|
|
|
# user was paged, unpaged, and then paged again - they should only show up once
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_UNPAGE_USER)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
|
|
|
|
|
paged_users = alert_group.get_paged_users()
|
|
|
|
|
assert len(paged_users) == 1
|
|
|
|
|
assert alert_group.get_paged_users()[0]["pk"] == user.public_primary_key
|
|
|
|
|
|
|
|
|
|
# user was paged and then paged again - they should only show up once
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
|
|
|
|
|
paged_users = alert_group.get_paged_users()
|
|
|
|
|
assert len(paged_users) == 1
|
|
|
|
|
assert alert_group.get_paged_users()[0]["pk"] == user.public_primary_key
|
2023-11-07 14:44:23 +01:00
|
|
|
|
2024-07-18 11:15:48 -03:00
|
|
|
# user was paged and then paged again, then unpaged - they should not show up
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_UNPAGE_USER)
|
|
|
|
|
|
|
|
|
|
paged_users = alert_group.get_paged_users()
|
|
|
|
|
assert len(paged_users) == 0
|
|
|
|
|
|
|
|
|
|
# adding extra unpage events should not break things
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_UNPAGE_USER)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_UNPAGE_USER)
|
|
|
|
|
_make_log_record(alert_group, user, AlertGroupLogRecord.TYPE_DIRECT_PAGING)
|
|
|
|
|
|
|
|
|
|
paged_users = alert_group.get_paged_users()
|
|
|
|
|
assert len(paged_users) == 1
|
|
|
|
|
assert alert_group.get_paged_users()[0]["pk"] == user.public_primary_key
|
|
|
|
|
|
2023-11-07 14:44:23 +01:00
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.start_unsilence_task", return_value=None)
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_filter_active_alert_groups(
|
|
|
|
|
mocked_start_unsilence_task,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
# alert groups with active escalation
|
|
|
|
|
alert_group_active = make_alert_group(alert_receive_channel)
|
|
|
|
|
alert_group_active_silenced = make_alert_group(alert_receive_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group_active_silenced.silence_by_user_or_backsync(user, silence_delay=1800) # silence by period
|
2023-11-07 14:44:23 +01:00
|
|
|
# alert groups with inactive escalation
|
|
|
|
|
alert_group_1 = make_alert_group(alert_receive_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group_1.acknowledge_by_user_or_backsync(user)
|
2023-11-07 14:44:23 +01:00
|
|
|
alert_group_2 = make_alert_group(alert_receive_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group_2.resolve_by_user_or_backsync(user)
|
2023-11-07 14:44:23 +01:00
|
|
|
alert_group_3 = make_alert_group(alert_receive_channel)
|
|
|
|
|
alert_group_3.attach_by_user(user, alert_group_active)
|
|
|
|
|
alert_group_4 = make_alert_group(alert_receive_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group_4.silence_by_user_or_backsync(user, silence_delay=None) # silence forever
|
2023-11-07 14:44:23 +01:00
|
|
|
|
|
|
|
|
active_alert_groups = AlertGroup.objects.filter_active()
|
|
|
|
|
assert active_alert_groups.count() == 2
|
|
|
|
|
assert alert_group_active in active_alert_groups
|
|
|
|
|
assert alert_group_active_silenced in active_alert_groups
|
2023-12-11 12:16:00 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.hard_delete")
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.un_attach_by_delete")
|
|
|
|
|
@patch("apps.alerts.models.AlertGroup.stop_escalation")
|
2024-01-31 15:54:50 -07:00
|
|
|
@patch("apps.alerts.tasks.delete_alert_group.alert_group_action_triggered_signal")
|
2023-12-11 12:16:00 -05:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_delete_by_user(
|
|
|
|
|
mock_alert_group_action_triggered_signal,
|
|
|
|
|
_mock_stop_escalation,
|
|
|
|
|
_mock_un_attach_by_delete,
|
|
|
|
|
_mock_hard_delete,
|
|
|
|
|
make_organization_and_user,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
2024-01-31 15:54:50 -07:00
|
|
|
django_capture_on_commit_callbacks,
|
2023-12-11 12:16:00 -05:00
|
|
|
):
|
|
|
|
|
organization, user = make_organization_and_user()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
# make a few dependent alert groups
|
|
|
|
|
dependent_alert_groups = [make_alert_group(alert_receive_channel, root_alert_group=alert_group) for _ in range(3)]
|
|
|
|
|
|
|
|
|
|
assert alert_group.log_records.filter(type=AlertGroupLogRecord.TYPE_DELETED).count() == 0
|
|
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.send_alert_group_signal_for_delete.delay", return_value=None
|
|
|
|
|
) as mock_send_alert_group_signal:
|
|
|
|
|
with django_capture_on_commit_callbacks(execute=True):
|
|
|
|
|
delete_alert_group(alert_group.pk, user.pk)
|
2023-12-11 12:16:00 -05:00
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
assert mock_send_alert_group_signal.call_count == 1
|
2023-12-11 12:16:00 -05:00
|
|
|
assert alert_group.log_records.filter(type=AlertGroupLogRecord.TYPE_DELETED).count() == 1
|
|
|
|
|
deleted_log_record = alert_group.log_records.get(type=AlertGroupLogRecord.TYPE_DELETED)
|
|
|
|
|
alert_group.stop_escalation.assert_called_once_with()
|
|
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
with patch(
|
|
|
|
|
"apps.alerts.tasks.delete_alert_group.finish_delete_alert_group.apply_async", return_value=None
|
|
|
|
|
) as mock_finish_delete_alert_group:
|
|
|
|
|
send_alert_group_signal_for_delete(*mock_send_alert_group_signal.call_args.args)
|
|
|
|
|
assert mock_finish_delete_alert_group.call_count == 1
|
|
|
|
|
|
2023-12-11 12:16:00 -05:00
|
|
|
mock_alert_group_action_triggered_signal.send.assert_called_once_with(
|
2024-01-31 15:54:50 -07:00
|
|
|
sender=send_alert_group_signal_for_delete,
|
2023-12-11 12:16:00 -05:00
|
|
|
log_record=deleted_log_record.pk,
|
|
|
|
|
force_sync=True,
|
|
|
|
|
)
|
|
|
|
|
|
2024-01-31 15:54:50 -07:00
|
|
|
finish_delete_alert_group(alert_group.pk)
|
|
|
|
|
|
2023-12-11 12:16:00 -05:00
|
|
|
alert_group.hard_delete.assert_called_once_with()
|
|
|
|
|
|
|
|
|
|
for dependent_alert_group in dependent_alert_groups:
|
|
|
|
|
dependent_alert_group.un_attach_by_delete.assert_called_with()
|
2024-03-20 10:54:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
2024-12-02 10:51:13 -05:00
|
|
|
def test_integration_config_on_alert_group_created(make_organization, make_alert_receive_channel):
|
2024-03-20 10:54:27 +00:00
|
|
|
organization = make_organization()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization, grouping_id_template="group_to_one_group")
|
|
|
|
|
|
|
|
|
|
with patch.object(
|
|
|
|
|
alert_receive_channel.config, "on_alert_group_created", create=True
|
|
|
|
|
) as mock_on_alert_group_created:
|
|
|
|
|
for _ in range(2):
|
|
|
|
|
alert = Alert.create(
|
|
|
|
|
title="the title",
|
|
|
|
|
message="the message",
|
|
|
|
|
alert_receive_channel=alert_receive_channel,
|
|
|
|
|
raw_request_data={},
|
|
|
|
|
integration_unique_data={},
|
|
|
|
|
image_url=None,
|
|
|
|
|
link_to_upstream_details=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
assert alert.group.alerts.count() == 2
|
|
|
|
|
mock_on_alert_group_created.assert_called_once_with(alert.group)
|
2024-03-27 13:37:01 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch.object(AlertGroup, "start_escalation_if_needed")
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
|
"new_state,log_type,to_firing_log_type",
|
|
|
|
|
[
|
|
|
|
|
(AlertGroupState.ACKNOWLEDGED, AlertGroupLogRecord.TYPE_ACK, AlertGroupLogRecord.TYPE_UN_ACK),
|
|
|
|
|
(AlertGroupState.RESOLVED, AlertGroupLogRecord.TYPE_RESOLVED, AlertGroupLogRecord.TYPE_UN_RESOLVED),
|
|
|
|
|
(AlertGroupState.SILENCED, AlertGroupLogRecord.TYPE_SILENCE, AlertGroupLogRecord.TYPE_UN_SILENCE),
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
def test_update_state_by_backsync(
|
|
|
|
|
mock_start_escalation_if_needed,
|
|
|
|
|
new_state,
|
|
|
|
|
log_type,
|
|
|
|
|
to_firing_log_type,
|
|
|
|
|
make_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
2024-04-05 13:04:13 -03:00
|
|
|
source_channel = make_alert_receive_channel(organization)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2024-04-05 13:04:13 -03:00
|
|
|
expected_log_data = (ActionSource.BACKSYNC, None, {"source_integration_name": source_channel.verbal_name})
|
2024-03-27 13:37:01 +01:00
|
|
|
assert alert_group.state == AlertGroupState.FIRING
|
|
|
|
|
# set to new_state
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.update_state_by_backsync(new_state, source_channel=source_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
assert alert_group.state == new_state
|
|
|
|
|
last_log = alert_group.log_records.last()
|
|
|
|
|
assert (last_log.action_source, last_log.author, last_log.step_specific_info) == expected_log_data
|
|
|
|
|
assert last_log.type == log_type
|
|
|
|
|
# set back to firing
|
2024-04-05 13:04:13 -03:00
|
|
|
alert_group.update_state_by_backsync(AlertGroupState.FIRING, source_channel=source_channel)
|
2024-03-27 13:37:01 +01:00
|
|
|
alert_group.refresh_from_db()
|
|
|
|
|
assert alert_group.state == AlertGroupState.FIRING
|
|
|
|
|
last_log = alert_group.log_records.last()
|
|
|
|
|
assert (last_log.action_source, last_log.author, last_log.step_specific_info) == expected_log_data
|
|
|
|
|
assert last_log.type == to_firing_log_type
|
|
|
|
|
mock_start_escalation_if_needed.assert_called_once()
|
2024-09-23 15:01:59 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_alert_group_created_if_resolve_condition_but_auto_resolving_disabled(
|
|
|
|
|
make_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
# grouping condition will match. resolve condition will evaluate to True, but auto resolving is disabled
|
|
|
|
|
grouping_distinction = "abcdef"
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(
|
|
|
|
|
organization,
|
|
|
|
|
grouping_id_template=grouping_distinction,
|
|
|
|
|
resolve_condition_template="True",
|
|
|
|
|
allow_source_based_resolving=False,
|
|
|
|
|
)
|
|
|
|
|
# existing alert group, resolved, with a matching grouping distinction
|
|
|
|
|
resolved_alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel,
|
|
|
|
|
resolved=True,
|
|
|
|
|
distinction=hashlib.md5(grouping_distinction.encode()).hexdigest(),
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# an alert for the same integration is received
|
|
|
|
|
alert = Alert.create(
|
|
|
|
|
title="the title",
|
|
|
|
|
message="the message",
|
|
|
|
|
alert_receive_channel=alert_receive_channel,
|
|
|
|
|
raw_request_data={},
|
|
|
|
|
integration_unique_data={},
|
|
|
|
|
image_url=None,
|
|
|
|
|
link_to_upstream_details=None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
# the alert will create a new alert group
|
|
|
|
|
assert alert.group != resolved_alert_group
|
2024-12-02 10:51:13 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class TestAlertGroupSlackChannelID:
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_slack_channel_id_with_slack_message(
|
|
|
|
|
self,
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_slack_channel,
|
|
|
|
|
make_slack_message,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Test that slack_channel_id returns the _channel_id from slack_message when slack_message exists.
|
|
|
|
|
"""
|
|
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
slack_channel = make_slack_channel(slack_team_identity)
|
|
|
|
|
slack_message = make_slack_message(slack_channel, alert_group=alert_group)
|
|
|
|
|
|
2024-12-06 09:28:26 -05:00
|
|
|
# Assert that slack_channel_id returns the channel.slack_id from slack_message
|
|
|
|
|
assert alert_group.slack_channel_id == slack_message.channel.slack_id
|
2024-12-02 10:51:13 -05:00
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_slack_channel_id_with_channel_filter(
|
|
|
|
|
self,
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_channel_filter,
|
|
|
|
|
make_slack_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Test that slack_channel_id returns the slack_id from channel_filter.slack_channel_or_org_default.
|
|
|
|
|
"""
|
|
|
|
|
organization, slack_team_identity = make_organization_with_slack_team_identity()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
slack_channel = make_slack_channel(slack_team_identity)
|
|
|
|
|
channel_filter = make_channel_filter(alert_receive_channel, slack_channel=slack_channel)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel, channel_filter=channel_filter)
|
|
|
|
|
|
|
|
|
|
# Assert that slack_channel_id returns the slack_id from the channel filter's Slack channel
|
|
|
|
|
assert alert_group.slack_channel_id == slack_channel.slack_id
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_slack_channel_id_no_slack_message_no_channel_filter(
|
|
|
|
|
self,
|
|
|
|
|
make_organization_with_slack_team_identity,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Test that slack_channel_id returns None when there is no slack_message and no channel_filter.
|
|
|
|
|
"""
|
|
|
|
|
organization, _ = make_organization_with_slack_team_identity()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel, channel_filter=None)
|
|
|
|
|
|
|
|
|
|
# Assert that slack_channel_id is None
|
|
|
|
|
assert alert_group.slack_channel_id is None
|