2023-03-13 18:19:22 -03:00
|
|
|
import json
|
2024-04-27 03:20:08 +05:30
|
|
|
from datetime import timedelta
|
2023-03-13 18:19:22 -03:00
|
|
|
from unittest.mock import call, patch
|
|
|
|
|
|
2024-02-02 16:08:04 -05:00
|
|
|
import httpretty
|
2023-03-13 18:19:22 -03:00
|
|
|
import pytest
|
2024-01-08 19:13:15 -05:00
|
|
|
import requests
|
2023-03-14 14:21:46 -03:00
|
|
|
from django.utils import timezone
|
2023-03-13 18:19:22 -03:00
|
|
|
|
2024-03-20 10:54:27 +00:00
|
|
|
from apps.alerts.models import AlertGroupExternalID, AlertGroupLogRecord, EscalationPolicy
|
2023-04-06 14:52:23 -03:00
|
|
|
from apps.base.models import UserNotificationPolicyLogRecord
|
2023-03-14 14:21:46 -03:00
|
|
|
from apps.public_api.serializers import IncidentSerializer
|
2023-03-13 18:19:22 -03:00
|
|
|
from apps.webhooks.models import Webhook
|
|
|
|
|
from apps.webhooks.tasks import execute_webhook, send_webhook_event
|
2023-04-18 13:03:33 -06:00
|
|
|
from apps.webhooks.tasks.trigger_webhook import NOT_FROM_SELECTED_INTEGRATION
|
2023-04-26 15:55:08 -06:00
|
|
|
from settings.base import WEBHOOK_RESPONSE_LIMIT
|
2023-03-13 18:19:22 -03:00
|
|
|
|
2024-01-09 19:55:39 -05:00
|
|
|
TIMEOUT = 4
|
|
|
|
|
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
class MockResponse:
|
2023-05-09 07:55:05 -06:00
|
|
|
def __init__(self, status_code=200, content=None):
|
2023-03-13 18:19:22 -03:00
|
|
|
self.status_code = status_code
|
2023-05-09 07:55:05 -06:00
|
|
|
if content:
|
|
|
|
|
self.content = content
|
|
|
|
|
else:
|
|
|
|
|
self.content = {"response": self.status_code}
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
def json(self):
|
2023-05-09 07:55:05 -06:00
|
|
|
return self.content
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
2023-03-14 14:21:46 -03:00
|
|
|
def test_send_webhook_event_filters(
|
|
|
|
|
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
|
|
|
|
):
|
2023-03-13 18:19:22 -03:00
|
|
|
organization = make_organization()
|
|
|
|
|
other_organization = make_organization()
|
2023-03-14 14:21:46 -03:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2024-02-19 14:12:56 -03:00
|
|
|
trigger_types = [t for t, _ in Webhook.TRIGGER_TYPES if t != Webhook.TRIGGER_STATUS_CHANGE]
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
webhooks = {}
|
2024-02-19 14:12:56 -03:00
|
|
|
for trigger_type in trigger_types:
|
2023-03-13 18:19:22 -03:00
|
|
|
webhooks[trigger_type] = make_custom_webhook(
|
2024-04-08 11:25:48 -03:00
|
|
|
organization=organization,
|
|
|
|
|
trigger_type=trigger_type,
|
|
|
|
|
team=make_team(organization),
|
|
|
|
|
is_from_connected_integration=(trigger_type != Webhook.TRIGGER_ACKNOWLEDGE),
|
2023-03-13 18:19:22 -03:00
|
|
|
)
|
|
|
|
|
|
2024-02-19 14:12:56 -03:00
|
|
|
for trigger_type in trigger_types:
|
2023-03-13 18:19:22 -03:00
|
|
|
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
2023-03-14 14:21:46 -03:00
|
|
|
send_webhook_event(trigger_type, alert_group.pk, organization_id=organization.pk)
|
2024-02-19 14:12:56 -03:00
|
|
|
assert mock_execute.call_args == call(
|
|
|
|
|
(webhooks[trigger_type].pk, alert_group.pk, None, None), kwargs={"trigger_type": trigger_type}
|
|
|
|
|
)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
2024-04-08 11:25:48 -03:00
|
|
|
# backsync event exclude connected integration webhooks
|
|
|
|
|
for trigger_type in trigger_types:
|
|
|
|
|
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
|
|
|
|
send_webhook_event(trigger_type, alert_group.pk, organization_id=organization.pk, is_backsync=True)
|
|
|
|
|
if trigger_type == Webhook.TRIGGER_ACKNOWLEDGE:
|
|
|
|
|
assert mock_execute.call_args == call(
|
|
|
|
|
(webhooks[trigger_type].pk, alert_group.pk, None, None), kwargs={"trigger_type": trigger_type}
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
# except for the acknowledge webhook (not connected integration set), the webhook is not triggered
|
|
|
|
|
mock_execute.assert_not_called()
|
|
|
|
|
|
2023-03-13 18:19:22 -03:00
|
|
|
# other org
|
2023-04-26 15:55:08 -06:00
|
|
|
other_org_webhook = make_custom_webhook(
|
|
|
|
|
organization=other_organization, trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED
|
|
|
|
|
)
|
|
|
|
|
|
2023-03-14 14:21:46 -03:00
|
|
|
alert_receive_channel = make_alert_receive_channel(other_organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2023-03-13 18:19:22 -03:00
|
|
|
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
2023-04-26 15:55:08 -06:00
|
|
|
send_webhook_event(Webhook.TRIGGER_ALERT_GROUP_CREATED, alert_group.pk, organization_id=other_organization.pk)
|
2024-02-19 14:12:56 -03:00
|
|
|
assert mock_execute.call_args == call(
|
|
|
|
|
(other_org_webhook.pk, alert_group.pk, None, None), kwargs={"trigger_type": Webhook.TRIGGER_ALERT_GROUP_CREATED}
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_send_webhook_event_status_change(
|
|
|
|
|
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
webhooks = {}
|
|
|
|
|
for trigger_type, _ in Webhook.TRIGGER_TYPES:
|
|
|
|
|
webhooks[trigger_type] = make_custom_webhook(
|
|
|
|
|
organization=organization, trigger_type=trigger_type, team=make_team(organization)
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
for trigger_type in Webhook.STATUS_CHANGE_TRIGGERS:
|
|
|
|
|
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
|
|
|
|
send_webhook_event(trigger_type, alert_group.pk, organization_id=organization.pk)
|
|
|
|
|
# execute is called for the trigger type itself and the status change trigger too (with the original type passed)
|
|
|
|
|
assert mock_execute.call_count == 2
|
|
|
|
|
mock_execute.assert_any_call(
|
|
|
|
|
(webhooks[trigger_type].pk, alert_group.pk, None, None), kwargs={"trigger_type": trigger_type}
|
|
|
|
|
)
|
|
|
|
|
status_change_trigger_type = Webhook.TRIGGER_STATUS_CHANGE
|
|
|
|
|
mock_execute.assert_any_call(
|
|
|
|
|
(webhooks[status_change_trigger_type].pk, alert_group.pk, None, None), kwargs={"trigger_type": trigger_type}
|
|
|
|
|
)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
|
2023-04-18 13:03:33 -06:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_disabled(
|
|
|
|
|
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2023-04-26 15:55:08 -06:00
|
|
|
make_custom_webhook(organization=organization, trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED)
|
|
|
|
|
make_custom_webhook(
|
|
|
|
|
organization=organization, trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED, is_webhook_enabled=False
|
|
|
|
|
)
|
2023-04-18 13:03:33 -06:00
|
|
|
|
|
|
|
|
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
2023-04-26 15:55:08 -06:00
|
|
|
send_webhook_event(Webhook.TRIGGER_ALERT_GROUP_CREATED, alert_group.pk, organization_id=organization.pk)
|
2023-04-18 13:03:33 -06:00
|
|
|
mock_execute.assert_called_once()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_integration_filter_not_matching(
|
2024-04-12 11:05:09 -03:00
|
|
|
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook, caplog
|
2023-04-18 13:03:33 -06:00
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
2024-02-23 08:55:44 -03:00
|
|
|
other_alert_receive_channel = make_alert_receive_channel(organization)
|
2023-04-18 13:03:33 -06:00
|
|
|
webhook = make_custom_webhook(
|
2023-04-26 15:55:08 -06:00
|
|
|
organization=organization,
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
|
2023-04-18 13:03:33 -06:00
|
|
|
)
|
2024-02-23 08:55:44 -03:00
|
|
|
webhook.filtered_integrations.add(other_alert_receive_channel)
|
2023-04-18 13:03:33 -06:00
|
|
|
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
|
|
|
|
|
|
|
|
|
assert not mock_requests.post.called
|
2024-04-12 11:05:09 -03:00
|
|
|
# no response is created for the webhook
|
|
|
|
|
assert webhook.responses.count() == 0
|
|
|
|
|
# check log should exist
|
|
|
|
|
assert f"Webhook {webhook.pk} was not triggered: {NOT_FROM_SELECTED_INTEGRATION}" in caplog.text
|
2023-04-18 13:03:33 -06:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_integration_filter_matching(
|
2024-04-12 11:05:09 -03:00
|
|
|
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook, caplog
|
2023-04-18 13:03:33 -06:00
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization, public_primary_key="test-integration-1")
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
2023-04-26 15:55:08 -06:00
|
|
|
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
|
2023-04-18 13:03:33 -06:00
|
|
|
# Check we get past integration filter but exit early to keep test simple
|
|
|
|
|
trigger_template="False",
|
|
|
|
|
)
|
2024-02-23 08:55:44 -03:00
|
|
|
webhook.filtered_integrations.add(alert_receive_channel)
|
2023-04-18 13:03:33 -06:00
|
|
|
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
|
|
|
|
|
|
|
|
|
assert not mock_requests.post.called
|
2024-04-12 11:05:09 -03:00
|
|
|
# no response is created for the webhook
|
|
|
|
|
assert webhook.responses.count() == 0
|
|
|
|
|
# check log should exist
|
|
|
|
|
assert f"Webhook {webhook.pk} was not triggered: False" in caplog.text
|
2023-04-18 13:03:33 -06:00
|
|
|
|
|
|
|
|
|
2024-02-02 16:08:04 -05:00
|
|
|
ALERT_GROUP_PUBLIC_PRIMARY_KEY = "IXJ47FKMYYJ5U"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@httpretty.activate(verbose=True, allow_net_connect=False)
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
|
"data,expected_request_data,request_post_kwargs",
|
|
|
|
|
[
|
|
|
|
|
(
|
|
|
|
|
'{"value": "{{ alert_group_id }}"}',
|
|
|
|
|
json.dumps({"value": ALERT_GROUP_PUBLIC_PRIMARY_KEY}),
|
|
|
|
|
{"json": {"value": ALERT_GROUP_PUBLIC_PRIMARY_KEY}},
|
|
|
|
|
),
|
|
|
|
|
# test that non-latin characters are properly encoded
|
|
|
|
|
(
|
|
|
|
|
"😊",
|
|
|
|
|
"b'\\xf0\\x9f\\x98\\x8a'",
|
|
|
|
|
{"data": "😊".encode("utf-8")},
|
|
|
|
|
),
|
|
|
|
|
],
|
|
|
|
|
)
|
2023-03-13 18:19:22 -03:00
|
|
|
@pytest.mark.django_db
|
2023-03-14 14:21:46 -03:00
|
|
|
def test_execute_webhook_ok(
|
2024-02-02 16:08:04 -05:00
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
data,
|
|
|
|
|
expected_request_data,
|
|
|
|
|
request_post_kwargs,
|
2023-03-14 14:21:46 -03:00
|
|
|
):
|
2023-03-13 18:19:22 -03:00
|
|
|
organization = make_organization()
|
2023-03-14 14:21:46 -03:00
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
2024-02-02 16:08:04 -05:00
|
|
|
alert_receive_channel,
|
|
|
|
|
acknowledged_at=timezone.now(),
|
|
|
|
|
acknowledged=True,
|
|
|
|
|
acknowledged_by=user.pk,
|
|
|
|
|
public_primary_key=ALERT_GROUP_PUBLIC_PRIMARY_KEY,
|
2023-03-14 14:21:46 -03:00
|
|
|
)
|
2023-03-13 18:19:22 -03:00
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
2024-02-02 16:08:04 -05:00
|
|
|
url="https://example.com/{{ alert_group_id }}/",
|
2023-03-13 18:19:22 -03:00
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ACKNOWLEDGE,
|
2023-03-14 14:21:46 -03:00
|
|
|
trigger_template="{{{{ alert_group.integration_id == '{}' }}}}".format(
|
|
|
|
|
alert_receive_channel.public_primary_key
|
|
|
|
|
),
|
|
|
|
|
headers='{"some-header": "{{ alert_group_id }}"}',
|
2024-02-02 16:08:04 -05:00
|
|
|
data=data,
|
2023-03-13 18:19:22 -03:00
|
|
|
forward_all=False,
|
|
|
|
|
)
|
|
|
|
|
|
2024-02-02 16:08:04 -05:00
|
|
|
templated_url = f"https://example.com/{alert_group.public_primary_key}/"
|
|
|
|
|
mock_response = httpretty.Response(json.dumps({"response": 200}))
|
|
|
|
|
httpretty.register_uri(httpretty.POST, templated_url, responses=[mock_response])
|
|
|
|
|
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname", return_value="8.8.8.8"):
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests", wraps=requests) as mock_requests:
|
2023-04-05 09:03:55 -03:00
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
2024-02-02 16:08:04 -05:00
|
|
|
mock_requests.post.assert_called_once_with(
|
|
|
|
|
templated_url,
|
2024-01-09 19:55:39 -05:00
|
|
|
timeout=TIMEOUT,
|
2023-03-14 14:21:46 -03:00
|
|
|
headers={"some-header": alert_group.public_primary_key},
|
2024-02-02 16:08:04 -05:00
|
|
|
**request_post_kwargs,
|
2023-03-13 18:19:22 -03:00
|
|
|
)
|
2024-02-02 16:08:04 -05:00
|
|
|
|
|
|
|
|
# assert the request was made to the webhook as we expected
|
|
|
|
|
last_request = httpretty.last_request()
|
|
|
|
|
assert last_request.method == "POST"
|
|
|
|
|
assert last_request.url == templated_url
|
|
|
|
|
assert last_request.headers["some-header"] == alert_group.public_primary_key
|
|
|
|
|
|
2023-03-13 18:19:22 -03:00
|
|
|
# check logs
|
2023-03-21 10:43:37 -03:00
|
|
|
log = webhook.responses.all()[0]
|
|
|
|
|
assert log.status_code == 200
|
2024-02-02 16:08:04 -05:00
|
|
|
assert log.content == json.dumps({"response": 200})
|
|
|
|
|
assert log.request_data == expected_request_data
|
2023-03-21 10:43:37 -03:00
|
|
|
assert log.request_headers == json.dumps({"some-header": alert_group.public_primary_key})
|
2024-02-02 16:08:04 -05:00
|
|
|
assert log.url == templated_url
|
2023-04-05 09:03:55 -03:00
|
|
|
# check log record
|
|
|
|
|
log_record = alert_group.log_records.last()
|
2024-03-28 11:37:22 -04:00
|
|
|
assert log_record.type == AlertGroupLogRecord.TYPE_CUSTOM_WEBHOOK_TRIGGERED
|
2023-04-05 09:03:55 -03:00
|
|
|
expected_info = {
|
|
|
|
|
"trigger": "acknowledge",
|
|
|
|
|
"webhook_id": webhook.public_primary_key,
|
|
|
|
|
"webhook_name": webhook.name,
|
|
|
|
|
}
|
|
|
|
|
assert log_record.step_specific_info == expected_info
|
|
|
|
|
assert log_record.escalation_policy is None
|
|
|
|
|
assert log_record.escalation_policy_step is None
|
|
|
|
|
assert log_record.rendered_log_line_action() == f"outgoing webhook `{webhook.name}` triggered by acknowledge"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_via_escalation_ok(
|
|
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
make_escalation_chain,
|
|
|
|
|
make_escalation_policy,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True, acknowledged_by=user.pk
|
|
|
|
|
)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://something/{{ alert_group_id }}/",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ESCALATION_STEP,
|
|
|
|
|
trigger_template="{{{{ alert_group.integration_id == '{}' }}}}".format(
|
|
|
|
|
alert_receive_channel.public_primary_key
|
|
|
|
|
),
|
|
|
|
|
headers='{"some-header": "{{ alert_group_id }}"}',
|
|
|
|
|
data='{"value": "{{ alert_group_id }}"}',
|
|
|
|
|
forward_all=False,
|
|
|
|
|
)
|
|
|
|
|
escalation_chain = make_escalation_chain(organization)
|
|
|
|
|
escalation_policy = make_escalation_policy(
|
|
|
|
|
escalation_chain=escalation_chain,
|
|
|
|
|
escalation_policy_step=EscalationPolicy.STEP_TRIGGER_CUSTOM_WEBHOOK,
|
|
|
|
|
custom_webhook=webhook,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
mock_response = MockResponse()
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
mock_requests.post.return_value = mock_response
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, escalation_policy.pk)
|
|
|
|
|
|
|
|
|
|
assert mock_requests.post.called
|
|
|
|
|
# check log record
|
|
|
|
|
log_record = alert_group.log_records.last()
|
2024-03-28 11:37:22 -04:00
|
|
|
assert log_record.type == AlertGroupLogRecord.TYPE_CUSTOM_WEBHOOK_TRIGGERED
|
2023-04-05 09:03:55 -03:00
|
|
|
expected_info = {
|
|
|
|
|
"trigger": "escalation",
|
|
|
|
|
"webhook_id": webhook.public_primary_key,
|
|
|
|
|
"webhook_name": webhook.name,
|
|
|
|
|
}
|
|
|
|
|
assert log_record.step_specific_info == expected_info
|
|
|
|
|
assert log_record.escalation_policy == escalation_policy
|
|
|
|
|
assert log_record.escalation_policy_step == EscalationPolicy.STEP_TRIGGER_CUSTOM_WEBHOOK
|
|
|
|
|
assert log_record.rendered_log_line_action() == f"outgoing webhook `{webhook.name}` triggered by escalation"
|
2023-03-21 10:43:37 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
2024-02-19 14:12:56 -03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
|
"webhook_trigger_type",
|
|
|
|
|
[Webhook.TRIGGER_ACKNOWLEDGE, Webhook.TRIGGER_STATUS_CHANGE],
|
|
|
|
|
)
|
2023-03-21 10:43:37 -03:00
|
|
|
def test_execute_webhook_ok_forward_all(
|
2023-04-06 14:52:23 -03:00
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_user_notification_policy_log_record,
|
|
|
|
|
make_custom_webhook,
|
2024-02-19 14:12:56 -03:00
|
|
|
webhook_trigger_type,
|
2023-03-21 10:43:37 -03:00
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
2023-04-06 14:52:23 -03:00
|
|
|
notified_user = make_user_for_organization(organization)
|
|
|
|
|
other_user = make_user_for_organization(organization)
|
2023-03-21 10:43:37 -03:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
2024-04-27 03:20:08 +05:30
|
|
|
alert_receive_channel,
|
|
|
|
|
acknowledged_at=timezone.now(),
|
|
|
|
|
acknowledged=True,
|
|
|
|
|
acknowledged_by=user.pk,
|
|
|
|
|
acknowledged_by_user=user,
|
2023-03-21 10:43:37 -03:00
|
|
|
)
|
2023-11-29 12:04:48 -03:00
|
|
|
for _ in range(3):
|
2023-04-06 14:52:23 -03:00
|
|
|
make_user_notification_policy_log_record(
|
|
|
|
|
author=notified_user,
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_SUCCESS,
|
|
|
|
|
)
|
|
|
|
|
make_user_notification_policy_log_record(
|
|
|
|
|
author=other_user,
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
|
|
|
|
|
)
|
2023-03-21 10:43:37 -03:00
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://something/{{ alert_group_id }}/",
|
|
|
|
|
http_method="POST",
|
2024-02-19 14:12:56 -03:00
|
|
|
trigger_type=webhook_trigger_type,
|
2023-03-21 10:43:37 -03:00
|
|
|
forward_all=True,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
mock_response = MockResponse()
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
mock_requests.post.return_value = mock_response
|
2024-02-19 14:12:56 -03:00
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None, trigger_type=Webhook.TRIGGER_ACKNOWLEDGE)
|
2023-03-21 10:43:37 -03:00
|
|
|
|
|
|
|
|
assert mock_requests.post.called
|
2023-03-14 14:21:46 -03:00
|
|
|
expected_data = {
|
|
|
|
|
"event": {
|
2023-03-21 10:43:37 -03:00
|
|
|
"type": "acknowledge",
|
2023-03-14 14:21:46 -03:00
|
|
|
"time": alert_group.acknowledged_at.isoformat(),
|
|
|
|
|
},
|
2023-04-06 14:52:23 -03:00
|
|
|
"user": {
|
|
|
|
|
"id": user.public_primary_key,
|
|
|
|
|
"username": user.username,
|
|
|
|
|
"email": user.email,
|
|
|
|
|
},
|
|
|
|
|
"integration": {
|
|
|
|
|
"id": alert_receive_channel.public_primary_key,
|
|
|
|
|
"type": alert_receive_channel.integration,
|
|
|
|
|
"name": alert_receive_channel.short_name,
|
|
|
|
|
"team": None,
|
Webhook labels (#3383)
This PR add labels for webhooks.
1. Make webhook "labelable" with ability to filter by labels.
2. Add labels to the webhook payload. It contain new field webhook with
it's name, id and labels. Field integration and alert_group has a
corresponding label field as well. See example of a new payload below:
```
{
"event": {
"type": "escalation"
},
"user": null,
"alert_group": {
"id": "IRFN6ZD31N31B",
"integration_id": "CTWM7U4A2QG97",
"route_id": "RUE7U7Z46SKGY",
"alerts_count": 1,
"state": "firing",
"created_at": "2023-11-22T08:54:55.178243Z",
"resolved_at": null,
"acknowledged_at": null,
"title": "Incident",
"permalinks": {
"slack": null,
"telegram": null,
"web": "http://grafana:3000/a/grafana-oncall-app/alert-groups/IRFN6ZD31N31B"
},
"labels": {
"severity": "critical"
}
},
"alert_group_id": "IRFN6ZD31N31B",
"alert_payload": {
"message": "This alert was sent by user for demonstration purposes"
},
"integration": {
"id": "CTWM7U4A2QG97",
"type": "webhook",
"name": "hi - Webhook",
"team": null,
"labels": {
"hello": "world",
"severity": "critical"
}
},
"notified_users": [],
"users_to_be_notified": [],
"webhook": {
"id": "WHAXK4BTC7TAEQ",
"name": "test",
"labels": {
"hello": "kesha"
}
}
}
```
I feel that there is an opportunity to make code cleaner - remove all
label logic from serializers, views and utils to models or dedicated
LabelerService and introduce Labelable interface with something like
label_verbal, update_labels methods. However, I don't want to tie
webhook labels with a refactoring.
---------
Co-authored-by: Dominik <dominik.broj@grafana.com>
2023-11-22 19:17:41 +08:00
|
|
|
"labels": {},
|
2023-04-06 14:52:23 -03:00
|
|
|
},
|
|
|
|
|
"notified_users": [
|
|
|
|
|
{
|
|
|
|
|
"id": notified_user.public_primary_key,
|
|
|
|
|
"username": notified_user.username,
|
|
|
|
|
"email": notified_user.email,
|
|
|
|
|
}
|
|
|
|
|
],
|
Webhook labels (#3383)
This PR add labels for webhooks.
1. Make webhook "labelable" with ability to filter by labels.
2. Add labels to the webhook payload. It contain new field webhook with
it's name, id and labels. Field integration and alert_group has a
corresponding label field as well. See example of a new payload below:
```
{
"event": {
"type": "escalation"
},
"user": null,
"alert_group": {
"id": "IRFN6ZD31N31B",
"integration_id": "CTWM7U4A2QG97",
"route_id": "RUE7U7Z46SKGY",
"alerts_count": 1,
"state": "firing",
"created_at": "2023-11-22T08:54:55.178243Z",
"resolved_at": null,
"acknowledged_at": null,
"title": "Incident",
"permalinks": {
"slack": null,
"telegram": null,
"web": "http://grafana:3000/a/grafana-oncall-app/alert-groups/IRFN6ZD31N31B"
},
"labels": {
"severity": "critical"
}
},
"alert_group_id": "IRFN6ZD31N31B",
"alert_payload": {
"message": "This alert was sent by user for demonstration purposes"
},
"integration": {
"id": "CTWM7U4A2QG97",
"type": "webhook",
"name": "hi - Webhook",
"team": null,
"labels": {
"hello": "world",
"severity": "critical"
}
},
"notified_users": [],
"users_to_be_notified": [],
"webhook": {
"id": "WHAXK4BTC7TAEQ",
"name": "test",
"labels": {
"hello": "kesha"
}
}
}
```
I feel that there is an opportunity to make code cleaner - remove all
label logic from serializers, views and utils to models or dedicated
LabelerService and introduce Labelable interface with something like
label_verbal, update_labels methods. However, I don't want to tie
webhook labels with a refactoring.
---------
Co-authored-by: Dominik <dominik.broj@grafana.com>
2023-11-22 19:17:41 +08:00
|
|
|
"alert_group": {**IncidentSerializer(alert_group).data, "labels": {}},
|
2023-03-14 14:21:46 -03:00
|
|
|
"alert_group_id": alert_group.public_primary_key,
|
|
|
|
|
"alert_payload": "",
|
2023-04-20 10:13:48 -06:00
|
|
|
"users_to_be_notified": [],
|
Webhook labels (#3383)
This PR add labels for webhooks.
1. Make webhook "labelable" with ability to filter by labels.
2. Add labels to the webhook payload. It contain new field webhook with
it's name, id and labels. Field integration and alert_group has a
corresponding label field as well. See example of a new payload below:
```
{
"event": {
"type": "escalation"
},
"user": null,
"alert_group": {
"id": "IRFN6ZD31N31B",
"integration_id": "CTWM7U4A2QG97",
"route_id": "RUE7U7Z46SKGY",
"alerts_count": 1,
"state": "firing",
"created_at": "2023-11-22T08:54:55.178243Z",
"resolved_at": null,
"acknowledged_at": null,
"title": "Incident",
"permalinks": {
"slack": null,
"telegram": null,
"web": "http://grafana:3000/a/grafana-oncall-app/alert-groups/IRFN6ZD31N31B"
},
"labels": {
"severity": "critical"
}
},
"alert_group_id": "IRFN6ZD31N31B",
"alert_payload": {
"message": "This alert was sent by user for demonstration purposes"
},
"integration": {
"id": "CTWM7U4A2QG97",
"type": "webhook",
"name": "hi - Webhook",
"team": null,
"labels": {
"hello": "world",
"severity": "critical"
}
},
"notified_users": [],
"users_to_be_notified": [],
"webhook": {
"id": "WHAXK4BTC7TAEQ",
"name": "test",
"labels": {
"hello": "kesha"
}
}
}
```
I feel that there is an opportunity to make code cleaner - remove all
label logic from serializers, views and utils to models or dedicated
LabelerService and introduce Labelable interface with something like
label_verbal, update_labels methods. However, I don't want to tie
webhook labels with a refactoring.
---------
Co-authored-by: Dominik <dominik.broj@grafana.com>
2023-11-22 19:17:41 +08:00
|
|
|
"webhook": {
|
|
|
|
|
"id": webhook.public_primary_key,
|
|
|
|
|
"name": webhook.name,
|
|
|
|
|
"labels": {},
|
|
|
|
|
},
|
2024-04-27 03:20:08 +05:30
|
|
|
"alert_group_acknowledged_by": {
|
|
|
|
|
"id": user.public_primary_key,
|
|
|
|
|
"username": user.username,
|
|
|
|
|
"email": user.email,
|
|
|
|
|
},
|
|
|
|
|
"alert_group_resolved_by": None,
|
2023-03-14 14:21:46 -03:00
|
|
|
}
|
2023-03-21 10:43:37 -03:00
|
|
|
expected_call = call(
|
|
|
|
|
"https://something/{}/".format(alert_group.public_primary_key),
|
2024-01-09 19:55:39 -05:00
|
|
|
timeout=TIMEOUT,
|
2023-03-21 10:43:37 -03:00
|
|
|
headers={},
|
|
|
|
|
json=expected_data,
|
|
|
|
|
)
|
|
|
|
|
assert mock_requests.post.call_args == expected_call
|
|
|
|
|
# check logs
|
|
|
|
|
log = webhook.responses.all()[0]
|
2024-02-19 14:12:56 -03:00
|
|
|
assert log.trigger_type == Webhook.TRIGGER_ACKNOWLEDGE
|
2023-03-21 10:43:37 -03:00
|
|
|
assert log.status_code == 200
|
|
|
|
|
assert log.content == json.dumps(mock_response.json())
|
|
|
|
|
assert json.loads(log.request_data) == expected_data
|
2023-03-14 14:21:46 -03:00
|
|
|
assert log.url == "https://something/{}/".format(alert_group.public_primary_key)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
|
2024-04-27 03:20:08 +05:30
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_ok_forward_all_resolved(
|
|
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_user_notification_policy_log_record,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
notified_user = make_user_for_organization(organization)
|
|
|
|
|
other_user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel,
|
|
|
|
|
acknowledged_at=timezone.now(),
|
|
|
|
|
acknowledged=True,
|
|
|
|
|
acknowledged_by=user.pk,
|
|
|
|
|
acknowledged_by_user=user,
|
|
|
|
|
resolved=True,
|
|
|
|
|
resolved_at=timezone.now() + timedelta(hours=2),
|
|
|
|
|
resolved_by=user.pk,
|
|
|
|
|
resolved_by_user=user,
|
|
|
|
|
)
|
|
|
|
|
for _ in range(3):
|
|
|
|
|
make_user_notification_policy_log_record(
|
|
|
|
|
author=notified_user,
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_SUCCESS,
|
|
|
|
|
)
|
|
|
|
|
make_user_notification_policy_log_record(
|
|
|
|
|
author=other_user,
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
|
|
|
|
|
)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://something/{{ alert_group_id }}/",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_RESOLVE,
|
|
|
|
|
forward_all=True,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
mock_response = MockResponse()
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
mock_requests.post.return_value = mock_response
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None, trigger_type=Webhook.TRIGGER_RESOLVE)
|
|
|
|
|
|
|
|
|
|
assert mock_requests.post.called
|
|
|
|
|
expected_data = {
|
|
|
|
|
"event": {
|
|
|
|
|
"type": "resolve",
|
|
|
|
|
"time": alert_group.resolved_at.isoformat(),
|
|
|
|
|
},
|
|
|
|
|
"user": {
|
|
|
|
|
"id": user.public_primary_key,
|
|
|
|
|
"username": user.username,
|
|
|
|
|
"email": user.email,
|
|
|
|
|
},
|
|
|
|
|
"integration": {
|
|
|
|
|
"id": alert_receive_channel.public_primary_key,
|
|
|
|
|
"type": alert_receive_channel.integration,
|
|
|
|
|
"name": alert_receive_channel.short_name,
|
|
|
|
|
"team": None,
|
|
|
|
|
"labels": {},
|
|
|
|
|
},
|
|
|
|
|
"notified_users": [
|
|
|
|
|
{
|
|
|
|
|
"id": notified_user.public_primary_key,
|
|
|
|
|
"username": notified_user.username,
|
|
|
|
|
"email": notified_user.email,
|
|
|
|
|
}
|
|
|
|
|
],
|
|
|
|
|
"alert_group": {**IncidentSerializer(alert_group).data, "labels": {}},
|
|
|
|
|
"alert_group_id": alert_group.public_primary_key,
|
|
|
|
|
"alert_payload": "",
|
|
|
|
|
"users_to_be_notified": [],
|
|
|
|
|
"webhook": {
|
|
|
|
|
"id": webhook.public_primary_key,
|
|
|
|
|
"name": webhook.name,
|
|
|
|
|
"labels": {},
|
|
|
|
|
},
|
|
|
|
|
"alert_group_acknowledged_by": {
|
|
|
|
|
"id": user.public_primary_key,
|
|
|
|
|
"username": user.username,
|
|
|
|
|
"email": user.email,
|
|
|
|
|
},
|
|
|
|
|
"alert_group_resolved_by": {
|
|
|
|
|
"id": user.public_primary_key,
|
|
|
|
|
"username": user.username,
|
|
|
|
|
"email": user.email,
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
expected_call = call(
|
|
|
|
|
"https://something/{}/".format(alert_group.public_primary_key),
|
|
|
|
|
timeout=TIMEOUT,
|
|
|
|
|
headers={},
|
|
|
|
|
json=expected_data,
|
|
|
|
|
)
|
|
|
|
|
assert mock_requests.post.call_args == expected_call
|
|
|
|
|
# check logs
|
|
|
|
|
log = webhook.responses.all()[0]
|
|
|
|
|
assert log.trigger_type == Webhook.TRIGGER_RESOLVE
|
|
|
|
|
assert log.status_code == 200
|
|
|
|
|
assert log.content == json.dumps(mock_response.json())
|
|
|
|
|
assert json.loads(log.request_data) == expected_data
|
|
|
|
|
assert log.url == "https://something/{}/".format(alert_group.public_primary_key)
|
|
|
|
|
|
|
|
|
|
|
2023-03-21 10:43:37 -03:00
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_using_responses_data(
|
|
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
make_webhook_response,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True, acknowledged_by=user.pk
|
|
|
|
|
)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
2023-04-13 12:52:29 -06:00
|
|
|
url='https://something/{{ responses["response-1"].id }}/',
|
2023-03-21 10:43:37 -03:00
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_RESOLVE,
|
2023-04-13 12:52:29 -06:00
|
|
|
data='{"value": "{{ responses["response-2"].status }}"}',
|
2023-03-21 10:43:37 -03:00
|
|
|
forward_all=False,
|
|
|
|
|
)
|
2023-04-13 12:52:29 -06:00
|
|
|
|
2023-03-21 10:43:37 -03:00
|
|
|
# add previous webhook responses for the related alert group
|
|
|
|
|
make_webhook_response(
|
2023-04-13 12:52:29 -06:00
|
|
|
alert_group=alert_group,
|
|
|
|
|
webhook=make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
public_primary_key="response-1",
|
|
|
|
|
),
|
2023-04-26 15:55:08 -06:00
|
|
|
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
|
2023-04-14 14:30:32 -03:00
|
|
|
status_code=200,
|
2023-04-13 12:52:29 -06:00
|
|
|
content=json.dumps({"id": "third-party-id"}),
|
2023-03-21 10:43:37 -03:00
|
|
|
)
|
|
|
|
|
make_webhook_response(
|
|
|
|
|
alert_group=alert_group,
|
2023-04-13 12:52:29 -06:00
|
|
|
webhook=make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
public_primary_key="response-2",
|
|
|
|
|
),
|
2023-03-21 10:43:37 -03:00
|
|
|
trigger_type=Webhook.TRIGGER_ACKNOWLEDGE,
|
2023-04-14 14:30:32 -03:00
|
|
|
status_code=200,
|
2023-03-21 10:43:37 -03:00
|
|
|
content=json.dumps({"id": "third-party-id", "status": "updated"}),
|
|
|
|
|
)
|
2023-04-14 14:30:32 -03:00
|
|
|
# webhook wasn't executed because of some error, there is no content or status_code
|
|
|
|
|
make_webhook_response(
|
|
|
|
|
alert_group=alert_group,
|
|
|
|
|
webhook=make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
public_primary_key="response-3",
|
|
|
|
|
),
|
|
|
|
|
trigger_type=Webhook.TRIGGER_SILENCE,
|
|
|
|
|
content=None,
|
|
|
|
|
status_code=None,
|
|
|
|
|
)
|
2023-03-21 10:43:37 -03:00
|
|
|
|
|
|
|
|
mock_response = MockResponse()
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
mock_requests.post.return_value = mock_response
|
2023-04-05 09:03:55 -03:00
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None)
|
2023-03-21 10:43:37 -03:00
|
|
|
|
|
|
|
|
assert mock_requests.post.called
|
|
|
|
|
expected_data = {"value": "updated"}
|
|
|
|
|
expected_call = call(
|
|
|
|
|
"https://something/third-party-id/",
|
2024-01-09 19:55:39 -05:00
|
|
|
timeout=TIMEOUT,
|
2023-03-21 10:43:37 -03:00
|
|
|
headers={},
|
|
|
|
|
json=expected_data,
|
|
|
|
|
)
|
|
|
|
|
assert mock_requests.post.call_args == expected_call
|
|
|
|
|
# check logs
|
|
|
|
|
log = webhook.responses.all()[0]
|
|
|
|
|
assert log.status_code == 200
|
|
|
|
|
assert log.content == json.dumps(mock_response.json())
|
|
|
|
|
assert json.loads(log.request_data) == expected_data
|
|
|
|
|
assert log.url == "https://something/third-party-id/"
|
|
|
|
|
|
|
|
|
|
|
2023-03-13 18:19:22 -03:00
|
|
|
@pytest.mark.django_db
|
2023-03-14 14:21:46 -03:00
|
|
|
def test_execute_webhook_trigger_false(
|
2024-04-12 11:05:09 -03:00
|
|
|
make_organization, make_alert_receive_channel, make_alert_group, make_custom_webhook, caplog
|
2023-03-14 14:21:46 -03:00
|
|
|
):
|
2023-03-13 18:19:22 -03:00
|
|
|
organization = make_organization()
|
2023-03-14 14:21:46 -03:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True)
|
2023-03-13 18:19:22 -03:00
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://something/{{ alert_id }}/",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ACKNOWLEDGE,
|
|
|
|
|
trigger_template="{{ integration_id == 'the-integration' }}",
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
2023-04-05 09:03:55 -03:00
|
|
|
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
assert not mock_requests.post.called
|
2024-04-12 11:05:09 -03:00
|
|
|
# no response is created for the webhook
|
|
|
|
|
assert webhook.responses.count() == 0
|
|
|
|
|
# check log should exist
|
|
|
|
|
assert f"Webhook {webhook.pk} was not triggered: False" in caplog.text
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
|
"field_name,value,log_field_name,expected_error",
|
|
|
|
|
[
|
|
|
|
|
(
|
|
|
|
|
"url",
|
2023-03-14 14:21:46 -03:00
|
|
|
"https://myserver/{{ }}/triggered",
|
2023-03-13 18:19:22 -03:00
|
|
|
"url",
|
2023-03-14 14:21:46 -03:00
|
|
|
"URL - Template Error: Expected an expression, got 'end of print statement'",
|
2023-03-13 18:19:22 -03:00
|
|
|
),
|
|
|
|
|
(
|
|
|
|
|
"trigger_template",
|
|
|
|
|
"{{ }}",
|
2023-03-21 10:43:37 -03:00
|
|
|
"request_trigger",
|
2023-03-13 18:19:22 -03:00
|
|
|
"Trigger - Template Error: Expected an expression, got 'end of print statement'",
|
|
|
|
|
),
|
2023-03-21 10:43:37 -03:00
|
|
|
("headers", '"{{foo|invalid}}"', "request_headers", "Headers - Template Error: No filter named 'invalid'."),
|
|
|
|
|
(
|
|
|
|
|
"data",
|
|
|
|
|
"{{ }}",
|
|
|
|
|
"request_data",
|
|
|
|
|
"Data - Template Error: Expected an expression, got 'end of print statement'",
|
|
|
|
|
),
|
2023-03-13 18:19:22 -03:00
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
def test_execute_webhook_errors(
|
2023-03-14 14:21:46 -03:00
|
|
|
make_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
field_name,
|
|
|
|
|
value,
|
|
|
|
|
log_field_name,
|
|
|
|
|
expected_error,
|
2023-03-13 18:19:22 -03:00
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
2023-03-14 14:21:46 -03:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel, resolved_at=timezone.now(), resolved=True)
|
|
|
|
|
|
2023-03-13 18:19:22 -03:00
|
|
|
extra_kwargs = {field_name: value}
|
|
|
|
|
if "url" not in extra_kwargs:
|
|
|
|
|
extra_kwargs["url"] = "https://something.cool/"
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
http_method="POST",
|
2023-03-14 14:21:46 -03:00
|
|
|
trigger_type=Webhook.TRIGGER_RESOLVE,
|
2023-03-13 18:19:22 -03:00
|
|
|
forward_all=False,
|
|
|
|
|
**extra_kwargs,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
# make it a valid URL when resolving name
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
2023-04-05 09:03:55 -03:00
|
|
|
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
2023-03-13 18:19:22 -03:00
|
|
|
|
|
|
|
|
assert not mock_requests.post.called
|
2023-03-21 10:43:37 -03:00
|
|
|
log = webhook.responses.all()[0]
|
|
|
|
|
assert log.status_code is None
|
|
|
|
|
assert log.content is None
|
2023-03-13 18:19:22 -03:00
|
|
|
error = getattr(log, log_field_name)
|
|
|
|
|
assert error == expected_error
|
2023-04-05 09:03:55 -03:00
|
|
|
# check log record
|
|
|
|
|
log_record = alert_group.log_records.last()
|
|
|
|
|
assert log_record.type == AlertGroupLogRecord.ERROR_ESCALATION_TRIGGER_CUSTOM_WEBHOOK_ERROR
|
|
|
|
|
expected_info = {
|
|
|
|
|
"trigger": "resolve",
|
|
|
|
|
"webhook_id": webhook.public_primary_key,
|
|
|
|
|
"webhook_name": webhook.name,
|
|
|
|
|
}
|
|
|
|
|
assert log_record.step_specific_info == expected_info
|
|
|
|
|
assert log_record.reason == expected_error
|
|
|
|
|
assert (
|
|
|
|
|
log_record.rendered_log_line_action() == f"skipped resolve outgoing webhook `{webhook.name}`: {expected_error}"
|
|
|
|
|
)
|
2023-04-26 15:55:08 -06:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_response_content_limit(
|
|
|
|
|
make_organization, make_user_for_organization, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True, acknowledged_by=user.pk
|
|
|
|
|
)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://test/",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ACKNOWLEDGE,
|
|
|
|
|
forward_all=False,
|
|
|
|
|
)
|
|
|
|
|
|
2023-05-09 07:55:05 -06:00
|
|
|
content_length = 100000
|
|
|
|
|
mock_response = MockResponse(content="A" * content_length)
|
2023-04-26 15:55:08 -06:00
|
|
|
with patch("apps.webhooks.utils.socket.gethostbyname") as mock_gethostbyname:
|
|
|
|
|
mock_gethostbyname.return_value = "8.8.8.8"
|
|
|
|
|
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
|
|
|
|
mock_requests.post.return_value = mock_response
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None)
|
|
|
|
|
|
|
|
|
|
assert mock_requests.post.called
|
|
|
|
|
expected_call = call(
|
|
|
|
|
"https://test/",
|
2024-01-09 19:55:39 -05:00
|
|
|
timeout=TIMEOUT,
|
2023-04-26 15:55:08 -06:00
|
|
|
headers={},
|
|
|
|
|
)
|
|
|
|
|
assert mock_requests.post.call_args == expected_call
|
|
|
|
|
# check logs
|
|
|
|
|
log = webhook.responses.all()[0]
|
|
|
|
|
assert log.status_code == 200
|
2023-05-09 07:55:05 -06:00
|
|
|
assert log.content == f"Response content {content_length} exceeds {WEBHOOK_RESPONSE_LIMIT} character limit"
|
2023-04-26 15:55:08 -06:00
|
|
|
assert log.url == "https://test/"
|
2024-01-08 19:13:15 -05:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.webhooks.tasks.trigger_webhook.execute_webhook", wraps=execute_webhook)
|
|
|
|
|
@patch("apps.webhooks.models.webhook.requests")
|
|
|
|
|
@patch("apps.webhooks.utils.socket.gethostbyname", return_value="8.8.8.8")
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
@pytest.mark.parametrize("exception", [requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout])
|
|
|
|
|
def test_manually_retried_exceptions(
|
|
|
|
|
_mock_gethostbyname,
|
|
|
|
|
mock_requests,
|
|
|
|
|
spy_execute_webhook,
|
|
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
exception,
|
|
|
|
|
):
|
|
|
|
|
mock_requests.post.side_effect = exception("foo bar")
|
|
|
|
|
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
alert_group = make_alert_group(
|
|
|
|
|
alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True, acknowledged_by=user.pk
|
|
|
|
|
)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://test/",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ACKNOWLEDGE,
|
|
|
|
|
forward_all=False,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
execute_webhook_args = webhook.pk, alert_group.pk, user.pk, None
|
|
|
|
|
|
|
|
|
|
# should retry
|
|
|
|
|
execute_webhook(*execute_webhook_args)
|
|
|
|
|
|
2024-01-09 19:55:39 -05:00
|
|
|
mock_requests.post.assert_called_once_with("https://test/", timeout=TIMEOUT, headers={})
|
2024-04-12 13:56:26 -04:00
|
|
|
spy_execute_webhook.apply_async.assert_called_once_with(
|
|
|
|
|
execute_webhook_args, kwargs={"trigger_type": None, "manual_retry_num": 1}, countdown=10
|
|
|
|
|
)
|
2024-01-08 19:13:15 -05:00
|
|
|
|
|
|
|
|
mock_requests.reset_mock()
|
|
|
|
|
spy_execute_webhook.reset_mock()
|
|
|
|
|
|
|
|
|
|
# should stop retrying after 3 attempts without raising issue
|
|
|
|
|
try:
|
|
|
|
|
execute_webhook(*execute_webhook_args, manual_retry_num=3)
|
|
|
|
|
except Exception:
|
|
|
|
|
pytest.fail()
|
|
|
|
|
|
2024-01-09 19:55:39 -05:00
|
|
|
mock_requests.post.assert_called_once_with("https://test/", timeout=TIMEOUT, headers={})
|
2024-01-08 19:13:15 -05:00
|
|
|
spy_execute_webhook.apply_async.assert_not_called()
|
2024-03-20 10:54:27 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@patch("apps.webhooks.models.webhook.requests.post", return_value=MockResponse())
|
|
|
|
|
@patch("apps.webhooks.utils.socket.gethostbyname", return_value="8.8.8.8")
|
|
|
|
|
@pytest.mark.django_db
|
|
|
|
|
def test_execute_webhook_integration_config(
|
|
|
|
|
_,
|
|
|
|
|
mock_requests_post,
|
|
|
|
|
make_organization,
|
|
|
|
|
make_user_for_organization,
|
|
|
|
|
make_alert_receive_channel,
|
|
|
|
|
make_alert_receive_channel_connection,
|
|
|
|
|
make_alert_group,
|
|
|
|
|
make_user_notification_policy_log_record,
|
|
|
|
|
make_custom_webhook,
|
|
|
|
|
):
|
|
|
|
|
organization = make_organization()
|
|
|
|
|
user = make_user_for_organization(organization)
|
|
|
|
|
|
|
|
|
|
# create connected integrations
|
2024-03-20 20:11:38 +00:00
|
|
|
source_alert_receive_channel = make_alert_receive_channel(
|
|
|
|
|
organization, additional_settings={"must_be": "non_empty"}
|
|
|
|
|
) # TODO: revisit this
|
2024-03-20 10:54:27 +00:00
|
|
|
alert_receive_channel = make_alert_receive_channel(organization)
|
|
|
|
|
make_alert_receive_channel_connection(source_alert_receive_channel, alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
alert_group = make_alert_group(alert_receive_channel)
|
|
|
|
|
webhook = make_custom_webhook(
|
|
|
|
|
organization=organization,
|
|
|
|
|
url="https://something/{{ external_id }}",
|
|
|
|
|
http_method="POST",
|
|
|
|
|
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
|
|
|
|
|
forward_all=True,
|
2024-04-10 08:37:11 -03:00
|
|
|
is_from_connected_integration=True,
|
2024-03-20 10:54:27 +00:00
|
|
|
)
|
|
|
|
|
webhook.filtered_integrations.set([source_alert_receive_channel, alert_receive_channel])
|
|
|
|
|
|
|
|
|
|
# create external ID entry
|
|
|
|
|
AlertGroupExternalID.objects.create(
|
|
|
|
|
source_alert_receive_channel=source_alert_receive_channel, alert_group=alert_group, value="test123"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
with patch.object(
|
|
|
|
|
source_alert_receive_channel.config,
|
|
|
|
|
"additional_webhook_data",
|
|
|
|
|
create=True,
|
|
|
|
|
return_value={"additional_field": "additional_value"},
|
|
|
|
|
) as mock_additional_webhook_data:
|
|
|
|
|
with patch.object(
|
|
|
|
|
source_alert_receive_channel.config, "on_webhook_response_created", create=True
|
|
|
|
|
) as mock_on_webhook_response_created:
|
|
|
|
|
execute_webhook(webhook.pk, alert_group.pk, user.pk, None, trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED)
|
|
|
|
|
|
|
|
|
|
assert mock_requests_post.called
|
|
|
|
|
|
|
|
|
|
# check external ID
|
|
|
|
|
assert mock_requests_post.call_args[0][0] == "https://something/test123"
|
|
|
|
|
assert mock_requests_post.call_args[1]["json"]["external_id"] == "test123"
|
|
|
|
|
|
|
|
|
|
# check additional webhook data
|
|
|
|
|
assert mock_requests_post.call_args[1]["json"]["additional_field"] == "additional_value"
|
|
|
|
|
mock_additional_webhook_data.assert_called_once_with(source_alert_receive_channel)
|
|
|
|
|
|
|
|
|
|
# check on_webhook_response_created is called
|
|
|
|
|
mock_on_webhook_response_created.assert_called_once_with(webhook.responses.all()[0], source_alert_receive_channel)
|