Merge pull request #5376 from grafana/dev

dev -> main
This commit is contained in:
Joey Orlando 2024-12-18 17:17:46 -05:00 committed by GitHub
commit d2859b5f41
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
14 changed files with 536 additions and 190 deletions

View file

@ -301,16 +301,21 @@ class AlertReceiveChannel(IntegrationOptionsMixin, MaintainableObject):
rate_limited_in_slack_at = models.DateTimeField(null=True, default=None)
rate_limit_message_task_id = models.CharField(max_length=100, null=True, default=None)
AlertGroupCustomLabelsDB = list[tuple[str, str | None, str | None]] | None
alert_group_labels_custom: AlertGroupCustomLabelsDB = models.JSONField(null=True, default=None)
DynamicLabelsEntryDB = tuple[str, str | None, str | None]
DynamicLabelsConfigDB = list[DynamicLabelsEntryDB] | None
alert_group_labels_custom: DynamicLabelsConfigDB = models.JSONField(null=True, default=None)
"""
Stores "custom labels" for alert group labels. Custom labels can be either "plain" or "templated".
For plain labels, the format is: [<LABEL_KEY_ID>, <LABEL_VALUE_ID>, None]
For templated labels, the format is: [<LABEL_KEY_ID>, None, <JINJA2_TEMPLATE>]
alert_group_labels_custom stores config of dynamic labels. It's stored as a list of tuples.
Format of tuple is: [<LABEL_KEY_ID>, None, <JINJA2_TEMPLATE>].
The second element is deprecated, so it's always None. It was used for static labels.
// TODO: refactor to use just regular DB fields for dynamic label config.
"""
alert_group_labels_template: str | None = models.TextField(null=True, default=None)
"""Stores a Jinja2 template for "advanced label templating" for alert group labels."""
"""
alert_group_labels_template is a Jinja2 template for "multi-label extraction template".
It extracts multiple labels from incoming alert payload.
"""
additional_settings: dict | None = models.JSONField(null=True, default=None)

View file

@ -33,6 +33,7 @@ def _additional_settings_serializer_from_type(integration_type: str) -> serializ
return cls
# TODO: refactor this types as w no longer support storing static labels in this field.
# AlertGroupCustomLabelValue represents custom alert group label value for API requests
# It handles two types of label's value:
# 1. Just Label Value from a label repo for a static label
@ -206,7 +207,7 @@ class IntegrationAlertGroupLabelsSerializer(serializers.Serializer):
@staticmethod
def _custom_labels_to_internal_value(
custom_labels: AlertGroupCustomLabelsAPI,
) -> AlertReceiveChannel.AlertGroupCustomLabelsDB:
) -> AlertReceiveChannel.DynamicLabelsConfigDB:
"""Convert custom labels from API representation to the schema used by the JSONField on the model."""
return [
@ -216,7 +217,7 @@ class IntegrationAlertGroupLabelsSerializer(serializers.Serializer):
@staticmethod
def _custom_labels_to_representation(
custom_labels: AlertReceiveChannel.AlertGroupCustomLabelsDB,
custom_labels: AlertReceiveChannel.DynamicLabelsConfigDB,
) -> AlertGroupCustomLabelsAPI:
"""
Inverse of the _custom_labels_to_internal_value method above.

View file

@ -1486,14 +1486,14 @@ def test_alert_receive_channel_contact_points_wrong_integration(
def test_integration_filter_by_labels(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_integration_label_association,
make_static_label_config,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel_1 = make_alert_receive_channel(organization)
alert_receive_channel_2 = make_alert_receive_channel(organization)
associated_label_1 = make_integration_label_association(organization, alert_receive_channel_1)
associated_label_2 = make_integration_label_association(organization, alert_receive_channel_1)
associated_label_1 = make_static_label_config(organization, alert_receive_channel_1)
associated_label_2 = make_static_label_config(organization, alert_receive_channel_1)
alert_receive_channel_2.labels.create(
key=associated_label_1.key, value=associated_label_1.value, organization=organization
)
@ -1659,7 +1659,7 @@ def test_alert_group_labels_get(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_label_key_and_value,
make_integration_label_association,
make_static_label_config,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
@ -1674,7 +1674,7 @@ def test_alert_group_labels_get(
assert response.status_code == status.HTTP_200_OK
assert response.json()["alert_group_labels"] == {"inheritable": {}, "custom": [], "template": None}
label = make_integration_label_association(organization, alert_receive_channel)
label = make_static_label_config(organization, alert_receive_channel)
template = "{{ payload.labels | tojson }}"
alert_receive_channel.alert_group_labels_template = template
@ -1707,14 +1707,14 @@ def test_alert_group_labels_get(
def test_alert_group_labels_put(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_integration_label_association,
make_static_label_config,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel = make_alert_receive_channel(organization)
label_1 = make_integration_label_association(organization, alert_receive_channel)
label_2 = make_integration_label_association(organization, alert_receive_channel)
label_3 = make_integration_label_association(organization, alert_receive_channel)
label_1 = make_static_label_config(organization, alert_receive_channel)
label_2 = make_static_label_config(organization, alert_receive_channel)
label_3 = make_static_label_config(organization, alert_receive_channel)
custom = [
# plain label

View file

@ -7,6 +7,8 @@ from anymail.exceptions import AnymailAPIError, AnymailInvalidAddress, AnymailWe
from anymail.inbound import AnymailInboundMessage
from anymail.signals import AnymailInboundEvent
from anymail.webhooks import amazon_ses, mailgun, mailjet, mandrill, postal, postmark, sendgrid, sparkpost
from bs4 import BeautifulSoup
from django.conf import settings
from django.http import HttpResponse, HttpResponseNotAllowed
from django.utils import timezone
from rest_framework import status
@ -25,6 +27,15 @@ class AmazonSESValidatedInboundWebhookView(amazon_ses.AmazonSESInboundWebhookVie
# disable "Your Anymail webhooks are insecure and open to anyone on the web." warning
warn_if_no_basic_auth = False
def __init__(self):
super().__init__(
session_params={
"aws_access_key_id": settings.INBOUND_EMAIL_AWS_ACCESS_KEY_ID,
"aws_secret_access_key": settings.INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY,
"region_name": settings.INBOUND_EMAIL_AWS_REGION,
},
)
def validate_request(self, request):
"""Add SNS message validation to Amazon SES inbound webhook view, which is not implemented in Anymail."""
if not validate_amazon_sns_message(self._parse_sns_message(request)):
@ -74,11 +85,10 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
if request.method.lower() == "head":
return HttpResponse(status=status.HTTP_200_OK)
integration_token = self.get_integration_token_from_request(request)
if integration_token is None:
if self.integration_token is None:
return HttpResponse(status=status.HTTP_400_BAD_REQUEST)
request.inbound_email_integration_token = integration_token # used in RequestTimeLoggingMiddleware
return super().dispatch(request, alert_channel_key=integration_token)
request.inbound_email_integration_token = self.integration_token # used in RequestTimeLoggingMiddleware
return super().dispatch(request, alert_channel_key=self.integration_token)
def post(self, request):
payload = self.get_alert_payload_from_email_message(self.message)
@ -94,7 +104,8 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
)
return Response("OK", status=status.HTTP_200_OK)
def get_integration_token_from_request(self, request) -> Optional[str]:
@cached_property
def integration_token(self) -> Optional[str]:
if not self.message:
return None
# First try envelope_recipient field.
@ -151,7 +162,8 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
logger.error("Failed to parse inbound email message")
return None
def check_inbound_email_settings_set(self):
@staticmethod
def check_inbound_email_settings_set():
"""
Guard method to checks if INBOUND_EMAIL settings present.
Returns InternalServerError if not.
@ -167,16 +179,105 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
logger.error("InboundEmailWebhookView: INBOUND_EMAIL_DOMAIN env variable must be set.")
return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def get_alert_payload_from_email_message(self, email: AnymailInboundMessage) -> EmailAlertPayload:
subject = email.subject or ""
subject = subject.strip()
message = email.text or ""
message = message.strip()
sender = self.get_sender_from_email_message(email)
@classmethod
def get_alert_payload_from_email_message(cls, email: AnymailInboundMessage) -> EmailAlertPayload:
if email.text:
message = email.text.strip()
elif email.html:
message = cls.html_to_plaintext(email.html)
else:
message = ""
return {"subject": subject, "message": message, "sender": sender}
return {
"subject": email.subject.strip() if email.subject else "",
"message": message,
"sender": cls.get_sender_from_email_message(email),
}
def get_sender_from_email_message(self, email: AnymailInboundMessage) -> str:
@staticmethod
def html_to_plaintext(html: str) -> str:
"""
Converts HTML to plain text. Renders links as "text (href)" and removes any empty lines.
Converting HTML to plaintext is a non-trivial task, so this method may not work perfectly for all cases.
"""
soup = BeautifulSoup(html, "html.parser")
# Browsers typically render these elements on their own line.
# There is no single official HTML5 list for this, so we go with HTML tags that render as
# display: block, display: list-item, display: table, display: table-row by default according to the HTML standard:
# https://html.spec.whatwg.org/multipage/rendering.html
newline_tags = [
"address",
"article",
"aside",
"blockquote",
"body",
"center",
"dd",
"details",
"dialog",
"dir",
"div",
"dl",
"dt",
"fieldset",
"figcaption",
"figure",
"footer",
"form",
"h1",
"h2",
"h3",
"h4",
"h5",
"h6",
"header",
"hgroup",
"hr",
"html",
"legend",
"li",
"listing",
"main",
"menu",
"nav",
"ol",
"p",
"plaintext",
"pre",
"search",
"section",
"summary",
"table",
"tr",
"ul",
"xmp",
]
# Insert a newline after each block-level element
for tag in soup.find_all(newline_tags):
tag.insert_before("\n")
tag.insert_after("\n")
# <br> tags are also typically rendered as newlines
for br in soup.find_all("br"):
br.replace_with("\n")
# example: "<a href="https://example.com">example</a>" -> "example (https://example.com)"
for a in soup.find_all("a"):
if href := a.get("href"):
a.append(f" ({href})")
for li in soup.find_all("li"):
li.insert_before("* ")
for hr in soup.find_all("hr"):
hr.replace_with("-" * 32)
# remove empty lines
return "\n".join(line.strip() for line in soup.get_text().splitlines() if line.strip())
@staticmethod
def get_sender_from_email_message(email: AnymailInboundMessage) -> str:
try:
if isinstance(email.from_email, list):
sender = email.from_email[0].addr_spec

View file

@ -6,6 +6,7 @@ from base64 import b64encode
from textwrap import dedent
from unittest.mock import ANY, Mock, patch
import httpretty
import pytest
from anymail.inbound import AnymailInboundMessage
from cryptography import x509
@ -54,13 +55,14 @@ SUBJECT = "Test email"
MESSAGE = "This is a test email message body."
def _sns_inbound_email_payload_and_headers(sender_email, to_email, subject, message):
def _sns_inbound_email_setup(sender_email, to_email, subject, message, content_type="text/plain", s3=False):
content = (
f"From: Sender Name <{sender_email}>\n"
f"To: {to_email}\n"
f"Subject: {subject}\n"
"Date: Tue, 5 Nov 2024 16:05:39 +0000\n"
"Message-ID: <example-message-id@mail.example.com>\n\n"
"Message-ID: <example-message-id@mail.example.com>\n"
f"Content-Type: {content_type}\n\n"
f"{message}\r\n"
)
@ -130,7 +132,7 @@ def _sns_inbound_email_payload_and_headers(sender_email, to_email, subject, mess
{"name": "To", "value": to_email},
{
"name": "Content-Type",
"value": 'multipart/alternative; boundary="00000000000036b9f706262c9312"',
"value": f"{content_type}",
},
],
"commonHeaders": {
@ -152,12 +154,12 @@ def _sns_inbound_email_payload_and_headers(sender_email, to_email, subject, mess
"dkimVerdict": {"status": "PASS"},
"dmarcVerdict": {"status": "PASS"},
"action": {
"type": "SNS",
"type": "S3" if s3 else "SNS",
"topicArn": "arn:aws:sns:us-east-2:123456789012:test",
"encoding": "BASE64",
**({"bucketName": "test-s3-bucket", "objectKey": "test-object-key"} if s3 else {"encoding": "BASE64"}),
},
},
"content": b64encode(content.encode()).decode(),
**({} if s3 else {"content": b64encode(content.encode()).decode()}),
}
payload = {
@ -189,7 +191,7 @@ def _sns_inbound_email_payload_and_headers(sender_email, to_email, subject, mess
"X-Amz-Sns-Message-Type": "Notification",
"X-Amz-Sns-Message-Id": "example-message-id-1234",
}
return payload, headers
return payload, headers, content
def _mailgun_inbound_email_payload(sender_email, to_email, subject, message):
@ -444,7 +446,7 @@ def test_amazon_ses_pass(create_alert_mock, settings, make_organization, make_al
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
@ -476,16 +478,17 @@ def test_amazon_ses_pass(create_alert_mock, settings, make_organization, make_al
)
@patch("requests.get", return_value=Mock(content=CERTIFICATE))
@patch.object(create_alert, "delay")
@httpretty.activate(verbose=True, allow_net_connect=True)
@pytest.mark.django_db
def test_amazon_ses_validated_pass(
mock_create_alert, mock_requests_get, settings, make_organization, make_alert_receive_channel
):
def test_amazon_ses_validated_s3_pass(mock_create_alert, settings, make_organization, make_alert_receive_channel):
settings.INBOUND_EMAIL_ESP = "amazon_ses_validated,mailgun"
settings.INBOUND_EMAIL_DOMAIN = "inbound.example.com"
settings.INBOUND_EMAIL_WEBHOOK_SECRET = "secret"
settings.INBOUND_EMAIL_AMAZON_SNS_TOPIC_ARN = AMAZON_SNS_TOPIC_ARN
settings.INBOUND_EMAIL_AWS_ACCESS_KEY_ID = "test-access-key-id"
settings.INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY = "test-secret-access-key"
settings.INBOUND_EMAIL_AWS_REGION = "us-east-2"
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(
@ -494,11 +497,24 @@ def test_amazon_ses_validated_pass(
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, content = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
message=MESSAGE,
s3=True,
)
httpretty.register_uri(httpretty.GET, SIGNING_CERT_URL, body=CERTIFICATE)
httpretty.register_uri(
httpretty.HEAD,
"https://test-s3-bucket.s3.us-east-2.amazonaws.com/test-object-key",
responses=[httpretty.Response(body="")],
)
httpretty.register_uri(
httpretty.GET,
"https://test-s3-bucket.s3.us-east-2.amazonaws.com/test-object-key",
responses=[httpretty.Response(body=content)],
)
client = APIClient()
@ -525,6 +541,100 @@ def test_amazon_ses_validated_pass(
received_at=ANY,
)
assert len(httpretty.latest_requests()) == 3
assert (httpretty.latest_requests()[0].method, httpretty.latest_requests()[0].path) == (
"GET",
"/SimpleNotificationService-example.pem",
)
assert (httpretty.latest_requests()[1].method, httpretty.latest_requests()[1].path) == ("HEAD", "/test-object-key")
assert (httpretty.latest_requests()[2].method, httpretty.latest_requests()[2].path) == ("GET", "/test-object-key")
@patch("requests.get", return_value=Mock(content=CERTIFICATE))
@patch.object(create_alert, "delay")
@pytest.mark.django_db
def test_amazon_ses_validated_pass_html(
mock_create_alert, mock_requests_get, settings, make_organization, make_alert_receive_channel
):
settings.INBOUND_EMAIL_ESP = "amazon_ses_validated,mailgun"
settings.INBOUND_EMAIL_DOMAIN = "inbound.example.com"
settings.INBOUND_EMAIL_WEBHOOK_SECRET = "secret"
settings.INBOUND_EMAIL_AMAZON_SNS_TOPIC_ARN = AMAZON_SNS_TOPIC_ARN
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(
organization,
integration=AlertReceiveChannel.INTEGRATION_INBOUND_EMAIL,
token="test-token",
)
html_message = """\
<html>
<title>title</title>
<body>
<div>
<h1>h1</h1>
<br><br><br>
<p>p<b>b</b><i>i</i> <span>span</span></p> <p>new line</p> <hr>
<a href="https://example.com">link</a>
<ul>
<li>li1</li>
<li>li2</li>
</ul>
<table>
<tr>
<td>td1</td>
<td>td2</td>
</tr>
</table>
</div>
</body>
</html>
"""
plaintext_message = (
"title\n"
"h1\n"
"pbi span\n"
"new line\n"
"--------------------------------\n"
"link (https://example.com)\n"
"* li1\n"
"* li2\n"
"td1\n"
"td2"
)
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
message=html_message,
content_type="text/html",
)
client = APIClient()
response = client.post(
reverse("integrations:inbound_email_webhook"),
data=sns_payload,
headers=sns_headers,
format="json",
)
assert response.status_code == status.HTTP_200_OK
mock_create_alert.assert_called_once_with(
title=SUBJECT,
message=plaintext_message,
alert_receive_channel_pk=alert_receive_channel.pk,
image_url=None,
link_to_upstream_details=None,
integration_unique_data=None,
raw_request_data={
"subject": SUBJECT,
"message": plaintext_message,
"sender": SENDER_EMAIL,
},
received_at=ANY,
)
mock_requests_get.assert_called_once_with(SIGNING_CERT_URL, timeout=5)
@ -546,7 +656,7 @@ def test_amazon_ses_validated_fail_wrong_sns_topic_arn(
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
@ -584,7 +694,7 @@ def test_amazon_ses_validated_fail_wrong_signature(
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
@ -622,7 +732,7 @@ def test_amazon_ses_validated_fail_cant_download_certificate(
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,
@ -656,7 +766,7 @@ def test_amazon_ses_validated_caches_certificate(
token="test-token",
)
sns_payload, sns_headers = _sns_inbound_email_payload_and_headers(
sns_payload, sns_headers, _ = _sns_inbound_email_setup(
sender_email=SENDER_EMAIL,
to_email=TO_EMAIL,
subject=SUBJECT,

View file

@ -10,10 +10,8 @@ from common.jinja_templater.apply_jinja_template import JinjaTemplateError, Jinj
if typing.TYPE_CHECKING:
from apps.alerts.models import Alert, AlertGroup, AlertReceiveChannel
logger = logging.getLogger(__name__)
# What can be used as a label key/value coming out from the template
LABEL_VALUE_TYPES = (str, int, float, bool)
@ -27,16 +25,14 @@ def gather_labels_from_alert_receive_channel_and_raw_request_data(
if not is_labels_feature_enabled(alert_receive_channel.organization):
return None
# inherit labels from the integration
# apply static labels by inheriting labels from the integration
labels = {
label.key.name: label.value.name for label in alert_receive_channel.labels.all().select_related("key", "value")
}
# apply custom labels
labels.update(_custom_labels(alert_receive_channel, raw_request_data))
labels.update(_apply_dynamic_labels(alert_receive_channel, raw_request_data))
# apply template labels
labels.update(_template_labels(alert_receive_channel, raw_request_data))
labels.update(_apply_multi_label_extraction_template(alert_receive_channel, raw_request_data))
return labels
@ -75,10 +71,10 @@ def assign_labels(
AlertGroupAssociatedLabel.objects.bulk_create(alert_group_labels)
def _custom_labels(
def _apply_dynamic_labels(
alert_receive_channel: "AlertReceiveChannel", raw_request_data: "Alert.RawRequestData"
) -> types.AlertLabels:
from apps.labels.models import MAX_VALUE_NAME_LENGTH, LabelKeyCache, LabelValueCache
from apps.labels.models import LabelKeyCache
if alert_receive_channel.alert_group_labels_custom is None:
return {}
@ -91,91 +87,79 @@ def _custom_labels(
).only("id", "name")
}
# fetch up-to-date label value names
label_value_names = {
v.id: v.name
for v in LabelValueCache.objects.filter(
id__in=[label[1] for label in alert_receive_channel.alert_group_labels_custom if label[1]]
).only("id", "name")
}
rendered_labels = {}
result_labels = {}
for label in alert_receive_channel.alert_group_labels_custom:
key_id, value_id, template = label
label = _apply_dynamic_label_entry(label, label_key_names, raw_request_data)
if label:
key, value = label
result_labels[key] = value
if key_id in label_key_names:
key = label_key_names[key_id]
else:
logger.warning("Label key cache not found. %s", key_id)
continue
if value_id:
if value_id in label_value_names:
rendered_labels[key] = label_value_names[value_id]
else:
logger.warning("Label value cache not found. %s", value_id)
continue
else:
try:
rendered_labels[key] = apply_jinja_template(template, raw_request_data)
except (JinjaTemplateError, JinjaTemplateWarning) as e:
logger.warning("Failed to apply template. %s", e.fallback_message)
continue
labels = {}
for key in rendered_labels:
value = rendered_labels[key]
# check value length
if len(value) == 0:
logger.warning("Template result value is empty. %s", value)
continue
if len(value) > MAX_VALUE_NAME_LENGTH:
logger.warning("Template result value is too long. %s", value)
continue
labels[key] = value
return labels
return result_labels
def _template_labels(
def _apply_dynamic_label_entry(
label: "AlertReceiveChannel.DynamicLabelsEntryDB", keys: dict, payload: "Alert.RawRequestData"
) -> typing.Optional[tuple[str, str]]:
key_id, value_id, template = label
key, value = "", ""
# check if key exists
if key_id in keys:
key = keys[key_id]
else:
logger.warning("Label key cache not found. %s", key_id)
return None
if value_id:
# if value_id is present - it's a static k-v pair. Deprecated.
logger.warning(
"value_id is present in dynamic label entry. It's deprecated & should not be there. %s", value_id
)
elif template:
# otherwise, it's a key-template pair, applying template
try:
value = apply_jinja_template(template, payload)
except (JinjaTemplateError, JinjaTemplateWarning) as e:
logger.warning("Failed to apply template. %s", e.fallback_message)
return None
if not _validate_templated_value(value):
return None
else:
logger.warning("Label value is neither a value_id, nor a template. %s", key)
return key, value
def _apply_multi_label_extraction_template(
alert_receive_channel: "AlertReceiveChannel", raw_request_data: "Alert.RawRequestData"
) -> types.AlertLabels:
from apps.labels.models import MAX_KEY_NAME_LENGTH, MAX_VALUE_NAME_LENGTH
from apps.labels.models import MAX_KEY_NAME_LENGTH
if not alert_receive_channel.alert_group_labels_template:
return {}
# render template - output will be a string.
# It's expected that it will be a JSON string, to be parsed into a dict.
try:
rendered = apply_jinja_template(alert_receive_channel.alert_group_labels_template, raw_request_data)
rendered_labels = apply_jinja_template(alert_receive_channel.alert_group_labels_template, raw_request_data)
except (JinjaTemplateError, JinjaTemplateWarning) as e:
logger.warning("Failed to apply template. %s", e.fallback_message)
return {}
# unmarshal rendered_labels JSON string to dict
try:
rendered_labels = json.loads(rendered)
labels_dict = json.loads(rendered_labels)
except (TypeError, json.JSONDecodeError):
logger.warning("Failed to parse template result. %s", rendered)
# it's expected, if user misconfigured the template
logger.warning("Failed to parse template result. %s", rendered_labels)
return {}
if not isinstance(rendered_labels, dict):
logger.warning("Template result is not a dict. %s", rendered_labels)
if not isinstance(labels_dict, dict):
logger.warning("Template result is not a dict. %s", labels_dict)
return {}
labels = {}
for key in rendered_labels:
value = rendered_labels[key]
# check value type
if not isinstance(value, LABEL_VALUE_TYPES):
logger.warning("Template result value has invalid type. %s", value)
continue
# convert value to string
value = str(value)
# validate dict of labels, drop invalid keys & values, convert all values to strings
result_labels = {}
for key in labels_dict:
# check key length
if len(key) == 0:
logger.warning("Template result key is empty. %s", key)
@ -185,15 +169,36 @@ def _template_labels(
logger.warning("Template result key is too long. %s", key)
continue
# check value length
if len(value) == 0:
logger.warning("Template result value is empty. %s", value)
# Checks specific to multi-label extraction template, because we're receiving value from a JSON:
# 1. check type
# 2. convert back to string
if not isinstance(labels_dict[key], LABEL_VALUE_TYPES):
logger.warning("Templated value has invalid type. %s", labels_dict[key])
continue
value = str(labels_dict[key])
# apply common value checks
if not _validate_templated_value(value):
continue
if len(value) > MAX_VALUE_NAME_LENGTH:
logger.warning("Template result value is too long. %s", value)
continue
result_labels[key] = labels_dict[key]
labels[key] = value
return result_labels
return labels
def _validate_templated_value(value: str) -> bool:
from apps.labels.models import MAX_VALUE_NAME_LENGTH
# check value length
if len(value) == 0:
logger.warning("Templated value value is empty. %s", value)
return False
if len(value) > MAX_VALUE_NAME_LENGTH:
logger.warning("Templated value is too long. %s", value)
return False
if value.lower().strip() == "none":
logger.warning("Templated value is None. %s", value)
return False
return True

View file

@ -3,7 +3,7 @@ from unittest import mock
import pytest
from apps.alerts.models import Alert
from apps.labels.models import MAX_KEY_NAME_LENGTH, MAX_VALUE_NAME_LENGTH
from apps.labels.models import MAX_KEY_NAME_LENGTH, MAX_VALUE_NAME_LENGTH, LabelKeyCache, LabelValueCache
TOO_LONG_KEY_NAME = "k" * (MAX_KEY_NAME_LENGTH + 1)
TOO_LONG_VALUE_NAME = "v" * (MAX_VALUE_NAME_LENGTH + 1)
@ -12,11 +12,11 @@ TOO_LONG_VALUE_NAME = "v" * (MAX_VALUE_NAME_LENGTH + 1)
@mock.patch("apps.labels.alert_group_labels.is_labels_feature_enabled", return_value=False)
@pytest.mark.django_db
def test_assign_labels_feature_flag_disabled(
_, make_organization, make_alert_receive_channel, make_integration_label_association
_, make_organization, make_alert_receive_channel, make_static_label_config
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
make_integration_label_association(organization, alert_receive_channel)
make_static_label_config(organization, alert_receive_channel)
alert = Alert.create(
title="the title",
@ -32,36 +32,20 @@ def test_assign_labels_feature_flag_disabled(
@pytest.mark.django_db
def test_assign_labels(
def test_multi_label_extraction_template(
make_organization,
make_alert_receive_channel,
make_label_key_and_value,
make_label_key,
make_integration_label_association,
make_static_label_config,
):
organization = make_organization()
# create label repo labels
label_key, label_value = make_label_key_and_value(organization, key_name="a", value_name="b")
label_key_1 = make_label_key(organization=organization, key_name="c")
label_key_2 = make_label_key(organization=organization)
label_key_3 = make_label_key(organization=organization)
label_key_4 = make_label_key(organization=organization)
# create alert receive channel with all 3 types of labels
alert_receive_channel = make_alert_receive_channel(
organization,
alert_group_labels_custom=[
[label_key.id, label_value.id, None], # plain label
["nonexistent", label_value.id, None], # plain label with nonexistent key ID
[label_key_2.id, "nonexistent", None], # plain label with nonexistent value ID
[label_key_1.id, None, "{{ payload.c }}"], # templated label
[label_key_3.id, None, TOO_LONG_VALUE_NAME], # templated label too long
[label_key_4.id, None, "{{ payload.nonexistent }}"], # templated label with nonexistent key
],
alert_group_labels_template="{{ payload.advanced_template | tojson }}",
)
make_integration_label_association(organization, alert_receive_channel, key_name="e", value_name="f")
# create alert group
alert = Alert.create(
@ -69,9 +53,9 @@ def test_assign_labels(
message="the message",
alert_receive_channel=alert_receive_channel,
raw_request_data={
"c": "d",
"advanced_template": {
"g": 123,
"cluster_id": 123,
"severity": "critical",
"too_long": TOO_LONG_VALUE_NAME,
TOO_LONG_KEY_NAME: "too_long",
"invalid_type": {"test": "test"},
@ -85,22 +69,87 @@ def test_assign_labels(
# check alert group labels are assigned correctly, in the lexicographical order
assert [(label.key_name, label.value_name) for label in alert.group.labels.all()] == [
("a", "b"),
("c", "d"),
("e", "f"),
("g", "123"),
("cluster_id", "123"),
("severity", "critical"),
]
@pytest.mark.django_db
def test_assign_labels_custom_labels_none(
def test_assign_dynamic_labels(
make_organization,
make_alert_receive_channel,
make_integration_label_association,
make_label_key_and_value,
make_label_key,
make_label_value,
):
organization = make_organization()
# create label repo labels
label_key_severity = make_label_key(organization=organization, key_name="severity")
label_key_service = make_label_key(organization=organization, key_name="service")
# add values for severity key
_ = make_label_value(label_key_severity, value_name="critical")
# set-up some keys to test invalid templates
label_key_cluster = make_label_key(organization=organization, key_name="cluster")
label_key_region = make_label_key(organization=organization, key_name="region")
label_key_team = make_label_key(organization=organization, key_name="team")
# create alert receive channel with all 3 types of labels
alert_receive_channel = make_alert_receive_channel(
organization,
alert_group_labels_custom=[
# valid templated label, parsed value present in label repo. Expected to be attached to group,
[label_key_severity.id, None, "{{ payload.severity }}"],
# valid templated label, parsed value NOT present in label repo Expected to be attached anyway.
[label_key_service.id, None, "{{ payload.service }}"],
# templated label too long. Expected to be ignored
[label_key_cluster.id, None, TOO_LONG_VALUE_NAME],
# templated label with jinja template pointing to nonexistent attribute in alert payload, Expected to be ignored
[label_key_region.id, None, "{{ payload.nonexistent }}"],
# templated label explicitly set to None. Expected to be ignored
[label_key_team.id, None, "{{ payload.nonexistent or None }}"],
# templated label with nonexistent key ID. Expected to be ignored
["nonexistent", None, "{{ payload.severity }}"],
],
)
# create alert group
alert = Alert.create(
title="the title",
message="the message",
alert_receive_channel=alert_receive_channel,
raw_request_data={
"severity": "critical",
"service": "oncall",
"extra": "hi",
},
integration_unique_data={},
image_url=None,
link_to_upstream_details=None,
)
assert [(label.key_name, label.value_name) for label in alert.group.labels.all()] == [
("service", "oncall"),
("severity", "critical"),
]
@pytest.mark.django_db
def test_assign_static_labels(
make_organization,
make_alert_receive_channel,
make_static_label_config,
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization, alert_group_labels_custom=None)
make_integration_label_association(organization, alert_receive_channel, key_name="a", value_name="b")
# Configure a static label - expected to be attached to group.
make_static_label_config(organization, alert_receive_channel, key_name="severity", value_name="critical")
# Configure a static label & delete key and value caches. Expected to be ignored.
make_static_label_config(organization, alert_receive_channel, key_name="service", value_name="oncall")
key = LabelKeyCache.objects.get(name="service")
LabelValueCache.objects.filter(name="oncall", key=key).delete()
key.delete()
alert = Alert.create(
title="the title",
@ -112,22 +161,22 @@ def test_assign_labels_custom_labels_none(
link_to_upstream_details=None,
)
assert [(label.key_name, label.value_name) for label in alert.group.labels.all()] == [("a", "b")]
assert [(label.key_name, label.value_name) for label in alert.group.labels.all()] == [("severity", "critical")]
@pytest.mark.django_db
def test_assign_labels_too_many(
make_organization, make_alert_receive_channel, make_integration_label_association, make_label_key_and_value
make_organization, make_alert_receive_channel, make_static_label_config, make_label_key_and_value
):
organization = make_organization()
label_key, label_value = make_label_key_and_value(organization, key_name="a", value_name="test")
alert_receive_channel = make_alert_receive_channel(
organization,
alert_group_labels_custom=[[label_key.id, label_value.id, None]],
alert_group_labels_template='{{ {"b": payload.b} | tojson }}',
)
make_integration_label_association(organization, alert_receive_channel, key_name="c", value_name="test")
make_static_label_config(organization, alert_receive_channel, key_name="a", value_name="test")
make_static_label_config(organization, alert_receive_channel, key_name="c", value_name="test")
with mock.patch("apps.labels.alert_group_labels.MAX_LABELS_PER_ALERT_GROUP", 2):
alert = Alert.create(

View file

@ -92,12 +92,12 @@ def test_label_associate_existing_label(make_label_key_and_value, make_organizat
@pytest.mark.django_db
def test_label_update_association_by_removing_label(
make_integration_label_association, make_organization, make_alert_receive_channel
make_static_label_config, make_organization, make_alert_receive_channel
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
label_association_1 = make_integration_label_association(organization, alert_receive_channel)
label_association_2 = make_integration_label_association(organization, alert_receive_channel)
label_association_1 = make_static_label_config(organization, alert_receive_channel)
label_association_2 = make_static_label_config(organization, alert_receive_channel)
labels_data = [
{
"key": {"id": label_association_1.key_id, "name": label_association_1.key.name, "prescribed": False},

View file

@ -89,11 +89,11 @@ def test_update_labels_cache(make_organization, make_label_key_and_value, make_l
@pytest.mark.django_db
def test_update_instances_labels_cache_recently_synced(
make_organization, make_alert_receive_channel, make_integration_label_association
make_organization, make_alert_receive_channel, make_static_label_config
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
label_association = make_integration_label_association(organization, alert_receive_channel)
label_association = make_static_label_config(organization, alert_receive_channel)
assert not label_association.key.is_outdated
assert not label_association.value.is_outdated
@ -109,11 +109,11 @@ def test_update_instances_labels_cache_recently_synced(
@pytest.mark.django_db
def test_update_instances_labels_cache_outdated(
make_organization, make_alert_receive_channel, make_integration_label_association
make_organization, make_alert_receive_channel, make_static_label_config
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
label_association = make_integration_label_association(organization, alert_receive_channel)
label_association = make_static_label_config(organization, alert_receive_channel)
outdated_last_synced = timezone.now() - timezone.timedelta(minutes=LABEL_OUTDATED_TIMEOUT_MINUTES + 1)
LabelKeyCache.objects.filter(id=label_association.key_id).update(last_synced=outdated_last_synced)
@ -140,12 +140,10 @@ def test_update_instances_labels_cache_outdated(
@pytest.mark.django_db
def test_update_instances_labels_cache_error(
make_organization, make_alert_receive_channel, make_integration_label_association
):
def test_update_instances_labels_cache_error(make_organization, make_alert_receive_channel, make_static_label_config):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
label_association = make_integration_label_association(organization, alert_receive_channel)
label_association = make_static_label_config(organization, alert_receive_channel)
outdated_last_synced = timezone.now() - timezone.timedelta(minutes=LABEL_OUTDATED_TIMEOUT_MINUTES + 1)
LabelKeyCache.objects.filter(id=label_association.key_id).update(last_synced=outdated_last_synced)

View file

@ -1088,7 +1088,7 @@ def make_label_key_and_value(make_label_key, make_label_value):
@pytest.fixture
def make_integration_label_association(make_label_key_and_value):
def make_static_label_config(make_label_key_and_value):
def _make_integration_label_association(
organization, alert_receive_channel, key_id=None, key_name=None, value_id=None, value_name=None, **kwargs
):

View file

@ -868,6 +868,9 @@ INBOUND_EMAIL_ESP = os.getenv("INBOUND_EMAIL_ESP")
INBOUND_EMAIL_DOMAIN = os.getenv("INBOUND_EMAIL_DOMAIN")
INBOUND_EMAIL_WEBHOOK_SECRET = os.getenv("INBOUND_EMAIL_WEBHOOK_SECRET")
INBOUND_EMAIL_AMAZON_SNS_TOPIC_ARN = os.getenv("INBOUND_EMAIL_AMAZON_SNS_TOPIC_ARN")
INBOUND_EMAIL_AWS_ACCESS_KEY_ID = os.getenv("INBOUND_EMAIL_AWS_ACCESS_KEY_ID")
INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY = os.getenv("INBOUND_EMAIL_AWS_SECRET_ACCESS_KEY")
INBOUND_EMAIL_AWS_REGION = os.getenv("INBOUND_EMAIL_AWS_REGION")
INSTALLED_ONCALL_INTEGRATIONS = [
# Featured

View file

@ -150,6 +150,68 @@ oncall-migrator
Consider modifying [alert templates](https://grafana.com/docs/oncall/latest/alert-behavior/alert-templates/) of the created
webhook integrations to adjust them for incoming payloads.
### Migrate your PagerDuty data while ignoring Grafana users
This scenario may be relevant where you are unable to import your list of Grafana users, but would like to experiment
with Grafana OnCall, using your existing PagerDuty setup as a starting point for experimentation.
If this is relevant to you, you can migrate as such:
```bash
# Step 1: run a plan of what will be migrated, ignoring users for now
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="<ONCALL_API_URL>" \
-e ONCALL_API_TOKEN="<ONCALL_API_TOKEN>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
-e MIGRATE_USERS="false" \
oncall-migrator
# Step 2. Actually migrate your PagerDuty data, again ignoring users
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="migrate" \
-e ONCALL_API_URL="<ONCALL_API_URL>" \
-e ONCALL_API_TOKEN="<ONCALL_API_TOKEN>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
-e MIGRATE_USERS="false" \
oncall-migrator
# Step 3. Optional; import your users from PagerDuty into your Grafana stack using our provided script
# For more information on our script, see "Migrating Users" section below for some more information on
# how users are migrated.
#
# Alternatively this can be done with other Grafana IAM methods.
# See Grafana's "Plan your IAM integration strategy" docs for more information on this.
# https://grafana.com/docs/grafana/latest/setup-grafana/configure-security/planning-iam-strategy/
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e GRAFANA_URL="<GRAFANA_API_URL>" \
-e GRAFANA_USERNAME="<GRAFANA_USERNAME>" \
-e GRAFANA_PASSWORD="<GRAFANA_PASSWORD>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
oncall-migrator python /app/add_users_to_grafana.py
# Step 4: When ready, run a plan of what will be migrated, including users this time
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="plan" \
-e ONCALL_API_URL="<ONCALL_API_URL>" \
-e ONCALL_API_TOKEN="<ONCALL_API_TOKEN>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
oncall-migrator
# Step 4: And finally, when ready, actually migrate your PagerDuty data, again including users
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e MODE="migrate" \
-e ONCALL_API_URL="<ONCALL_API_URL>" \
-e ONCALL_API_TOKEN="<ONCALL_API_TOKEN>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
oncall-migrator
```
### Configuration
Configuration is done via environment variables passed to the docker container.
@ -165,6 +227,7 @@ Configuration is done via environment variables passed to the docker container.
| `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` | When set to `true`, integrations with unsupported type will be migrated to Grafana OnCall integrations with type "webhook". When set to `false`, integrations with unsupported type won't be migrated. | Boolean | `false` |
| `EXPERIMENTAL_MIGRATE_EVENT_RULES` | Migrate global event rulesets to Grafana OnCall integrations. | Boolean | `false` |
| `EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES` | Include service & integrations names from PD in migrated integrations (only effective when `EXPERIMENTAL_MIGRATE_EVENT_RULES` is `true`). | Boolean | `false` |
| `MIGRATE_USERS` | If `false`, will allow you to important all objects, while ignoring user references in schedules and escalation policies. In addition, if `false`, will also skip importing User notification rules. This may be helpful in cases where you are unable to import your list of Grafana users, but would like to experiment with OnCall using your existing PagerDuty setup as a starting point for experimentation. | Boolean | `true` |
### Resources
@ -340,9 +403,9 @@ Grafana users via the Grafana HTTP API.
```bash
docker run --rm \
-e MIGRATING_FROM="pagerduty" \
-e GRAFANA_URL="http://localhost:3000" \
-e GRAFANA_USERNAME="admin" \
-e GRAFANA_PASSWORD="admin" \
-e GRAFANA_URL="<GRAFANA_API_URL>" \
-e GRAFANA_USERNAME="<GRAFANA_USERNAME>" \
-e GRAFANA_PASSWORD="<GRAFANA_PASSWORD>" \
-e PAGERDUTY_API_TOKEN="<PAGERDUTY_API_TOKEN>" \
oncall-migrator python /app/add_users_to_grafana.py
```
@ -352,9 +415,9 @@ oncall-migrator python /app/add_users_to_grafana.py
```bash
docker run --rm \
-e MIGRATING_FROM="splunk" \
-e GRAFANA_URL="http://localhost:3000" \
-e GRAFANA_USERNAME="admin" \
-e GRAFANA_PASSWORD="admin" \
-e GRAFANA_URL="<GRAFANA_API_URL>" \
-e GRAFANA_USERNAME="<GRAFANA_USERNAME>" \
-e GRAFANA_PASSWORD="<GRAFANA_PASSWORD>" \
-e SPLUNK_API_ID="<SPLUNK_API_ID>" \
-e SPLUNK_API_KEY="<SPLUNK_API_KEY>" \
oncall-migrator python /app/add_users_to_grafana.py

View file

@ -36,3 +36,5 @@ EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES = (
UNSUPPORTED_INTEGRATION_TO_WEBHOOKS = (
os.getenv("UNSUPPORTED_INTEGRATION_TO_WEBHOOKS", "false").lower() == "true"
)
MIGRATE_USERS = os.getenv("MIGRATE_USERS", "true").lower() == "true"

View file

@ -7,6 +7,7 @@ from lib.common.resources.users import match_user
from lib.oncall.api_client import OnCallAPIClient
from lib.pagerduty.config import (
EXPERIMENTAL_MIGRATE_EVENT_RULES,
MIGRATE_USERS,
MODE,
MODE_PLAN,
PAGERDUTY_API_TOKEN,
@ -46,8 +47,12 @@ def migrate() -> None:
session = APISession(PAGERDUTY_API_TOKEN)
session.timeout = 20
print("▶ Fetching users...")
users = session.list_all("users", params={"include[]": "notification_rules"})
if MIGRATE_USERS:
print("▶ Fetching users...")
users = session.list_all("users", params={"include[]": "notification_rules"})
else:
print("▶ Skipping user migration as MIGRATE_USERS is false...")
users = []
oncall_users = OnCallAPIClient.list_users_with_notification_rules()
@ -97,8 +102,9 @@ def migrate() -> None:
rules = session.list_all(f"rulesets/{ruleset['id']}/rules")
ruleset["rules"] = rules
for user in users:
match_user(user, oncall_users)
if MIGRATE_USERS:
for user in users:
match_user(user, oncall_users)
user_id_map = {
u["id"]: u["oncall_user"]["id"] if u["oncall_user"] else None for u in users
@ -138,11 +144,14 @@ def migrate() -> None:
return
print("▶ Migrating user notification rules...")
for user in users:
if user["oncall_user"]:
migrate_notification_rules(user)
print(TAB + format_user(user))
if MIGRATE_USERS:
print("▶ Migrating user notification rules...")
for user in users:
if user["oncall_user"]:
migrate_notification_rules(user)
print(TAB + format_user(user))
else:
print("▶ Skipping migrating user notification rules as MIGRATE_USERS is false...")
print("▶ Migrating schedules...")
for schedule in schedules: