This commit is contained in:
Joey Orlando 2024-11-06 08:58:12 -05:00 committed by GitHub
commit ffb32d735a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
10 changed files with 205 additions and 172 deletions

View file

@ -9,49 +9,28 @@ logger = logging.getLogger(__name__)
def populate_slack_channel(apps, schema_editor):
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
SlackChannel = apps.get_model("slack", "SlackChannel")
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
Organization = apps.get_model("user_management", "Organization")
logger.info("Starting migration to populate slack_channel field.")
queryset = ChannelFilter.objects.filter(
_slack_channel_id__isnull=False,
alert_receive_channel__organization__slack_team_identity__isnull=False,
)
total_channel_filters = queryset.count()
updated_channel_filters = 0
missing_channel_filters = 0
channel_filters_to_update = []
sql = f"""
UPDATE {ChannelFilter._meta.db_table} AS cf
JOIN {AlertReceiveChannel._meta.db_table} AS arc ON arc.id = cf.alert_receive_channel_id
JOIN {Organization._meta.db_table} AS org ON org.id = arc.organization_id
JOIN {SlackChannel._meta.db_table} AS sc ON sc.slack_id = cf._slack_channel_id
AND sc.slack_team_identity_id = org.slack_team_identity_id
SET cf.slack_channel_id = sc.id
WHERE cf._slack_channel_id IS NOT NULL
AND org.slack_team_identity_id IS NOT NULL;
"""
logger.info(f"Total channel filters to process: {total_channel_filters}")
with schema_editor.connection.cursor() as cursor:
cursor.execute(sql)
updated_rows = cursor.rowcount # Number of rows updated
for channel_filter in queryset:
slack_id = channel_filter._slack_channel_id
slack_team_identity = channel_filter.alert_receive_channel.organization.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
channel_filter.slack_channel = slack_channel
channel_filters_to_update.append(channel_filter)
updated_channel_filters += 1
logger.info(
f"ChannelFilter {channel_filter.id} updated with SlackChannel {slack_channel.id} "
f"(slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_channel_filters += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for ChannelFilter {channel_filter.id}."
)
if channel_filters_to_update:
ChannelFilter.objects.bulk_update(channel_filters_to_update, ["slack_channel"])
logger.info(f"Bulk updated {len(channel_filters_to_update)} ChannelFilters with their Slack channel.")
logger.info(
f"Finished migration. Total channel filters processed: {total_channel_filters}. "
f"Channel filters updated: {updated_channel_filters}. Missing SlackChannels: {missing_channel_filters}."
)
logger.info(f"Bulk updated {updated_rows} ChannelFilters with their Slack channel.")
logger.info("Finished migration to populate slack_channel field.")
class Migration(migrations.Migration):

View file

@ -10,53 +10,30 @@ logger = logging.getLogger(__name__)
def populate_slack_channel(apps, schema_editor):
ResolutionNoteSlackMessage = apps.get_model("alerts", "ResolutionNoteSlackMessage")
SlackChannel = apps.get_model("slack", "SlackChannel")
AlertGroup = apps.get_model("alerts", "AlertGroup")
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
Organization = apps.get_model("user_management", "Organization")
logger.info("Starting migration to populate slack_channel field.")
queryset = ResolutionNoteSlackMessage.objects.filter(
_slack_channel_id__isnull=False,
alert_group__channel__organization__slack_team_identity__isnull=False,
)
total_resolution_notes = queryset.count()
updated_resolution_notes = 0
missing_resolution_notes = 0
resolution_notes_to_update = []
sql = f"""
UPDATE {ResolutionNoteSlackMessage._meta.db_table} AS rsm
JOIN {AlertGroup._meta.db_table} AS ag ON ag.id = rsm.alert_group_id
JOIN {AlertReceiveChannel._meta.db_table} AS arc ON arc.id = ag.channel_id
JOIN {Organization._meta.db_table} AS org ON org.id = arc.organization_id
JOIN {SlackChannel._meta.db_table} AS sc ON sc.slack_id = rsm._slack_channel_id
AND sc.slack_team_identity_id = org.slack_team_identity_id
SET rsm.slack_channel_id = sc.id
WHERE rsm._slack_channel_id IS NOT NULL
AND org.slack_team_identity_id IS NOT NULL;
"""
logger.info(f"Total resolution note slack messages to process: {total_resolution_notes}")
for resolution_note in queryset:
slack_id = resolution_note._slack_channel_id
slack_team_identity = resolution_note.alert_group.channel.organization.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
resolution_note.slack_channel = slack_channel
resolution_notes_to_update.append(resolution_note)
updated_resolution_notes += 1
logger.info(
f"ResolutionNoteSlackMessage {resolution_note.id} updated with SlackChannel {slack_channel.id} "
f"(slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_resolution_notes += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for ResolutionNoteSlackMessage {resolution_note.id}."
)
if resolution_notes_to_update:
ResolutionNoteSlackMessage.objects.bulk_update(resolution_notes_to_update, ["slack_channel"])
logger.info(
f"Bulk updated {len(resolution_notes_to_update)} ResolutionNoteSlackMessage with their Slack channel."
)
logger.info(
f"Finished migration. Total resolution note slack messages processed: {total_resolution_notes}. "
f"Resolution note slack messages updated: {updated_resolution_notes}. "
f"Missing SlackChannels: {missing_resolution_notes}."
)
with schema_editor.connection.cursor() as cursor:
cursor.execute(sql)
updated_rows = cursor.rowcount # Number of rows updated
logger.info(f"Bulk updated {updated_rows} ResolutionNoteSlackMessage records with their Slack channel.")
logger.info("Finished migration to populate slack_channel field.")
class Migration(migrations.Migration):

View file

@ -26,6 +26,7 @@ class OrganizationSerializer(EagerLoadingMixin, serializers.ModelSerializer):
rbac_enabled = serializers.BooleanField(read_only=True, source="is_rbac_permissions_enabled")
grafana_incident_enabled = serializers.BooleanField(read_only=True, source="is_grafana_incident_enabled")
grafana_irm_enabled = serializers.BooleanField(read_only=True, source="is_grafana_irm_enabled")
SELECT_RELATED = ["slack_team_identity", "slack_channel"]
@ -39,6 +40,7 @@ class OrganizationSerializer(EagerLoadingMixin, serializers.ModelSerializer):
"slack_channel",
"rbac_enabled",
"grafana_incident_enabled",
"grafana_irm_enabled",
"direct_paging_prefer_important_policy",
]
read_only_fields = [
@ -46,6 +48,7 @@ class OrganizationSerializer(EagerLoadingMixin, serializers.ModelSerializer):
"slack_team_identity",
"rbac_enabled",
"grafana_incident_enabled",
"grafana_irm_enabled",
]

View file

@ -36,7 +36,10 @@ def test_get_organization(
client = APIClient()
url = reverse("api-internal:api-organization")
expected_result = {
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json() == {
"pk": organization.public_primary_key,
"name": organization.org_title,
"stack_slug": organization.stack_slug,
@ -44,14 +47,12 @@ def test_get_organization(
"slack_channel": None,
"rbac_enabled": organization.is_rbac_permissions_enabled,
"grafana_incident_enabled": organization.is_grafana_incident_enabled,
"grafana_irm_enabled": organization.is_grafana_irm_enabled,
"direct_paging_prefer_important_policy": organization.direct_paging_prefer_important_policy,
"is_resolution_note_required": False,
"env_status": mock_env_status,
"banner": mock_banner,
}
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_result
@pytest.mark.django_db
@ -70,6 +71,30 @@ def test_get_organization_rbac_enabled(make_organization_and_user_with_plugin_to
assert response.json()["rbac_enabled"] == organization.is_rbac_permissions_enabled
# NOTE: we need to patch the following because when is_grafana_irm_enabled is True, it alters how
# API authz works. For the purpose of this test, we don't care about testing that behaviour (it's already tested),
# just want to test the serializer essentially.
@patch("apps.api.permissions.user_is_authorized", return_value=True)
@pytest.mark.django_db
@pytest.mark.parametrize("is_grafana_irm_enabled", [True, False])
def test_get_organization_grafana_irm_enabled(
_mock_user_is_authorized,
make_organization_and_user_with_plugin_token,
make_user_auth_headers,
is_grafana_irm_enabled,
):
organization, user, token = make_organization_and_user_with_plugin_token()
organization.is_grafana_irm_enabled = is_grafana_irm_enabled
organization.save()
client = APIClient()
url = reverse("api-internal:api-organization")
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json()["grafana_irm_enabled"] is is_grafana_irm_enabled
@pytest.mark.django_db
def test_update_organization_settings(make_organization_and_user_with_plugin_token, make_user_auth_headers):
organization, user, token = make_organization_and_user_with_plugin_token()

View file

@ -9,47 +9,26 @@ logger = logging.getLogger(__name__)
def populate_slack_channel(apps, schema_editor):
OnCallSchedule = apps.get_model("schedules", "OnCallSchedule")
SlackChannel = apps.get_model("slack", "SlackChannel")
Organization = apps.get_model("user_management", "Organization")
logger.info("Starting migration to populate slack_channel field.")
queryset = OnCallSchedule.objects.filter(channel__isnull=False, organization__slack_team_identity__isnull=False)
total_schedules = queryset.count()
updated_schedules = 0
missing_channels = 0
schedules_to_update = []
sql = f"""
UPDATE {OnCallSchedule._meta.db_table} AS ocs
JOIN {Organization._meta.db_table} AS org ON org.id = ocs.organization_id
JOIN {SlackChannel._meta.db_table} AS sc ON sc.slack_id = ocs.channel
AND sc.slack_team_identity_id = org.slack_team_identity_id
SET ocs.slack_channel_id = sc.id
WHERE ocs.channel IS NOT NULL
AND org.slack_team_identity_id IS NOT NULL;
"""
logger.info(f"Total schedules to process: {total_schedules}")
for schedule in queryset:
slack_id = schedule.channel
slack_team_identity = schedule.organization.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
schedule.slack_channel = slack_channel
schedules_to_update.append(schedule)
updated_schedules += 1
logger.info(
f"Schedule {schedule.id} updated with SlackChannel {slack_channel.id} (slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_channels += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for Schedule {schedule.id}."
)
if schedules_to_update:
OnCallSchedule.objects.bulk_update(schedules_to_update, ["slack_channel"])
logger.info(f"Bulk updated {len(schedules_to_update)} OnCallSchedules with their Slack channel.")
logger.info(
f"Finished migration. Total schedules processed: {total_schedules}. "
f"Schedules updated: {updated_schedules}. Missing SlackChannels: {missing_channels}."
)
with schema_editor.connection.cursor() as cursor:
cursor.execute(sql)
updated_rows = cursor.rowcount # Number of rows updated
logger.info(f"Bulk updated {updated_rows} OnCallSchedules with their Slack channel.")
logger.info("Finished migration to populate slack_channel field.")
class Migration(migrations.Migration):

View file

@ -13,43 +13,21 @@ def populate_default_slack_channel(apps, schema_editor):
logger.info("Starting migration to populate default_slack_channel field.")
queryset = Organization.objects.filter(general_log_channel_id__isnull=False, slack_team_identity__isnull=False)
total_orgs = queryset.count()
updated_orgs = 0
missing_channels = 0
organizations_to_update = []
sql = f"""
UPDATE {Organization._meta.db_table} AS org
JOIN {SlackChannel._meta.db_table} AS sc ON sc.slack_id = org.general_log_channel_id
AND sc.slack_team_identity_id = org.slack_team_identity_id
SET org.default_slack_channel_id = sc.id
WHERE org.general_log_channel_id IS NOT NULL
AND org.slack_team_identity_id IS NOT NULL;
"""
logger.info(f"Total organizations to process: {total_orgs}")
with schema_editor.connection.cursor() as cursor:
cursor.execute(sql)
updated_rows = cursor.rowcount # Number of rows updated
for org in queryset:
slack_id = org.general_log_channel_id
slack_team_identity = org.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
org.default_slack_channel = slack_channel
organizations_to_update.append(org)
updated_orgs += 1
logger.info(
f"Organization {org.id} updated with SlackChannel {slack_channel.id} (slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_channels += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for Organization {org.id}."
)
if organizations_to_update:
Organization.objects.bulk_update(organizations_to_update, ["default_slack_channel"])
logger.info(f"Bulk updated {len(organizations_to_update)} organizations with their default Slack channel.")
logger.info(
f"Finished migration. Total organizations processed: {total_orgs}. "
f"Organizations updated: {updated_orgs}. Missing SlackChannels: {missing_channels}."
)
logger.info(f"Bulk updated {updated_rows} organizations with their default Slack channel.")
logger.info("Finished migration to populate default_slack_channel field.")
class Migration(migrations.Migration):

View file

@ -21,4 +21,4 @@ def is_insight_logs_enabled(organization: "Organization") -> bool:
f"ONCALL_BACKEND_REGION={settings.ONCALL_BACKEND_REGION} "
f"cluster_slug={organization.cluster_slug}"
)
return not settings.IS_OPEN_SOURCE and settings.ONCALL_BACKEND_REGION == organization.cluster_slug
return not settings.IS_OPEN_SOURCE

View file

@ -172,8 +172,9 @@ Configuration is done via environment variables passed to the docker container.
The tool is capable of migrating user notification rules from PagerDuty to Grafana OnCall.
Notification rules from the `"When a high-urgency incident is assigned to me..."` section in PagerDuty settings are
taken into account and will be migrated to default notification rules in Grafana OnCall for each user. Note that delays
between notification rules may be slightly different in Grafana OnCall, see [Limitations](#limitations) for more info.
taken into account and will be migrated to both default and important notification rules in Grafana OnCall
for each user. Note that delays between notification rules may be slightly different in Grafana OnCall,
see [Limitations](#limitations) for more info.
When running the migration, existing notification rules in Grafana OnCall will be deleted for every affected user.

View file

@ -27,21 +27,25 @@ def migrate_notification_rules(user: dict) -> None:
rule for rule in user["notification_rules"] if rule["urgency"] == "high"
]
oncall_rules = transform_notification_rules(
notification_rules, user["oncall_user"]["id"]
)
for important in (False, True):
oncall_rules = transform_notification_rules(
notification_rules, user["oncall_user"]["id"], important
)
for rule in oncall_rules:
OnCallAPIClient.create("personal_notification_rules", rule)
for rule in oncall_rules:
OnCallAPIClient.create("personal_notification_rules", rule)
if oncall_rules:
# delete old notification rules if any new rules were created
for rule in user["oncall_user"]["notification_rules"]:
OnCallAPIClient.delete("personal_notification_rules/{}".format(rule["id"]))
if oncall_rules:
# delete old notification rules if any new rules were created
for rule in user["oncall_user"]["notification_rules"]:
if rule["important"] == important:
OnCallAPIClient.delete(
"personal_notification_rules/{}".format(rule["id"])
)
def transform_notification_rules(
notification_rules: list[dict], user_id: str
notification_rules: list[dict], user_id: str, important: bool
) -> list[dict]:
"""
Transform PagerDuty user notification rules to Grafana OnCall personal notification rules.
@ -58,7 +62,9 @@ def transform_notification_rules(
previous_delay = notification_rules[idx - 1]["start_delay_in_minutes"]
delay -= previous_delay
oncall_notification_rules += transform_notification_rule(rule, delay, user_id)
oncall_notification_rules += transform_notification_rule(
rule, delay, user_id, important
)
oncall_notification_rules = remove_duplicate_rules_between_waits(
oncall_notification_rules
@ -68,12 +74,12 @@ def transform_notification_rules(
def transform_notification_rule(
notification_rule: dict, delay: int, user_id: str
notification_rule: dict, delay: int, user_id: str, important: bool
) -> list[dict]:
contact_method_type = notification_rule["contact_method"]["type"]
oncall_type = PAGERDUTY_TO_ONCALL_CONTACT_METHOD_MAP[contact_method_type]
notify_rule = {"user_id": user_id, "type": oncall_type, "important": False}
notify_rule = {"user_id": user_id, "type": oncall_type, "important": important}
if not delay:
return [notify_rule]
@ -82,6 +88,6 @@ def transform_notification_rule(
"user_id": user_id,
"type": "wait",
"duration": transform_wait_delay(delay),
"important": "False",
"important": important,
}
return [wait_rule, notify_rule]

View file

@ -0,0 +1,85 @@
from unittest.mock import call, patch
from lib.oncall.api_client import OnCallAPIClient
from lib.pagerduty.resources.notification_rules import migrate_notification_rules
@patch.object(OnCallAPIClient, "delete")
@patch.object(OnCallAPIClient, "create")
def test_migrate_notification_rules(api_client_create_mock, api_client_delete_mock):
migrate_notification_rules(
{
"notification_rules": [
{
"contact_method": {"type": "sms_contact_method"},
"start_delay_in_minutes": 0,
"urgency": "high",
},
{
"contact_method": {"type": "push_notification_contact_method"},
"start_delay_in_minutes": 5,
"urgency": "high",
},
],
"oncall_user": {
"id": "EXISTING_USER_ID",
"notification_rules": [
{"id": "EXISTING_RULE_ID_1", "important": False},
{"id": "EXISTING_RULE_ID_2", "important": True},
],
},
}
)
assert api_client_create_mock.call_args_list == [
call(
"personal_notification_rules",
{
"user_id": "EXISTING_USER_ID",
"type": "notify_by_sms",
"important": False,
},
),
call(
"personal_notification_rules",
{
"user_id": "EXISTING_USER_ID",
"type": "wait",
"duration": 300,
"important": False,
},
),
call(
"personal_notification_rules",
{
"user_id": "EXISTING_USER_ID",
"type": "notify_by_mobile_app",
"important": False,
},
),
call(
"personal_notification_rules",
{"user_id": "EXISTING_USER_ID", "type": "notify_by_sms", "important": True},
),
call(
"personal_notification_rules",
{
"user_id": "EXISTING_USER_ID",
"type": "wait",
"duration": 300,
"important": True,
},
),
call(
"personal_notification_rules",
{
"user_id": "EXISTING_USER_ID",
"type": "notify_by_mobile_app",
"important": True,
},
),
]
assert api_client_delete_mock.call_args_list == [
call("personal_notification_rules/EXISTING_RULE_ID_1"),
call("personal_notification_rules/EXISTING_RULE_ID_2"),
]