chore: convert two slack channel ID char fields to foreign keys (#5224)

# What this PR does

Similar to https://github.com/grafana/oncall/pull/5199

Converts follow char fields to primary key relationships on
`SlackChannel` table:
- `ResolutionNoteSlackMessage.channel_id` ->
`ResolutionNoteSlackMessage.slack_channel`
- `ChannelFilter.slack_channel_id` -> `ChannelFilter.slack_channel`

## Checklist

- [x] Unit, integration, and e2e (if applicable) tests updated
- [x] Documentation added (or `pr:no public docs` PR label added if not
required)
- [x] Added the relevant release notes label (see labels prefixed w/
`release:`). These labels dictate how your PR will
    show up in the autogenerated release notes.
This commit is contained in:
Joey Orlando 2024-11-04 13:34:06 -05:00 committed by GitHub
parent 07d289b0a3
commit deb6a45588
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
38 changed files with 1121 additions and 532 deletions

View file

@ -20,10 +20,13 @@ from apps.alerts.tasks import (
)
from apps.alerts.utils import is_declare_incident_step_enabled
from apps.schedules.ical_utils import list_users_to_notify_from_ical
from apps.user_management.models import User
if typing.TYPE_CHECKING:
from apps.alerts.models.alert_group import AlertGroup
from apps.schedules.models import OnCallSchedule
from apps.slack.models import SlackUserGroup
from apps.user_management.models import Team, User
from apps.webhooks.models import Webhook
class EscalationPolicySnapshot:
@ -57,24 +60,24 @@ class EscalationPolicySnapshot:
def __init__(
self,
id,
order,
step,
wait_delay,
notify_to_users_queue,
last_notified_user,
from_time,
to_time,
num_alerts_in_window,
num_minutes_in_window,
custom_webhook,
notify_schedule,
notify_to_group,
escalation_counter,
passed_last_time,
pause_escalation,
notify_to_team_members=None,
severity=None,
id: int,
order: int,
step: int,
wait_delay: typing.Optional[datetime.timedelta],
notify_to_users_queue: typing.Optional[typing.Sequence["User"]],
last_notified_user: typing.Optional["User"],
from_time: typing.Optional[datetime.time],
to_time: typing.Optional[datetime.time],
num_alerts_in_window: typing.Optional[int],
num_minutes_in_window: typing.Optional[int],
custom_webhook: typing.Optional["Webhook"],
notify_schedule: typing.Optional["OnCallSchedule"],
notify_to_group: typing.Optional["SlackUserGroup"],
escalation_counter: int,
passed_last_time: typing.Optional[datetime.datetime],
pause_escalation: bool,
notify_to_team_members: typing.Optional["Team"] = None,
severity: typing.Optional[str] = None,
):
self.id = id
self.order = order
@ -107,11 +110,11 @@ class EscalationPolicySnapshot:
return EscalationPolicy.objects.filter(pk=self.id).first()
@property
def sorted_users_queue(self) -> typing.List[User]:
def sorted_users_queue(self) -> typing.List["User"]:
return sorted(self.notify_to_users_queue, key=lambda user: (user.username or "", user.pk))
@property
def next_user_in_sorted_queue(self) -> User:
def next_user_in_sorted_queue(self) -> "User":
users_queue = self.sorted_users_queue
try:
last_user_index = users_queue.index(self.last_notified_user)
@ -120,7 +123,7 @@ class EscalationPolicySnapshot:
next_user = users_queue[(last_user_index + 1) % len(users_queue)]
return next_user
def execute(self, alert_group: "AlertGroup", reason) -> StepExecutionResultData:
def execute(self, alert_group: "AlertGroup", reason: str) -> StepExecutionResultData:
action_map: typing.Dict[typing.Optional[int], EscalationPolicySnapshot.StepExecutionFunc] = {
EscalationPolicy.STEP_WAIT: self._escalation_step_wait,
EscalationPolicy.STEP_FINAL_NOTIFYALL: self._escalation_step_notify_all,

View file

@ -10,26 +10,69 @@ from apps.schedules.ical_utils import list_users_to_notify_from_ical
if typing.TYPE_CHECKING:
from django.db.models.manager import RelatedManager
from apps.alerts.escalation_snapshot.snapshot_classes import EscalationPolicySnapshot, EscalationSnapshot
from apps.alerts.models import AlertGroup, AlertGroupLogRecord, ResolutionNote
from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord
from apps.slack.models import SlackTeamIdentity
from apps.user_management.models import User
class PlanLines(typing.TypedDict):
plan_lines: typing.List[str]
class NotificationPlanLines(PlanLines):
user_id: int
plan_lines: typing.List[str]
"""
rendered notification policy line
"""
is_the_first_notification_step: bool
EscalationPlan = typing.Dict[timezone.timedelta, typing.List[PlanLines]]
"""
`dict` with `timedelta` as a key and values containing a `list` of escalation plan lines
"""
NotificationPlan = typing.Dict[timezone.timedelta, typing.List[NotificationPlanLines]]
"""
`dict` with `timedelta` as a key and values containing a `list` of notification plan lines
"""
MixedEscalationNotificationPlan = typing.Dict[
timezone.timedelta, typing.List[typing.Union[PlanLines, NotificationPlanLines]]
]
"""
NOTE: this seems janky, maybe there's a better way for us to do this than jam two different object types into the same
list? Opportunity for future refactoring...
"""
FlattendEscalationPlan = typing.Dict[timezone.timedelta, typing.List[str]]
LogRecords = typing.List[typing.Union["AlertGroupLogRecord", "ResolutionNote", "UserNotificationPolicyLogRecord"]]
UsersToNotify = typing.Iterable["User"]
"""
`typing.Iterable` ([docs](https://docs.python.org/3/library/collections.abc.html#collections.abc.Iterable)):
ABC for classes that provide the __iter__() method
We just need this because some methods for fetching users return `QuerySet`s, others simply assign a list. In the end
we simply iterate over the users.
"""
class IncidentLogBuilder:
def __init__(self, alert_group: "AlertGroup"):
self.alert_group = alert_group
def get_log_records_list(
self, with_resolution_notes: bool = False
) -> typing.List[typing.Union["AlertGroupLogRecord", "ResolutionNote", "UserNotificationPolicyLogRecord"]]:
def get_log_records_list(self, with_resolution_notes: bool = False) -> LogRecords:
"""
Generates list of `AlertGroupLogRecord` and `UserNotificationPolicyLogRecord` logs.
Generates list of `LogRecords`. `ResolutionNote`s are optionally included if `with_resolution_notes` is `True`.
"""
all_log_records: LogRecords = list()
`ResolutionNote`s are optionally included if `with_resolution_notes` is `True`.
"""
all_log_records: typing.List[
typing.Union["AlertGroupLogRecord", "ResolutionNote", "UserNotificationPolicyLogRecord"]
] = list()
# get logs from AlertGroupLogRecord
alert_group_log_records = self._get_log_records_for_after_resolve_report()
all_log_records.extend(alert_group_log_records)
@ -41,9 +84,9 @@ class IncidentLogBuilder:
if with_resolution_notes:
resolution_notes = self._get_resolution_notes()
all_log_records.extend(resolution_notes)
# sort logs by date
all_log_records_sorted = sorted(all_log_records, key=lambda log: log.created_at)
return all_log_records_sorted
return sorted(all_log_records, key=lambda log: log.created_at)
def _get_log_records_for_after_resolve_report(self) -> "RelatedManager['AlertGroupLogRecord']":
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy
@ -120,25 +163,22 @@ class IncidentLogBuilder:
"created_at"
)
def get_incident_escalation_plan(self, for_slack=False):
"""
Generates dict with escalation plan with timedelta as keys and list with plan lines as values
:param for_slack: (bool) add user slack id to plan line or not
:return:
"""
incident_escalation_plan = dict()
incident_escalation_plan = self._add_invitation_plan(incident_escalation_plan, for_slack=for_slack)
if not self.alert_group.acknowledged and not self.alert_group.is_silenced_forever:
incident_escalation_plan = self._add_escalation_plan(incident_escalation_plan, for_slack=for_slack)
final_incident_escalation_plan = self._finalize_escalation_plan_dict(incident_escalation_plan)
return final_incident_escalation_plan
def get_escalation_plan(self, for_slack: bool = False) -> FlattendEscalationPlan:
escalation_plan: EscalationPlan = dict()
escalation_plan = self._add_invitation_plan(escalation_plan, for_slack=for_slack)
def _add_escalation_plan(self, escalation_plan_dict, for_slack=False):
if not self.alert_group.acknowledged and not self.alert_group.is_silenced_forever:
escalation_plan = self._add_escalation_plan(escalation_plan, for_slack=for_slack)
return self._finalize_escalation_plan(escalation_plan)
def _add_escalation_plan(
self,
escalation_plan: MixedEscalationNotificationPlan,
for_slack: bool = False,
) -> MixedEscalationNotificationPlan:
"""
Returns plan for future escalations
:param escalation_plan_dict:
:param for_slack:
:return: {timedelta: [{"user_id": user.pk, "plan_lines": [#rendered escalation policy line, ]}, ..., ...], ...}
"""
esc_timedelta = timezone.timedelta(seconds=0) # timedelta for next escalation step
now = timezone.now()
@ -147,7 +187,7 @@ class IncidentLogBuilder:
# We cannot generate escalation plan in this case
escalation_snapshot = self.alert_group.escalation_snapshot
if not self.alert_group.has_escalation_policies_snapshots:
return escalation_plan_dict
return escalation_plan
if self.alert_group.silenced_until:
timedelta = self.alert_group.silenced_until - now
@ -160,18 +200,22 @@ class IncidentLogBuilder:
stop_escalation_log_pk = stop_escalation_log.pk if stop_escalation_log else 0
# render escalation plan from escalation_snapshot
escalation_plan_dict = self._render_escalation_plan_from_escalation_snapshot(
escalation_plan_dict,
return self._render_escalation_plan_from_escalation_snapshot(
escalation_plan,
stop_escalation_log_pk,
esc_timedelta,
escalation_snapshot,
for_slack,
)
return escalation_plan_dict
def _render_escalation_plan_from_escalation_snapshot(
self, escalation_plan_dict, stop_escalation_log_pk, esc_timedelta, escalation_snapshot, for_slack=False
):
self,
escalation_plan: EscalationPlan,
stop_escalation_log_pk: int,
esc_timedelta: timezone.timedelta,
escalation_snapshot: "EscalationSnapshot",
for_slack=False,
) -> MixedEscalationNotificationPlan:
from apps.alerts.models import EscalationPolicy
now = timezone.now()
@ -203,9 +247,11 @@ class IncidentLogBuilder:
if len(escalation_policies_snapshots) > 0 and not escalation_eta:
future_step_timedelta = None
for escalation_policy_snapshot in escalation_policies_snapshots:
step_timedelta = esc_timedelta
future_step = escalation_policy_snapshot.order >= escalation_policy_order # step not passed yet
if future_step and escalation_policy_snapshot.step == EscalationPolicy.STEP_WAIT:
wait_delay = escalation_policy_snapshot.wait_delay or EscalationPolicy.DEFAULT_WAIT_DELAY
esc_timedelta += wait_delay # increase timedelta for next steps
@ -215,6 +261,7 @@ class IncidentLogBuilder:
future_step_timedelta = esc_timedelta - last_log_timedelta
elif not future_step:
passed_last_time = escalation_policy_snapshot.passed_last_time
if passed_last_time is not None:
step_timedelta = esc_timedelta - (now - passed_last_time)
else:
@ -224,8 +271,8 @@ class IncidentLogBuilder:
# stop plan generation if there is resolve step in escalation plan
if future_step and escalation_policy_snapshot.step == EscalationPolicy.STEP_FINAL_RESOLVE:
escalation_plan_dict = IncidentLogBuilder._remove_future_plan(esc_timedelta, escalation_plan_dict)
escalation_step_plan_dict = self._render_escalation_step_plan_from_escalation_policy_snapshot(
escalation_plan = IncidentLogBuilder._remove_future_plan(esc_timedelta, escalation_plan)
escalation_step_plan = self._render_escalation_step_plan_from_escalation_policy_snapshot(
escalation_policy_snapshot,
escalation_snapshot,
for_slack=for_slack,
@ -234,45 +281,48 @@ class IncidentLogBuilder:
)
step_timedelta += timezone.timedelta(seconds=5) # make this step the last in plan
for timedelta, plan in escalation_step_plan_dict.items():
for timedelta, plan in escalation_step_plan.items():
timedelta += step_timedelta
escalation_plan_dict.setdefault(timedelta, []).extend(plan)
escalation_plan.setdefault(timedelta, []).extend(plan)
break
# render escalation and notification plan lines for step
escalation_step_plan_dict = self._render_escalation_step_plan_from_escalation_policy_snapshot(
escalation_step_plan = self._render_escalation_step_plan_from_escalation_policy_snapshot(
escalation_policy_snapshot,
escalation_snapshot,
for_slack=for_slack,
future_step=future_step,
esc_timedelta=step_timedelta,
)
escalation_plan_dict = self._correct_users_notification_plan(
escalation_plan_dict, escalation_step_plan_dict, step_timedelta
# NOTE: should probably refactor this? `_correct_users_notification_plan` expects a `NotificationPlan`
# as its second arg, `escalation_step_plan` is of type
escalation_plan = self._correct_users_notification_plan(
escalation_plan, escalation_step_plan, step_timedelta
)
return escalation_plan_dict
return escalation_plan
@staticmethod
def _remove_future_plan(timedelta_to_remove, plan_dict):
def _remove_future_plan(timedelta_to_remove: timezone.timedelta, escalation_plan: EscalationPlan) -> EscalationPlan:
"""
Removes plan with higher timedelta (for events, that will start later, than selected time
(timedelta_to_remove)).
:param timedelta_to_remove:
:param plan_dict:
:return: new plan dict
Removes plan with higher timedelta (for events, that will start later, than selected time (timedelta_to_remove))
"""
new_plan_dict = dict()
for timedelta in sorted(plan_dict):
if timedelta <= timedelta_to_remove:
new_plan_dict[timedelta] = plan_dict[timedelta]
return new_plan_dict
new_plan: EscalationPlan = dict()
def _add_invitation_plan(self, escalation_plan_dict, for_slack=False):
for timedelta in sorted(escalation_plan):
if timedelta <= timedelta_to_remove:
new_plan[timedelta] = escalation_plan[timedelta]
return new_plan
def _add_invitation_plan(
self,
escalation_plan: EscalationPlan,
for_slack: bool = False,
) -> MixedEscalationNotificationPlan:
"""
Adds notification plan for invitation
:param escalation_plan_dict:
:param for_slack:
:return: {timedelta: [{"user_id": user.pk, "plan_lines": [#rendered escalation policy line, ]}, ..., ...], ...}
"""
from apps.alerts.models import Invitation
@ -280,6 +330,7 @@ class IncidentLogBuilder:
for invitation in self.alert_group.invitations.filter(is_active=True):
invitation_timedelta = timezone.timedelta()
current_attempt = invitation.attempt - 1
# generate notification plan for each attempt
for attempt in range(current_attempt, Invitation.ATTEMPTS_LIMIT + 1):
notification_plan = self._get_notification_plan_for_user(
@ -287,99 +338,107 @@ class IncidentLogBuilder:
for_slack=for_slack,
future_step=attempt >= invitation.attempt,
)
escalation_plan_dict = self._correct_users_notification_plan(
escalation_plan_dict, notification_plan, invitation_timedelta
escalation_plan = self._correct_users_notification_plan(
escalation_plan, notification_plan, invitation_timedelta
)
started_timedelta = now - invitation.created_at
invitation_timedelta += Invitation.get_delay_by_attempt(attempt) - started_timedelta
return escalation_plan_dict
def _correct_users_notification_plan(self, escalation_plan_dict, notification_plan_dict, esc_time):
return escalation_plan
def _correct_users_notification_plan(
self,
escalation_plan: EscalationPlan,
notification_plan: NotificationPlan,
esc_time: timezone.timedelta,
) -> MixedEscalationNotificationPlan:
"""
Check if escalation_plan_dict has user notification events with higher timedelta
than timedelta of current step. If it has, remove future notification events for users that
repeatedly notified by current escalation step from current escalation_plan_dict
because their notification chain will start from the beginning.
Check if `escalation_plan` has user notification events with higher `timedelta` than `timedelta` of
current step.
:param escalation_plan_dict:
:param notification_plan_dict:
:param esc_time:
:return:
If it has, remove future notification events for users that repeatedly notified by current escalation step from
current `escalation_plan` because their notification chain will start from the beginning.
"""
future_step_timedelta = None
later_events_exist = False
for timedelta in escalation_plan_dict:
for timedelta in escalation_plan:
if timedelta > esc_time:
later_events_exist = True
break
if later_events_exist:
earliest_events = notification_plan_dict.get(timezone.timedelta(), [])
notification_plans_to_remove = []
for event_dict in earliest_events: # [{"user_id": user.pk, "plan_lines": []}, {"plan_lines": []}]
user_id = event_dict.get("user_id")
if user_id:
notification_plans_to_remove.append(user_id)
new_escalation_policies_dict = {}
for timedelta in sorted(escalation_plan_dict):
if later_events_exist:
earliest_events = notification_plan.get(timezone.timedelta(), [])
notification_plans_to_remove: list[int] = []
for event_dict in earliest_events:
if user_id := event_dict.get("user_id"):
notification_plans_to_remove.append(user_id)
new_escalation_plan: EscalationPlan = {}
for timedelta in sorted(escalation_plan):
# do not add step from escalation plan if its timedelta < 0
if timedelta < timezone.timedelta():
continue
events_list = list()
for event_dict in escalation_plan_dict[timedelta]:
if event_dict.get("is_the_first_notification_step"):
events_list: list[PlanLines] = list()
for plan in escalation_plan[timedelta]:
if plan.get("is_the_first_notification_step"):
if (
future_step_timedelta is None
and timedelta > esc_time
and event_dict.get("user_id") in notification_plans_to_remove
and plan.get("user_id") in notification_plans_to_remove
):
future_step_timedelta = timedelta
if (
timedelta < esc_time
or event_dict.get("user_id") not in notification_plans_to_remove
or plan.get("user_id") not in notification_plans_to_remove
or future_step_timedelta is not None
):
events_list.append(event_dict)
events_list.append(plan)
if len(events_list) > 0:
new_escalation_policies_dict.setdefault(timedelta, []).extend(events_list)
new_escalation_plan.setdefault(timedelta, []).extend(events_list)
escalation_plan_dict = new_escalation_policies_dict
escalation_plan = new_escalation_plan
for timedelta, plan in notification_plan_dict.items():
for timedelta, plan in notification_plan.items():
timedelta = esc_time + timedelta
if future_step_timedelta is None or future_step_timedelta > timedelta:
escalation_plan_dict.setdefault(timedelta, []).extend(plan)
escalation_plan.setdefault(timedelta, []).extend(plan)
return escalation_plan_dict
return escalation_plan
def _finalize_escalation_plan_dict(self, escalation_dict):
def _finalize_escalation_plan(self, escalation_plan: MixedEscalationNotificationPlan) -> FlattendEscalationPlan:
"""
It changes escalation dict structure
from {timedelta: [{"user_id": user.pk, "plan_lines": []}, {"plan_lines": []}]}
to {timedelta: [all plan lines for this timedelta]}
:param escalation_dict:
:return:
It transforms `escalation_plan` from `MixedEscalationNotificationPlan` to `FlattendEscalationPlan`
"""
final_escalation_dict = dict()
for timedelta in escalation_dict:
plan_lines_list = list()
for event_dict in escalation_dict[timedelta]:
plan_lines_list.extend(event_dict["plan_lines"])
if len(plan_lines_list) > 0:
final_escalation_plan: FlattendEscalationPlan = dict()
for timedelta in escalation_plan:
flattened_plan_lines: list[str] = list()
for plan_lines in escalation_plan[timedelta]:
flattened_plan_lines.extend(plan_lines["plan_lines"])
if len(flattened_plan_lines) > 0:
timedelta = timedelta if timedelta > timezone.timedelta() else timezone.timedelta()
final_escalation_dict.setdefault(timedelta, []).extend(plan_lines_list)
return final_escalation_dict
final_escalation_plan.setdefault(timedelta, []).extend(flattened_plan_lines)
return final_escalation_plan
def _render_escalation_step_plan_from_escalation_policy_snapshot(
self,
escalation_policy_snapshot,
escalation_snapshot,
for_slack=False,
future_step=False,
esc_timedelta=None,
):
escalation_policy_snapshot: "EscalationPolicySnapshot",
escalation_snapshot: "EscalationSnapshot",
for_slack: bool = False,
future_step: bool = False,
esc_timedelta: typing.Optional[timezone.timedelta] = None,
) -> EscalationPlan:
"""
Renders escalation and notification policies plan dict.
@ -388,112 +447,128 @@ class IncidentLogBuilder:
:param for_slack: (bool) add or not user slack id to user notification plan line
:param future_step: (bool) step not passed yet
:param esc_timedelta: timedelta of escalation step
:return: dict with timedelta as a key and list with escalation and notification plan lines as a value
"""
from apps.alerts.models import EscalationPolicy
escalation_plan_dict = {}
escalation_plan: EscalationPlan = {}
timedelta = timezone.timedelta()
if escalation_policy_snapshot.step in [
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS,
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS_IMPORTANT,
EscalationPolicy.STEP_NOTIFY_USERS_QUEUE,
]:
users_to_notify = escalation_policy_snapshot.sorted_users_queue
users_to_notify: UsersToNotify = escalation_policy_snapshot.sorted_users_queue
if future_step:
if users_to_notify:
plan_line = f'escalation step "{escalation_policy_snapshot.step_display}"'
if escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_USERS_QUEUE:
try:
last_user_index = users_to_notify.index(escalation_policy_snapshot.last_notified_user)
except ValueError:
last_user_index = -1
user_to_notify = users_to_notify[(last_user_index + 1) % len(users_to_notify)]
users_to_notify = [user_to_notify]
else:
plan_line = (
f'escalation step "{escalation_policy_snapshot.step_display}" with no recipients. ' f"Skipping"
)
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_USERS_QUEUE:
last_notified_user = escalation_policy_snapshot.last_notified_user
users_to_notify = [last_notified_user] if last_notified_user else []
for user_to_notify in users_to_notify:
notification_plan_dict = self._get_notification_plan_for_user(
notification_plan = self._get_notification_plan_for_user(
user_to_notify,
important=escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS_IMPORTANT,
for_slack=for_slack,
future_step=future_step,
)
# notification_plan_dict structure - {timedelta: [{"user_id": user.pk, "plan_lines": []}]
for timedelta, notification_plan in notification_plan_dict.items():
escalation_plan_dict.setdefault(timedelta, []).extend(notification_plan)
for timedelta, plan in notification_plan.items():
escalation_plan.setdefault(timedelta, []).extend(plan)
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_FINAL_NOTIFYALL:
channel_id = escalation_snapshot.slack_channel_id
users_to_notify = []
final_notify_all_users_to_notify: UsersToNotify = []
if future_step:
if self.alert_group.is_presented_in_slack and channel_id:
plan_line = f'escalation step "{escalation_policy_snapshot.step_display}"'
slack_team_identity = self.alert_group.slack_message.slack_team_identity
users_to_notify = slack_team_identity.get_users_from_slack_conversation_for_organization(
channel_id=channel_id,
organization=self.alert_group.channel.organization,
# "technically" `slack_message` could be `None` here, but because of the conditional check above
# it's safe to cast this to `SlackTeamIdentity`
slack_team_identity: SlackTeamIdentity = self.alert_group.slack_message.slack_team_identity
final_notify_all_users_to_notify = (
slack_team_identity.get_users_from_slack_conversation_for_organization(
channel_id=channel_id,
organization=self.alert_group.channel.organization,
)
)
else:
plan_line = (
f'escalation step "{escalation_policy_snapshot.step_display}" is slack specific. ' f"Skipping"
)
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
else:
users_to_notify = escalation_policy_snapshot.notify_to_users_queue
for user_to_notify in users_to_notify:
notification_plan_dict = self._get_notification_plan_for_user(
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
else:
final_notify_all_users_to_notify = escalation_policy_snapshot.notify_to_users_queue
for user_to_notify in final_notify_all_users_to_notify:
notification_plan = self._get_notification_plan_for_user(
user_to_notify,
important=escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_IMPORTANT,
for_slack=for_slack,
future_step=future_step,
)
# notification_plan_dict structure - {timedelta: [{"user_id": user.pk, "plan_lines": []}]
for timedelta, notification_plan in notification_plan_dict.items():
escalation_plan_dict.setdefault(timedelta, []).extend(notification_plan)
for timedelta, plan in notification_plan.items():
escalation_plan.setdefault(timedelta, []).extend(plan)
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_FINAL_RESOLVE:
if future_step:
plan_line = "resolve automatically"
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": ["resolve automatically"]})
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_REPEAT_ESCALATION_N_TIMES:
if future_step:
escalation_counter = escalation_policy_snapshot.escalation_counter
repeat_times = EscalationPolicy.MAX_TIMES_REPEAT - escalation_counter
if repeat_times > 0:
plan_line = f"repeat escalation from the beginning ({repeat_times} times)"
else:
plan_line = 'skip step "Repeat Escalation"'
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
elif escalation_policy_snapshot.step in [
EscalationPolicy.STEP_NOTIFY_GROUP,
EscalationPolicy.STEP_NOTIFY_GROUP_IMPORTANT,
]:
users_to_notify = []
notify_group_users_to_notify: UsersToNotify = []
if future_step:
if self.alert_group.is_presented_in_slack:
user_group = escalation_policy_snapshot.notify_to_group
if user_group is not None:
users_to_notify = user_group.get_users_from_members_for_organization(
notify_group_users_to_notify = user_group.get_users_from_members_for_organization(
self.alert_group.channel.organization
)
user_group_handle = user_group.handle
important_text = ""
if escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_GROUP_IMPORTANT:
important_text = " (Important)"
plan_line = f'escalation step "Notify @{user_group_handle} User Group{important_text}"'
else:
plan_line = (
@ -504,58 +579,65 @@ class IncidentLogBuilder:
plan_line = (
f'escalation step "{escalation_policy_snapshot.step_display}" is slack specific. Skipping'
)
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
else:
users_to_notify = escalation_policy_snapshot.notify_to_users_queue
for user_to_notify in users_to_notify:
notification_plan_dict = self._get_notification_plan_for_user(
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
else:
notify_group_users_to_notify = escalation_policy_snapshot.notify_to_users_queue
for user_to_notify in notify_group_users_to_notify:
notification_plan = self._get_notification_plan_for_user(
user_to_notify,
important=escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_GROUP_IMPORTANT,
for_slack=for_slack,
future_step=future_step,
)
for timedelta, notification_plan in notification_plan_dict.items():
escalation_plan_dict.setdefault(timedelta, []).extend(notification_plan)
for timedelta, plan in notification_plan.items():
escalation_plan.setdefault(timedelta, []).extend(plan)
elif escalation_policy_snapshot.step in [
EscalationPolicy.STEP_NOTIFY_SCHEDULE,
EscalationPolicy.STEP_NOTIFY_SCHEDULE_IMPORTANT,
]:
schedule = escalation_policy_snapshot.notify_schedule
users_oncall = []
users_oncall: UsersToNotify = []
if future_step:
if schedule is not None:
step_datetime = timezone.now() + esc_timedelta
users_oncall = list_users_to_notify_from_ical(schedule, step_datetime)
important_text = ""
if escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_SCHEDULE_IMPORTANT:
important_text = " (Important)"
plan_line = f"escalation step \"Notify on-call from Schedule '{schedule.name}'{important_text}\""
if users_oncall is None:
plan_line += ", but iCal import was failed. Skipping"
elif len(users_oncall) == 0:
plan_line += ", but there are no users to notify for this schedule slot. Skipping"
else:
plan_line = (
f'escalation step "{escalation_policy_snapshot.step_display}", but schedule is '
f"unspecified. Skipping"
)
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
else:
users_oncall = escalation_policy_snapshot.notify_to_users_queue
for user_to_notify in users_oncall:
notification_plan_dict = self._get_notification_plan_for_user(
notification_plan = self._get_notification_plan_for_user(
user_to_notify,
for_slack=for_slack,
important=escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_SCHEDULE_IMPORTANT,
future_step=future_step,
)
# notification_plan_dict structure - {timedelta: [{"user_id": user.pk, "plan_lines": []}]
for timedelta, notification_plan in notification_plan_dict.items():
escalation_plan_dict.setdefault(timedelta, []).extend(notification_plan)
for timedelta, plan in notification_plan.items():
escalation_plan.setdefault(timedelta, []).extend(plan)
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_TRIGGER_CUSTOM_WEBHOOK:
if future_step:
custom_webhook = escalation_policy_snapshot.custom_webhook
@ -563,32 +645,34 @@ class IncidentLogBuilder:
plan_line = f'trigger outgoing webhook "{custom_webhook.name}"'
else:
plan_line = f'escalation step "{escalation_policy_snapshot.step_display}" but outgoing webhook is unspecified. Skipping'
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
elif escalation_policy_snapshot.step == EscalationPolicy.STEP_NOTIFY_IF_TIME:
if future_step:
if escalation_policy_snapshot.from_time is not None and escalation_policy_snapshot.to_time is not None:
plan_line = 'escalation step "Continue escalation if time"'
else:
plan_line = 'escalation step "Continue escalation if time", but time is not configured. Skipping'
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
escalation_plan.setdefault(timedelta, []).append({"plan_lines": [plan_line]})
elif escalation_policy_snapshot.step is None:
if future_step:
plan_line = "escalation step is unspecified. Skipping"
plan = {"plan_lines": [plan_line]}
escalation_plan_dict.setdefault(timedelta, []).append(plan)
return escalation_plan_dict
escalation_plan.setdefault(timedelta, []).append(
{"plan_lines": ["escalation step is unspecified. Skipping"]}
)
return escalation_plan
def _render_user_notification_line(
self, user_to_notify: "User", notification_policy: "UserNotificationPolicy", for_slack=False
):
self,
user_to_notify: "User",
notification_policy: "UserNotificationPolicy",
for_slack: bool = False,
) -> str:
"""
Renders user notification plan line
:param user_to_notify:
:param notification_policy:
:param for_slack: (bool) add or not user slack id to user notification plan line
:return: plan line
"""
from apps.base.models import UserNotificationPolicy
@ -616,15 +700,14 @@ class IncidentLogBuilder:
return result
def _get_notification_plan_for_user(
self, user_to_notify: "User", future_step=False, important=False, for_slack=False
):
self,
user_to_notify: "User",
future_step: bool = False,
important: bool = False,
for_slack: bool = False,
) -> NotificationPlan:
"""
Renders user notification plan
:param user_to_notify:
:param future_step:
:param important:
:param for_slack: (bool) add or not user slack id to user notification plan line
:return: {timedelta: [{"user_id": user.pk, "plan_lines": [#rendered notification policy line, ]}, ...], ...}
"""
from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord
@ -632,12 +715,15 @@ class IncidentLogBuilder:
is_the_first_notification_step = future_step # escalation starts with this step or not
# generate starter dict for notification plan
plan_lines_dict = {
"user_id": user_to_notify.pk,
"plan_lines": [],
"is_the_first_notification_step": is_the_first_notification_step,
notification_plan: NotificationPlan = {
timedelta: [
{
"user_id": user_to_notify.pk,
"plan_lines": [],
"is_the_first_notification_step": is_the_first_notification_step,
},
],
}
notification_plan_dict = {timedelta: [plan_lines_dict]}
last_user_log = None
@ -650,10 +736,12 @@ class IncidentLogBuilder:
)
.distinct()
)
# get lists of notification policies with scheduled but not triggered bundled notifications
# and of all notification policies with bundled notifications
notification_policy_ids_in_scheduled_bundle: typing.Set[int] = set()
notification_policy_ids_in_bundle: typing.Set[int] = set()
for notification_policy_in_bundle in notification_policies_in_bundle:
if notification_policy_in_bundle["bundle_uuid"] is None:
notification_policy_ids_in_scheduled_bundle.add(notification_policy_in_bundle["notification_policy"])
@ -683,6 +771,7 @@ class IncidentLogBuilder:
if last_user_log.notification_step is not None
else last_user_log.notification_policy.step
)
# get order of the next notification step
if notification_step == UserNotificationPolicy.Step.WAIT:
# do not exclude wait step, because we need it to count timedelta
@ -701,10 +790,11 @@ class IncidentLogBuilder:
notification_policy.order >= notification_policy_order
and notification_policy.id not in notification_policy_ids_in_bundle
)
if notification_policy.step == UserNotificationPolicy.Step.WAIT:
wait_delay = notification_policy.wait_delay
if wait_delay is not None:
if (wait_delay := notification_policy.wait_delay) is not None:
timedelta += wait_delay # increase timedelta for next steps
elif future_notification or is_scheduled_bundled_notification:
notification_timedelta = (
timedelta + timezone.timedelta(seconds=BUNDLED_NOTIFICATION_DELAY_SECONDS)
@ -714,10 +804,12 @@ class IncidentLogBuilder:
plan_line = self._render_user_notification_line(
user_to_notify, notification_policy, for_slack=for_slack
)
# add plan_line to user plan_lines list
if not notification_plan_dict.get(notification_timedelta):
if not notification_plan.get(notification_timedelta):
plan = {"user_id": user_to_notify.pk, "plan_lines": [plan_line]}
notification_plan_dict.setdefault(notification_timedelta, []).append(plan)
notification_plan.setdefault(notification_timedelta, []).append(plan)
else:
notification_plan_dict[notification_timedelta][0]["plan_lines"].append(plan_line)
return notification_plan_dict
notification_plan[notification_timedelta][0]["plan_lines"].append(plan_line)
return notification_plan

View file

@ -0,0 +1,36 @@
# Generated by Django 4.2.16 on 2024-11-01 11:20
from django.db import migrations, models
import django.db.models.deletion
import django_migration_linter as linter
class Migration(migrations.Migration):
dependencies = [
('slack', '0005_slackteamidentity__unified_slack_app_installed'),
('alerts', '0061_alter_alertgroup_resolved_by_alert'),
]
operations = [
linter.IgnoreMigration(),
migrations.RenameField(
model_name='channelfilter',
old_name='slack_channel_id',
new_name='_slack_channel_id',
),
migrations.RenameField(
model_name='resolutionnoteslackmessage',
old_name='slack_channel_id',
new_name='_slack_channel_id',
),
migrations.AddField(
model_name='channelfilter',
name='slack_channel',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='slack.slackchannel'),
),
migrations.AddField(
model_name='resolutionnoteslackmessage',
name='slack_channel',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='slack.slackchannel'),
),
]

View file

@ -0,0 +1,68 @@
# Generated by Django 4.2.16 on 2024-11-01 10:58
import logging
from django.db import migrations
import django_migration_linter as linter
logger = logging.getLogger(__name__)
def populate_slack_channel(apps, schema_editor):
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
SlackChannel = apps.get_model("slack", "SlackChannel")
logger.info("Starting migration to populate slack_channel field.")
queryset = ChannelFilter.objects.filter(
_slack_channel_id__isnull=False,
alert_receive_channel__organization__slack_team_identity__isnull=False,
)
total_channel_filters = queryset.count()
updated_channel_filters = 0
missing_channel_filters = 0
channel_filters_to_update = []
logger.info(f"Total channel filters to process: {total_channel_filters}")
for channel_filter in queryset:
slack_id = channel_filter._slack_channel_id
slack_team_identity = channel_filter.alert_receive_channel.organization.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
channel_filter.slack_channel = slack_channel
channel_filters_to_update.append(channel_filter)
updated_channel_filters += 1
logger.info(
f"ChannelFilter {channel_filter.id} updated with SlackChannel {slack_channel.id} "
f"(slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_channel_filters += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for ChannelFilter {channel_filter.id}."
)
if channel_filters_to_update:
ChannelFilter.objects.bulk_update(channel_filters_to_update, ["slack_channel"])
logger.info(f"Bulk updated {len(channel_filters_to_update)} ChannelFilters with their Slack channel.")
logger.info(
f"Finished migration. Total channel filters processed: {total_channel_filters}. "
f"Channel filters updated: {updated_channel_filters}. Missing SlackChannels: {missing_channel_filters}."
)
class Migration(migrations.Migration):
dependencies = [
('alerts', '0062_rename_slack_channel_id_channelfilter__slack_channel_id_and_more'),
]
operations = [
# simply setting this new field is okay, we are not deleting the value of channel
# therefore, no need to revert it
linter.IgnoreMigration(),
migrations.RunPython(populate_slack_channel, migrations.RunPython.noop),
]

View file

@ -0,0 +1,73 @@
# Generated by Django 4.2.16 on 2024-11-01 10:58
import logging
from django.db import migrations
import django_migration_linter as linter
logger = logging.getLogger(__name__)
def populate_slack_channel(apps, schema_editor):
ResolutionNoteSlackMessage = apps.get_model("alerts", "ResolutionNoteSlackMessage")
SlackChannel = apps.get_model("slack", "SlackChannel")
logger.info("Starting migration to populate slack_channel field.")
queryset = ResolutionNoteSlackMessage.objects.filter(
_slack_channel_id__isnull=False,
alert_group__channel__organization__slack_team_identity__isnull=False,
)
total_resolution_notes = queryset.count()
updated_resolution_notes = 0
missing_resolution_notes = 0
resolution_notes_to_update = []
logger.info(f"Total resolution note slack messages to process: {total_resolution_notes}")
for resolution_note in queryset:
slack_id = resolution_note._slack_channel_id
slack_team_identity = resolution_note.alert_group.channel.organization.slack_team_identity
try:
slack_channel = SlackChannel.objects.get(slack_id=slack_id, slack_team_identity=slack_team_identity)
resolution_note.slack_channel = slack_channel
resolution_notes_to_update.append(resolution_note)
updated_resolution_notes += 1
logger.info(
f"ResolutionNoteSlackMessage {resolution_note.id} updated with SlackChannel {slack_channel.id} "
f"(slack_id: {slack_id})."
)
except SlackChannel.DoesNotExist:
missing_resolution_notes += 1
logger.warning(
f"SlackChannel with slack_id {slack_id} and slack_team_identity {slack_team_identity} "
f"does not exist for ResolutionNoteSlackMessage {resolution_note.id}."
)
if resolution_notes_to_update:
ResolutionNoteSlackMessage.objects.bulk_update(resolution_notes_to_update, ["slack_channel"])
logger.info(
f"Bulk updated {len(resolution_notes_to_update)} ResolutionNoteSlackMessage with their Slack channel."
)
logger.info(
f"Finished migration. Total resolution note slack messages processed: {total_resolution_notes}. "
f"Resolution note slack messages updated: {updated_resolution_notes}. "
f"Missing SlackChannels: {missing_resolution_notes}."
)
class Migration(migrations.Migration):
dependencies = [
('alerts', '0063_migrate_channelfilter_slack_channel_id'),
]
operations = [
# simply setting this new field is okay, we are not deleting the value of channel
# therefore, no need to revert it
linter.IgnoreMigration(),
migrations.RunPython(populate_slack_channel, migrations.RunPython.noop),
]

View file

@ -43,6 +43,7 @@ if typing.TYPE_CHECKING:
AlertGroupLogRecord,
AlertReceiveChannel,
BundledNotification,
Invitation,
RelatedIncident,
ResolutionNote,
ResolutionNoteSlackMessage,
@ -193,11 +194,13 @@ class AlertGroup(AlertGroupSlackRenderingMixin, EscalationSnapshotMixin, models.
acknowledged_by_user: typing.Optional["User"]
alerts: "RelatedManager['Alert']"
bundled_notifications: "RelatedManager['BundledNotification']"
related_incidents: "RelatedManager['RelatedIncident']"
dependent_alert_groups: "RelatedManager['AlertGroup']"
channel: "AlertReceiveChannel"
dependent_alert_groups: "RelatedManager['AlertGroup']"
invitations: "RelatedManager['Invitation']"
labels: "RelatedManager['AlertGroupAssociatedLabel']"
log_records: "RelatedManager['AlertGroupLogRecord']"
personal_log_records: "RelatedManager['UserNotificationPolicyLogRecord']"
related_incidents: "RelatedManager['RelatedIncident']"
resolution_notes: "RelatedManager['ResolutionNote']"
resolution_note_slack_messages: "RelatedManager['ResolutionNoteSlackMessage']"
resolved_by_user: typing.Optional["User"]
@ -205,7 +208,6 @@ class AlertGroup(AlertGroupSlackRenderingMixin, EscalationSnapshotMixin, models.
silenced_by_user: typing.Optional["User"]
slack_messages: "RelatedManager['SlackMessage']"
users: "RelatedManager['User']"
labels: "RelatedManager['AlertGroupAssociatedLabel']"
objects: models.Manager["AlertGroup"] = AlertGroupQuerySet.as_manager()
@ -1980,13 +1982,10 @@ class AlertGroup(AlertGroupSlackRenderingMixin, EscalationSnapshotMixin, models.
def slack_channel_id(self) -> str | None:
if not self.channel.organization.slack_team_identity:
return None
if self.slack_message:
elif self.slack_message:
return self.slack_message.channel_id
if self.channel_filter:
return self.channel_filter.slack_channel_id_or_general_log_id
elif self.channel_filter:
return self.channel_filter.slack_channel_id_or_org_default_id
return None
@property

View file

@ -621,7 +621,7 @@ class AlertReceiveChannel(IntegrationOptionsMixin, MaintainableObject):
# TODO: this method should be refactored.
# It's binded to slack and sending maintenance notification only there.
channel_ids = list(
self.channel_filters.filter(slack_channel_id__isnull=False, notify_in_slack=False).values_list(
self.channel_filters.filter(slack_channel__isnull=False, notify_in_slack=False).values_list(
"slack_channel_id", flat=True
)
)

View file

@ -21,6 +21,7 @@ if typing.TYPE_CHECKING:
from apps.alerts.models import Alert, AlertGroup, AlertReceiveChannel
from apps.labels.types import AlertLabels, LabelPair
from apps.slack.models import SlackChannel
logger = logging.getLogger(__name__)
@ -47,6 +48,7 @@ class ChannelFilter(OrderedModel):
alert_groups: "RelatedManager['AlertGroup']"
alert_receive_channel: "AlertReceiveChannel"
filtering_labels: typing.Optional[list["LabelPair"]]
slack_channel: typing.Optional["SlackChannel"]
order_with_respect_to = ["alert_receive_channel_id", "is_default"]
@ -68,15 +70,15 @@ class ChannelFilter(OrderedModel):
notify_in_slack = models.BooleanField(null=True, default=True)
notify_in_telegram = models.BooleanField(null=True, default=False)
slack_channel_id = models.CharField(max_length=100, null=True, default=None)
# TODO: migrate slack_channel_id to slack_channel
# slack_channel = models.ForeignKey(
# 'slack.SlackChannel',
# null=True,
# default=None,
# on_delete=models.SET_NULL,
# related_name='+',
# )
# TODO: remove _slack_channel_id in future release
_slack_channel_id = models.CharField(max_length=100, null=True, default=None)
slack_channel = models.ForeignKey(
"slack.SlackChannel",
null=True,
default=None,
on_delete=models.SET_NULL,
related_name="+",
)
telegram_channel = models.ForeignKey(
"telegram.TelegramToOrganizationConnector",
@ -170,15 +172,18 @@ class ChannelFilter(OrderedModel):
return False
@property
def slack_channel_id_or_general_log_id(self):
def slack_channel_slack_id(self) -> typing.Optional[str]:
return self.slack_channel.slack_id if self.slack_channel else None
@property
def slack_channel_id_or_org_default_id(self):
organization = self.alert_receive_channel.organization
slack_team_identity = organization.slack_team_identity
if slack_team_identity is None:
if organization.slack_team_identity is None:
return None
if self.slack_channel_id is None:
elif self.slack_channel_slack_id is None:
return organization.default_slack_channel_slack_id
else:
return self.slack_channel_id
return self.slack_channel_slack_id
@property
def str_for_clients(self):
@ -212,17 +217,10 @@ class ChannelFilter(OrderedModel):
"slack_notification_enabled": self.notify_in_slack,
"telegram_notification_enabled": self.notify_in_telegram,
}
if self.slack_channel_id:
if self.slack_channel_id:
from apps.slack.models import SlackChannel
sti = self.alert_receive_channel.organization.slack_team_identity
slack_channel = SlackChannel.objects.filter(
slack_team_identity=sti, slack_id=self.slack_channel_id
).first()
if slack_channel is not None:
# Case when slack channel was deleted, but channel filter still has it's id
result["slack_channel"] = slack_channel.name
if self.slack_channel:
result["slack_channel"] = self.slack_channel.name
# TODO: use names instead of pks for telegram and other notifications backends.
# It's needed to rework messaging backends for that
if self.telegram_channel:

View file

@ -1,11 +1,17 @@
import datetime
import logging
import typing
from functools import partial
from django.db import models, transaction
from apps.alerts import tasks
if typing.TYPE_CHECKING:
from apps.alerts.models import AlertGroup
from apps.user_management.models import User
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@ -15,6 +21,10 @@ class Invitation(models.Model):
It's an invitation of a user to join working on Alert Group
"""
alert_group: "AlertGroup"
author: typing.Optional["User"]
invitee: typing.Optional["User"]
ATTEMPTS_LIMIT = 10
time_deltas_by_attempts = [
@ -45,18 +55,18 @@ class Invitation(models.Model):
attempt = models.IntegerField(default=0)
@property
def attempts_left(self):
def attempts_left(self) -> int:
return Invitation.ATTEMPTS_LIMIT - self.attempt
@staticmethod
def get_delay_by_attempt(attempt):
def get_delay_by_attempt(attempt: int) -> datetime.timedelta:
countdown = Invitation.time_deltas_by_attempts[-1]
if attempt < len(Invitation.time_deltas_by_attempts):
countdown = Invitation.time_deltas_by_attempts[attempt]
return countdown
@staticmethod
def invite_user(invitee_user, alert_group, user):
def invite_user(invitee_user: "User", alert_group: "AlertGroup", user: "User") -> None:
from apps.alerts.models import AlertGroupLogRecord
# RFCT - why atomic? without select for update?
@ -97,7 +107,7 @@ class Invitation(models.Model):
transaction.on_commit(partial(tasks.invite_user_to_join_incident.delay, invitation.pk))
@staticmethod
def stop_invitation(invitation_pk, user):
def stop_invitation(invitation_pk: int, user: "User") -> None:
from apps.alerts.models import AlertGroupLogRecord
with transaction.atomic():

View file

@ -13,6 +13,7 @@ from common.utils import clean_markup
if typing.TYPE_CHECKING:
from apps.alerts.models import AlertGroup
from apps.slack.models import SlackChannel
def generate_public_primary_key_for_alert_group_postmortem():
@ -54,6 +55,7 @@ class ResolutionNoteSlackMessageQueryset(models.QuerySet):
class ResolutionNoteSlackMessage(models.Model):
alert_group: "AlertGroup"
resolution_note: typing.Optional["ResolutionNote"]
slack_channel: typing.Optional["SlackChannel"]
alert_group = models.ForeignKey(
"alerts.AlertGroup",
@ -74,15 +76,15 @@ class ResolutionNoteSlackMessage(models.Model):
)
text = models.TextField(max_length=3000, default=None, null=True)
slack_channel_id = models.CharField(max_length=100, null=True, default=None)
# TODO: migrate slack_channel_id to slack_channel
# slack_channel = models.ForeignKey(
# 'slack.SlackChannel',
# null=True,
# default=None,
# on_delete=models.SET_NULL,
# related_name='+',
# )
# TODO: remove _slack_channel_id in future release
_slack_channel_id = models.CharField(max_length=100, null=True, default=None)
slack_channel = models.ForeignKey(
"slack.SlackChannel",
null=True,
default=None,
on_delete=models.SET_NULL,
related_name="+",
)
ts = models.CharField(max_length=100, null=True, default=None)
thread_ts = models.CharField(max_length=100, null=True, default=None)
@ -98,6 +100,10 @@ class ResolutionNoteSlackMessage(models.Model):
models.Index(fields=["ts", "thread_ts", "slack_channel_id"]),
]
@property
def slack_channel_slack_id(self) -> typing.Optional[str]:
return self.slack_channel.slack_id if self.slack_channel else None
def get_resolution_note(self) -> typing.Optional["ResolutionNote"]:
try:
return self.resolution_note

View file

@ -1,4 +1,5 @@
import logging
import typing
from django.conf import settings
@ -11,6 +12,9 @@ from common.incident_api.client import (
IncidentAPIException,
)
if typing.TYPE_CHECKING:
from apps.alerts.models import AlertGroup, EscalationPolicy
logger = logging.getLogger(__name__)
ATTACHMENT_CAPTION = "OnCall Alert Group"
@ -19,7 +23,13 @@ MAX_RETRIES = 1 if settings.DEBUG else 10
MAX_ATTACHED_ALERT_GROUPS_PER_INCIDENT = 5
def _attach_alert_group_to_incident(alert_group, incident_id, incident_title, escalation_policy, attached=False):
def _attach_alert_group_to_incident(
alert_group: "AlertGroup",
incident_id: str,
incident_title: str,
escalation_policy: "EscalationPolicy",
attached: bool = False,
) -> None:
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy, RelatedIncident
declared_incident, _ = RelatedIncident.objects.get_or_create(
@ -41,7 +51,9 @@ def _attach_alert_group_to_incident(alert_group, incident_id, incident_title, es
)
def _create_error_log_record(alert_group, escalation_policy, reason=""):
def _create_error_log_record(
alert_group: "AlertGroup", escalation_policy: "EscalationPolicy", reason: str = ""
) -> None:
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy
AlertGroupLogRecord.objects.create(
@ -55,7 +67,7 @@ def _create_error_log_record(alert_group, escalation_policy, reason=""):
@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=MAX_RETRIES)
def declare_incident(alert_group_pk, escalation_policy_pk, severity=None):
def declare_incident(alert_group_pk: int, escalation_policy_pk: int, severity: typing.Optional[str] = None) -> None:
from apps.alerts.models import AlertGroup, EscalationPolicy, RelatedIncident
alert_group = AlertGroup.objects.get(pk=alert_group_pk)

View file

@ -1,3 +1,5 @@
import typing
from django.conf import settings
from apps.slack.scenarios import scenario_step
@ -11,7 +13,7 @@ from .task_logger import task_logger
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=1 if settings.DEBUG else None
)
def notify_group_task(alert_group_pk, escalation_policy_snapshot_order=None):
def notify_group_task(alert_group_pk: int, escalation_policy_snapshot_order: typing.Optional[int] = None):
from apps.alerts.models import AlertGroup, AlertGroupLogRecord, EscalationPolicy
from apps.base.models import UserNotificationPolicy

View file

@ -88,12 +88,15 @@ def test_delete(
make_alert_receive_channel,
make_alert_group,
make_alert,
make_slack_channel,
make_slack_message,
make_resolution_note_slack_message,
django_capture_on_commit_callbacks,
):
"""test alert group deleting"""
organization, slack_team_identity = make_organization_with_slack_team_identity()
slack_channel1 = make_slack_channel(slack_team_identity)
slack_channel2 = make_slack_channel(slack_team_identity)
user = make_user(organization=organization)
alert_receive_channel = make_alert_receive_channel(organization)
@ -108,7 +111,7 @@ def test_delete(
user=user,
added_by_user=user,
posted_by_bot=True,
slack_channel_id="test1_channel_id",
slack_channel=slack_channel1,
ts="test1_ts",
)
resolution_note_2 = make_resolution_note_slack_message(
@ -116,7 +119,7 @@ def test_delete(
user=user,
added_by_user=user,
added_to_resolution_note=True,
slack_channel_id="test2_channel_id",
slack_channel=slack_channel2,
ts="test2_ts",
)
@ -168,11 +171,15 @@ def test_delete_slack_ratelimit(
make_alert_receive_channel,
make_alert_group,
make_alert,
make_slack_channel,
make_slack_message,
make_resolution_note_slack_message,
django_capture_on_commit_callbacks,
):
organization, slack_team_identity = make_organization_with_slack_team_identity()
slack_channel1 = make_slack_channel(slack_team_identity)
slack_channel2 = make_slack_channel(slack_team_identity)
user = make_user(organization=organization)
alert_receive_channel = make_alert_receive_channel(organization)
@ -187,7 +194,7 @@ def test_delete_slack_ratelimit(
user=user,
added_by_user=user,
posted_by_bot=True,
slack_channel_id="test1_channel_id",
slack_channel=slack_channel1,
ts="test1_ts",
)
make_resolution_note_slack_message(
@ -195,7 +202,7 @@ def test_delete_slack_ratelimit(
user=user,
added_by_user=user,
added_to_resolution_note=True,
slack_channel_id="test2_channel_id",
slack_channel=slack_channel2,
ts="test2_ts",
)
@ -237,10 +244,13 @@ def test_delete_slack_api_error_other_than_ratelimit(
make_alert_receive_channel,
make_alert_group,
make_alert,
make_slack_channel,
make_slack_message,
make_resolution_note_slack_message,
):
organization, slack_team_identity = make_organization_with_slack_team_identity()
slack_channel1 = make_slack_channel(slack_team_identity)
slack_channel2 = make_slack_channel(slack_team_identity)
user = make_user(organization=organization)
alert_receive_channel = make_alert_receive_channel(organization)
@ -255,7 +265,7 @@ def test_delete_slack_api_error_other_than_ratelimit(
user=user,
added_by_user=user,
posted_by_bot=True,
slack_channel_id="test1_channel_id",
slack_channel=slack_channel1,
ts="test1_ts",
)
make_resolution_note_slack_message(
@ -263,7 +273,7 @@ def test_delete_slack_api_error_other_than_ratelimit(
user=user,
added_by_user=user,
added_to_resolution_note=True,
slack_channel_id="test2_channel_id",
slack_channel=slack_channel2,
ts="test2_ts",
)

View file

@ -36,7 +36,7 @@ def test_escalation_plan_messaging_backends(
alert_group.save()
log_builder = IncidentLogBuilder(alert_group=alert_group)
plan = log_builder.get_incident_escalation_plan()
plan = log_builder.get_escalation_plan()
assert list(plan.values()) == [["send test only backend message to {}".format(user.username)]]
@ -156,5 +156,5 @@ def test_escalation_plan_custom_webhooks(
alert_group.save()
log_builder = IncidentLogBuilder(alert_group=alert_group)
plan = log_builder.get_incident_escalation_plan()
plan = log_builder.get_escalation_plan()
assert list(plan.values()) == [[f'trigger outgoing webhook "{custom_webhook.name}"']]

View file

@ -11,6 +11,7 @@ from apps.base.models.user_notification_policy import UserNotificationPolicy
def test_notify_all(
make_organization,
make_slack_team_identity,
make_slack_channel,
make_user,
make_user_notification_policy,
make_escalation_chain,
@ -21,6 +22,7 @@ def test_notify_all(
):
organization = make_organization()
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization.slack_team_identity = slack_team_identity
organization.save()
@ -37,7 +39,7 @@ def test_notify_all(
alert_receive_channel,
escalation_chain=escalation_chain,
notify_in_slack=True,
slack_channel_id="slack-channel-id",
slack_channel=slack_channel,
)
# note this is the only escalation step, with order=1
notify_all = make_escalation_policy(

View file

@ -16,12 +16,14 @@ def test_notify_group(
make_escalation_chain,
make_escalation_policy,
make_channel_filter,
make_slack_channel,
make_slack_user_group,
make_alert_receive_channel,
make_alert_group,
):
organization = make_organization()
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization.slack_team_identity = slack_team_identity
organization.save()
@ -40,7 +42,7 @@ def test_notify_group(
alert_receive_channel,
escalation_chain=escalation_chain,
notify_in_slack=True,
slack_channel_id="slack-channel-id",
slack_channel=slack_channel,
)
usergroup = make_slack_user_group(slack_team_identity)
# note this is the only escalation step, with order=1

View file

@ -1,12 +1,14 @@
import typing
from rest_framework import serializers
from apps.alerts.models import AlertReceiveChannel, ChannelFilter, EscalationChain
from apps.api.serializers.labels import LabelPairSerializer
from apps.api.serializers.slack_channel import SlackChannelSerializer
from apps.base.messaging import get_messaging_backend_from_id
from apps.telegram.models import TelegramToOrganizationConnector
from common.api_helpers.custom_fields import OrganizationFilteredPrimaryKeyRelatedField
from common.api_helpers.custom_fields import (
OrganizationFilteredPrimaryKeyRelatedField,
SlackChannelsFilteredByOrganizationSlackWorkspaceField,
)
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
from common.api_helpers.utils import valid_jinja_template_for_serializer_method_field
@ -14,12 +16,6 @@ from common.jinja_templater.apply_jinja_template import JinjaTemplateError
from common.utils import is_regex_valid
class SlackChannelDetails(typing.TypedDict):
display_name: str
slack_id: str
id: str
class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
class TelegramChannelDetailsSerializer(serializers.Serializer):
display_name = serializers.CharField(source="channel_name")
@ -33,7 +29,10 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
allow_null=True,
required=False,
)
slack_channel = serializers.SerializerMethodField()
slack_channel = SlackChannelSerializer(read_only=True, allow_null=True)
# TODO: we probably don't need both telegram_channel and telegram_channel_details, research which one isn't needed
# and get rid of it
# Duplicated telegram channel and telegram_channel_details field for backwards compatibility for old integration page
telegram_channel = OrganizationFilteredPrimaryKeyRelatedField(
queryset=TelegramToOrganizationConnector.objects, filter_field="organization", allow_null=True, required=False
@ -41,11 +40,12 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
telegram_channel_details = TelegramChannelDetailsSerializer(
source="telegram_channel", read_only=True, allow_null=True
)
filtering_term_as_jinja2 = serializers.SerializerMethodField()
filtering_term = serializers.CharField(required=False, allow_null=True, allow_blank=True)
filtering_labels = LabelPairSerializer(many=True, required=False)
SELECT_RELATED = ["escalation_chain", "alert_receive_channel"]
SELECT_RELATED = ["escalation_chain", "alert_receive_channel", "slack_channel"]
class Meta:
model = ChannelFilter
@ -96,30 +96,13 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
raise serializers.ValidationError(["Filtering labels field is required"])
else:
raise serializers.ValidationError(["Expression type is incorrect"])
if "slack_channel_id" in data:
slack_channel = data.pop("slack_channel_id", None)
data["slack_channel"] = slack_channel
return data
def get_slack_channel(self, obj) -> SlackChannelDetails | None:
if obj.slack_channel_id is None:
return None
# display_name and id appears via annotate in ChannelFilterView.get_queryset()
return {
"display_name": obj.slack_channel_name,
"slack_id": obj.slack_channel_id,
"id": obj.slack_channel_pk,
}
def validate_slack_channel(self, slack_channel_id):
from apps.slack.models import SlackChannel
if slack_channel_id is not None:
slack_channel_id = slack_channel_id.upper()
organization = self.context["request"].auth.organization
try:
organization.slack_team_identity.get_cached_channels().get(slack_id=slack_channel_id)
except SlackChannel.DoesNotExist:
raise serializers.ValidationError(["Slack channel does not exist"])
return slack_channel_id
def validate_notification_backends(self, notification_backends):
# NOTE: updates the whole field, handling dict updates per backend
if notification_backends is not None:
@ -160,8 +143,11 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
class ChannelFilterCreateSerializer(ChannelFilterSerializer):
alert_receive_channel = OrganizationFilteredPrimaryKeyRelatedField(queryset=AlertReceiveChannel.objects)
slack_channel = serializers.CharField(allow_null=True, required=False, source="slack_channel_id")
slack_channel_id = SlackChannelsFilteredByOrganizationSlackWorkspaceField(
allow_null=True,
required=False,
write_only=True,
)
class Meta:
model = ChannelFilter
@ -170,6 +156,7 @@ class ChannelFilterCreateSerializer(ChannelFilterSerializer):
"alert_receive_channel",
"escalation_chain",
"slack_channel",
"slack_channel_id",
"created_at",
"filtering_labels",
"filtering_term",
@ -182,27 +169,6 @@ class ChannelFilterCreateSerializer(ChannelFilterSerializer):
]
read_only_fields = ["created_at", "is_default"]
def _get_slack_channel(self, obj) -> SlackChannelDetails | None:
if obj.slack_channel_id is None:
return None
slack_team_identity = self.context["request"].auth.organization.slack_team_identity
if slack_team_identity is None:
return None
slack_channel = slack_team_identity.get_cached_channels(slack_id=obj.slack_channel_id).first()
if slack_channel is None:
return None
return {
"display_name": slack_channel.name,
"slack_id": slack_channel.slack_id,
"id": slack_channel.public_primary_key,
}
def to_representation(self, obj):
"""add correct slack channel data to result after instance creation/update"""
result = super().to_representation(obj)
result["slack_channel"] = self._get_slack_channel(obj)
return result
def create(self, validated_data):
instance = super().create(validated_data)
instance.to_index(0) # the new route should be the first one
@ -222,15 +188,3 @@ class ChannelFilterUpdateSerializer(ChannelFilterCreateSerializer):
raise BadRequest(detail="Filtering term of default channel filter cannot be changed")
return super().update(instance, validated_data)
class ChannelFilterUpdateResponseSerializer(ChannelFilterUpdateSerializer):
"""
This serializer is used in OpenAPI schema to show proper response structure,
as `slack_channel` field expects string on create/update and returns dict on response
"""
slack_channel = serializers.SerializerMethodField()
def get_slack_channel(self, obj) -> SlackChannelDetails | None:
return self._get_slack_channel(obj)

View file

@ -685,3 +685,64 @@ def test_channel_filter_long_filtering_term(
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert "Expression is too long" in response.json()["non_field_errors"][0]
@pytest.mark.django_db
def test_channel_filter_with_slack_channel_crud(
make_organization,
make_user_for_organization,
make_token_for_organization,
make_slack_team_identity,
make_slack_channel,
make_alert_receive_channel,
make_user_auth_headers,
):
slack_team_identity = make_slack_team_identity()
slack_channel1 = make_slack_channel(slack_team_identity)
slack_channel2 = make_slack_channel(slack_team_identity)
organization = make_organization(slack_team_identity=slack_team_identity)
user = make_user_for_organization(organization, role=LegacyAccessControlRole.ADMIN)
_, token = make_token_for_organization(organization)
alert_receive_channel = make_alert_receive_channel(organization)
client = APIClient()
auth_headers = make_user_auth_headers(user, token)
# create the channel filter
response = client.post(
reverse("api-internal:channel_filter-list"),
data={
"alert_receive_channel": alert_receive_channel.public_primary_key,
"slack_channel_id": slack_channel1.slack_id,
},
format="json",
**auth_headers,
)
created_channel_filter = response.json()
assert response.status_code == status.HTTP_201_CREATED
assert created_channel_filter["slack_channel"] == {
"id": slack_channel1.public_primary_key,
"display_name": slack_channel1.name,
"slack_id": slack_channel1.slack_id,
}
# update the slack channel
url = reverse("api-internal:channel_filter-detail", kwargs={"pk": created_channel_filter["id"]})
response = client.patch(url, data={"slack_channel_id": slack_channel2.slack_id}, format="json", **auth_headers)
assert response.status_code == status.HTTP_200_OK
assert response.json()["slack_channel"] == {
"id": slack_channel2.public_primary_key,
"display_name": slack_channel2.name,
"slack_id": slack_channel2.slack_id,
}
# remove the slack channel
response = client.patch(url, data={"slack_channel_id": None}, format="json", **auth_headers)
assert response.status_code == status.HTTP_200_OK
assert response.json()["slack_channel"] is None

View file

@ -240,6 +240,7 @@ def test_organization_get_channel_verification_code_invalid(
@pytest.mark.django_db
def test_get_organization_slack_config_checks(
make_organization_and_user_with_plugin_token,
make_slack_channel,
make_slack_team_identity,
make_alert_receive_channel,
make_channel_filter,
@ -283,7 +284,7 @@ def test_get_organization_slack_config_checks(
assert response.json() == expected_result
# connect integration to Slack (set a channel)
channel_filter.slack_channel_id = "C123456"
channel_filter.slack_channel = make_slack_channel(slack_team_identity)
channel_filter.save()
response = client.get(url, format="json", **make_user_auth_headers(user, token))

View file

@ -1,6 +1,5 @@
from django.db.models import OuterRef, Subquery
from django_filters import rest_framework as filters
from drf_spectacular.utils import extend_schema, extend_schema_view
from drf_spectacular.utils import extend_schema
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
@ -11,11 +10,9 @@ from apps.api.permissions import RBACPermission
from apps.api.serializers.channel_filter import (
ChannelFilterCreateSerializer,
ChannelFilterSerializer,
ChannelFilterUpdateResponseSerializer,
ChannelFilterUpdateSerializer,
)
from apps.auth_token.auth import PluginAuthentication
from apps.slack.models import SlackChannel
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.filters import ModelFieldFilterMixin, MultipleChoiceCharFilter, get_integration_queryset
from common.api_helpers.mixins import (
@ -36,14 +33,6 @@ class ChannelFilterFilter(ModelFieldFilterMixin, filters.FilterSet):
)
@extend_schema_view(
list=extend_schema(responses=ChannelFilterSerializer),
create=extend_schema(request=ChannelFilterCreateSerializer, responses=ChannelFilterUpdateResponseSerializer),
update=extend_schema(request=ChannelFilterUpdateSerializer, responses=ChannelFilterUpdateResponseSerializer),
partial_update=extend_schema(
request=ChannelFilterUpdateSerializer, responses=ChannelFilterUpdateResponseSerializer
),
)
class ChannelFilterView(
TeamFilteringMixin,
PublicPrimaryKeyMixin[ChannelFilter],
@ -82,22 +71,15 @@ class ChannelFilterView(
TEAM_LOOKUP = "alert_receive_channel__team"
def get_queryset(self, ignore_filtering_by_available_teams=False):
slack_channels_subq = SlackChannel.objects.filter(
slack_id=OuterRef("slack_channel_id"),
slack_team_identity=self.request.auth.organization.slack_team_identity,
).order_by("pk")
queryset = ChannelFilter.objects.filter(
alert_receive_channel__organization=self.request.auth.organization,
alert_receive_channel__deleted_at=None,
).annotate(
slack_channel_name=Subquery(slack_channels_subq.values("name")[:1]),
slack_channel_pk=Subquery(slack_channels_subq.values("public_primary_key")[:1]),
)
if not ignore_filtering_by_available_teams:
queryset = queryset.filter(*self.available_teams_lookup_args).distinct()
queryset = self.serializer_class.setup_eager_loading(queryset)
return queryset
return self.serializer_class.setup_eager_loading(queryset)
def destroy(self, request, *args, **kwargs):
instance = self.get_object()

View file

@ -2,7 +2,10 @@ from rest_framework import fields, serializers
from apps.alerts.models import AlertReceiveChannel, ChannelFilter, EscalationChain
from apps.base.messaging import get_messaging_backend_from_id, get_messaging_backends
from common.api_helpers.custom_fields import OrganizationFilteredPrimaryKeyRelatedField
from common.api_helpers.custom_fields import (
OrganizationFilteredPrimaryKeyRelatedField,
SlackChannelsFilteredByOrganizationSlackWorkspaceField,
)
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
from common.api_helpers.utils import valid_jinja_template_for_serializer_method_field
@ -11,6 +14,11 @@ from common.ordered_model.serializer import OrderedModelSerializer
from common.utils import is_regex_valid
class SlackSerializer(serializers.Serializer):
channel_id = SlackChannelsFilteredByOrganizationSlackWorkspaceField(required=False, allow_null=True)
enabled = serializers.BooleanField(required=False, allow_null=True)
class BaseChannelFilterSerializer(OrderedModelSerializer):
"""Base Channel Filter serializer with validation methods"""
@ -25,13 +33,19 @@ class BaseChannelFilterSerializer(OrderedModelSerializer):
self._declared_fields[field] = serializers.DictField(required=False)
self.Meta.fields.append(field)
def to_representation(self, instance):
result = super().to_representation(instance)
result["slack"] = {"channel_id": instance.slack_channel_id, "enabled": bool(instance.notify_in_slack)}
result["telegram"] = {
"id": instance.telegram_channel.public_primary_key if instance.telegram_channel else None,
"enabled": bool(instance.notify_in_telegram),
def to_representation(self, instance: ChannelFilter):
result = {
**super().to_representation(instance),
"slack": {
"channel_id": instance.slack_channel_slack_id,
"enabled": bool(instance.notify_in_slack),
},
"telegram": {
"id": instance.telegram_channel.public_primary_key if instance.telegram_channel else None,
"enabled": bool(instance.notify_in_telegram),
},
}
# add representation for other messaging backends
for backend_id, backend in get_messaging_backends():
if backend is None:
@ -45,18 +59,16 @@ class BaseChannelFilterSerializer(OrderedModelSerializer):
result[field] = {"id": channel_id, "enabled": notification_enabled}
return result
def _correct_validated_data(self, validated_data):
def _correct_validated_data(self, validated_data: dict) -> dict:
organization = self.context["request"].auth.organization
slack_field = validated_data.pop("slack", {})
if slack_field:
if slack_field := validated_data.pop("slack", {}):
if "channel_id" in slack_field:
validated_data["slack_channel_id"] = self._validate_slack_channel_id(slack_field.get("channel_id"))
validated_data["slack_channel"] = slack_field["channel_id"]
if "enabled" in slack_field:
validated_data["notify_in_slack"] = bool(slack_field.get("enabled"))
validated_data["notify_in_slack"] = slack_field["enabled"]
telegram_field = validated_data.pop("telegram", {})
if telegram_field:
if telegram_field := validated_data.pop("telegram", {}):
if "id" in telegram_field:
validated_data["telegram_channel"] = self._validate_telegram_channel(telegram_field.get("id"))
if "enabled" in telegram_field:
@ -78,23 +90,9 @@ class BaseChannelFilterSerializer(OrderedModelSerializer):
notification_backends[backend_id] = notification_backend
if notification_backends:
validated_data["notification_backends"] = notification_backends
return validated_data
def _validate_slack_channel_id(self, slack_channel_id):
from apps.slack.models import SlackChannel
if slack_channel_id is not None:
slack_channel_id = slack_channel_id.upper()
organization = self.context["request"].auth.organization
slack_team_identity = organization.slack_team_identity
if not slack_team_identity:
raise BadRequest(detail="Slack isn't connected to this workspace")
try:
slack_team_identity.get_cached_channels().get(slack_id=slack_channel_id)
except SlackChannel.DoesNotExist:
raise BadRequest(detail="Slack channel does not exist")
return slack_channel_id
def _validate_telegram_channel(self, telegram_channel_id):
from apps.telegram.models import TelegramToOrganizationConnector
@ -132,7 +130,7 @@ class RoutingTypeField(fields.CharField):
class ChannelFilterSerializer(EagerLoadingMixin, BaseChannelFilterSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
slack = serializers.DictField(required=False)
slack = SlackSerializer(required=False)
telegram = serializers.DictField(required=False)
routing_type = RoutingTypeField(allow_null=False, required=False, source="filtering_term_type")
routing_regex = serializers.CharField(allow_null=False, required=True, source="filtering_term")
@ -147,7 +145,7 @@ class ChannelFilterSerializer(EagerLoadingMixin, BaseChannelFilterSerializer):
is_the_last_route = serializers.BooleanField(read_only=True, source="is_default")
SELECT_RELATED = ["alert_receive_channel", "escalation_chain"]
SELECT_RELATED = ["alert_receive_channel", "escalation_chain", "slack_channel"]
class Meta:
model = ChannelFilter
@ -214,7 +212,7 @@ class ChannelFilterUpdateSerializer(ChannelFilterSerializer):
class DefaultChannelFilterSerializer(BaseChannelFilterSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
slack = serializers.DictField(required=False)
slack = SlackSerializer(required=False)
telegram = serializers.DictField(required=False)
escalation_chain_id = OrganizationFilteredPrimaryKeyRelatedField(
queryset=EscalationChain.objects,

View file

@ -12,28 +12,36 @@ TEST_MESSAGING_BACKEND_ID = "TESTBACKENDID"
@pytest.fixture()
def route_public_api_setup(
make_organization_and_user_with_token,
make_organization,
make_user_for_organization,
make_public_api_token,
make_alert_receive_channel,
make_escalation_chain,
make_channel_filter,
make_slack_team_identity,
make_slack_channel,
):
organization, user, token = make_organization_and_user_with_token()
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization = make_organization(slack_team_identity=slack_team_identity)
user = make_user_for_organization(organization)
_, token = make_public_api_token(user, organization)
alert_receive_channel = make_alert_receive_channel(organization)
escalation_chain = make_escalation_chain(organization)
channel_filter = make_channel_filter(
alert_receive_channel,
is_default=True,
slack_channel_id="TEST_SLACK_ID",
slack_channel=slack_channel,
escalation_chain=escalation_chain,
)
return organization, user, token, alert_receive_channel, escalation_chain, channel_filter
return organization, token, alert_receive_channel, escalation_chain, channel_filter
@pytest.mark.django_db
def test_get_route(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
def test_get_route(route_public_api_setup):
_, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
client = APIClient()
@ -48,7 +56,7 @@ def test_get_route(
"routing_regex": channel_filter.filtering_term,
"position": channel_filter.order,
"is_the_last_route": channel_filter.is_default,
"slack": {"channel_id": channel_filter.slack_channel_id, "enabled": True},
"slack": {"channel_id": channel_filter.slack_channel_slack_id, "enabled": True},
"telegram": {"id": None, "enabled": False},
TEST_MESSAGING_BACKEND_FIELD: {"id": None, "enabled": False},
}
@ -58,10 +66,8 @@ def test_get_route(
@pytest.mark.django_db
def test_get_routes_list(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
def test_get_routes_list(route_public_api_setup):
_, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
client = APIClient()
@ -81,7 +87,7 @@ def test_get_routes_list(
"routing_regex": channel_filter.filtering_term,
"position": channel_filter.order,
"is_the_last_route": channel_filter.is_default,
"slack": {"channel_id": channel_filter.slack_channel_id, "enabled": True},
"slack": {"channel_id": channel_filter.slack_channel_slack_id, "enabled": True},
"telegram": {"id": None, "enabled": False},
TEST_MESSAGING_BACKEND_FIELD: {"id": None, "enabled": False},
}
@ -96,10 +102,8 @@ def test_get_routes_list(
@pytest.mark.django_db
def test_get_routes_filter_by_integration_id(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
def test_get_routes_filter_by_integration_id(route_public_api_setup):
_, token, alert_receive_channel, escalation_chain, channel_filter = route_public_api_setup
client = APIClient()
@ -121,7 +125,7 @@ def test_get_routes_filter_by_integration_id(
"routing_regex": channel_filter.filtering_term,
"position": channel_filter.order,
"is_the_last_route": channel_filter.is_default,
"slack": {"channel_id": channel_filter.slack_channel_id, "enabled": True},
"slack": {"channel_id": channel_filter.slack_channel_slack_id, "enabled": True},
"telegram": {"id": None, "enabled": False},
TEST_MESSAGING_BACKEND_FIELD: {"id": None, "enabled": False},
}
@ -136,10 +140,8 @@ def test_get_routes_filter_by_integration_id(
@pytest.mark.django_db
def test_create_route(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
def test_create_route(route_public_api_setup):
_, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
client = APIClient()
@ -170,7 +172,7 @@ def test_create_route(
@pytest.mark.django_db
def test_create_route_without_escalation_chain(route_public_api_setup):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
_, token, alert_receive_channel, _, _ = route_public_api_setup
client = APIClient()
url = reverse("api-public:routes-list")
@ -203,7 +205,7 @@ def test_create_route_without_escalation_chain(route_public_api_setup):
def test_invalid_route_data(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
_, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
client = APIClient()
@ -219,11 +221,8 @@ def test_invalid_route_data(
@pytest.mark.django_db
def test_update_route(
route_public_api_setup,
make_channel_filter,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
def test_update_route(route_public_api_setup, make_channel_filter):
_, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
new_channel_filter = make_channel_filter(
alert_receive_channel,
is_default=False,
@ -251,7 +250,7 @@ def test_update_route(
"routing_regex": data_to_update["routing_regex"],
"position": new_channel_filter.order,
"is_the_last_route": new_channel_filter.is_default,
"slack": {"channel_id": new_channel_filter.slack_channel_id, "enabled": True},
"slack": {"channel_id": new_channel_filter.slack_channel_slack_id, "enabled": True},
"telegram": {"id": None, "enabled": False},
TEST_MESSAGING_BACKEND_FIELD: {"id": None, "enabled": False},
}
@ -261,11 +260,8 @@ def test_update_route(
@pytest.mark.django_db
def test_delete_route(
route_public_api_setup,
make_channel_filter,
):
_, _, token, alert_receive_channel, _, _ = route_public_api_setup
def test_delete_route(route_public_api_setup, make_channel_filter):
_, token, alert_receive_channel, _, _ = route_public_api_setup
new_channel_filter = make_channel_filter(
alert_receive_channel,
is_default=False,
@ -285,47 +281,110 @@ def test_delete_route(
@pytest.mark.django_db
def test_create_route_slack_error(
route_public_api_setup,
make_slack_team_identity,
make_slack_channel,
make_organization,
make_user_for_organization,
make_public_api_token,
make_alert_receive_channel,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
_, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
client = APIClient()
url = reverse("api-public:routes-list")
data_for_create = {
"integration_id": alert_receive_channel.public_primary_key,
"routing_regex": "testreg",
"escalation_chain_id": escalation_chain.public_primary_key,
"slack": {"channel_id": "TEST_SLACK_ID"},
}
response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create)
# Channel does not exist
response = client.post(
url,
format="json",
HTTP_AUTHORIZATION=token,
data={
"integration_id": alert_receive_channel.public_primary_key,
"routing_regex": "testreg",
"escalation_chain_id": escalation_chain.public_primary_key,
"slack": {"channel_id": "TEST_SLACK_ID"},
},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
assert response.json()["slack"]["channel_id"][0] == "Slack channel does not exist"
# org doesn't have slack workspace connected
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization = make_organization()
user = make_user_for_organization(organization)
_, token = make_public_api_token(user, organization)
alert_receive_channel = make_alert_receive_channel(organization)
assert organization.slack_team_identity is None
response = client.post(
url,
format="json",
HTTP_AUTHORIZATION=token,
data={
"integration_id": alert_receive_channel.public_primary_key,
"slack": {"channel_id": slack_channel.slack_id},
},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_update_route_slack_error(
route_public_api_setup,
make_channel_filter,
make_slack_team_identity,
make_slack_channel,
make_organization,
make_user_for_organization,
make_public_api_token,
make_alert_receive_channel,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
new_channel_filter = make_channel_filter(
alert_receive_channel,
is_default=False,
filtering_term="testreg",
)
client = APIClient()
url = reverse("api-public:routes-detail", kwargs={"pk": new_channel_filter.public_primary_key})
data_to_update = {
"slack": {"channel_id": "TEST_SLACK_ID"},
}
# Channel does not exist
_, token, alert_receive_channel, _, _ = route_public_api_setup
new_channel_filter = make_channel_filter(alert_receive_channel, is_default=False, filtering_term="testreg")
response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
url = reverse("api-public:routes-detail", kwargs={"pk": new_channel_filter.public_primary_key})
response = client.put(
url,
format="json",
HTTP_AUTHORIZATION=token,
data={
"slack": {"channel_id": "TEST_SLACK_ID"},
},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
assert response.json()["slack"]["channel_id"][0] == "Slack channel does not exist"
# org doesn't have slack workspace connected
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization = make_organization()
user = make_user_for_organization(organization)
_, token = make_public_api_token(user, organization)
alert_receive_channel = make_alert_receive_channel(organization)
new_channel_filter = make_channel_filter(alert_receive_channel, is_default=False, filtering_term="testreg")
assert organization.slack_team_identity is None
url = reverse("api-public:routes-detail", kwargs={"pk": new_channel_filter.public_primary_key})
response = client.put(
url,
format="json",
HTTP_AUTHORIZATION=token,
data={
"slack": {"channel_id": slack_channel.slack_id},
},
)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.json()["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
@ -334,7 +393,7 @@ def test_create_route_with_messaging_backend(
make_slack_team_identity,
make_slack_channel,
):
organization, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
organization, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
slack_team_identity = make_slack_team_identity()
organization.slack_team_identity = slack_team_identity
organization.save(update_fields=["slack_team_identity"])
@ -381,7 +440,7 @@ def test_update_route_with_messaging_backend(
make_slack_team_identity,
make_slack_channel,
):
organization, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
organization, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
slack_team_identity = make_slack_team_identity()
organization.slack_team_identity = slack_team_identity
organization.save(update_fields=["slack_team_identity"])
@ -407,7 +466,7 @@ def test_update_route_with_messaging_backend(
}
# check if route data is different
assert new_channel_filter.slack_channel_id != slack_channel.slack_id
assert new_channel_filter.slack_channel_slack_id != slack_channel.slack_id
assert new_channel_filter.notify_in_slack != data_to_update["slack"]["enabled"]
assert new_channel_filter.notify_in_telegram != data_to_update["telegram"]["enabled"]
assert new_channel_filter.notification_backends is None
@ -433,7 +492,7 @@ def test_update_route_with_messaging_backend(
new_channel_filter.refresh_from_db()
# check if route data is different was changed correctly
assert new_channel_filter.slack_channel_id == slack_channel.slack_id
assert new_channel_filter.slack_channel_slack_id == slack_channel.slack_id
assert new_channel_filter.notify_in_slack == data_to_update["slack"]["enabled"]
assert new_channel_filter.notify_in_telegram == data_to_update["telegram"]["enabled"]
assert new_channel_filter.notification_backends == {
@ -467,7 +526,7 @@ def test_update_route_with_messaging_backend(
new_channel_filter.refresh_from_db()
# check if route data is different was changed correctly
assert new_channel_filter.slack_channel_id == data_to_update["slack"]["channel_id"]
assert new_channel_filter.slack_channel_slack_id == data_to_update["slack"]["channel_id"]
assert new_channel_filter.notify_in_slack == data_to_update["slack"]["enabled"]
assert new_channel_filter.notify_in_telegram == data_to_update["telegram"]["enabled"]
assert new_channel_filter.notification_backends == {TestOnlyBackend.backend_id: {"channel": None, "enabled": True}}
@ -504,11 +563,8 @@ def test_update_route_with_manual_ordering(
@pytest.mark.django_db
def test_routes_long_filtering_term(
route_public_api_setup,
make_channel_filter,
):
organization, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
def test_routes_long_filtering_term(route_public_api_setup, make_channel_filter):
_, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
client = APIClient()
long_filtering_term = "a" * (ChannelFilter.FILTERING_TERM_MAX_LENGTH + 1)

View file

@ -345,7 +345,7 @@ def list_users_to_notify_from_ical(
schedule: "OnCallSchedule",
events_datetime: typing.Optional[datetime.datetime] = None,
from_cached_final: bool = False,
) -> typing.Sequence["User"]:
) -> typing.List["User"]:
"""
Retrieve on-call users for the current time
"""
@ -363,7 +363,7 @@ def list_users_to_notify_from_ical_for_period(
start_datetime: datetime.datetime,
end_datetime: datetime.datetime,
from_cached_final: bool = False,
) -> typing.Sequence["User"]:
) -> typing.List["User"]:
if from_cached_final and schedule.cached_ical_final_schedule:
events = schedule.filter_events(start_datetime, end_datetime, from_cached_final=True)
else:
@ -606,7 +606,7 @@ def get_missing_users_from_ical_event(event, organization: "Organization"):
return [u for u in all_usernames if u != "" and u not in found_usernames and u.lower() not in found_emails]
def get_users_from_ical_event(event, organization: "Organization") -> typing.Sequence["User"]:
def get_users_from_ical_event(event, organization: "Organization") -> typing.List["User"]:
usernames_from_ical, _ = get_usernames_from_ical_event(event)
users = []
if len(usernames_from_ical) != 0:

View file

@ -43,3 +43,7 @@ class SlackChannel(models.Model):
class Meta:
unique_together = ("slack_id", "slack_team_identity")
@classmethod
def __str__(self):
return f"{self.name}: {self.slack_id}"

View file

@ -29,6 +29,8 @@ class SlackMessage(models.Model):
id = models.CharField(primary_key=True, default=uuid.uuid4, editable=False, max_length=36)
slack_id = models.CharField(max_length=100)
# TODO: convert this to a foreign key field to SlackChannel
channel_id = models.CharField(max_length=100, null=True, default=None)
organization = models.ForeignKey(

View file

@ -68,7 +68,7 @@ class AlertShootingStep(scenario_step.ScenarioStep):
if num_updated_rows == 1:
try:
channel_id = (
alert.group.channel_filter.slack_channel_id_or_general_log_id
alert.group.channel_filter.slack_channel_id_or_org_default_id
if alert.group.channel_filter
# if channel filter is deleted mid escalation, use default Slack channel
else alert.group.channel.organization.default_slack_channel_slack_id

View file

@ -71,7 +71,7 @@ class AddToResolutionNoteStep(scenario_step.ScenarioStep):
predefined_org: typing.Optional["Organization"] = None,
) -> None:
from apps.alerts.models import ResolutionNote, ResolutionNoteSlackMessage
from apps.slack.models import SlackMessage, SlackUserIdentity
from apps.slack.models import SlackChannel, SlackMessage, SlackUserIdentity
try:
channel_id = payload["channel"]["id"]
@ -156,12 +156,17 @@ class AddToResolutionNoteStep(scenario_step.ScenarioStep):
except ResolutionNoteSlackMessage.DoesNotExist:
text = payload["message"]["text"]
text = text.replace("```", "")
slack_channel = SlackChannel.objects.get(
slack_id=channel_id, slack_team_identity=slack_team_identity
)
slack_message = SlackMessage.objects.get(
slack_id=thread_ts,
_slack_team_identity=slack_team_identity,
channel_id=channel_id,
)
alert_group = slack_message.alert_group
try:
author_slack_user_identity = SlackUserIdentity.objects.get(
slack_id=payload["message"]["user"], slack_team_identity=slack_team_identity
@ -174,12 +179,13 @@ class AddToResolutionNoteStep(scenario_step.ScenarioStep):
)
self.open_warning_window(payload, warning_text)
return
resolution_note_slack_message = ResolutionNoteSlackMessage(
alert_group=alert_group,
user=author_user,
added_by_user=self.user,
text=text,
slack_channel_id=channel_id,
slack_channel=slack_channel,
thread_ts=thread_ts,
ts=message_ts,
permalink=permalink,
@ -188,6 +194,7 @@ class AddToResolutionNoteStep(scenario_step.ScenarioStep):
resolution_note_slack_message.added_to_resolution_note = True
resolution_note_slack_message.save()
resolution_note = resolution_note_slack_message.get_resolution_note()
if resolution_note is None:
ResolutionNote(
alert_group=alert_group,
@ -197,6 +204,7 @@ class AddToResolutionNoteStep(scenario_step.ScenarioStep):
).save()
else:
resolution_note.recreate()
try:
self._slack_client.reactions_add(
channel=channel_id,
@ -225,14 +233,14 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
)
def remove_resolution_note_slack_message(self, resolution_note: "ResolutionNote") -> None:
resolution_note_slack_message = resolution_note.resolution_note_slack_message
if resolution_note_slack_message is not None:
if (resolution_note_slack_message := resolution_note.resolution_note_slack_message) is not None:
resolution_note_slack_message.added_to_resolution_note = False
resolution_note_slack_message.save(update_fields=["added_to_resolution_note"])
if resolution_note_slack_message.posted_by_bot:
try:
self._slack_client.chat_delete(
channel=resolution_note_slack_message.slack_channel_id,
channel=resolution_note_slack_message.slack_channel_slack_id,
ts=resolution_note_slack_message.ts,
)
except RESOLUTION_NOTE_EXCEPTIONS:
@ -242,17 +250,23 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
def post_or_update_resolution_note_in_thread(self, resolution_note: "ResolutionNote") -> None:
from apps.alerts.models import ResolutionNoteSlackMessage
from apps.slack.models import SlackChannel
resolution_note_slack_message = resolution_note.resolution_note_slack_message
alert_group = resolution_note.alert_group
alert_group_slack_message = alert_group.slack_message
slack_channel_id = alert_group_slack_message.channel_id
blocks = self.get_resolution_note_blocks(resolution_note)
slack_channel = SlackChannel.objects.get(
slack_id=slack_channel_id, slack_team_identity=self.slack_team_identity
)
if resolution_note_slack_message is None:
resolution_note_text = Truncator(resolution_note.text)
try:
result = self._slack_client.chat_postMessage(
channel=alert_group_slack_message.channel_id,
channel=slack_channel_id,
thread_ts=alert_group_slack_message.slack_id,
text=resolution_note_text.chars(BLOCK_SECTION_TEXT_MAX_SIZE),
blocks=blocks,
@ -261,17 +275,14 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
pass
else:
message_ts = result["message"]["ts"]
result_permalink = self._slack_client.chat_getPermalink(
channel=alert_group_slack_message.channel_id,
message_ts=message_ts,
)
result_permalink = self._slack_client.chat_getPermalink(channel=slack_channel_id, message_ts=message_ts)
resolution_note_slack_message = ResolutionNoteSlackMessage(
alert_group=alert_group,
user=resolution_note.author,
added_by_user=resolution_note.author,
text=resolution_note.text,
slack_channel_id=alert_group_slack_message.channel_id,
slack_channel=slack_channel,
thread_ts=result["ts"],
ts=message_ts,
permalink=result_permalink["permalink"],
@ -305,7 +316,7 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
def add_resolution_note_reaction(self, slack_thread_message: "ResolutionNoteSlackMessage"):
try:
self._slack_client.reactions_add(
channel=slack_thread_message.slack_channel_id,
channel=slack_thread_message.slack_channel_slack_id,
name="memo",
timestamp=slack_thread_message.ts,
)
@ -315,7 +326,7 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
def remove_resolution_note_reaction(self, slack_thread_message: "ResolutionNoteSlackMessage") -> None:
try:
self._slack_client.reactions_remove(
channel=slack_thread_message.slack_channel_id,
channel=slack_thread_message.slack_channel_slack_id,
name="memo",
timestamp=slack_thread_message.ts,
)

View file

@ -57,8 +57,6 @@ class SlackChannelDeletedEventStep(scenario_step.ScenarioStep):
slack_id=slack_id,
slack_team_identity=slack_team_identity,
).delete()
# even if channel is deteletd run the task to clean possible leftowers
clean_slack_channel_leftovers.apply_async((slack_team_identity.id, slack_id))
class SlackChannelArchivedEventStep(scenario_step.ScenarioStep):

View file

@ -44,7 +44,7 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
self, slack_user_identity: "SlackUserIdentity", payload: EventPayload
) -> None:
from apps.alerts.models import ResolutionNoteSlackMessage
from apps.slack.models import SlackMessage
from apps.slack.models import SlackChannel, SlackMessage
if slack_user_identity is None:
logger.warning(
@ -53,7 +53,7 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
)
return
channel = payload["event"]["channel"]
channel_id = payload["event"]["channel"]
thread_ts = payload["event"].get("thread_ts") or payload["event"]["message"]["thread_ts"]
# sometimes we get messages with empty text, probably because it's an image or attachment
event_text = payload["event"].get("text")
@ -68,18 +68,23 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
try:
slack_message = SlackMessage.objects.get(
slack_id=thread_ts,
channel_id=channel,
channel_id=channel_id,
_slack_team_identity=self.slack_team_identity,
)
except SlackMessage.DoesNotExist:
return
try:
slack_channel = SlackChannel.objects.get(slack_id=channel_id, slack_team_identity=self.slack_team_identity)
except SlackChannel.DoesNotExist:
return
if not slack_message.alert_group:
# SlackMessage instances without alert_group set (e.g., SSR Slack messages)
return
try:
result = self._slack_client.chat_getPermalink(channel=channel, message_ts=message_ts)
result = self._slack_client.chat_getPermalink(channel=channel_id, message_ts=message_ts)
except RESOLUTION_NOTE_EXCEPTIONS:
return
@ -89,7 +94,7 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
if len(text) > 2900:
self._slack_client.chat_postEphemeral(
channel=channel,
channel=channel_id,
user=slack_user_identity.slack_id,
text=":warning: Unable to show the <{}|message> in Resolution Note: the message is too long ({}). "
"Max length - 2900 symbols.".format(permalink, len(text)),
@ -104,7 +109,7 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
"user": self.user,
"added_by_user": self.user,
"text": text,
"slack_channel_id": channel,
"slack_channel": slack_channel,
"permalink": permalink,
},
)
@ -132,7 +137,7 @@ class SlackChannelMessageEventStep(scenario_step.ScenarioStep):
slack_thread_message = ResolutionNoteSlackMessage.objects.get(
ts=message_ts,
thread_ts=thread_ts,
slack_channel_id=channel_id,
slack_channel__slack_id=channel_id,
)
except ResolutionNoteSlackMessage.DoesNotExist:
pass

View file

@ -31,7 +31,7 @@ class AlertGroupLogSlackRenderer:
log_builder = IncidentLogBuilder(alert_group)
result = ""
if not (alert_group.resolved or alert_group.wiped_at or alert_group.root_alert_group):
escalation_policies_plan = log_builder.get_incident_escalation_plan(for_slack=True)
escalation_policies_plan = log_builder.get_escalation_plan(for_slack=True)
if escalation_policies_plan:
result += "\n:arrow_down: :arrow_down: :arrow_down: Plan:\n\n"
# humanize time, create plan text

View file

@ -302,16 +302,16 @@ def post_slack_rate_limit_message(integration_id):
f"Active: {integration.rate_limit_message_task_id}"
)
return
default_route = integration.channel_filters.get(is_default=True)
slack_channel = default_route.slack_channel_id_or_general_log_id
if slack_channel:
if (slack_channel_id := default_route.slack_channel_id_or_org_default_id) is not None:
text = (
f"Delivering and updating alert groups of integration {integration.verbal_name} in Slack is "
f"temporarily stopped due to rate limit. You could find new alert groups at "
f"<{integration.new_incidents_web_link}|web page "
'"Alert Groups">'
)
post_message_to_channel(integration.organization, slack_channel, text)
post_message_to_channel(integration.organization, slack_channel_id, text)
@shared_dedicated_queue_retry_task(
@ -548,18 +548,17 @@ def clean_slack_integration_leftovers(organization_id, *args, **kwargs):
from apps.schedules.models import OnCallSchedule
logger.info(f"Cleaning up for organization {organization_id}")
ChannelFilter.objects.filter(alert_receive_channel__organization_id=organization_id).update(slack_channel_id=None)
ChannelFilter.objects.filter(alert_receive_channel__organization_id=organization_id).update(slack_channel=None)
OnCallSchedule.objects.filter(organization_id=organization_id).update(channel=None, user_group=None)
@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=10)
def clean_slack_channel_leftovers(slack_team_identity_id, slack_channel_id):
"""
TODO: once we add/migrate to ChannelFilter.slack_channel, this will mean that we no longer need this task
and it can be safely removed (foreign key relationships to a slack channel that is deleted in the db will
automatically be set to None due to on_delete=models.SET_NULL)
This task removes binding to slack channel after a channel is archived in Slack.
This task removes binding to slack channel after channel archived or deleted in slack.
**NOTE**: this is only needed for Slack Channel archive. If a channel is deleted, we simply remove references
to that channel via `on_delete=models.SET_NULL`.
"""
from apps.alerts.models import ChannelFilter
from apps.slack.models import SlackTeamIdentity

View file

@ -32,7 +32,7 @@ class SlackUserGroupFactory(factory.DjangoModelFactory):
class SlackChannelFactory(factory.DjangoModelFactory):
slack_id = UniqueFaker("sentence", nb_words=3)
slack_id = factory.Sequence(lambda n: f"TEST_SLACK_ID_{n}")
name = factory.Faker("word")
class Meta:

View file

@ -40,29 +40,37 @@ def test_reset_slack_integration_permissions(
@pytest.mark.django_db
def test_clean_slack_integration_leftovers(
make_organization_with_slack_team_identity,
make_slack_team_identity,
make_slack_channel,
make_organization,
make_alert_receive_channel,
make_channel_filter,
make_slack_user_group,
make_schedule,
):
organization, slack_team_identity = make_organization_with_slack_team_identity()
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization = make_organization(slack_team_identity=slack_team_identity, default_slack_channel=slack_channel)
# create channel filter with Slack channel
alert_receive_channel = make_alert_receive_channel(organization)
channel_filter = make_channel_filter(alert_receive_channel, slack_channel_id="test")
channel_filter = make_channel_filter(alert_receive_channel, slack_channel=slack_channel)
# create schedule with Slack channel and user group
user_group = make_slack_user_group(slack_team_identity)
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb, channel="test", user_group=user_group)
assert channel_filter.slack_channel is not None
assert schedule.channel is not None
assert schedule.user_group is not None
# clean Slack integration leftovers
clean_slack_integration_leftovers(organization.pk)
channel_filter.refresh_from_db()
schedule.refresh_from_db()
# check that references to Slack objects are removed
assert channel_filter.slack_channel_id is None
assert channel_filter.slack_channel is None
assert schedule.channel is None
assert schedule.user_group is None
@ -100,3 +108,33 @@ def test_unpopulate_slack_user_identities(
# check that Slack specific info is reset for organization
assert organization.slack_team_identity is None
assert organization.default_slack_channel_slack_id is None
@pytest.mark.django_db
def test_delete_slack_channel_and_cascade_deletes(
make_slack_team_identity,
make_slack_channel,
make_organization,
make_alert_receive_channel,
make_channel_filter,
# make_schedule,
):
# TODO: add the schedule related bits once https://github.com/grafana/oncall/pull/5199 is merged
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity)
organization = make_organization(slack_team_identity=slack_team_identity, default_slack_channel=slack_channel)
alert_receive_channel = make_alert_receive_channel(organization)
channel_filter = make_channel_filter(alert_receive_channel, slack_channel=slack_channel)
# schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
assert channel_filter.slack_channel == slack_channel
# assert schedule.slack_channel == slack_channel
slack_channel.delete()
channel_filter.refresh_from_db()
# schedule.refresh_from_db()
assert channel_filter.slack_channel is None
# assert schedule.slack_channel is None

View file

@ -145,17 +145,23 @@ def test_post_or_update_resolution_note_in_thread_truncate_message_text(
make_alert_receive_channel,
make_alert_group,
make_slack_message,
make_slack_channel,
make_resolution_note,
):
UpdateResolutionNoteStep = ScenarioStep.get_step("resolution_note", "UpdateResolutionNoteStep")
organization, user, slack_team_identity, _ = make_organization_and_user_with_slack_identities()
step = UpdateResolutionNoteStep(slack_team_identity)
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
make_slack_message(alert_group=alert_group, channel_id="RANDOM_CHANNEL_ID", slack_id="RANDOM_MESSAGE_ID")
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
make_slack_message(alert_group=alert_group, channel_id=slack_channel_id)
resolution_note = make_resolution_note(alert_group=alert_group, author=user, message_text="a" * 3000)
UpdateResolutionNoteStep = ScenarioStep.get_step("resolution_note", "UpdateResolutionNoteStep")
step = UpdateResolutionNoteStep(slack_team_identity)
with patch("apps.slack.client.SlackClient.api_call") as mock_slack_api_call:
mock_slack_api_call.return_value = {
"ts": "timestamp",
@ -176,16 +182,19 @@ def test_post_or_update_resolution_note_in_thread_update_truncate_message_text(
make_alert_receive_channel,
make_alert_group,
make_slack_message,
make_slack_channel,
make_resolution_note,
make_resolution_note_slack_message,
):
UpdateResolutionNoteStep = ScenarioStep.get_step("resolution_note", "UpdateResolutionNoteStep")
organization, user, slack_team_identity, _ = make_organization_and_user_with_slack_identities()
step = UpdateResolutionNoteStep(slack_team_identity)
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
make_slack_message(alert_group=alert_group, channel_id="RANDOM_CHANNEL_ID", slack_id="RANDOM_MESSAGE_ID")
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
make_slack_message(alert_group=alert_group, channel_id=slack_channel_id)
resolution_note = make_resolution_note(alert_group=alert_group, author=user, message_text="a" * 3000)
make_resolution_note_slack_message(
alert_group=alert_group,
@ -197,6 +206,9 @@ def test_post_or_update_resolution_note_in_thread_update_truncate_message_text(
text=resolution_note.text,
)
UpdateResolutionNoteStep = ScenarioStep.get_step("resolution_note", "UpdateResolutionNoteStep")
step = UpdateResolutionNoteStep(slack_team_identity)
with patch("apps.slack.client.SlackClient.api_call") as mock_slack_api_call:
mock_slack_api_call.return_value = {
"ts": "timestamp",
@ -345,16 +357,21 @@ def test_add_to_resolution_note(
make_alert_group,
make_alert,
make_slack_message,
make_slack_channel,
settings,
):
organization, user, slack_team_identity, slack_user_identity = make_organization_and_user_with_slack_identities()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
make_alert(alert_group=alert_group, raw_request_data={})
slack_message = make_slack_message(alert_group=alert_group)
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
slack_message = make_slack_message(alert_group=alert_group, channel_id=slack_channel_id)
payload = {
"channel": {"id": slack_message.channel_id},
"channel": {"id": slack_channel_id},
"message_ts": "random_ts",
"message": {
"type": "message",

View file

@ -219,6 +219,7 @@ class TestSlackChannelMessageEventStep:
make_alert_receive_channel,
make_alert_group,
make_slack_message,
make_slack_channel,
) -> None:
(
organization,
@ -229,11 +230,13 @@ class TestSlackChannelMessageEventStep:
integration = make_alert_receive_channel(organization)
alert_group = make_alert_group(integration)
channel = "potato"
ts = 88945.4849
thread_ts = 16789.123
make_slack_message(alert_group, slack_id=thread_ts, channel_id=channel)
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
make_slack_message(alert_group, slack_id=thread_ts, channel_id=slack_channel_id)
mock_permalink = "http://example.com"
@ -243,7 +246,7 @@ class TestSlackChannelMessageEventStep:
payload = {
"event": {
"channel": channel,
"channel": slack_channel_id,
"ts": ts,
"thread_ts": thread_ts,
"text": "h" * 2901,
@ -272,6 +275,7 @@ class TestSlackChannelMessageEventStep:
make_alert_receive_channel,
make_alert_group,
make_slack_message,
make_slack_channel,
) -> None:
(
organization,
@ -282,11 +286,13 @@ class TestSlackChannelMessageEventStep:
integration = make_alert_receive_channel(organization)
alert_group = make_alert_group(integration)
channel = "potato"
ts = 88945.4849
thread_ts = 16789.123
make_slack_message(alert_group, slack_id=thread_ts, channel_id=channel)
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
make_slack_message(alert_group, slack_id=thread_ts, channel_id=slack_channel_id)
step = SlackChannelMessageEventStep(slack_team_identity, organization, user)
step._slack_client = Mock()
@ -296,7 +302,7 @@ class TestSlackChannelMessageEventStep:
payload = {
"event": {
"channel": channel,
"channel": slack_channel_id,
"ts": ts,
"thread_ts": thread_ts,
"text": "h" * 2901,
@ -318,6 +324,7 @@ class TestSlackChannelMessageEventStep:
make_alert_receive_channel,
make_alert_group,
make_slack_message,
make_slack_channel,
make_resolution_note_slack_message,
resolution_note_slack_message_already_exists,
) -> None:
@ -332,12 +339,13 @@ class TestSlackChannelMessageEventStep:
original_text = "original text"
new_text = "new text"
channel = "potato"
ts = 88945.4849
thread_ts = 16789.123
make_slack_message(alert_group, slack_id=thread_ts, channel_id=channel)
slack_channel = make_slack_channel(slack_team_identity)
slack_channel_id = slack_channel.slack_id
make_slack_message(alert_group, slack_id=thread_ts, channel_id=slack_channel_id)
resolution_note_slack_message = None
if resolution_note_slack_message_already_exists:
@ -353,7 +361,7 @@ class TestSlackChannelMessageEventStep:
payload = {
"event": {
"channel": channel,
"channel": slack_channel_id,
"ts": ts,
"thread_ts": thread_ts,
"text": new_text,
@ -433,23 +441,25 @@ class TestSlackChannelMessageEventStep:
make_alert_receive_channel,
make_alert_group,
make_resolution_note_slack_message,
make_slack_channel,
) -> None:
channel_id = "potato"
ts = 88945.4849
thread_ts = 16789.123
(
organization,
user,
slack_team_identity,
slack_user_identity,
) = make_organization_and_user_with_slack_identities()
slack_channel = make_slack_channel(slack_team_identity, slack_id=channel_id)
integration = make_alert_receive_channel(organization)
alert_group = make_alert_group(integration)
channel = "potato"
ts = 88945.4849
thread_ts = 16789.123
payload = {
"event": {
"channel": channel,
"channel": channel_id,
"previous_message": {
"ts": ts,
"thread_ts": thread_ts,
@ -458,7 +468,7 @@ class TestSlackChannelMessageEventStep:
}
make_resolution_note_slack_message(
alert_group, user, user, ts=ts, thread_ts=thread_ts, slack_channel_id=channel
alert_group, user, user, ts=ts, thread_ts=thread_ts, slack_channel=slack_channel
)
step = SlackChannelMessageEventStep(slack_team_identity, organization, user)
@ -471,7 +481,7 @@ class TestSlackChannelMessageEventStep:
ResolutionNoteSlackMessage.objects.filter(
ts=ts,
thread_ts=thread_ts,
slack_channel_id=channel,
slack_channel=slack_channel,
).count()
== 0
)

View file

@ -140,7 +140,10 @@ class User(models.Model):
user_schedule_export_token: "RelatedManager['UserScheduleExportAuthToken']"
wiped_alert_groups: "RelatedManager['AlertGroup']"
objects = UserManager.from_queryset(UserQuerySet)()
# mypy/django-stubs support isn't 100% there for this.. however, manually typing this (to what it actually is)
# works for now. See this issue for more details
# https://github.com/typeddjango/django-stubs/issues/353#issuecomment-1095656633
objects: UserQuerySet = UserManager.from_queryset(UserQuerySet)()
class Meta:
# For some reason there are cases when Grafana user gets deleted,

View file

@ -1,3 +1,4 @@
import typing
from datetime import timedelta
from django.core.exceptions import ObjectDoesNotExist
@ -11,6 +12,9 @@ from apps.user_management.models import User
from common.api_helpers.exceptions import BadRequest
from common.timezones import raise_exception_if_not_valid_timezone
if typing.TYPE_CHECKING:
from apps.slack.models import SlackChannel
@extend_schema_field(serializers.CharField)
class OrganizationFilteredPrimaryKeyRelatedField(RelatedField):
@ -118,6 +122,30 @@ class UsersFilteredByOrganizationField(serializers.Field):
return users
class SlackChannelsFilteredByOrganizationSlackWorkspaceField(serializers.RelatedField):
def get_queryset(self):
request = self.context.get("request", None)
if not request:
return None
organization = request.user.organization
if organization.slack_team_identity is None:
raise BadRequest(detail="Slack isn't connected to this workspace")
return organization.slack_team_identity.cached_channels.all()
def to_internal_value(self, slack_id: str):
try:
return self.get_queryset().get(slack_id=slack_id.upper())
except ObjectDoesNotExist:
raise ValidationError("Slack channel does not exist")
except (TypeError, ValueError, AttributeError):
raise ValidationError("Invalid Slack channel")
def to_representation(self, obj: "SlackChannel") -> str:
return obj.public_primary_key
class IntegrationFilteredByOrganizationField(serializers.RelatedField):
def get_queryset(self):
request = self.context.get("request", None)

View file

@ -6,6 +6,7 @@ import pytz
from rest_framework import serializers
import common.api_helpers.custom_fields as cf
from common.api_helpers.exceptions import BadRequest
class TestTimeZoneField:
@ -100,3 +101,111 @@ class TestTimeZoneAwareDatetimeField:
else:
with pytest.raises(serializers.ValidationError):
serializer.is_valid(raise_exception=True)
class TestSlackChannelsFilteredByOrganizationSlackWorkspaceField:
class MockRequest:
def __init__(self, user) -> None:
self.user = user
class MySerializer(serializers.Serializer):
slack_channel_id = cf.SlackChannelsFilteredByOrganizationSlackWorkspaceField()
@pytest.mark.django_db
def test_org_does_not_have_slack_connected(
self,
make_organization,
make_user_for_organization,
):
organization = make_organization()
user = make_user_for_organization(organization)
serializer = self.MySerializer(
data={"slack_channel_id": "abcd"},
context={"request": self.MockRequest(user)},
)
with pytest.raises(BadRequest) as excinfo:
serializer.is_valid(raise_exception=True)
assert excinfo.value.detail == "Slack isn't connected to this workspace"
assert excinfo.value.status_code == 400
@pytest.mark.django_db
def test_org_channel_doesnt_belong_to_org(
self,
make_organization,
make_user_for_organization,
make_slack_team_identity,
make_slack_channel,
):
slack_channel1_id = "FOO"
slack_channel2_id = "BAR"
slack_team_identity1 = make_slack_team_identity()
make_slack_channel(slack_team_identity1, slack_id=slack_channel1_id)
slack_team_identity2 = make_slack_team_identity()
make_slack_channel(slack_team_identity2, slack_id=slack_channel2_id)
organization = make_organization(slack_team_identity=slack_team_identity1)
user = make_user_for_organization(organization)
serializer = self.MySerializer(
data={"slack_channel_id": slack_channel2_id},
context={"request": self.MockRequest(user)},
)
with pytest.raises(serializers.ValidationError) as excinfo:
serializer.is_valid(raise_exception=True)
assert excinfo.value.detail == {"slack_channel_id": ["Slack channel does not exist"]}
@pytest.mark.django_db
def test_invalid_slack_channel(
self,
make_organization,
make_user_for_organization,
make_slack_team_identity,
make_slack_channel,
):
slack_channel_id = "FOO"
slack_team_identity = make_slack_team_identity()
make_slack_channel(slack_team_identity, slack_id=slack_channel_id)
organization = make_organization(slack_team_identity=slack_team_identity)
user = make_user_for_organization(organization)
serializer = self.MySerializer(
data={"slack_channel_id": 1},
context={"request": self.MockRequest(user)},
)
with pytest.raises(serializers.ValidationError) as excinfo:
serializer.is_valid(raise_exception=True)
assert excinfo.value.detail == {"slack_channel_id": ["Invalid Slack channel"]}
@pytest.mark.django_db
def test_valid(
self,
make_organization,
make_user_for_organization,
make_slack_team_identity,
make_slack_channel,
):
slack_channel_id = "FOO"
slack_team_identity = make_slack_team_identity()
slack_channel = make_slack_channel(slack_team_identity, slack_id=slack_channel_id)
organization = make_organization(slack_team_identity=slack_team_identity)
user = make_user_for_organization(organization)
context = {"request": self.MockRequest(user)}
serializer = self.MySerializer(data={"slack_channel_id": slack_channel_id}, context=context)
serializer.is_valid(raise_exception=True)
assert serializer.validated_data["slack_channel_id"] == slack_channel
# case insensitive
serializer = self.MySerializer(data={"slack_channel_id": slack_channel_id.lower()}, context=context)
serializer.is_valid(raise_exception=True)
assert serializer.validated_data["slack_channel_id"] == slack_channel