Merge pull request #5128 from grafana/dev

v1.10.2
This commit is contained in:
Michael Derynck 2024-10-04 11:47:33 -06:00 committed by GitHub
commit 305801a888
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 832 additions and 42 deletions

View file

@ -227,14 +227,50 @@ class AlertGroupLogRecord(models.Model):
STEP_SPECIFIC_INFO_KEYS = ["schedule_name", "custom_button_name", "usergroup_handle", "source_integration_name"]
def _make_log_line_link(self, url, title, html=False, for_slack=False, substitute_with_tag=False):
if html and url:
return f"<a href='{url}'>{title}</a>"
elif for_slack and url:
return f"<{url}|{title}>"
elif substitute_with_tag:
return f"{{{{{substitute_with_tag}}}}}"
else:
return title
def render_log_line_json(self):
time = humanize.naturaldelta(self.alert_group.started_at - self.created_at)
created_at = DateTimeField().to_representation(self.created_at)
organization = self.alert_group.channel.organization
author = self.author.short(organization) if self.author is not None else None
escalation_chain = self.alert_group.channel_filter.escalation_chain if self.alert_group.channel_filter else None
step_info = self.get_step_specific_info()
escalation_chain_data = (
{
"pk": escalation_chain.public_primary_key,
"title": escalation_chain.name,
}
if escalation_chain
else None
)
schedule = (
{
"pk": self.escalation_policy.notify_schedule.public_primary_key,
"title": self.escalation_policy.notify_schedule.name,
}
if self.escalation_policy and self.escalation_policy.notify_schedule
else None
)
webhook = (
{
"pk": step_info["webhook_id"],
"title": step_info.get("webhook_name", "webhook"),
}
if step_info and "webhook_id" in step_info
else None
)
sf = SlackFormatter(organization)
action = sf.format(self.rendered_log_line_action(substitute_author_with_tag=True))
action = sf.format(self.rendered_log_line_action(substitute_with_tag=True))
action = clean_markup(action)
result = {
@ -244,6 +280,9 @@ class AlertGroupLogRecord(models.Model):
"type": self.type,
"created_at": created_at,
"author": author,
"escalation_chain": escalation_chain_data,
"schedule": schedule,
"webhook": webhook,
}
return result
@ -258,7 +297,7 @@ class AlertGroupLogRecord(models.Model):
result += self.rendered_log_line_action(for_slack=for_slack, html=html)
return result
def rendered_log_line_action(self, for_slack=False, html=False, substitute_author_with_tag=False):
def rendered_log_line_action(self, for_slack=False, html=False, substitute_with_tag=False):
from apps.alerts.models import EscalationPolicy
result = ""
@ -276,7 +315,7 @@ class AlertGroupLogRecord(models.Model):
elif self.action_source == ActionSource.BACKSYNC:
author_name = "source integration " + step_specific_info.get("source_integration_name", "")
elif self.author:
if substitute_author_with_tag:
if substitute_with_tag:
author_name = "{{author}}"
elif for_slack:
author_name = self.author.get_username_with_slack_verbal()
@ -303,7 +342,9 @@ class AlertGroupLogRecord(models.Model):
result += f'alert group assigned to route "{channel_filter.str_for_clients}"'
if escalation_chain is not None:
result += f' with escalation chain "{escalation_chain.name}"'
tag = "escalation_chain" if substitute_with_tag else False
escalation_chain_text = self._make_log_line_link(None, escalation_chain.name, html, for_slack, tag)
result += f' with escalation chain "{escalation_chain_text}"'
else:
result += " with no escalation chain, skipping escalation"
else:
@ -379,7 +420,9 @@ class AlertGroupLogRecord(models.Model):
important_text = ""
if escalation_policy_step == EscalationPolicy.STEP_NOTIFY_SCHEDULE_IMPORTANT:
important_text = " (Important)"
result += f'triggered step "Notify on-call from Schedule {schedule_name}{important_text}"'
tag = "schedule" if substitute_with_tag else False
schedule_text = self._make_log_line_link(None, schedule_name, html, for_slack, tag)
result += f'triggered step "Notify on-call from Schedule {schedule_text}{important_text}"'
elif escalation_policy_step == EscalationPolicy.STEP_REPEAT_ESCALATION_N_TIMES:
result += "escalation started from the beginning"
else:
@ -485,7 +528,10 @@ class AlertGroupLogRecord(models.Model):
trigger = f"{author_name}"
else:
trigger = trigger or "escalation chain"
result += f"outgoing webhook `{webhook_name}` triggered by {trigger}"
tag = "webhook" if substitute_with_tag else False
webhook_text = self._make_log_line_link(None, webhook_name, html, for_slack, tag)
result += f"outgoing webhook `{webhook_text}` triggered by {trigger}"
elif self.type == AlertGroupLogRecord.TYPE_FAILED_ATTACHMENT:
if self.alert_group.slack_message is not None:
result += (

View file

@ -2,7 +2,8 @@ from unittest.mock import patch
import pytest
from apps.alerts.models import AlertGroupLogRecord
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy
from apps.schedules.models import OnCallScheduleWeb
@pytest.mark.django_db
@ -37,3 +38,138 @@ def test_trigger_update_signal(
with patch("apps.alerts.tasks.send_update_log_report_signal") as mock_update_log_signal:
alert_group.log_records.create(type=log_type)
mock_update_log_signal.apply_async.assert_called_once()
@pytest.mark.django_db
@pytest.mark.parametrize(
"for_slack, html, substitute_with_tag, expected",
[
(True, False, False, 'with escalation chain "Escalation name"'),
(False, True, False, 'with escalation chain "Escalation name"'),
(False, False, True, 'with escalation chain "{{escalation_chain}}'),
],
)
def test_log_record_escalation_chain_link(
make_organization_with_slack_team_identity,
make_alert_receive_channel,
make_escalation_chain,
make_channel_filter,
make_alert_group,
for_slack,
html,
substitute_with_tag,
expected,
):
organization, _ = make_organization_with_slack_team_identity()
alert_receive_channel = make_alert_receive_channel(organization)
escalation_chain = make_escalation_chain(organization, name="Escalation name")
channel_filter = make_channel_filter(alert_receive_channel, escalation_chain=escalation_chain)
alert_group = make_alert_group(alert_receive_channel, channel_filter=channel_filter)
alert_group.raw_escalation_snapshot = alert_group.build_raw_escalation_snapshot()
log = alert_group.log_records.create(
type=AlertGroupLogRecord.TYPE_ROUTE_ASSIGNED,
)
log_line = log.rendered_log_line_action(for_slack=for_slack, html=html, substitute_with_tag=substitute_with_tag)
assert expected in log_line
log_data = log.render_log_line_json()
escalation_chain_data = log_data.get("escalation_chain")
assert escalation_chain_data == {"pk": escalation_chain.public_primary_key, "title": escalation_chain.name}
@pytest.mark.django_db
@pytest.mark.parametrize(
"for_slack, html, substitute_with_tag, expected",
[
(True, False, False, "Notify on-call from Schedule 'Schedule name'"),
(False, True, False, "Notify on-call from Schedule 'Schedule name'"),
(False, False, True, "Notify on-call from Schedule {{schedule}}"),
],
)
def test_log_record_schedule_link(
make_organization_with_slack_team_identity,
make_alert_receive_channel,
make_channel_filter,
make_alert_group,
make_schedule,
make_escalation_chain,
make_escalation_policy,
for_slack,
html,
substitute_with_tag,
expected,
):
organization, _ = make_organization_with_slack_team_identity()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb, name="Schedule name")
escalation_chain = make_escalation_chain(organization, name="Escalation name")
channel_filter = make_channel_filter(alert_receive_channel, escalation_chain=escalation_chain)
escalation_policy = make_escalation_policy(
escalation_chain=channel_filter.escalation_chain,
escalation_policy_step=EscalationPolicy.STEP_NOTIFY_SCHEDULE,
notify_schedule=schedule,
)
log = alert_group.log_records.create(
type=AlertGroupLogRecord.TYPE_ESCALATION_TRIGGERED,
step_specific_info={"schedule_name": schedule.name},
escalation_policy=escalation_policy,
)
log_line = log.rendered_log_line_action(for_slack=for_slack, html=html, substitute_with_tag=substitute_with_tag)
assert expected in log_line
log_data = log.render_log_line_json()
schedule_data = log_data.get("schedule")
assert schedule_data == {"pk": schedule.public_primary_key, "title": schedule.name}
@pytest.mark.django_db
@pytest.mark.parametrize(
"for_slack, html, substitute_with_tag, expected",
[
(True, False, False, "outgoing webhook `Webhook name`"),
(False, True, False, "outgoing webhook `Webhook name`"),
(False, False, True, "outgoing webhook `{{webhook}}`"),
],
)
def test_log_record_webhook_link(
make_organization_with_slack_team_identity,
make_alert_receive_channel,
make_channel_filter,
make_alert_group,
make_custom_webhook,
make_escalation_chain,
make_escalation_policy,
for_slack,
html,
substitute_with_tag,
expected,
):
organization, _ = make_organization_with_slack_team_identity()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
webhook = make_custom_webhook(organization, name="Webhook name")
escalation_chain = make_escalation_chain(organization, name="Escalation name")
channel_filter = make_channel_filter(alert_receive_channel, escalation_chain=escalation_chain)
escalation_policy = make_escalation_policy(
escalation_chain=channel_filter.escalation_chain,
escalation_policy_step=EscalationPolicy.STEP_TRIGGER_CUSTOM_WEBHOOK,
custom_webhook=webhook,
)
log = alert_group.log_records.create(
type=AlertGroupLogRecord.TYPE_CUSTOM_WEBHOOK_TRIGGERED,
step_specific_info={"webhook_id": webhook.public_primary_key, "webhook_name": webhook.name},
escalation_policy=escalation_policy,
)
log_line = log.rendered_log_line_action(for_slack=for_slack, html=html, substitute_with_tag=substitute_with_tag)
assert expected in log_line
log_data = log.render_log_line_json()
webhook_data = log_data.get("webhook")
assert webhook_data == {"pk": webhook.public_primary_key, "title": webhook.name}

View file

@ -106,8 +106,14 @@ def test_get_resolution_note(
assert response.data == result
@patch("apps.alerts.tasks.send_update_resolution_note_signal.send_update_resolution_note_signal.apply_async")
@pytest.mark.django_db
def test_create_resolution_note(make_organization_and_user_with_token, make_alert_receive_channel, make_alert_group):
def test_create_resolution_note(
mock_send_update_resolution_note_signal,
make_organization_and_user_with_token,
make_alert_receive_channel,
make_alert_group,
):
organization, user, token = make_organization_and_user_with_token()
client = APIClient()
@ -137,6 +143,8 @@ def test_create_resolution_note(make_organization_and_user_with_token, make_aler
assert response.status_code == status.HTTP_201_CREATED
assert response.data == result
mock_send_update_resolution_note_signal.assert_called_once()
@pytest.mark.django_db
def test_create_resolution_note_invalid_text(
@ -163,8 +171,10 @@ def test_create_resolution_note_invalid_text(
assert response.data["text"][0] == "This field may not be blank."
@patch("apps.alerts.tasks.send_update_resolution_note_signal.send_update_resolution_note_signal.apply_async")
@pytest.mark.django_db
def test_update_resolution_note(
mock_send_update_resolution_note_signal,
make_organization_and_user_with_token,
make_alert_receive_channel,
make_alert_group,
@ -206,6 +216,39 @@ def test_update_resolution_note(
assert resolution_note.text == result["text"]
assert response.data == result
mock_send_update_resolution_note_signal.assert_called_once()
@patch("apps.alerts.tasks.send_update_resolution_note_signal.send_update_resolution_note_signal.apply_async")
@pytest.mark.django_db
def test_update_resolution_note_same_text(
mock_send_update_resolution_note_signal,
make_organization_and_user_with_token,
make_alert_receive_channel,
make_alert_group,
make_resolution_note,
):
organization, user, token = make_organization_and_user_with_token()
client = APIClient()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
resolution_note = make_resolution_note(
alert_group=alert_group,
source=ResolutionNote.Source.WEB,
author=user,
)
url = reverse("api-public:resolution_notes-detail", kwargs={"pk": resolution_note.public_primary_key})
response = client.put(
url, data={"text": resolution_note.message_text}, format="json", HTTP_AUTHORIZATION=f"{token}"
)
assert response.status_code == status.HTTP_200_OK
# update signal shouldn't be sent when text doesn't change
mock_send_update_resolution_note_signal.assert_not_called()
@pytest.mark.django_db
def test_update_resolution_note_invalid_source(
@ -242,8 +285,10 @@ def test_update_resolution_note_invalid_source(
assert response.data["detail"] == "Cannot update message with this source type"
@patch("apps.alerts.tasks.send_update_resolution_note_signal.send_update_resolution_note_signal.apply_async")
@pytest.mark.django_db
def test_delete_resolution_note(
mock_send_update_resolution_note_signal,
make_organization_and_user_with_token,
make_alert_receive_channel,
make_alert_group,
@ -272,6 +317,7 @@ def test_delete_resolution_note(
resolution_note.refresh_from_db()
assert resolution_note.deleted_at is not None
mock_send_update_resolution_note_signal.assert_called_once()
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")

View file

@ -56,20 +56,16 @@ class ResolutionNoteView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelView
except ResolutionNote.DoesNotExist:
raise NotFound
def dispatch(self, request, *args, **kwargs):
result = super().dispatch(request, *args, **kwargs)
def perform_create(self, serializer):
super().perform_create(serializer)
send_update_resolution_note_signal.apply_async((serializer.instance.alert_group.pk, serializer.instance.pk))
# send signal to update alert group and resolution_note
method = request.method.lower()
if method in ["post", "put", "patch", "delete"]:
instance_id = self.kwargs.get("pk") or result.data.get("id")
if instance_id:
instance = ResolutionNote.objects_with_deleted.filter(public_primary_key=instance_id).first()
if instance is not None:
send_update_resolution_note_signal.apply_async(
kwargs={
"alert_group_pk": instance.alert_group.pk,
"resolution_note_pk": instance.pk,
}
)
return result
def perform_update(self, serializer):
is_text_updated = serializer.instance.message_text != serializer.validated_data["message_text"]
super().perform_update(serializer)
if is_text_updated:
send_update_resolution_note_signal.apply_async((serializer.instance.alert_group.pk, serializer.instance.pk))
def perform_destroy(self, instance):
super().perform_destroy(instance)
send_update_resolution_note_signal.apply_async((instance.alert_group.pk, instance.pk))

View file

@ -323,38 +323,53 @@ class CustomOnCallShift(models.Model):
# we may need to iterate several times over users until we get a seen combination
# use the group index as reference since user groups could repeat in the queue
cycle_user_groups = itertools.cycle(range(len(users_queue)))
orig_start = last_start = start
previous_day = None
all_rotations_checked = False
# we need to go through each individual day
day_by_day_rrule = copy.deepcopy(self.event_ical_rules)
day_by_day_rrule["interval"] = 1
for user_group_id in cycle_user_groups:
for i in range(self.interval):
if not start: # means that rotation ends before next event starts
all_rotations_checked = True
break
last_start = start
day = CustomOnCallShift.ICAL_WEEKDAY_MAP[start.weekday()]
# double-check day is valid (when until is set, we may get unexpected days)
if day in self.by_day:
if not start:
# means that rotation ended before next event starts
# keep iterating to track missing combinations and get the right week_interval
if previous_day is None:
day = self.by_day[0]
else:
previous_index = self.by_day.index(previous_day)
day = self.by_day[(previous_index + 1) % len(self.by_day)]
if (user_group_id, day, i) in combinations:
all_rotations_checked = True
break
starting_dates.append(start)
combinations.append((user_group_id, day, i))
# get next event date following the original rule
event_ical = self.generate_ical(start, 1, None, 1, time_zone, custom_rrule=day_by_day_rrule)
start = self.get_rotation_date(event_ical, get_next_date=True, interval=1)
previous_day = day
else:
day = CustomOnCallShift.ICAL_WEEKDAY_MAP[start.weekday()]
# double-check day is valid (when until is set, we may get unexpected days)
if day in self.by_day:
if (user_group_id, day, i) in combinations:
all_rotations_checked = True
break
starting_dates.append(start)
combinations.append((user_group_id, day, i))
previous_day = day
# get next event date following the original rule
event_ical = self.generate_ical(start, 1, None, 1, time_zone, custom_rrule=day_by_day_rrule)
start = self.get_rotation_date(event_ical, get_next_date=True, interval=1)
if all_rotations_checked:
break
week_interval = 1
if orig_start and last_start:
# number of weeks used to cover all combinations
week_interval = ((last_start - orig_start).days // 7) or 1
# interval is given by the number of weeks required to cover all combinations
week_interval = (len(combinations) // len(self.by_day)) or 1
counter = 1
for (user_group_id, day, _), start in zip(combinations, starting_dates):
for (user_group_id, day, _), start in itertools.zip_longest(combinations, starting_dates, fillvalue=None):
if not start:
# means that rotation ended before next event starts, no more events to generate
break
users = users_queue[user_group_id]
for user_counter, user in enumerate(users, start=1):
# setup weekly events, for each user group/day combinations,

View file

@ -0,0 +1,474 @@
from typing import List, Tuple
from apps.schedules.models import CustomOnCallShift
"""
Simplified shift definition to test that shifts contain the correct users.
This test is only checking the start date of the shift is landing on the correct day.
If you need a test for precise hours that should be a different test so that these cases
can stay easy to define.
Define custom shift test case parameters:
users_per_group - Recurrence groups, generates users for each group based on number, Users are named A-Z
shift_start - Date to start shift
day_mask - Which days are included, None to include all
total_days - Length of shift (end = start + total_days)
frequency - Recurrence frequency
interval - Recurrence interval
expected_result - Dict where each day displays the users that should be scheduled
"""
CUSTOM_SHIFT_TEST_CASES: List[Tuple] = [
(
# 0 - Original Test Case
[1, 1, 1],
"2024-08-01",
["FR"],
21,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-02": "A",
"2024-08-09": "B",
"2024-08-16": "C",
},
),
(
# 1 - Weekdays Start Monday Daily
[1, 1, 1, 1, 1],
"2024-08-05",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-05": "A",
"2024-08-06": "B",
"2024-08-07": "C",
"2024-08-08": "D",
"2024-08-09": "E",
"2024-08-12": "A",
"2024-08-13": "B",
"2024-08-14": "C",
"2024-08-15": "D",
"2024-08-16": "E",
},
),
(
# 2 - Weekdays Start Monday Daily Interval 2
[1, 1, 1, 1],
"2024-08-05",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
2,
{
"2024-08-05": "A",
"2024-08-06": "A",
"2024-08-07": "B",
"2024-08-08": "B",
"2024-08-09": "C",
"2024-08-12": "C",
"2024-08-13": "D",
"2024-08-14": "D",
"2024-08-15": "A",
"2024-08-16": "A",
},
),
(
# 3 - Weekdays Start Monday Weekly
[1, 1, 1, 1, 1],
"2024-08-05",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_WEEKLY,
1,
{
"2024-08-05": "A",
"2024-08-06": "A",
"2024-08-07": "A",
"2024-08-08": "A",
"2024-08-09": "A",
"2024-08-12": "B",
"2024-08-13": "B",
"2024-08-14": "B",
"2024-08-15": "B",
"2024-08-16": "B",
},
),
(
# 4 - Weekdays Start Monday Monthly
[1, 1, 1, 1, 1],
"2024-08-26",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_MONTHLY,
1,
{
"2024-08-26": "A",
"2024-08-27": "A",
"2024-08-28": "A",
"2024-08-29": "A",
"2024-08-30": "A",
"2024-09-02": "B",
"2024-09-03": "B",
"2024-09-04": "B",
"2024-09-05": "B",
"2024-09-06": "B",
},
),
(
# 5 - Weekdays Start Thursday Daily
[1, 1, 1, 1, 1],
"2024-08-08",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-08": "A",
"2024-08-09": "B",
"2024-08-12": "C",
"2024-08-13": "D",
"2024-08-14": "E",
"2024-08-15": "A",
"2024-08-16": "B",
"2024-08-19": "C",
"2024-08-20": "D",
"2024-08-21": "E",
},
),
(
# 6 - Weekdays Start Thursday Weekly
[1, 1, 1, 1, 1],
"2024-08-08",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_WEEKLY,
1,
{
"2024-08-08": "A",
"2024-08-09": "A",
"2024-08-12": "B",
"2024-08-13": "B",
"2024-08-14": "B",
"2024-08-15": "B",
"2024-08-16": "B",
"2024-08-19": "C",
"2024-08-20": "C",
"2024-08-21": "C",
},
),
(
# 7 - Weekdays Start Thursday Monthly
[1, 1, 1, 1, 1],
"2024-08-29",
["MO", "TU", "WE", "TH", "FR"],
14,
CustomOnCallShift.FREQUENCY_MONTHLY,
1,
{
"2024-08-29": "A",
"2024-08-30": "A",
"2024-09-02": "B",
"2024-09-03": "B",
"2024-09-04": "B",
"2024-09-05": "B",
"2024-09-06": "B",
"2024-09-09": "B",
"2024-09-10": "B",
"2024-09-11": "B",
},
),
(
# 8 - All Days uneven groups
[2, 1],
"2024-08-14",
None,
9,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-14": "AB",
"2024-08-15": "C",
"2024-08-16": "AB",
"2024-08-17": "C",
"2024-08-18": "AB",
"2024-08-19": "C",
"2024-08-20": "AB",
"2024-08-21": "C",
"2024-08-22": "AB",
},
),
(
# 9 - Weekends Start Saturday
[1, 1, 1],
"2024-08-03",
["SA", "SU"],
15,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "B",
"2024-08-10": "C",
"2024-08-11": "A",
"2024-08-17": "B",
},
),
(
# 10 - Weekends Start Saturday Users > Shifts
[1, 1, 1, 1, 1],
"2024-08-03",
["SA", "SU"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "B",
"2024-08-10": "C",
"2024-08-11": "D",
},
),
(
# 11 - Weekends Start Saturday Users > Shifts
[1, 1, 1, 1, 1],
"2024-08-03",
["SA", "SU"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "B",
"2024-08-10": "C",
"2024-08-11": "D",
},
),
(
# 12 - Weekends Start Thursday
[1],
"2024-08-01",
["SA", "SU"],
5,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "A",
},
),
(
# 13 - Weekends Start Thursday
[1, 1, 1],
"2024-08-01",
["SA", "SU"],
17,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "B",
"2024-08-10": "C",
"2024-08-11": "A",
"2024-08-17": "B",
},
),
(
# 14 - Weekends Start Thursday Users > Shifts
[1, 1, 1, 1, 1, 1],
"2024-08-01",
["SA", "SU"],
17,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-08-03": "A",
"2024-08-04": "B",
"2024-08-10": "C",
"2024-08-11": "D",
"2024-08-17": "E",
},
),
(
# 15 - Weekends Start Thursday Interval 2
[1, 1, 1],
"2024-08-01",
["SA", "SU"],
17,
CustomOnCallShift.FREQUENCY_DAILY,
2,
{
"2024-08-03": "A",
"2024-08-04": "A",
"2024-08-10": "B",
"2024-08-11": "B",
"2024-08-17": "C",
},
),
(
# 16 - Weekends Start Saturday Weekly
[1, 1, 1],
"2024-08-01",
["SA", "SU"],
18,
CustomOnCallShift.FREQUENCY_WEEKLY,
1,
{
"2024-08-03": "A",
"2024-08-04": "A",
"2024-08-10": "B",
"2024-08-11": "B",
"2024-08-17": "C",
"2024-08-18": "C",
},
),
(
# 17 - Weekends Start Saturday Weekly Interval 2
[1, 1, 1],
"2024-08-01",
["SA", "SU"],
18,
CustomOnCallShift.FREQUENCY_WEEKLY,
2,
{
"2024-08-03": "A",
"2024-08-04": "A",
"2024-08-17": "B",
"2024-08-18": "B",
},
),
(
# 18 - Weekends Start Thursday Monthly
[1, 1, 1],
"2024-08-29",
["SA", "SU"],
18,
CustomOnCallShift.FREQUENCY_MONTHLY,
1,
{
"2024-08-31": "A",
"2024-09-01": "B",
"2024-09-07": "B",
"2024-09-08": "B",
"2024-09-14": "B",
"2024-09-15": "B",
},
),
(
# 19 - No days (==All)
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"2024-09-30",
[],
18,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-09-30": "A",
"2024-10-01": "B",
"2024-10-02": "C",
"2024-10-03": "D",
"2024-10-04": "E",
"2024-10-05": "F",
"2024-10-06": "G",
"2024-10-07": "H",
"2024-10-08": "I",
"2024-10-09": "J",
"2024-10-10": "A",
"2024-10-11": "B",
"2024-10-12": "C",
"2024-10-13": "D",
"2024-10-14": "E",
"2024-10-15": "F",
"2024-10-16": "G",
"2024-10-17": "H",
},
),
(
# 20 - All days
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"2024-09-30",
["MO", "TU", "WE", "TH", "FR", "SA", "SU"],
18,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{
"2024-09-30": "A",
"2024-10-01": "B",
"2024-10-02": "C",
"2024-10-03": "D",
"2024-10-04": "E",
"2024-10-05": "F",
"2024-10-06": "G",
"2024-10-07": "H",
"2024-10-08": "I",
"2024-10-09": "J",
"2024-10-10": "A",
"2024-10-11": "B",
"2024-10-12": "C",
"2024-10-13": "D",
"2024-10-14": "E",
"2024-10-15": "F",
"2024-10-16": "G",
"2024-10-17": "H",
},
),
(
# 21 - No days (==All) 7 Interval
# Note: This test verifies expected behavior not the correct result,
# it *should* be equivalent to #22, but it is not using the daily_by_day logic
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"2024-09-30",
[],
18,
CustomOnCallShift.FREQUENCY_DAILY,
7,
{
"2024-09-30": "A",
"2024-10-07": "B",
"2024-10-14": "C",
"2024-10-15": "D",
"2024-10-16": "E",
"2024-10-17": "F",
},
),
(
# 22 - All days 7 Interval
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
"2024-09-30",
["MO", "TU", "WE", "TH", "FR", "SA", "SU"],
18,
CustomOnCallShift.FREQUENCY_DAILY,
7,
{
"2024-09-30": "A",
"2024-10-01": "A",
"2024-10-02": "A",
"2024-10-03": "A",
"2024-10-04": "A",
"2024-10-05": "A",
"2024-10-06": "A",
"2024-10-07": "B",
"2024-10-08": "B",
"2024-10-09": "B",
"2024-10-10": "B",
"2024-10-11": "B",
"2024-10-12": "B",
"2024-10-13": "B",
"2024-10-14": "C",
"2024-10-15": "C",
"2024-10-16": "C",
"2024-10-17": "C",
},
),
(
# 23 - Single User 1 day
[1],
"2024-08-01",
["MO"],
14,
CustomOnCallShift.FREQUENCY_DAILY,
1,
{"2024-08-05": "A", "2024-08-12": "A"},
),
]

View file

@ -1,12 +1,17 @@
import datetime
from calendar import monthrange
from collections import defaultdict
from unittest.mock import patch
from zoneinfo import ZoneInfo
import icalendar
import pytest
from django.utils import timezone
from recurring_ical_events import UnfoldableCalendar
from apps.schedules.ical_utils import list_users_to_notify_from_ical
from apps.schedules.models import CustomOnCallShift, OnCallSchedule, OnCallScheduleCalendar, OnCallScheduleWeb
from apps.schedules.tests.custom_shift_test_cases import CUSTOM_SHIFT_TEST_CASES
@pytest.mark.django_db
@ -1826,3 +1831,72 @@ def test_refresh_schedule(make_organization_and_user, make_schedule, make_on_cal
assert mock_refresh_final.apply_async.called
assert schedule.cached_ical_file_primary is not None
assert schedule.cached_ical_file_overrides is not None
@pytest.mark.parametrize(
"users_per_group, shift_start, day_mask, total_days, frequency, interval, expected_result", CUSTOM_SHIFT_TEST_CASES
)
@pytest.mark.django_db
def test_ical_shift_generation(
make_organization,
make_user_for_organization,
make_schedule,
make_on_call_shift,
users_per_group,
shift_start,
day_mask,
total_days,
frequency,
interval,
expected_result,
):
organization = make_organization()
schedule = make_schedule(
organization,
schedule_class=OnCallScheduleWeb,
name="test_web_schedule",
)
total_users = sum(users_per_group)
users = [make_user_for_organization(organization, username=chr(i + 64)) for i in range(1, total_users + 1)]
start = datetime.datetime.strptime(shift_start, "%Y-%m-%d").replace(tzinfo=ZoneInfo("UTC"))
data = {
"start": start,
"rotation_start": start,
"until": start + timezone.timedelta(days=total_days),
"duration": timezone.timedelta(hours=12),
"frequency": frequency,
"by_day": day_mask,
"schedule": schedule,
"interval": interval,
"priority_level": 1,
"week_start": CustomOnCallShift.MONDAY,
}
on_call_shift = make_on_call_shift(
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
)
rolling_users = []
start_user = 0
for count in users_per_group:
end = start_user + count
rolling_users.append(users[start_user:end])
start_user = end
on_call_shift.add_rolling_users(rolling_users)
query_start = start
query_end = data["until"]
calendar = icalendar.Calendar.from_ical(schedule._ical_file_primary)
events = UnfoldableCalendar(calendar).between(query_start, query_end)
day_events = defaultdict(str)
for event in events:
event_start = event["DTSTART"].dt
event_summary = event["SUMMARY"].strip()[-1]
event_date = event_start.date().strftime("%Y-%m-%d")
day_events[event_date] += event_summary
for k, v in day_events.items():
day_events[k] = "".join(sorted(v))
assert day_events == expected_result

View file

@ -6,6 +6,7 @@ import uuid
from django.db import models
from apps.slack.client import SlackClient
from apps.slack.constants import BLOCK_SECTION_TEXT_MAX_SIZE
from apps.slack.errors import (
SlackAPIChannelArchivedError,
SlackAPIError,
@ -127,6 +128,8 @@ class SlackMessage(models.Model):
else:
text = "{}\nInviting {} to look at the alert group.".format(alert_group.long_verbose_name, user_verbal)
text = text[:BLOCK_SECTION_TEXT_MAX_SIZE]
blocks = [
{
"type": "section",