commit
23b882dfc9
33 changed files with 971 additions and 72 deletions
14
CHANGELOG.md
14
CHANGELOG.md
|
|
@ -5,6 +5,16 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## v1.2.13 (2023-04-18)
|
||||
|
||||
### Changed
|
||||
|
||||
- Rework ical schedule export to include final events; also improve changing shifts sync
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix issue when creating web overrides for TF schedules using a non-UTC timezone
|
||||
|
||||
## v1.2.12 (2023-04-18)
|
||||
|
||||
### Changed
|
||||
|
|
@ -34,6 +44,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## v1.2.10 (2023-04-13)
|
||||
|
||||
### Added
|
||||
|
||||
- Added mine filter to schedules listing
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed a bug in GForm's RemoteSelect where the value for Dropdown could not change
|
||||
|
|
|
|||
|
|
@ -27,7 +27,6 @@ ICAL_URL = "https://calendar.google.com/calendar/ical/amixr.io_37gttuakhrtr75ano
|
|||
@pytest.fixture()
|
||||
def schedule_internal_api_setup(
|
||||
make_organization_and_user_with_plugin_token,
|
||||
make_user_auth_headers,
|
||||
make_slack_channel,
|
||||
make_schedule,
|
||||
):
|
||||
|
|
@ -377,6 +376,50 @@ def test_get_list_schedules_by_used(
|
|||
assert set(schedule_names) == set(expected_schedule_names)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize(
|
||||
"query_param, expected_schedule_names",
|
||||
[
|
||||
("?mine=true", ["test_web_schedule"]),
|
||||
("?mine=false", ["test_calendar_schedule", "test_ical_schedule", "test_web_schedule"]),
|
||||
("?mine=null", ["test_calendar_schedule", "test_ical_schedule", "test_web_schedule"]),
|
||||
("", ["test_calendar_schedule", "test_ical_schedule", "test_web_schedule"]),
|
||||
],
|
||||
)
|
||||
def test_get_list_schedules_by_mine(
|
||||
schedule_internal_api_setup,
|
||||
make_user_auth_headers,
|
||||
make_on_call_shift,
|
||||
query_param,
|
||||
expected_schedule_names,
|
||||
):
|
||||
user, token, calendar_schedule, ical_schedule, web_schedule, slack_channel = schedule_internal_api_setup
|
||||
client = APIClient()
|
||||
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
# setup user shift in web schedule
|
||||
override_data = {
|
||||
"start": today + timezone.timedelta(hours=22),
|
||||
"rotation_start": today + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": web_schedule,
|
||||
}
|
||||
override = make_on_call_shift(
|
||||
organization=user.organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **override_data
|
||||
)
|
||||
override.add_rolling_users([[user]])
|
||||
web_schedule.refresh_ical_file()
|
||||
|
||||
url = reverse("api-internal:schedule-list") + query_param
|
||||
response = client.get(url, format="json", **make_user_auth_headers(user, token))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.json()["count"] == len(expected_schedule_names)
|
||||
|
||||
schedule_names = [schedule["name"] for schedule in response.json()["results"]]
|
||||
assert set(schedule_names) == set(expected_schedule_names)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_list_schedules_pagination_respects_search(
|
||||
schedule_internal_api_setup,
|
||||
|
|
|
|||
|
|
@ -1716,19 +1716,20 @@ def test_upcoming_shifts_oncall(
|
|||
response = client.get(url, format="json", **make_user_auth_headers(admin, token))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
returned_data = response.data
|
||||
assert returned_data[schedule.public_primary_key]["schedule"] == schedule.name
|
||||
assert returned_data[schedule.public_primary_key]["is_oncall"]
|
||||
assert returned_data[schedule.public_primary_key]["current_shift"]["start"] == on_call_shift.start
|
||||
returned_data = response.data[0]
|
||||
assert returned_data["schedule_id"] == schedule.public_primary_key
|
||||
assert returned_data["schedule_name"] == schedule.name
|
||||
assert returned_data["is_oncall"]
|
||||
assert returned_data["current_shift"]["start"] == on_call_shift.start
|
||||
next_shift_start = on_call_shift.start + timezone.timedelta(days=1)
|
||||
assert returned_data[schedule.public_primary_key]["next_shift"]["start"] == next_shift_start
|
||||
assert returned_data["next_shift"]["start"] == next_shift_start
|
||||
|
||||
# empty response for other user
|
||||
url = reverse("api-internal:user-upcoming-shifts", kwargs={"pk": other_user.public_primary_key})
|
||||
response = client.get(url, format="json", **make_user_auth_headers(admin, token))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
assert response.data == {}
|
||||
assert response.data == []
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
@ -1748,11 +1749,11 @@ def test_upcoming_shifts_override(
|
|||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
)
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
tomorrow = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + timezone.timedelta(days=1)
|
||||
|
||||
override_data = {
|
||||
"start": today + timezone.timedelta(hours=22),
|
||||
"rotation_start": today + timezone.timedelta(hours=22),
|
||||
"start": tomorrow + timezone.timedelta(hours=22),
|
||||
"rotation_start": tomorrow + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": schedule,
|
||||
}
|
||||
|
|
@ -1768,11 +1769,12 @@ def test_upcoming_shifts_override(
|
|||
response = client.get(url, format="json", **make_user_auth_headers(admin, token))
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
returned_data = response.data
|
||||
assert returned_data[schedule.public_primary_key]["schedule"] == schedule.name
|
||||
assert returned_data[schedule.public_primary_key]["is_oncall"] is False
|
||||
assert returned_data[schedule.public_primary_key]["current_shift"] is None
|
||||
assert returned_data[schedule.public_primary_key]["next_shift"]["start"] == override.start
|
||||
returned_data = response.data[0]
|
||||
assert returned_data["schedule_id"] == schedule.public_primary_key
|
||||
assert returned_data["schedule_name"] == schedule.name
|
||||
assert returned_data["is_oncall"] is False
|
||||
assert returned_data["current_shift"] is None
|
||||
assert returned_data["next_shift"]["start"] == override.start
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
@ -1789,7 +1791,8 @@ def test_upcoming_shifts_multiple_schedules(
|
|||
_, token = make_token_for_organization(organization)
|
||||
|
||||
schedules = []
|
||||
for i in range(3):
|
||||
# create schedules in a reversed order to check the output is sorted later
|
||||
for i in range(2, -1, -1):
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
|
|
@ -1822,16 +1825,14 @@ def test_upcoming_shifts_multiple_schedules(
|
|||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
returned_data = response.data
|
||||
for i, schedule in enumerate(schedules):
|
||||
assert returned_data[schedule.public_primary_key]["schedule"] == schedule.name
|
||||
for i, schedule in enumerate(reversed(schedules)):
|
||||
assert returned_data[i]["schedule_name"] == schedule.name
|
||||
expected_start = today + timezone.timedelta(hours=start_h) + timezone.timedelta(days=i)
|
||||
if i == 0:
|
||||
assert returned_data[schedule.public_primary_key]["is_oncall"]
|
||||
assert returned_data[schedule.public_primary_key]["current_shift"]["start"] == expected_start
|
||||
assert returned_data[schedule.public_primary_key]["next_shift"][
|
||||
"start"
|
||||
] == expected_start + timezone.timedelta(days=1)
|
||||
assert returned_data[i]["is_oncall"]
|
||||
assert returned_data[i]["current_shift"]["start"] == expected_start
|
||||
assert returned_data[i]["next_shift"]["start"] == expected_start + timezone.timedelta(days=1)
|
||||
else:
|
||||
assert returned_data[schedule.public_primary_key]["is_oncall"] is False
|
||||
assert returned_data[schedule.public_primary_key]["current_shift"] is None
|
||||
assert returned_data[schedule.public_primary_key]["next_shift"]["start"] == expected_start
|
||||
assert returned_data[i]["is_oncall"] is False
|
||||
assert returned_data[i]["current_shift"] is None
|
||||
assert returned_data[i]["next_shift"]["start"] == expected_start
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from rest_framework.views import APIView
|
|||
|
||||
from apps.auth_token.auth import PluginAuthentication
|
||||
from apps.base.utils import live_settings
|
||||
from apps.webhooks.utils import is_webhooks_enabled_for_organization
|
||||
|
||||
FEATURE_SLACK = "slack"
|
||||
FEATURE_TELEGRAM = "telegram"
|
||||
|
|
@ -12,6 +13,7 @@ FEATURE_LIVE_SETTINGS = "live_settings"
|
|||
FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = "grafana_cloud_notifications"
|
||||
FEATURE_GRAFANA_CLOUD_CONNECTION = "grafana_cloud_connection"
|
||||
FEATURE_WEB_SCHEDULES = "web_schedules"
|
||||
FEATURE_WEBHOOKS2 = "webhooks2"
|
||||
|
||||
|
||||
class FeaturesAPIView(APIView):
|
||||
|
|
@ -58,4 +60,7 @@ class FeaturesAPIView(APIView):
|
|||
if request.auth.organization.pk in enabled_web_schedules_orgs.json_value["org_ids"]:
|
||||
enabled_features.append(FEATURE_WEB_SCHEDULES)
|
||||
|
||||
if is_webhooks_enabled_for_organization(request.auth.organization.pk):
|
||||
enabled_features.append(FEATURE_WEBHOOKS2)
|
||||
|
||||
return enabled_features
|
||||
|
|
|
|||
|
|
@ -162,12 +162,14 @@ class ScheduleView(
|
|||
def get_queryset(self, ignore_filtering_by_available_teams=False):
|
||||
is_short_request = self.request.query_params.get("short", "false") == "true"
|
||||
filter_by_type = self.request.query_params.get("type")
|
||||
mine = BooleanField(allow_null=True).to_internal_value(data=self.request.query_params.get("mine"))
|
||||
used = BooleanField(allow_null=True).to_internal_value(data=self.request.query_params.get("used"))
|
||||
organization = self.request.auth.organization
|
||||
queryset = OnCallSchedule.objects.filter(organization=organization).defer(
|
||||
# avoid requesting large text fields which are not used when listing schedules
|
||||
"prev_ical_file_primary",
|
||||
"prev_ical_file_overrides",
|
||||
"cached_ical_final_schedule",
|
||||
)
|
||||
if not ignore_filtering_by_available_teams:
|
||||
queryset = queryset.filter(*self.available_teams_lookup_args).distinct()
|
||||
|
|
@ -178,6 +180,9 @@ class ScheduleView(
|
|||
queryset = queryset.filter().instance_of(SCHEDULE_TYPE_TO_CLASS[filter_by_type])
|
||||
if used is not None:
|
||||
queryset = queryset.filter(escalation_policies__isnull=not used).distinct()
|
||||
if mine:
|
||||
user = self.request.user
|
||||
queryset = queryset.related_to_user(user)
|
||||
|
||||
queryset = queryset.order_by("pk")
|
||||
return queryset
|
||||
|
|
@ -475,6 +480,12 @@ class ScheduleView(
|
|||
"href": api_root + "teams/",
|
||||
"global": True,
|
||||
},
|
||||
{
|
||||
"name": "mine",
|
||||
"type": "boolean",
|
||||
"display_name": "Mine",
|
||||
"default": "true",
|
||||
},
|
||||
{
|
||||
"name": "used",
|
||||
"type": "boolean",
|
||||
|
|
|
|||
|
|
@ -475,16 +475,26 @@ class UserView(
|
|||
schedules = OnCallSchedule.objects.related_to_user(user)
|
||||
|
||||
# check upcoming shifts
|
||||
upcoming = {}
|
||||
upcoming = []
|
||||
for schedule in schedules:
|
||||
current_shift, upcoming_shift = schedule.upcoming_shift_for_user(user, days=days)
|
||||
if current_shift or upcoming_shift:
|
||||
upcoming[schedule.public_primary_key] = {
|
||||
"schedule": schedule.name,
|
||||
"is_oncall": current_shift is not None,
|
||||
"current_shift": current_shift,
|
||||
"next_shift": upcoming_shift,
|
||||
}
|
||||
upcoming.append(
|
||||
{
|
||||
"schedule_id": schedule.public_primary_key,
|
||||
"schedule_name": schedule.name,
|
||||
"is_oncall": current_shift is not None,
|
||||
"current_shift": current_shift,
|
||||
"next_shift": upcoming_shift,
|
||||
}
|
||||
)
|
||||
|
||||
# sort entries by start timestamp
|
||||
def sorting_key(entry):
|
||||
shift = entry["current_shift"] if entry["current_shift"] else entry["next_shift"]
|
||||
return shift["start"]
|
||||
|
||||
upcoming.sort(key=sorting_key)
|
||||
|
||||
return Response(upcoming, status=status.HTTP_200_OK)
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ from rest_framework import status
|
|||
from rest_framework.test import APIClient
|
||||
|
||||
from apps.auth_token.models import ScheduleExportAuthToken, UserScheduleExportAuthToken
|
||||
from apps.schedules.constants import ICAL_COMPONENT_VEVENT, ICAL_SUMMARY
|
||||
from apps.schedules.models import OnCallScheduleICal
|
||||
|
||||
ICAL_DATA = """
|
||||
|
|
@ -48,9 +49,13 @@ END:VCALENDAR
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_export_calendar(make_organization_and_user_with_token, make_schedule):
|
||||
def test_export_calendar(make_organization_and_user_with_token, make_user_for_organization, make_schedule):
|
||||
|
||||
organization, user, _ = make_organization_and_user_with_token()
|
||||
usernames = {"amixr", "justin.hunthrop@grafana.com"}
|
||||
# setup users for shifts
|
||||
for u in usernames:
|
||||
make_user_for_organization(organization, username=u)
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
|
|
@ -75,7 +80,11 @@ def test_export_calendar(make_organization_and_user_with_token, make_schedule):
|
|||
cal = Calendar.from_ical(response.data)
|
||||
|
||||
assert type(cal) == Calendar
|
||||
assert len(cal.subcomponents) == 2
|
||||
# check there are events
|
||||
assert len(cal.subcomponents) > 0
|
||||
for component in cal.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT:
|
||||
assert component[ICAL_SUMMARY] in usernames
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
|
|||
|
|
@ -38,7 +38,12 @@ class OnCallScheduleChannelView(RateLimitHeadersMixin, UpdateSerializerMixin, Mo
|
|||
def get_queryset(self):
|
||||
name = self.request.query_params.get("name", None)
|
||||
|
||||
queryset = OnCallSchedule.objects.filter(organization=self.request.auth.organization)
|
||||
queryset = OnCallSchedule.objects.filter(organization=self.request.auth.organization).defer(
|
||||
# avoid requesting large text fields which are not used when listing schedules
|
||||
"prev_ical_file_primary",
|
||||
"prev_ical_file_overrides",
|
||||
"cached_ical_final_schedule",
|
||||
)
|
||||
|
||||
if name is not None:
|
||||
queryset = queryset.filter(name=name)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,13 @@ ICAL_ATTENDEE = "ATTENDEE"
|
|||
ICAL_UID = "UID"
|
||||
ICAL_RRULE = "RRULE"
|
||||
ICAL_UNTIL = "UNTIL"
|
||||
ICAL_LAST_MODIFIED = "LAST-MODIFIED"
|
||||
ICAL_STATUS = "STATUS"
|
||||
ICAL_STATUS_CANCELLED = "CANCELLED"
|
||||
ICAL_COMPONENT_VEVENT = "VEVENT"
|
||||
RE_PRIORITY = re.compile(r"^\[L(\d+)\]")
|
||||
RE_EVENT_UID_V1 = re.compile(r"amixr-([\w\d-]+)-U(\d+)-E(\d+)-S(\d+)")
|
||||
RE_EVENT_UID_V2 = re.compile(r"oncall-([\w\d-]+)-PK([\w\d]+)-U(\d+)-E(\d+)-S(\d+)")
|
||||
|
||||
EXPORT_WINDOW_DAYS_AFTER = 180
|
||||
EXPORT_WINDOW_DAYS_BEFORE = 15
|
||||
|
|
|
|||
|
|
@ -601,6 +601,7 @@ def create_base_icalendar(name: str) -> Calendar:
|
|||
cal.add("calscale", "GREGORIAN")
|
||||
cal.add("x-wr-calname", name)
|
||||
cal.add("x-wr-timezone", "UTC")
|
||||
cal.add("version", "2.0")
|
||||
cal.add("prodid", "//Grafana Labs//Grafana On-Call//")
|
||||
|
||||
return cal
|
||||
|
|
@ -614,7 +615,7 @@ def get_events_from_calendars(ical_obj: Calendar, calendars: tuple) -> None:
|
|||
ical_obj.add_component(component)
|
||||
|
||||
|
||||
def get_user_events_from_calendars(ical_obj: Calendar, calendars: tuple, user: User) -> None:
|
||||
def get_user_events_from_calendars(ical_obj: Calendar, calendars: tuple, user: User, name: str = None) -> None:
|
||||
for calendar in calendars:
|
||||
if calendar:
|
||||
for component in calendar.walk():
|
||||
|
|
@ -622,14 +623,41 @@ def get_user_events_from_calendars(ical_obj: Calendar, calendars: tuple, user: U
|
|||
event_user = get_usernames_from_ical_event(component)
|
||||
event_user_value = event_user[0][0]
|
||||
if event_user_value == user.username or event_user_value.lower() == user.email.lower():
|
||||
if name:
|
||||
component["SUMMARY"] = "{}: {}".format(name, component["SUMMARY"])
|
||||
ical_obj.add_component(component)
|
||||
|
||||
|
||||
def _is_final_export_enabled(schedule: OnCallSchedule) -> bool:
|
||||
DynamicSetting = apps.get_model("base", "DynamicSetting")
|
||||
enabled_final_export = DynamicSetting.objects.get_or_create(
|
||||
name="enabled_final_schedule_export",
|
||||
defaults={
|
||||
"json_value": {
|
||||
"schedule_ids": [],
|
||||
}
|
||||
},
|
||||
)[0]
|
||||
return schedule.public_primary_key in enabled_final_export.json_value["schedule_ids"]
|
||||
|
||||
|
||||
def _get_ical_data_final_schedule(schedule: OnCallSchedule) -> str:
|
||||
ical_data = schedule.cached_ical_final_schedule
|
||||
if ical_data is None:
|
||||
schedule.refresh_ical_final_schedule()
|
||||
ical_data = schedule.cached_ical_final_schedule
|
||||
return ical_data
|
||||
|
||||
|
||||
def ical_export_from_schedule(schedule: OnCallSchedule) -> bytes:
|
||||
calendars = schedule.get_icalendars()
|
||||
ical_obj = create_base_icalendar(schedule.name)
|
||||
get_events_from_calendars(ical_obj, calendars)
|
||||
return ical_obj.to_ical()
|
||||
if _is_final_export_enabled(schedule):
|
||||
ical_data = _get_ical_data_final_schedule(schedule)
|
||||
return ical_data.encode()
|
||||
else:
|
||||
calendars = schedule.get_icalendars()
|
||||
ical_obj = create_base_icalendar(schedule.name)
|
||||
get_events_from_calendars(ical_obj, calendars)
|
||||
return ical_obj.to_ical()
|
||||
|
||||
|
||||
def user_ical_export(user: User, schedules: list[OnCallSchedule]) -> bytes:
|
||||
|
|
@ -637,8 +665,14 @@ def user_ical_export(user: User, schedules: list[OnCallSchedule]) -> bytes:
|
|||
ical_obj = create_base_icalendar(schedule_name)
|
||||
|
||||
for schedule in schedules:
|
||||
calendars = schedule.get_icalendars()
|
||||
get_user_events_from_calendars(ical_obj, calendars, user)
|
||||
if _is_final_export_enabled(schedule):
|
||||
name = schedule.name
|
||||
ical_data = _get_ical_data_final_schedule(schedule)
|
||||
calendars = [Calendar.from_ical(ical_data)]
|
||||
else:
|
||||
name = None
|
||||
calendars = schedule.get_icalendars()
|
||||
get_user_events_from_calendars(ical_obj, calendars, user, name=name)
|
||||
|
||||
return ical_obj.to_ical()
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-11 19:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('schedules', '0010_fix_polymorphic_delete_related'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='oncallschedule',
|
||||
name='cached_ical_final_schedule',
|
||||
field=models.TextField(default=None, null=True),
|
||||
),
|
||||
]
|
||||
|
|
@ -21,6 +21,7 @@ from recurring_ical_events import UnfoldableCalendar
|
|||
|
||||
from apps.schedules.tasks import (
|
||||
drop_cached_ical_task,
|
||||
refresh_ical_final_schedule,
|
||||
schedule_notify_about_empty_shifts_in_schedule,
|
||||
schedule_notify_about_gaps_in_schedule,
|
||||
)
|
||||
|
|
@ -670,6 +671,7 @@ class CustomOnCallShift(models.Model):
|
|||
drop_cached_ical_task.apply_async((schedule.pk,))
|
||||
schedule_notify_about_empty_shifts_in_schedule.apply_async((schedule.pk,))
|
||||
schedule_notify_about_gaps_in_schedule.apply_async((schedule.pk,))
|
||||
refresh_ical_final_schedule.apply_async((schedule.pk,))
|
||||
|
||||
@cached_property
|
||||
def last_updated_shift(self):
|
||||
|
|
|
|||
|
|
@ -19,7 +19,21 @@ from polymorphic.managers import PolymorphicManager
|
|||
from polymorphic.models import PolymorphicModel
|
||||
from polymorphic.query import PolymorphicQuerySet
|
||||
|
||||
from apps.schedules.constants import (
|
||||
EXPORT_WINDOW_DAYS_AFTER,
|
||||
EXPORT_WINDOW_DAYS_BEFORE,
|
||||
ICAL_COMPONENT_VEVENT,
|
||||
ICAL_DATETIME_END,
|
||||
ICAL_DATETIME_STAMP,
|
||||
ICAL_DATETIME_START,
|
||||
ICAL_LAST_MODIFIED,
|
||||
ICAL_STATUS,
|
||||
ICAL_STATUS_CANCELLED,
|
||||
ICAL_SUMMARY,
|
||||
ICAL_UID,
|
||||
)
|
||||
from apps.schedules.ical_utils import (
|
||||
create_base_icalendar,
|
||||
fetch_ical_file_or_get_error,
|
||||
get_oncall_users_for_multiple_schedules,
|
||||
list_of_empty_shifts_in_schedule,
|
||||
|
|
@ -74,11 +88,12 @@ class OnCallScheduleQuerySet(PolymorphicQuerySet):
|
|||
return get_oncall_users_for_multiple_schedules(self, events_datetime)
|
||||
|
||||
def related_to_user(self, user):
|
||||
username_regex = r"SUMMARY:(\[L[0-9]+\] )?{}".format(user.username)
|
||||
return self.filter(
|
||||
Q(cached_ical_file_primary__contains=user.username)
|
||||
Q(cached_ical_file_primary__regex=username_regex)
|
||||
| Q(cached_ical_file_primary__contains=user.email)
|
||||
| Q(cached_ical_file_overrides__contains=user.username)
|
||||
| Q(cached_ical_file_overrides__contains=user.username),
|
||||
| Q(cached_ical_file_overrides__regex=username_regex)
|
||||
| Q(cached_ical_file_overrides__contains=user.email),
|
||||
organization=user.organization,
|
||||
)
|
||||
|
||||
|
|
@ -106,6 +121,8 @@ class OnCallSchedule(PolymorphicModel):
|
|||
cached_ical_file_overrides = models.TextField(null=True, default=None)
|
||||
prev_ical_file_overrides = models.TextField(null=True, default=None)
|
||||
|
||||
cached_ical_final_schedule = models.TextField(null=True, default=None)
|
||||
|
||||
organization = models.ForeignKey(
|
||||
"user_management.Organization", on_delete=NON_POLYMORPHIC_CASCADE, related_name="oncall_schedules"
|
||||
)
|
||||
|
|
@ -294,6 +311,63 @@ class OnCallSchedule(PolymorphicModel):
|
|||
events = self._resolve_schedule(events)
|
||||
return events
|
||||
|
||||
def refresh_ical_final_schedule(self):
|
||||
# TODO: check flag?
|
||||
tz = "UTC"
|
||||
now = timezone.now()
|
||||
# window to consider: from now, -15 days + 6 months
|
||||
delta = EXPORT_WINDOW_DAYS_BEFORE
|
||||
starting_datetime = now - timezone.timedelta(days=delta)
|
||||
starting_date = starting_datetime.date()
|
||||
days = EXPORT_WINDOW_DAYS_AFTER + delta
|
||||
|
||||
# setup calendar with final schedule shift events
|
||||
calendar = create_base_icalendar(self.name)
|
||||
events = self.final_events(tz, starting_date, days)
|
||||
updated_ids = set()
|
||||
for e in events:
|
||||
for u in e["users"]:
|
||||
event = icalendar.Event()
|
||||
event.add(ICAL_SUMMARY, u["display_name"])
|
||||
event.add(ICAL_DATETIME_START, e["start"])
|
||||
event.add(ICAL_DATETIME_END, e["end"])
|
||||
event.add(ICAL_DATETIME_STAMP, now)
|
||||
event.add(ICAL_LAST_MODIFIED, now)
|
||||
event_uid = "{}-{}-{}".format(e["shift"]["pk"], e["start"].strftime("%Y%m%d%H%S"), u["pk"])
|
||||
event[ICAL_UID] = event_uid
|
||||
calendar.add_component(event)
|
||||
updated_ids.add(event_uid)
|
||||
|
||||
# check previously cached final schedule for potentially cancelled events
|
||||
if self.cached_ical_final_schedule:
|
||||
previous = icalendar.Calendar.from_ical(self.cached_ical_final_schedule)
|
||||
for component in previous.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT and component[ICAL_UID] not in updated_ids:
|
||||
# check if event was ended or cancelled, update ical
|
||||
dtend = component.get(ICAL_DATETIME_END)
|
||||
if dtend and dtend.dt < starting_datetime:
|
||||
# event ended before window start
|
||||
continue
|
||||
is_cancelled = component.get(ICAL_STATUS)
|
||||
last_modified = component.get(ICAL_LAST_MODIFIED)
|
||||
if is_cancelled and last_modified and last_modified.dt < starting_datetime:
|
||||
# drop already ended events older than the window we consider
|
||||
continue
|
||||
elif is_cancelled and not last_modified:
|
||||
# set last_modified if it was missing (e.g. from previous export ical implementation)
|
||||
component[ICAL_LAST_MODIFIED] = icalendar.vDatetime(now).to_ical()
|
||||
elif not is_cancelled:
|
||||
# set the event as cancelled
|
||||
component[ICAL_DATETIME_END] = component[ICAL_DATETIME_START]
|
||||
component[ICAL_STATUS] = ICAL_STATUS_CANCELLED
|
||||
component[ICAL_LAST_MODIFIED] = icalendar.vDatetime(now).to_ical()
|
||||
# include just cancelled events as well as those that were cancelled during the time window
|
||||
calendar.add_component(component)
|
||||
|
||||
ical_data = calendar.to_ical().decode()
|
||||
self.cached_ical_final_schedule = ical_data
|
||||
self.save(update_fields=["cached_ical_final_schedule"])
|
||||
|
||||
def upcoming_shift_for_user(self, user, days=7):
|
||||
user_tz = user.timezone or "UTC"
|
||||
now = timezone.now()
|
||||
|
|
@ -598,7 +672,7 @@ class OnCallSchedule(PolymorphicModel):
|
|||
ical = ical_file.replace(end_line, "").strip()
|
||||
ical = f"{ical}\r\n"
|
||||
for event in itertools.chain(qs.all(), extra_shifts):
|
||||
ical += event.convert_to_ical(self.time_zone, allow_empty_users=allow_empty_users)
|
||||
ical += event.convert_to_ical(allow_empty_users=allow_empty_users)
|
||||
ical += f"{end_line}\r\n"
|
||||
return ical
|
||||
|
||||
|
|
|
|||
|
|
@ -13,4 +13,9 @@ from .notify_about_gaps_in_schedule import ( # noqa: F401
|
|||
start_check_gaps_in_schedule,
|
||||
start_notify_about_gaps_in_schedule,
|
||||
)
|
||||
from .refresh_ical_files import refresh_ical_file, start_refresh_ical_files # noqa: F401
|
||||
from .refresh_ical_files import ( # noqa: F401
|
||||
refresh_ical_file,
|
||||
refresh_ical_final_schedule,
|
||||
start_refresh_ical_files,
|
||||
start_refresh_ical_final_schedules,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -24,6 +24,17 @@ def start_refresh_ical_files():
|
|||
start_update_slack_user_group_for_schedules.apply_async(countdown=30)
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def start_refresh_ical_final_schedules():
|
||||
OnCallSchedule = apps.get_model("schedules", "OnCallSchedule")
|
||||
|
||||
task_logger.info("Start refresh ical final schedules")
|
||||
|
||||
schedules = OnCallSchedule.objects.all()
|
||||
for schedule in schedules:
|
||||
refresh_ical_final_schedule.apply_async((schedule.pk,))
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def refresh_ical_file(schedule_pk):
|
||||
OnCallSchedule = apps.get_model("schedules", "OnCallSchedule")
|
||||
|
|
@ -74,3 +85,17 @@ def refresh_ical_file(schedule_pk):
|
|||
if run_task:
|
||||
notify_about_empty_shifts_in_schedule.apply_async((schedule_pk,))
|
||||
notify_about_gaps_in_schedule.apply_async((schedule_pk,))
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def refresh_ical_final_schedule(schedule_pk):
|
||||
OnCallSchedule = apps.get_model("schedules", "OnCallSchedule")
|
||||
task_logger.info(f"Refresh ical final schedule {schedule_pk}")
|
||||
|
||||
try:
|
||||
schedule = OnCallSchedule.objects.get(pk=schedule_pk)
|
||||
except OnCallSchedule.DoesNotExist:
|
||||
task_logger.info(f"Tried to refresh final schedule for non-existing schedule {schedule_pk}")
|
||||
return
|
||||
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
|
|
|||
|
|
@ -1,11 +1,22 @@
|
|||
import datetime
|
||||
import textwrap
|
||||
from unittest.mock import patch
|
||||
|
||||
import icalendar
|
||||
import pytest
|
||||
import pytz
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.api.permissions import LegacyAccessControlRole
|
||||
from apps.schedules.constants import (
|
||||
ICAL_COMPONENT_VEVENT,
|
||||
ICAL_DATETIME_END,
|
||||
ICAL_DATETIME_START,
|
||||
ICAL_LAST_MODIFIED,
|
||||
ICAL_STATUS,
|
||||
ICAL_STATUS_CANCELLED,
|
||||
ICAL_SUMMARY,
|
||||
)
|
||||
from apps.schedules.ical_utils import memoized_users_in_ical
|
||||
from apps.schedules.models import (
|
||||
CustomOnCallShift,
|
||||
|
|
@ -1027,8 +1038,11 @@ def test_api_schedule_use_overrides_from_url(make_organization, make_schedule, g
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_api_schedule_use_overrides_from_db(make_organization, make_schedule, make_on_call_shift):
|
||||
def test_api_schedule_use_overrides_from_db(
|
||||
make_organization, make_user_for_organization, make_schedule, make_on_call_shift
|
||||
):
|
||||
organization = make_organization()
|
||||
user_1 = make_user_for_organization(organization)
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
|
|
@ -1046,6 +1060,39 @@ def test_api_schedule_use_overrides_from_db(make_organization, make_schedule, ma
|
|||
source=CustomOnCallShift.SOURCE_WEB,
|
||||
schedule=schedule,
|
||||
)
|
||||
override.add_rolling_users([[user_1]])
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
ical_event = override.convert_to_ical()
|
||||
assert ical_event in schedule.cached_ical_file_overrides
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_api_schedule_overrides_from_db_use_own_tz(
|
||||
make_organization, make_user_for_organization, make_schedule, make_on_call_shift
|
||||
):
|
||||
organization = make_organization()
|
||||
user_1 = make_user_for_organization(organization)
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
ical_url_overrides=None,
|
||||
enable_web_overrides=True,
|
||||
time_zone="Etc/GMT-2",
|
||||
)
|
||||
now = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
override = make_on_call_shift(
|
||||
organization=organization,
|
||||
shift_type=CustomOnCallShift.TYPE_OVERRIDE,
|
||||
priority_level=1,
|
||||
start=now,
|
||||
rotation_start=now,
|
||||
duration=timezone.timedelta(hours=12),
|
||||
source=CustomOnCallShift.SOURCE_WEB,
|
||||
schedule=schedule,
|
||||
)
|
||||
override.add_rolling_users([[user_1]])
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
|
|
@ -1199,11 +1246,76 @@ def test_user_related_schedules(
|
|||
override.add_rolling_users([[admin]])
|
||||
schedule2.refresh_ical_file()
|
||||
|
||||
# schedule2
|
||||
# schedule3
|
||||
make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
|
||||
schedules = OnCallSchedule.objects.related_to_user(admin)
|
||||
assert list(schedules) == [schedule1, schedule2]
|
||||
assert set(schedules) == {schedule1, schedule2}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_related_schedules_only_username(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization = make_organization()
|
||||
# oncall is used as keyword in the ical calendar definition,
|
||||
# shouldn't be associated to the user
|
||||
user = make_user_for_organization(organization, username="oncall")
|
||||
other_user = make_user_for_organization(organization, username="other")
|
||||
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
schedule1 = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
shifts = (
|
||||
# user, priority, start time (h), duration (seconds)
|
||||
(user, 1, 0, (24 * 60 * 60) - 1), # r1-1: 0-23:59:59
|
||||
)
|
||||
for user, priority, start_h, duration in shifts:
|
||||
data = {
|
||||
"start": today + timezone.timedelta(hours=start_h),
|
||||
"rotation_start": today + timezone.timedelta(hours=start_h),
|
||||
"duration": timezone.timedelta(seconds=duration),
|
||||
"priority_level": priority,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule1,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user]])
|
||||
schedule1.refresh_ical_file()
|
||||
|
||||
schedule2 = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
override_data = {
|
||||
"start": today + timezone.timedelta(hours=22),
|
||||
"rotation_start": today + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": schedule2,
|
||||
}
|
||||
override = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **override_data
|
||||
)
|
||||
override.add_rolling_users([[user]])
|
||||
schedule2.refresh_ical_file()
|
||||
|
||||
# schedule3
|
||||
schedule3 = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
override_data = {
|
||||
"start": today + timezone.timedelta(hours=22),
|
||||
"rotation_start": today + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": schedule3,
|
||||
}
|
||||
override = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **override_data
|
||||
)
|
||||
override.add_rolling_users([[other_user]])
|
||||
schedule3.refresh_ical_file()
|
||||
|
||||
schedules = OnCallSchedule.objects.related_to_user(user)
|
||||
assert set(schedules) == {schedule1, schedule2}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
@ -1246,3 +1358,248 @@ def test_upcoming_shift_for_user(
|
|||
current_shift, upcoming_shift = schedule.upcoming_shift_for_user(other_user)
|
||||
assert current_shift is None
|
||||
assert upcoming_shift is None
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_ical_final_schedule_ok(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization = make_organization()
|
||||
u1 = make_user_for_organization(organization)
|
||||
u2 = make_user_for_organization(organization)
|
||||
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
shifts = (
|
||||
# user, priority, start time (h), duration (seconds)
|
||||
(u1, 1, 0, (12 * 60 * 60) - 1), # r1-1: 0-11:59:59
|
||||
(u2, 1, 12, (12 * 60 * 60) - 1), # r1-1: 12-23:59:59
|
||||
)
|
||||
for user, priority, start_h, duration in shifts:
|
||||
data = {
|
||||
"start": today + timezone.timedelta(hours=start_h),
|
||||
"rotation_start": today + timezone.timedelta(hours=start_h),
|
||||
"duration": timezone.timedelta(seconds=duration),
|
||||
"priority_level": priority,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user]])
|
||||
|
||||
override_data = {
|
||||
"start": today + timezone.timedelta(hours=22),
|
||||
"rotation_start": today + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": schedule,
|
||||
}
|
||||
override = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **override_data
|
||||
)
|
||||
override.add_rolling_users([[u1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
expected_events = {
|
||||
# user, start, end
|
||||
(u1.username, today, today + timezone.timedelta(seconds=(12 * 60 * 60) - 1)),
|
||||
(u2.username, today + timezone.timedelta(hours=12), today + timezone.timedelta(hours=22)),
|
||||
(u1.username, today + timezone.timedelta(hours=22), today + timezone.timedelta(hours=23)),
|
||||
(u2.username, today + timezone.timedelta(hours=23), today + timezone.timedelta(seconds=(24 * 60 * 60) - 1)),
|
||||
}
|
||||
|
||||
for i in range(2):
|
||||
# running multiple times keeps the same events in place
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_AFTER", 1):
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_BEFORE", 0):
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
||||
assert schedule.cached_ical_final_schedule
|
||||
calendar = icalendar.Calendar.from_ical(schedule.cached_ical_final_schedule)
|
||||
for component in calendar.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT:
|
||||
event = (component[ICAL_SUMMARY], component[ICAL_DATETIME_START].dt, component[ICAL_DATETIME_END].dt)
|
||||
assert event in expected_events
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_ical_final_schedule_cancel_deleted_events(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization = make_organization()
|
||||
u1 = make_user_for_organization(organization)
|
||||
u2 = make_user_for_organization(organization)
|
||||
|
||||
tomorrow = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) + timezone.timedelta(days=1)
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
shifts = (
|
||||
# user, priority, start time (h), duration (seconds)
|
||||
(u1, 1, 0, (24 * 60 * 60) - 1), # r1-1: 0-23:59:59
|
||||
)
|
||||
for user, priority, start_h, duration in shifts:
|
||||
data = {
|
||||
"start": tomorrow + timezone.timedelta(hours=start_h),
|
||||
"rotation_start": tomorrow + timezone.timedelta(hours=start_h),
|
||||
"duration": timezone.timedelta(seconds=duration),
|
||||
"priority_level": priority,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user]])
|
||||
|
||||
override_data = {
|
||||
"start": tomorrow + timezone.timedelta(hours=22),
|
||||
"rotation_start": tomorrow + timezone.timedelta(hours=22),
|
||||
"duration": timezone.timedelta(hours=1),
|
||||
"schedule": schedule,
|
||||
}
|
||||
override = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_OVERRIDE, **override_data
|
||||
)
|
||||
override.add_rolling_users([[u2]])
|
||||
|
||||
# refresh ical files
|
||||
schedule.refresh_ical_file()
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_AFTER", 1):
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_BEFORE", 0):
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
||||
# delete override, re-check the final refresh
|
||||
override.delete()
|
||||
|
||||
# reload instance to avoid cached properties issue
|
||||
schedule = OnCallScheduleWeb.objects.get(id=schedule.id)
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_AFTER", 1):
|
||||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_BEFORE", 0):
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
||||
# check for deleted override
|
||||
calendar = icalendar.Calendar.from_ical(schedule.cached_ical_final_schedule)
|
||||
for component in calendar.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT and component[ICAL_SUMMARY] == u2.username:
|
||||
# check event is cancelled
|
||||
assert component[ICAL_DATETIME_START].dt == component[ICAL_DATETIME_END].dt
|
||||
assert component[ICAL_LAST_MODIFIED]
|
||||
assert component[ICAL_STATUS] == ICAL_STATUS_CANCELLED
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_ical_final_schedule_cancelled_not_updated(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
):
|
||||
organization = make_organization()
|
||||
u1 = make_user_for_organization(organization)
|
||||
u2 = make_user_for_organization(organization)
|
||||
last_week = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0) - timezone.timedelta(days=7)
|
||||
last_week_timestamp = last_week.strftime("%Y%m%dT%H%M%S")
|
||||
cached_ical_final_schedule = textwrap.dedent(
|
||||
"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID://Grafana Labs//Grafana On-Call//
|
||||
CALSCALE:GREGORIAN
|
||||
X-WR-CALNAME:Cup cut.
|
||||
X-WR-TIMEZONE:UTC
|
||||
BEGIN:VEVENT
|
||||
SUMMARY:{}
|
||||
DTSTART;VALUE=DATE-TIME:20220414T000000Z
|
||||
DTEND;VALUE=DATE-TIME:20220414T000000Z
|
||||
DTSTAMP;VALUE=DATE-TIME:20220414T190951Z
|
||||
UID:O231U3VXVIYRX-202304140000-U5FWIHEASEWS2
|
||||
LAST-MODIFIED;VALUE=DATE-TIME:20220414T190951Z
|
||||
STATUS:CANCELLED
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
SUMMARY:{}
|
||||
DTSTART;VALUE=DATE-TIME:{}Z
|
||||
DTEND;VALUE=DATE-TIME:{}Z
|
||||
DTSTAMP;VALUE=DATE-TIME:20230414T190951Z
|
||||
UID:OBPQ1TI99E4DG-202304141200-U2G6RZQM3S3I9
|
||||
LAST-MODIFIED;VALUE=DATE-TIME:{}Z
|
||||
STATUS:CANCELLED
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
""".format(
|
||||
u1.username, u2.username, last_week_timestamp, last_week_timestamp, last_week_timestamp
|
||||
)
|
||||
)
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
cached_ical_final_schedule=cached_ical_final_schedule,
|
||||
)
|
||||
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
||||
# check old event is dropped, recent one is kept unchanged
|
||||
event_count = 0
|
||||
calendar = icalendar.Calendar.from_ical(schedule.cached_ical_final_schedule)
|
||||
for component in calendar.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT:
|
||||
event_count += 1
|
||||
if component[ICAL_SUMMARY] == u2.username:
|
||||
# check event is unchanged
|
||||
assert component[ICAL_DATETIME_START].dt == last_week
|
||||
assert component[ICAL_DATETIME_END].dt == last_week
|
||||
assert component[ICAL_LAST_MODIFIED].dt == last_week
|
||||
assert component[ICAL_STATUS] == ICAL_STATUS_CANCELLED
|
||||
assert event_count == 1
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_refresh_ical_final_schedule_event_in_the_past(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
):
|
||||
organization = make_organization()
|
||||
u1 = make_user_for_organization(organization)
|
||||
cached_ical_final_schedule = textwrap.dedent(
|
||||
"""
|
||||
BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID://Grafana Labs//Grafana On-Call//
|
||||
CALSCALE:GREGORIAN
|
||||
X-WR-CALNAME:Cup cut.
|
||||
X-WR-TIMEZONE:UTC
|
||||
BEGIN:VEVENT
|
||||
SUMMARY:{}
|
||||
DTSTART;VALUE=DATE-TIME:20220414T000000Z
|
||||
DTEND;VALUE=DATE-TIME:20220414T000000Z
|
||||
DTSTAMP;VALUE=DATE-TIME:20220414T190951Z
|
||||
UID:O231U3VXVIYRX-202304140000-U5FWIHEASEWS2
|
||||
LAST-MODIFIED;VALUE=DATE-TIME:20220414T190951Z
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
""".format(
|
||||
u1.username
|
||||
)
|
||||
)
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
cached_ical_final_schedule=cached_ical_final_schedule,
|
||||
)
|
||||
|
||||
schedule.refresh_ical_final_schedule()
|
||||
|
||||
# check old event is dropped, recent one is kept unchanged
|
||||
calendar = icalendar.Calendar.from_ical(schedule.cached_ical_final_schedule)
|
||||
events = [component for component in calendar.walk() if component.name == ICAL_COMPONENT_VEVENT]
|
||||
assert len(events) == 0
|
||||
|
|
|
|||
23
engine/apps/webhooks/migrations/0004_auto_20230418_0109.py
Normal file
23
engine/apps/webhooks/migrations/0004_auto_20230418_0109.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.18 on 2023-04-18 01:09
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('webhooks', '0003_auto_20230412_0006'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='webhook',
|
||||
name='trigger_type',
|
||||
field=models.IntegerField(choices=[(0, 'Escalation step'), (1, 'Firing'), (2, 'Acknowledged'), (3, 'Resolved'), (4, 'Silenced'), (5, 'Unsilenced'), (6, 'Unresolved'), (7, 'Unacknowledged')], default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='webhookresponse',
|
||||
name='trigger_type',
|
||||
field=models.IntegerField(choices=[(0, 'Escalation step'), (1, 'Firing'), (2, 'Acknowledged'), (3, 'Resolved'), (4, 'Silenced'), (5, 'Unsilenced'), (6, 'Unresolved'), (7, 'Unacknowledged')]),
|
||||
),
|
||||
]
|
||||
|
|
@ -63,7 +63,8 @@ class Webhook(models.Model):
|
|||
TRIGGER_SILENCE,
|
||||
TRIGGER_UNSILENCE,
|
||||
TRIGGER_UNRESOLVE,
|
||||
) = range(7)
|
||||
TRIGGER_UNACKNOWLEDGE,
|
||||
) = range(8)
|
||||
|
||||
# Must be the same order as previous
|
||||
TRIGGER_TYPES = (
|
||||
|
|
@ -74,6 +75,7 @@ class Webhook(models.Model):
|
|||
(TRIGGER_SILENCE, "Silenced"),
|
||||
(TRIGGER_UNSILENCE, "Unsilenced"),
|
||||
(TRIGGER_UNRESOLVE, "Unresolved"),
|
||||
(TRIGGER_UNACKNOWLEDGE, "Unacknowledged"),
|
||||
)
|
||||
|
||||
public_primary_key = models.CharField(
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@ ACTION_TO_TRIGGER_TYPE = {
|
|||
AlertGroupLogRecord.TYPE_SILENCE: Webhook.TRIGGER_SILENCE,
|
||||
AlertGroupLogRecord.TYPE_UN_SILENCE: Webhook.TRIGGER_UNSILENCE,
|
||||
AlertGroupLogRecord.TYPE_UN_RESOLVED: Webhook.TRIGGER_UNRESOLVE,
|
||||
AlertGroupLogRecord.TYPE_UN_ACK: Webhook.TRIGGER_UNACKNOWLEDGE,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -20,6 +20,8 @@ from apps.webhooks.utils import (
|
|||
)
|
||||
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
|
||||
|
||||
NOT_FROM_SELECTED_INTEGRATION = "Alert group was not from a selected integration"
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
|
@ -32,6 +34,7 @@ TRIGGER_TYPE_TO_LABEL = {
|
|||
Webhook.TRIGGER_UNSILENCE: "unsilence",
|
||||
Webhook.TRIGGER_UNRESOLVE: "unresolve",
|
||||
Webhook.TRIGGER_ESCALATION_STEP: "escalation",
|
||||
Webhook.TRIGGER_UNACKNOWLEDGE: "unacknowledge",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -102,7 +105,7 @@ def make_request(webhook, alert_group, data):
|
|||
exception = error = None
|
||||
try:
|
||||
if not webhook.check_integration_filter(alert_group):
|
||||
status["request_trigger"] = f"Alert group was not from a selected integration"
|
||||
status["request_trigger"] = NOT_FROM_SELECTED_INTEGRATION
|
||||
return status, None, None
|
||||
|
||||
triggered, status["request_trigger"] = webhook.check_trigger(data)
|
||||
|
|
|
|||
|
|
@ -67,6 +67,7 @@ def test_alert_group_created_does_not_exist(make_organization, make_custom_webho
|
|||
(AlertGroupLogRecord.TYPE_SILENCE, Webhook.TRIGGER_SILENCE),
|
||||
(AlertGroupLogRecord.TYPE_UN_SILENCE, Webhook.TRIGGER_UNSILENCE),
|
||||
(AlertGroupLogRecord.TYPE_UN_RESOLVED, Webhook.TRIGGER_UNRESOLVE),
|
||||
(AlertGroupLogRecord.TYPE_UN_ACK, Webhook.TRIGGER_UNACKNOWLEDGE),
|
||||
],
|
||||
)
|
||||
def test_alert_group_status_change(
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ from apps.base.models import UserNotificationPolicyLogRecord
|
|||
from apps.public_api.serializers import IncidentSerializer
|
||||
from apps.webhooks.models import Webhook
|
||||
from apps.webhooks.tasks import execute_webhook, send_webhook_event
|
||||
from apps.webhooks.tasks.trigger_webhook import NOT_FROM_SELECTED_INTEGRATION
|
||||
|
||||
|
||||
class MockResponse:
|
||||
|
|
@ -63,6 +64,72 @@ def test_send_webhook_event_filters(
|
|||
assert mock_execute.call_args == call((other_org_webhook.pk, alert_group.pk, None, None))
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_execute_webhook_disabled(
|
||||
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
||||
):
|
||||
organization = make_organization()
|
||||
alert_receive_channel = make_alert_receive_channel(organization)
|
||||
alert_group = make_alert_group(alert_receive_channel)
|
||||
make_custom_webhook(organization=organization, trigger_type=Webhook.TRIGGER_FIRING)
|
||||
make_custom_webhook(organization=organization, trigger_type=Webhook.TRIGGER_FIRING, is_webhook_enabled=False)
|
||||
|
||||
with patch("apps.webhooks.tasks.trigger_webhook.execute_webhook.apply_async") as mock_execute:
|
||||
send_webhook_event(Webhook.TRIGGER_FIRING, alert_group.pk, organization_id=organization.pk)
|
||||
mock_execute.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_execute_webhook_integration_filter_not_matching(
|
||||
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
||||
):
|
||||
organization = make_organization()
|
||||
alert_receive_channel = make_alert_receive_channel(organization)
|
||||
alert_group = make_alert_group(alert_receive_channel)
|
||||
webhook = make_custom_webhook(
|
||||
organization=organization, trigger_type=Webhook.TRIGGER_FIRING, integration_filter=["does-not-match"]
|
||||
)
|
||||
|
||||
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
||||
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
||||
|
||||
assert not mock_requests.post.called
|
||||
# check log should exist but have no status code
|
||||
assert (
|
||||
webhook.responses.count() == 1
|
||||
and webhook.responses.first().status_code is None
|
||||
and webhook.responses.first().request_trigger == NOT_FROM_SELECTED_INTEGRATION
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_execute_webhook_integration_filter_matching(
|
||||
make_organization, make_team, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
||||
):
|
||||
organization = make_organization()
|
||||
alert_receive_channel = make_alert_receive_channel(organization, public_primary_key="test-integration-1")
|
||||
alert_group = make_alert_group(alert_receive_channel)
|
||||
webhook = make_custom_webhook(
|
||||
organization=organization,
|
||||
trigger_type=Webhook.TRIGGER_FIRING,
|
||||
integration_filter=["test-integration-1"],
|
||||
# Check we get past integration filter but exit early to keep test simple
|
||||
trigger_template="False",
|
||||
)
|
||||
|
||||
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
|
||||
execute_webhook(webhook.pk, alert_group.pk, None, None)
|
||||
|
||||
assert not mock_requests.post.called
|
||||
# check log should exist but have no status code
|
||||
assert (
|
||||
webhook.responses.count() == 1
|
||||
and webhook.responses.first().status_code is None
|
||||
# Matches evaluated trigger_template
|
||||
and webhook.responses.first().request_trigger == "False"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_execute_webhook_ok(
|
||||
make_organization, make_user_for_organization, make_alert_receive_channel, make_alert_group, make_custom_webhook
|
||||
|
|
|
|||
|
|
@ -414,6 +414,11 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"schedule": getenv_integer("ALERT_GROUP_ESCALATION_AUDITOR_CELERY_TASK_HEARTBEAT_INTERVAL", 13 * 60),
|
||||
"args": (),
|
||||
},
|
||||
"start_refresh_ical_final_schedules": {
|
||||
"task": "apps.schedules.tasks.refresh_ical_files.start_refresh_ical_final_schedules",
|
||||
"schedule": crontab(minute=15, hour=0),
|
||||
"args": (),
|
||||
},
|
||||
"start_refresh_ical_files": {
|
||||
"task": "apps.schedules.tasks.refresh_ical_files.start_refresh_ical_files",
|
||||
"schedule": 10 * 60,
|
||||
|
|
|
|||
|
|
@ -24,6 +24,14 @@ const SchedulesFilters = (props: SchedulesFiltersProps) => {
|
|||
},
|
||||
[value]
|
||||
);
|
||||
|
||||
const handleMineChange = useCallback(
|
||||
(mine) => {
|
||||
onChange({ ...value, mine });
|
||||
},
|
||||
[value]
|
||||
);
|
||||
|
||||
const handleStatusChange = useCallback(
|
||||
(used) => {
|
||||
onChange({ ...value, used });
|
||||
|
|
@ -53,6 +61,23 @@ const SchedulesFilters = (props: SchedulesFiltersProps) => {
|
|||
</Field>
|
||||
</div>
|
||||
<div className={cx('right')}>
|
||||
<Field label="Mine">
|
||||
<RadioButtonGroup
|
||||
options={[
|
||||
{ label: 'All', value: undefined },
|
||||
{
|
||||
label: 'Mine',
|
||||
value: true,
|
||||
},
|
||||
{
|
||||
label: 'Not mine',
|
||||
value: false,
|
||||
},
|
||||
]}
|
||||
value={value.mine}
|
||||
onChange={handleMineChange}
|
||||
/>
|
||||
</Field>
|
||||
<Field label="Status">
|
||||
<RadioButtonGroup
|
||||
options={[
|
||||
|
|
|
|||
|
|
@ -4,4 +4,5 @@ export interface SchedulesFiltersType {
|
|||
searchTerm: string;
|
||||
type: ScheduleType;
|
||||
used: boolean | undefined;
|
||||
mine: boolean | undefined;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
import React from 'react';
|
||||
|
||||
import { Badge, HorizontalGroup, Tooltip, VerticalGroup } from '@grafana/ui';
|
||||
import { Badge, HorizontalGroup, IconButton, Tooltip, VerticalGroup } from '@grafana/ui';
|
||||
import cn from 'classnames/bind';
|
||||
import { observer } from 'mobx-react';
|
||||
import CopyToClipboard from 'react-copy-to-clipboard';
|
||||
import Emoji from 'react-emoji-render';
|
||||
|
||||
import IntegrationLogo from 'components/IntegrationLogo/IntegrationLogo';
|
||||
|
|
@ -11,6 +12,7 @@ import Text from 'components/Text/Text';
|
|||
import TeamName from 'containers/TeamName/TeamName';
|
||||
import { HeartGreenIcon, HeartRedIcon } from 'icons';
|
||||
import { AlertReceiveChannel } from 'models/alert_receive_channel/alert_receive_channel.types';
|
||||
import { AppFeature } from 'state/features';
|
||||
import { useStore } from 'state/useStore';
|
||||
|
||||
import styles from './AlertReceiveChannelCard.module.scss';
|
||||
|
|
@ -63,6 +65,22 @@ const AlertReceiveChannelCard = observer((props: AlertReceiveChannelCardProps) =
|
|||
<Text type="primary" size="medium">
|
||||
<Emoji className={cx('title')} text={alertReceiveChannel.verbal_name} />
|
||||
</Text>
|
||||
{store.hasFeature(AppFeature.Webhooks2) && (
|
||||
<CopyToClipboard text={alertReceiveChannel.id}>
|
||||
<IconButton
|
||||
variant="primary"
|
||||
tooltip={
|
||||
<div>
|
||||
ID {alertReceiveChannel.id}
|
||||
<br />
|
||||
(click to copy ID to clipboard)
|
||||
</div>
|
||||
}
|
||||
tooltipPlacement="top"
|
||||
name="info-circle"
|
||||
/>
|
||||
</CopyToClipboard>
|
||||
)}
|
||||
{alertReceiveChannelCounter && (
|
||||
<PluginLink
|
||||
query={{ page: 'alert-groups', integration: alertReceiveChannel.id }}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@ import {
|
|||
} from '@grafana/ui';
|
||||
import cn from 'classnames/bind';
|
||||
import { observer } from 'mobx-react';
|
||||
import CopyToClipboard from 'react-copy-to-clipboard';
|
||||
import Emoji from 'react-emoji-render';
|
||||
|
||||
import Collapse from 'components/Collapse/Collapse';
|
||||
|
|
@ -39,6 +40,7 @@ import { ChannelFilter } from 'models/channel_filter/channel_filter.types';
|
|||
import { EscalationChain } from 'models/escalation_chain/escalation_chain.types';
|
||||
import { EscalationPolicyOption } from 'models/escalation_policy/escalation_policy.types';
|
||||
import { MaintenanceType } from 'models/maintenance/maintenance.types';
|
||||
import { AppFeature } from 'state/features';
|
||||
import { WithStoreProps } from 'state/types';
|
||||
import { withMobXProviderContext } from 'state/withStore';
|
||||
import { openNotification } from 'utils';
|
||||
|
|
@ -683,6 +685,22 @@ class AlertRules extends React.Component<AlertRulesProps, AlertRulesState> {
|
|||
/>
|
||||
</WithPermissionControlTooltip>
|
||||
)}
|
||||
{store.hasFeature(AppFeature.Webhooks2) && (
|
||||
<CopyToClipboard text={channelFilter.id}>
|
||||
<IconButton
|
||||
variant="primary"
|
||||
tooltip={
|
||||
<div>
|
||||
ID {channelFilter.id}
|
||||
<br />
|
||||
(click to copy ID to clipboard)
|
||||
</div>
|
||||
}
|
||||
tooltipPlacement="top"
|
||||
name="info-circle"
|
||||
/>
|
||||
</CopyToClipboard>
|
||||
)}
|
||||
<WithPermissionControlTooltip userAction={UserActions.IntegrationsTest}>
|
||||
<Button variant="secondary" size="sm" onClick={this.getSendDemoAlertToParticularRoute(channelFilterId)}>
|
||||
Send demo alert
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ export const WebhookTriggerType = {
|
|||
Silenced: new KeyValuePair('4', 'Silenced'),
|
||||
Unsilenced: new KeyValuePair('5', 'Unsilenced'),
|
||||
Unresolved: new KeyValuePair('6', 'Unresolved'),
|
||||
Unacknowledged: new KeyValuePair('7', 'Unacknowledged'),
|
||||
};
|
||||
|
||||
export const form: { name: string; fields: FormItem[] } = {
|
||||
|
|
@ -78,6 +79,10 @@ export const form: { name: string; fields: FormItem[] } = {
|
|||
value: WebhookTriggerType.Unresolved.key,
|
||||
label: WebhookTriggerType.Unresolved.value,
|
||||
},
|
||||
{
|
||||
value: WebhookTriggerType.Unacknowledged.key,
|
||||
label: WebhookTriggerType.Unacknowledged.value,
|
||||
},
|
||||
],
|
||||
},
|
||||
validation: { required: true },
|
||||
|
|
|
|||
|
|
@ -121,7 +121,7 @@ export class ScheduleStore extends BaseStore {
|
|||
|
||||
@action
|
||||
async updateItems(
|
||||
f: SchedulesFiltersType | string = { searchTerm: '', type: undefined, used: undefined },
|
||||
f: SchedulesFiltersType | string = { searchTerm: '', type: undefined, used: undefined, mine: undefined },
|
||||
page = 1,
|
||||
shouldUpdateFn: () => boolean = undefined
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
import React from 'react';
|
||||
|
||||
import { Button, HorizontalGroup, Icon, VerticalGroup } from '@grafana/ui';
|
||||
import { Button, HorizontalGroup, Icon, IconButton, VerticalGroup } from '@grafana/ui';
|
||||
import cn from 'classnames/bind';
|
||||
import { observer } from 'mobx-react';
|
||||
import moment from 'moment-timezone';
|
||||
import LegacyNavHeading from 'navbar/LegacyNavHeading';
|
||||
import CopyToClipboard from 'react-copy-to-clipboard';
|
||||
import { RouteComponentProps, withRouter } from 'react-router-dom';
|
||||
|
||||
import GTable from 'components/GTable/GTable';
|
||||
|
|
@ -25,6 +26,7 @@ import { ActionDTO } from 'models/action';
|
|||
import { FiltersValues } from 'models/filters/filters.types';
|
||||
import { OutgoingWebhook } from 'models/outgoing_webhook/outgoing_webhook.types';
|
||||
import { OutgoingWebhook2 } from 'models/outgoing_webhook_2/outgoing_webhook_2.types';
|
||||
import { AppFeature } from 'state/features';
|
||||
import { PageProps, WithStoreProps } from 'state/types';
|
||||
import { withMobXProviderContext } from 'state/withStore';
|
||||
import { isUserActionAllowed, UserActions } from 'utils/authorization';
|
||||
|
|
@ -146,7 +148,7 @@ class OutgoingWebhooks2 extends React.Component<OutgoingWebhooks2Props, Outgoing
|
|||
},
|
||||
];
|
||||
|
||||
return (
|
||||
return store.hasFeature(AppFeature.Webhooks2) ? (
|
||||
<PageErrorHandlingWrapper
|
||||
errorData={errorData}
|
||||
objectName="outgoing webhook 2"
|
||||
|
|
@ -209,6 +211,8 @@ class OutgoingWebhooks2 extends React.Component<OutgoingWebhooks2Props, Outgoing
|
|||
</>
|
||||
)}
|
||||
</PageErrorHandlingWrapper>
|
||||
) : (
|
||||
<Text>Outgoing webhooks 2 functionality is not enabled.</Text>
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -245,6 +249,20 @@ class OutgoingWebhooks2 extends React.Component<OutgoingWebhooks2Props, Outgoing
|
|||
renderActionButtons = (record: ActionDTO) => {
|
||||
return (
|
||||
<HorizontalGroup justify="flex-end">
|
||||
<CopyToClipboard text={record.id}>
|
||||
<IconButton
|
||||
variant="primary"
|
||||
tooltip={
|
||||
<div>
|
||||
ID {record.id}
|
||||
<br />
|
||||
(click to copy ID to clipboard)
|
||||
</div>
|
||||
}
|
||||
tooltipPlacement="top"
|
||||
name="info-circle"
|
||||
/>
|
||||
</CopyToClipboard>
|
||||
<WithPermissionControlTooltip key={'status_action'} userAction={UserActions.OutgoingWebhooksRead}>
|
||||
<Button onClick={() => this.onStatusClick(record.id)} fill="text">
|
||||
Status
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ class SchedulesPage extends React.Component<SchedulesPageProps, SchedulesPageSta
|
|||
|
||||
this.state = {
|
||||
startMoment: getStartOfWeek(store.currentTimezone),
|
||||
filters: { searchTerm: '', type: undefined, used: undefined },
|
||||
filters: { searchTerm: '', type: undefined, used: undefined, mine: undefined },
|
||||
showNewScheduleSelector: false,
|
||||
expandedRowKeys: [],
|
||||
scheduleIdToEdit: undefined,
|
||||
|
|
|
|||
|
|
@ -5,4 +5,5 @@ export enum AppFeature {
|
|||
CloudNotifications = 'grafana_cloud_notifications',
|
||||
CloudConnection = 'grafana_cloud_connection',
|
||||
WebSchedules = 'web_schedules',
|
||||
Webhooks2 = 'webhooks2',
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,26 +1,29 @@
|
|||
# PagerDuty to Grafana OnCall migrator tool
|
||||
|
||||
This tool helps to migrate PagerDuty configuration to Grafana OnCall.
|
||||
This tool helps to migrate your PagerDuty configuration to Grafana OnCall.
|
||||
|
||||
## Overview
|
||||
|
||||
Resources that can be migrated using this tool:
|
||||
|
||||
- User notification rules
|
||||
- Escalation policies
|
||||
- On-call schedules
|
||||
- Integrations (services)
|
||||
- Escalation policies
|
||||
- Services (integrations)
|
||||
- Event rules (experimental, only works with global event rulesets)
|
||||
|
||||
## Limitations
|
||||
|
||||
- Not all integration types are supported
|
||||
- Migrated on-call schedules in Grafana OnCall will use ICalendar files from PagerDuty
|
||||
- Delays between migrated notification/escalation rules could be slightly different from original.
|
||||
E.g. if you have a 4-minute delay between rules in PagerDuty, the resulting delay in Grafana OnCall will be 5 minutes
|
||||
- Manual changes to PD configuration may be required to migrate some resources
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. Make sure you have `docker` installed
|
||||
2. Build the docker image: `docker build -t pd-oncall-migrator .`
|
||||
3. Obtain a PagerDuty API user token: <https://support.pagerduty.com/docs/api-access-keys#generate-a-user-token-rest-api-key>
|
||||
3. Obtain a PagerDuty API **user token**: <https://support.pagerduty.com/docs/api-access-keys#generate-a-user-token-rest-api-key>
|
||||
4. Obtain a Grafana OnCall API token and API URL on the "Settings" page of your Grafana OnCall instance
|
||||
|
||||
## Migration plan
|
||||
|
|
@ -37,12 +40,7 @@ pd-oncall-migrator
|
|||
```
|
||||
|
||||
Please read the generated report carefully since depending on the content of the report, some PagerDuty resources
|
||||
could not be migrated and some existing Grafana OnCall resources could be deleted.
|
||||
|
||||
Note that users are matched by email, so if there are users in the report with "no Grafana OnCall user found with
|
||||
this email" error, it's possible to fix it by adding these users to your Grafana organization.
|
||||
If there is a large number of unmatched users, please also [see the script](scripts/README.md) that can automatically
|
||||
create missing Grafana users via Grafana HTTP API.
|
||||
could be not migrated and some existing Grafana OnCall resources could be deleted.
|
||||
|
||||
### Example migration plan
|
||||
|
||||
|
|
@ -83,6 +81,10 @@ docker run --rm \
|
|||
pd-oncall-migrator
|
||||
```
|
||||
|
||||
When performing a migration, only resources that are marked with ✅ or ⚠️ on the plan stage will be migrated.
|
||||
The migrator is designed to be idempotent, so it's safe to run it multiple times. On every migration run, the tool will
|
||||
check if the resource already exists in Grafana OnCall and will delete it before creating a new one.
|
||||
|
||||
### Migrate unsupported integration types
|
||||
|
||||
It's possible to migrate unsupported integration types to [Grafana OnCall incoming webhooks](https://grafana.com/docs/oncall/latest/integrations/available-integrations/configure-webhook/).
|
||||
|
|
@ -101,9 +103,98 @@ pd-oncall-migrator
|
|||
Consider modifying [alert templates](https://grafana.com/docs/oncall/latest/alert-behavior/alert-templates/) of the created
|
||||
webhook integrations to adjust them for incoming payloads.
|
||||
|
||||
### After migration
|
||||
## Configuration
|
||||
|
||||
Configuration is done via environment variables passed to the docker container.
|
||||
|
||||
<!-- markdownlint-disable MD013 -->
|
||||
|
||||
| Name | Description | Type | Default |
|
||||
|-----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------|---------|
|
||||
| `PAGERDUTY_API_TOKEN` | PagerDuty API **user token**. To create a token, refer to [PagerDuty docs](<https://support.pagerduty.com/docs/api-access-keys#generate-a-user-token-rest-api-key>). | String | N/A |
|
||||
| `ONCALL_API_URL` | Grafana OnCall API URL. This can be found on the "Settings" page of your Grafana OnCall instance. | String | N/A |
|
||||
| `ONCALL_API_TOKEN` | Grafana OnCall API Token. To create a token, navigate to the "Settings" page of your Grafana OnCall instance. | String | N/A |
|
||||
| `MODE` | Migration mode (plan vs actual migration). | String (choices: `plan`, `migrate`) | `plan` |
|
||||
| `SCHEDULE_MIGRATION_MODE` | Determines how on-call schedules are migrated. | String (choices: `ical`, `web`) | `ical` |
|
||||
| `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` | When set to `true`, integrations with unsupported type will be migrated to Grafana OnCall integrations with type "webhook". When set to `false`, integrations with unsupported type won't be migrated. | Boolean | `false` |
|
||||
| `EXPERIMENTAL_MIGRATE_EVENT_RULES` | Migrate global event rulesets to Grafana OnCall integrations. | Boolean | `false` |
|
||||
| `EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES` | Include service & integrations names from PD in migrated integrations (only effective when `EXPERIMENTAL_MIGRATE_EVENT_RULES` is `true`). | Boolean | `false` |
|
||||
|
||||
<!-- markdownlint-enable MD013 -->
|
||||
|
||||
## Resources
|
||||
|
||||
### User notification rules
|
||||
|
||||
The tool is capable of migrating user notification rules from PagerDuty to Grafana OnCall.
|
||||
Notification rules from the `"When a high-urgency incident is assigned to me..."` section in PagerDuty settings are
|
||||
taken into account and will be migrated to default notification rules in Grafana OnCall for each user. Note that delays
|
||||
between notification rules may be slightly different in Grafana OnCall, see [Limitations](#limitations) for more info.
|
||||
|
||||
When running the migration, existing notification rules in Grafana OnCall will be deleted for every affected user.
|
||||
|
||||
Note that users are matched by email, so if there are users in the report with "no Grafana OnCall user found with
|
||||
this email" error, it's possible to fix it by adding these users to your Grafana organization.
|
||||
If there is a large number of unmatched users, please also [see the script](scripts/README.md) that can automatically
|
||||
create missing Grafana users via Grafana HTTP API.
|
||||
|
||||
### On-call schedules
|
||||
|
||||
The tool is capable of migrating on-call schedules from PagerDuty to Grafana OnCall.
|
||||
There are two ways to migrate on-call schedules:
|
||||
|
||||
- Migrate on-call shifts as if they were created in Grafana OnCall web UI. Due to scheduling differences between
|
||||
PagerDuty and Grafana OnCall, it's sometimes impossible to automatically migrate on-call shifts without manual changes
|
||||
in PD. Pass `SCHEDULE_MIGRATION_MODE=web` to the tool to enable this mode.
|
||||
- Using ICalendar file URLs from PagerDuty. This way it's always possible to migrate schedules without any manual
|
||||
changes in PD, but resulting schedules in Grafana OnCall will be read-only. Pass `SCHEDULE_MIGRATION_MODE=ical` to the tool
|
||||
to enable this mode.
|
||||
|
||||
On-call schedules will be migrated to new Grafana OnCall schedules with the same name as in PD. Any existing schedules
|
||||
with the same name will be deleted before migration. Any on-call schedules that reference unmatched users won't be
|
||||
migrated.
|
||||
|
||||
When running the plan with `SCHEDULE_MIGRATION_MODE=web`, there could be a number of errors regarding on-call schedules.
|
||||
These errors are expected and are caused by the fact that the tool can't always automatically migrate on-call shifts
|
||||
due to differences in scheduling systems in PD and Grafana OnCall. To fix these errors, you need to manually change
|
||||
on-call shifts in PD and re-run the migration.
|
||||
|
||||
### Escalation policies
|
||||
|
||||
The tool is capable of migrating escalation policies from PagerDuty to Grafana OnCall.
|
||||
Every escalation policy will be migrated to a new Grafana OnCall escalation chain with the same name.
|
||||
|
||||
Any existing escalation chains with the same name will be deleted before migration. Any escalation policies that reference
|
||||
unmatched users or schedules that cannot be migrated won't be migrated as well.
|
||||
|
||||
Note that delays between escalation steps may be slightly different in Grafana OnCall,
|
||||
see [Limitations](#limitations) for more info.
|
||||
|
||||
### Services (integrations)
|
||||
|
||||
The tool is capable of migrating services (integrations) from PagerDuty to Grafana OnCall.
|
||||
For every service in PD, the tool will migrate all integrations to Grafana OnCall integrations.
|
||||
|
||||
Any services that reference escalation policies that cannot be migrated won't be migrated as well.
|
||||
Any integrations with unsupported type won't be migrated unless `UNSUPPORTED_INTEGRATION_TO_WEBHOOKS` is set to `true`.
|
||||
|
||||
### Event rules (global event rulesets)
|
||||
|
||||
The tool is capable of migrating global event rulesets from PagerDuty to Grafana OnCall integrations. This feature is
|
||||
experimental and disabled by default. To enable it, set `EXPERIMENTAL_MIGRATE_EVENT_RULES` to `true`.
|
||||
|
||||
For every ruleset in PD, the tool will create a webhook integration in Grafana OnCall. The tool will create
|
||||
a route for every rule in ruleset, converting conditions in PD to Jinja2 routes in Grafana OnCall. The tool will also
|
||||
select appropriate escalation chains for each route based on service referenced in the rule.
|
||||
|
||||
If you want to include service & integration names in the names of migrated integrations, set
|
||||
`EXPERIMENTAL_MIGRATE_EVENT_RULES_LONG_NAMES` to `true` (note that this only applies when
|
||||
`EXPERIMENTAL_MIGRATE_EVENT_RULES` is `true`). This can make searching for integrations easier,
|
||||
but it can also make the names of integrations too long.
|
||||
|
||||
## After migration
|
||||
|
||||
- Connect integrations (press the "How to connect" button on the integration page)
|
||||
- Make sure users connect their phone numbers, Slack accounts, etc. in their user settings
|
||||
- At some point you would probably want to recreate schedules using Google Calendar or Terraform to be able to modify
|
||||
migrated on-call schedules in Grafana OnCall
|
||||
- When using `SCHEDULE_MIGRATION_MODE=ical`, at some point you would probably want to recreate schedules using
|
||||
Google Calendar or Terraform to be able to modify migrated on-call schedules in Grafana OnCall
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue