PD migrator: migrate overrides (#1454)
# What this PR does Allows PD migrator to migrate overrides (the current implementation only migrates rotation layers). Also tweaks public API so created overrides are consistent with the web UI. ## Checklist - [x] Tests updated
This commit is contained in:
parent
19f1491e3a
commit
c20229fefd
5 changed files with 218 additions and 2 deletions
|
|
@ -337,6 +337,19 @@ class CustomOnCallShiftSerializer(EagerLoadingMixin, serializers.ModelSerializer
|
|||
validated_data[field] = None
|
||||
if validated_data.get("start") is not None:
|
||||
validated_data["start"] = validated_data["start"].replace(tzinfo=None)
|
||||
|
||||
# Populate "rolling_users" field using "users" field for web overrides
|
||||
# This emulates the behavior of the web UI, which creates overrides populating the rolling_users field
|
||||
# Also set the "priority_level" to 99 and "rotation_start" to "start" so it's consistent with the web UI
|
||||
# See apps.api.serializers.on_call_shifts.OnCallShiftSerializer for more info
|
||||
if (
|
||||
event_type == CustomOnCallShift.TYPE_OVERRIDE
|
||||
and validated_data.get("source") == CustomOnCallShift.SOURCE_WEB
|
||||
):
|
||||
validated_data["rolling_users"] = [{str(u.pk): u.public_primary_key} for u in validated_data["users"]]
|
||||
validated_data["priority_level"] = 99
|
||||
validated_data["rotation_start"] = validated_data["start"]
|
||||
|
||||
return validated_data
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -375,3 +375,45 @@ def test_delete_on_call_shift(make_organization_and_user_with_token, make_on_cal
|
|||
|
||||
with pytest.raises(CustomOnCallShift.DoesNotExist):
|
||||
on_call_shift.refresh_from_db()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_create_web_override(make_organization_and_user_with_token, make_on_call_shift):
|
||||
_, user, token = make_organization_and_user_with_token()
|
||||
client = APIClient()
|
||||
|
||||
url = reverse("api-public:on_call_shifts-list")
|
||||
|
||||
start = datetime.datetime.now(datetime.timezone.utc).replace(microsecond=0)
|
||||
data = {
|
||||
"team_id": None,
|
||||
"name": "test web override",
|
||||
"type": "override",
|
||||
"source": 0,
|
||||
"start": start.strftime("%Y-%m-%dT%H:%M:%S"),
|
||||
"duration": 3600,
|
||||
"users": [user.public_primary_key],
|
||||
"time_zone": "UTC",
|
||||
}
|
||||
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
|
||||
|
||||
shift = CustomOnCallShift.objects.get(name="test web override")
|
||||
expected_response = {
|
||||
"id": shift.public_primary_key,
|
||||
"team_id": None,
|
||||
"name": "test web override",
|
||||
"type": "override",
|
||||
"start": start.strftime("%Y-%m-%dT%H:%M:%S"),
|
||||
"rotation_start": start.strftime("%Y-%m-%dT%H:%M:%S"),
|
||||
"duration": 3600,
|
||||
"users": [user.public_primary_key],
|
||||
"time_zone": "UTC",
|
||||
}
|
||||
|
||||
assert response.status_code == status.HTTP_201_CREATED
|
||||
assert response.json() == expected_response
|
||||
|
||||
assert shift.rolling_users == [{str(user.pk): user.public_primary_key}]
|
||||
assert shift.priority_level == 99
|
||||
assert shift.start == start
|
||||
assert shift.rotation_start == start
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import datetime
|
||||
|
||||
from pdpyras import APISession
|
||||
|
||||
from migrator import oncall_api_client
|
||||
|
|
@ -49,9 +51,24 @@ def main() -> None:
|
|||
]
|
||||
|
||||
print("▶ Fetching schedules...")
|
||||
# Fetch schedules from PagerDuty
|
||||
schedules = session.list_all(
|
||||
"schedules", params={"include[]": "schedule_layers", "time_zone": "UTC"}
|
||||
)
|
||||
|
||||
# Fetch overrides from PagerDuty
|
||||
since = datetime.datetime.now(datetime.timezone.utc)
|
||||
until = since + datetime.timedelta(
|
||||
days=365
|
||||
) # fetch overrides up to 1 year from now
|
||||
for schedule in schedules:
|
||||
response = session.jget(
|
||||
f"schedules/{schedule['id']}/overrides",
|
||||
params={"since": since.isoformat(), "until": until.isoformat()},
|
||||
)
|
||||
schedule["overrides"] = response["overrides"]
|
||||
|
||||
# Fetch schedules from OnCall
|
||||
oncall_schedules = oncall_api_client.list_all("schedules")
|
||||
|
||||
print("▶ Fetching escalation policies...")
|
||||
|
|
|
|||
|
|
@ -98,6 +98,7 @@ class Schedule:
|
|||
name: str
|
||||
time_zone: str
|
||||
layers: list["Layer"]
|
||||
overrides: list["Override"]
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, schedule: dict) -> "Schedule":
|
||||
|
|
@ -116,10 +117,15 @@ class Schedule:
|
|||
|
||||
layers.append(layer)
|
||||
|
||||
overrides = []
|
||||
for override in schedule["overrides"]:
|
||||
overrides.append(Override.from_dict(override))
|
||||
|
||||
return cls(
|
||||
name=schedule["name"],
|
||||
time_zone=schedule["time_zone"],
|
||||
layers=layers,
|
||||
overrides=overrides,
|
||||
)
|
||||
|
||||
def to_oncall_schedule(
|
||||
|
|
@ -139,7 +145,7 @@ class Schedule:
|
|||
]
|
||||
if deactivated_user_ids:
|
||||
errors.append(
|
||||
f"{layer.name}: User IDs {deactivated_user_ids} not found. The users probably have been deactivated in PagerDuty."
|
||||
f"{layer.name}: Users with IDs {deactivated_user_ids} not found. The users probably have been deactivated in PagerDuty."
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
@ -158,6 +164,15 @@ class Schedule:
|
|||
|
||||
errors.append(error_text)
|
||||
|
||||
for override in self.overrides:
|
||||
if override.user_id not in user_id_map:
|
||||
errors.append(
|
||||
f"Override: User with ID '{override.user_id}' not found. The user probably has been deactivated in PagerDuty."
|
||||
)
|
||||
continue
|
||||
|
||||
shifts.append(override.to_oncall_shift(user_id_map))
|
||||
|
||||
if errors:
|
||||
return None, errors
|
||||
|
||||
|
|
@ -572,3 +587,39 @@ class Restriction:
|
|||
|
||||
# there should always be a restriction
|
||||
raise ValueError("No restriction found for given datetime")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Override:
|
||||
start: datetime.datetime
|
||||
end: datetime.datetime
|
||||
user_id: str
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, override: dict) -> "Override":
|
||||
# convert start and end to datetime objects in UTC
|
||||
start = datetime.datetime.fromisoformat(override["start"]).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
end = datetime.datetime.fromisoformat(override["end"]).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
return cls(start=start, end=end, user_id=override["user"]["id"])
|
||||
|
||||
def to_oncall_shift(self, user_id_map: dict[str, str]) -> dict:
|
||||
start = _dt_to_oncall_datetime(self.start)
|
||||
duration = int((self.end - self.start).total_seconds())
|
||||
user_id = user_id_map[self.user_id]
|
||||
|
||||
return {
|
||||
"name": uuid4().hex,
|
||||
"team_id": None,
|
||||
"type": "override",
|
||||
"time_zone": "UTC",
|
||||
"start": start,
|
||||
"duration": duration,
|
||||
"rotation_start": start,
|
||||
"users": [user_id],
|
||||
"source": 0, # 0 is alias for "web"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -179,6 +179,7 @@ def test_deactivated_users():
|
|||
pd_schedule = {
|
||||
"name": "No restrictions",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 1",
|
||||
|
|
@ -199,7 +200,7 @@ def test_deactivated_users():
|
|||
user_id_map
|
||||
)
|
||||
assert errors == [
|
||||
"Layer 1: User IDs ['USER_ID_DEACTIVATED'] not found. The users probably have been deactivated in PagerDuty."
|
||||
"Layer 1: Users with IDs ['USER_ID_DEACTIVATED'] not found. The users probably have been deactivated in PagerDuty."
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -207,6 +208,7 @@ def test_no_restrictions():
|
|||
pd_schedule = {
|
||||
"name": "No restrictions",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 5",
|
||||
|
|
@ -355,6 +357,7 @@ def test_daily_with_daily_restrictions():
|
|||
pd_schedule = {
|
||||
"name": "Daily with daily restrictions",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 10",
|
||||
|
|
@ -941,6 +944,7 @@ def test_weekly_with_daily_restrictions():
|
|||
pd_schedule = {
|
||||
"name": "Weekly with daily restrictions",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 6",
|
||||
|
|
@ -1287,6 +1291,7 @@ def test_daily_with_weekly_restrictions():
|
|||
pd_schedule = {
|
||||
"name": "Daily with weekly restrictions",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 4",
|
||||
|
|
@ -1477,6 +1482,7 @@ def test_weekly_with_weekly_restrictions():
|
|||
pd_schedule = {
|
||||
"name": "Weekly (weekly)",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 6",
|
||||
|
|
@ -1835,6 +1841,7 @@ def test_errors():
|
|||
pd_schedule = {
|
||||
"name": "Errors",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 11",
|
||||
|
|
@ -2059,6 +2066,7 @@ def test_time_zone():
|
|||
pd_schedule = {
|
||||
"name": "Time zone",
|
||||
"time_zone": "Europe/Paris",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 1",
|
||||
|
|
@ -2115,6 +2123,7 @@ def test_removed_layers():
|
|||
pd_schedule = {
|
||||
"name": "Removed layer",
|
||||
"time_zone": "Europe/Paris",
|
||||
"overrides": [],
|
||||
"schedule_layers": [
|
||||
{
|
||||
"name": "Layer 1",
|
||||
|
|
@ -2149,3 +2158,87 @@ def test_removed_layers():
|
|||
for shift in oncall_schedule["shifts"]:
|
||||
shift.pop("name")
|
||||
assert oncall_schedule == expected
|
||||
|
||||
|
||||
def test_overrides():
|
||||
pd_schedule = {
|
||||
"name": "Overrides",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [
|
||||
{
|
||||
"start": "2023-03-02T11:00:00",
|
||||
"end": "2023-03-02T12:00:00",
|
||||
"user": {"id": "USER_ID_1"},
|
||||
},
|
||||
{
|
||||
"start": "2023-03-02T11:00:00+00:00",
|
||||
"end": "2023-03-02T12:00:00+00:00",
|
||||
"user": {"id": "USER_ID_1"},
|
||||
},
|
||||
{
|
||||
"start": "2023-03-02T12:00:00+01:00",
|
||||
"end": "2023-03-02T13:00:00+01:00",
|
||||
"user": {"id": "USER_ID_1"},
|
||||
},
|
||||
{
|
||||
"start": "2023-03-02T10:00:00-01:00",
|
||||
"end": "2023-03-02T11:00:00-01:00",
|
||||
"user": {"id": "USER_ID_1"},
|
||||
},
|
||||
],
|
||||
"schedule_layers": [],
|
||||
}
|
||||
|
||||
expected = {
|
||||
"name": "Overrides",
|
||||
"shifts": [
|
||||
{
|
||||
"team_id": None,
|
||||
"duration": 3600,
|
||||
"users": ["USER_ID_1"],
|
||||
"rotation_start": "2023-03-02T11:00:00",
|
||||
"start": "2023-03-02T11:00:00",
|
||||
"time_zone": "UTC",
|
||||
"type": "override",
|
||||
"source": 0,
|
||||
},
|
||||
]
|
||||
* 4, # all shifts are the same
|
||||
"team_id": None,
|
||||
"time_zone": "Europe/London",
|
||||
"type": "web",
|
||||
}
|
||||
|
||||
oncall_schedule, errors = Schedule.from_dict(pd_schedule).to_oncall_schedule(
|
||||
user_id_map
|
||||
)
|
||||
|
||||
assert errors == []
|
||||
|
||||
for shift in oncall_schedule["shifts"]:
|
||||
shift.pop("name")
|
||||
assert oncall_schedule == expected
|
||||
|
||||
|
||||
def test_override_deactivated_user():
|
||||
pd_schedule = {
|
||||
"name": "Overrides",
|
||||
"time_zone": "Europe/London",
|
||||
"overrides": [
|
||||
{
|
||||
"start": "2023-03-02T11:00:00",
|
||||
"end": "2023-03-02T12:00:00",
|
||||
"user": {"id": "USER_ID_4"},
|
||||
},
|
||||
],
|
||||
"schedule_layers": [],
|
||||
}
|
||||
|
||||
oncall_schedule, errors = Schedule.from_dict(pd_schedule).to_oncall_schedule(
|
||||
user_id_map
|
||||
)
|
||||
|
||||
assert errors == [
|
||||
"Override: User with ID 'USER_ID_4' not found. The user probably has been deactivated in PagerDuty."
|
||||
]
|
||||
assert oncall_schedule is None
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue