public API endpoint to export schedule final shifts (#2047)
# What this PR does Closes https://github.com/grafana/oncall-private/issues/1632 ## Checklist - [x] Unit, integration, and e2e (if applicable) tests updated - [x] Documentation added (or `pr:no public docs` PR label added if not required) - [x] `CHANGELOG.md` updated (or `pr:no changelog` PR label added if not required)
This commit is contained in:
parent
5f067af14f
commit
7ed6290d42
9 changed files with 424 additions and 13 deletions
|
|
@ -13,6 +13,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
## v1.2.36 (2023-06-02)
|
||||
|
||||
### Added
|
||||
|
||||
- Add public API endpoint to export a schedule's final shifts by @joeyorlando ([2047](https://github.com/grafana/oncall/pull/2047))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix demo alert for inbound email integration by @vadimkerr ([#2081](https://github.com/grafana/oncall/pull/2081))
|
||||
|
|
|
|||
|
|
@ -195,3 +195,171 @@ curl "{{API_URL}}/api/v1/schedules/SBM7DV7BKFUYU/" \
|
|||
**HTTP request**
|
||||
|
||||
`DELETE {{API_URL}}/api/v1/schedules/<SCHEDULE_ID>/`
|
||||
|
||||
# Export a schedule's final shifts
|
||||
|
||||
**HTTP request**
|
||||
|
||||
```shell
|
||||
curl "{{API_URL}}/api/v1/schedules/SBM7DV7BKFUYU/final_shifts?start_date=2023-01-01&end_date=2023-02-01" \
|
||||
--request GET \
|
||||
--header "Authorization: meowmeowmeow"
|
||||
```
|
||||
|
||||
The above command returns JSON structured in the following way:
|
||||
|
||||
```json
|
||||
{
|
||||
"count": 12,
|
||||
"next": null,
|
||||
"previous": null,
|
||||
"results": [
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-02T09:00:00Z",
|
||||
"shift_end": "2023-01-02T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-04T09:00:00Z",
|
||||
"shift_end": "2023-01-04T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-06T09:00:00Z",
|
||||
"shift_end": "2023-01-06T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-09T09:00:00Z",
|
||||
"shift_end": "2023-01-09T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-11T09:00:00Z",
|
||||
"shift_end": "2023-01-11T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-13T09:00:00Z",
|
||||
"shift_end": "2023-01-13T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-16T09:00:00Z",
|
||||
"shift_end": "2023-01-16T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-18T09:00:00Z",
|
||||
"shift_end": "2023-01-18T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-20T09:00:00Z",
|
||||
"shift_end": "2023-01-20T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-23T09:00:00Z",
|
||||
"shift_end": "2023-01-23T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "UC2CHRT5SD34X",
|
||||
"user_email": "alice@example.com",
|
||||
"user_username": "alice",
|
||||
"shift_start": "2023-01-25T09:00:00Z",
|
||||
"shift_end": "2023-01-25T17:00:00Z"
|
||||
},
|
||||
{
|
||||
"user_pk": "U7S8H84ARFTGN",
|
||||
"user_email": "bob@example.com",
|
||||
"user_username": "bob",
|
||||
"shift_start": "2023-01-27T09:00:00Z",
|
||||
"shift_end": "2023-01-27T17:00:00Z"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Caveats
|
||||
|
||||
Some notes on the `start_date` and `end_date` query parameters:
|
||||
|
||||
- they are both required and should represent ISO 8601 formatted dates
|
||||
- `end_date` must be greater than or equal to `start_date`
|
||||
- `end_date` cannot be more than 365 days in the future from `start_date`
|
||||
|
||||
Lastly, this endpoint is currently only active for web schedules. It will return HTTP 400 for schedules
|
||||
defined via Terraform or iCal.
|
||||
|
||||
## Example script to transform data to .csv for all of your schedules
|
||||
|
||||
The following Python script will generate a `.csv` file, `oncall-report-2023-01-01-to-2023-01-31.csv`. This file will
|
||||
contain three columns, `user_pk`, `user_email`, and `hours_on_call`, which represents how many hours each user was
|
||||
on call during the period starting January 1, 2023 to January 31, 2023 (inclusive).
|
||||
|
||||
```python
|
||||
import csv
|
||||
import requests
|
||||
from datetime import datetime
|
||||
|
||||
# CUSTOMIZE THE FOLLOWING VARIABLES
|
||||
START_DATE = "2023-01-01"
|
||||
END_DATE = "2023-01-31"
|
||||
OUTPUT_FILE_NAME = f"oncall-report-{START_DATE}-to-{END_DATE}.csv"
|
||||
MY_ONCALL_API_BASE_URL = "https://oncall-prod-us-central-0.grafana.net/oncall/api/v1/schedules"
|
||||
MY_ONCALL_API_KEY = "meowmeowwoofwoof"
|
||||
|
||||
headers = {"Authorization": MY_ONCALL_API_KEY}
|
||||
schedule_ids = [schedule["id"] for schedule in requests.get(MY_ONCALL_API_BASE_URL, headers=headers).json()["results"]]
|
||||
user_on_call_hours = {}
|
||||
|
||||
for schedule_id in schedule_ids:
|
||||
response = requests.get(
|
||||
f"{MY_ONCALL_API_BASE_URL}/{schedule_id}/final_shifts?start_date={START_DATE}&end_date={END_DATE}",
|
||||
headers=headers)
|
||||
|
||||
for final_shift in response.json()["results"]:
|
||||
user_pk = final_shift["user_pk"]
|
||||
end = datetime.fromisoformat(final_shift["shift_end"])
|
||||
start = datetime.fromisoformat(final_shift["shift_start"])
|
||||
shift_time_in_seconds = (end - start).total_seconds()
|
||||
shift_time_in_hours = shift_time_in_seconds / (60 * 60)
|
||||
|
||||
if user_pk in user_on_call_hours:
|
||||
user_on_call_hours[user_pk]["hours_on_call"] += shift_time_in_hours
|
||||
else:
|
||||
user_on_call_hours[user_pk] = {
|
||||
"email": final_shift["user_email"],
|
||||
"hours_on_call": shift_time_in_hours,
|
||||
}
|
||||
|
||||
with open(OUTPUT_FILE_NAME, "w") as fp:
|
||||
csv_writer = csv.DictWriter(fp, ["user_pk", "user_email", "hours_on_call"])
|
||||
csv_writer.writeheader()
|
||||
|
||||
for user_pk, user_info in user_on_call_hours.items():
|
||||
csv_writer.writerow({
|
||||
"user_pk": user_pk, "user_email": user_info["email"], "hours_on_call": user_info["hours_on_call"]})
|
||||
```
|
||||
|
|
|
|||
|
|
@ -1363,7 +1363,9 @@ def test_on_call_shift_preview(
|
|||
"is_gap": False,
|
||||
"priority_level": 2,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{"display_name": other_user.username, "pk": other_user.public_primary_key, "email": other_user.email}
|
||||
],
|
||||
"source": "web",
|
||||
}
|
||||
]
|
||||
|
|
@ -1653,7 +1655,9 @@ def test_on_call_shift_preview_update(
|
|||
"is_gap": False,
|
||||
"priority_level": 1,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{"display_name": other_user.username, "pk": other_user.public_primary_key, "email": other_user.email}
|
||||
],
|
||||
"source": "web",
|
||||
}
|
||||
assert rotation_events[-1] == expected_shift_preview
|
||||
|
|
@ -1764,7 +1768,9 @@ def test_on_call_shift_preview_update_not_started_reuse_pk(
|
|||
"is_gap": False,
|
||||
"priority_level": 1,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{"display_name": other_user.username, "pk": other_user.public_primary_key, "email": other_user.email}
|
||||
],
|
||||
"source": "web",
|
||||
},
|
||||
]
|
||||
|
|
|
|||
|
|
@ -812,7 +812,7 @@ def test_events_calendar(
|
|||
"all_day": False,
|
||||
"start": on_call_shift.start,
|
||||
"end": on_call_shift.start + on_call_shift.duration,
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"missing_users": [],
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"source": "api",
|
||||
|
|
@ -878,7 +878,7 @@ def test_filter_events_calendar(
|
|||
"all_day": False,
|
||||
"start": mon_start,
|
||||
"end": mon_start + on_call_shift.duration,
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"missing_users": [],
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"source": "api",
|
||||
|
|
@ -894,7 +894,7 @@ def test_filter_events_calendar(
|
|||
"all_day": False,
|
||||
"start": fri_start,
|
||||
"end": fri_start + on_call_shift.duration,
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"missing_users": [],
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"source": "api",
|
||||
|
|
@ -977,7 +977,7 @@ def test_filter_events_range_calendar(
|
|||
"all_day": False,
|
||||
"start": fri_start,
|
||||
"end": fri_start + on_call_shift.duration,
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"missing_users": [],
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"source": "api",
|
||||
|
|
@ -1059,7 +1059,13 @@ def test_filter_events_overrides(
|
|||
"all_day": False,
|
||||
"start": override_start,
|
||||
"end": override_start + override.duration,
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{
|
||||
"display_name": other_user.username,
|
||||
"pk": other_user.public_primary_key,
|
||||
"email": other_user.email,
|
||||
}
|
||||
],
|
||||
"missing_users": [],
|
||||
"priority_level": None,
|
||||
"source": "api",
|
||||
|
|
|
|||
|
|
@ -71,3 +71,17 @@ class ScheduleBaseSerializer(serializers.ModelSerializer):
|
|||
}
|
||||
|
||||
return result
|
||||
|
||||
|
||||
class FinalShiftQueryParamsSerializer(serializers.Serializer):
|
||||
start_date = serializers.DateField(required=True)
|
||||
end_date = serializers.DateField(required=True)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs["start_date"] > attrs["end_date"]:
|
||||
raise serializers.ValidationError("start_date must be less than or equal to end_date")
|
||||
if attrs["end_date"] - attrs["start_date"] > timezone.timedelta(days=365):
|
||||
raise serializers.ValidationError(
|
||||
"The difference between start_date and end_date must be less than one year (365 days)"
|
||||
)
|
||||
return attrs
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import collections
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
|
@ -781,3 +782,146 @@ def test_create_ical_schedule_without_ical_url(make_organization_and_user_with_t
|
|||
}
|
||||
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_oncall_shifts_request_validation(
|
||||
make_organization_and_user_with_token,
|
||||
make_schedule,
|
||||
):
|
||||
organization, _, token = make_organization_and_user_with_token()
|
||||
ical_schedule = make_schedule(organization, schedule_class=OnCallScheduleICal)
|
||||
terraform_schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar)
|
||||
web_schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
|
||||
schedule_type_validation_msg = "OnCall shifts exports are currently only available for web calendars"
|
||||
valid_date_msg = "Date has wrong format. Use one of these formats instead: YYYY-MM-DD."
|
||||
|
||||
client = APIClient()
|
||||
|
||||
def _make_request(schedule, query_params=""):
|
||||
url = reverse("api-public:schedules-final-shifts", kwargs={"pk": schedule.public_primary_key})
|
||||
return client.get(f"{url}{query_params}", format="json", HTTP_AUTHORIZATION=token)
|
||||
|
||||
# only web schedules are allowed for now
|
||||
response = _make_request(ical_schedule)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.data == schedule_type_validation_msg
|
||||
|
||||
response = _make_request(terraform_schedule)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.data == schedule_type_validation_msg
|
||||
|
||||
# query param validation
|
||||
response = _make_request(web_schedule, "?start_date=2021-01-01")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json()["end_date"][0] == "This field is required."
|
||||
|
||||
response = _make_request(web_schedule, "?start_date=asdfasdf")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json()["start_date"][0] == valid_date_msg
|
||||
|
||||
response = _make_request(web_schedule, "?end_date=2021-01-01")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json()["start_date"][0] == "This field is required."
|
||||
|
||||
response = _make_request(web_schedule, "?start_date=2021-01-01&end_date=asdfasdf")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json()["end_date"][0] == valid_date_msg
|
||||
|
||||
response = _make_request(web_schedule, "?end_date=2021-01-01&start_date=2022-01-01")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json() == {
|
||||
"non_field_errors": [
|
||||
"start_date must be less than or equal to end_date",
|
||||
]
|
||||
}
|
||||
|
||||
response = _make_request(web_schedule, "?end_date=2021-01-01&start_date=2019-12-31")
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert response.json() == {
|
||||
"non_field_errors": [
|
||||
"The difference between start_date and end_date must be less than one year (365 days)",
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_oncall_shifts_export(
|
||||
make_organization_and_user_with_token,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, token = make_organization_and_user_with_token()
|
||||
|
||||
user1_email = "alice909450945045@example.com"
|
||||
user2_email = "bob123123123123123@example.com"
|
||||
user1_username = "alice"
|
||||
user2_username = "bob"
|
||||
|
||||
user1 = make_user(organization=organization, email=user1_email, username=user1_username)
|
||||
user2 = make_user(organization=organization, email=user2_email, username=user2_username)
|
||||
|
||||
user1_public_primary_key = user1.public_primary_key
|
||||
user2_public_primary_key = user2.public_primary_key
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
|
||||
start_date = timezone.datetime(2023, 1, 1, 9, 0, 0)
|
||||
make_on_call_shift(
|
||||
organization=organization,
|
||||
schedule=schedule,
|
||||
shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT,
|
||||
frequency=CustomOnCallShift.FREQUENCY_DAILY,
|
||||
priority_level=1,
|
||||
interval=1,
|
||||
by_day=["MO", "WE", "FR"],
|
||||
start=start_date,
|
||||
until=start_date + timezone.timedelta(days=28),
|
||||
rolling_users=[{user1.pk: user1_public_primary_key}, {user2.pk: user2_public_primary_key}],
|
||||
rotation_start=start_date,
|
||||
duration=timezone.timedelta(hours=8),
|
||||
)
|
||||
|
||||
client = APIClient()
|
||||
|
||||
url = reverse("api-public:schedules-final-shifts", kwargs={"pk": schedule.public_primary_key})
|
||||
response = client.get(f"{url}?start_date=2023-01-01&end_date=2023-02-01", format="json", HTTP_AUTHORIZATION=token)
|
||||
response_json = response.json()
|
||||
shifts = response_json["results"]
|
||||
|
||||
total_time_on_call = collections.defaultdict(int)
|
||||
pk_to_user_mapping = {
|
||||
user1_public_primary_key: {
|
||||
"email": user1_email,
|
||||
"username": user1_username,
|
||||
},
|
||||
user2_public_primary_key: {
|
||||
"email": user2_email,
|
||||
"username": user2_username,
|
||||
},
|
||||
}
|
||||
|
||||
for row in shifts:
|
||||
user_pk = row["user_pk"]
|
||||
|
||||
# make sure we're exporting email and username as well
|
||||
assert pk_to_user_mapping[user_pk]["email"] == row["user_email"]
|
||||
assert pk_to_user_mapping[user_pk]["username"] == row["user_username"]
|
||||
|
||||
end = timezone.datetime.fromisoformat(row["shift_end"])
|
||||
start = timezone.datetime.fromisoformat(row["shift_start"])
|
||||
shift_time_in_seconds = (end - start).total_seconds()
|
||||
total_time_on_call[row["user_pk"]] += shift_time_in_seconds / (60 * 60)
|
||||
|
||||
assert response.status_code == status.HTTP_200_OK
|
||||
|
||||
# 3 shifts per week x 4 weeks x 8 hours per shift = 96 / 2 users = 48h per user for this period
|
||||
expected_time_on_call = 48
|
||||
assert total_time_on_call[user1_public_primary_key] == expected_time_on_call
|
||||
assert total_time_on_call[user2_public_primary_key] == expected_time_on_call
|
||||
|
||||
# pagination parameters are mocked out for now
|
||||
assert response_json["next"] is None
|
||||
assert response_json["previous"] is None
|
||||
assert response_json["count"] == len(shifts)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import logging
|
||||
|
||||
from django_filters import rest_framework as filters
|
||||
from rest_framework import status
|
||||
from rest_framework.decorators import action
|
||||
|
|
@ -9,9 +11,11 @@ from rest_framework.viewsets import ModelViewSet
|
|||
from apps.auth_token.auth import ApiTokenAuthentication, ScheduleExportAuthentication
|
||||
from apps.public_api.custom_renderers import CalendarRenderer
|
||||
from apps.public_api.serializers import PolymorphicScheduleSerializer, PolymorphicScheduleUpdateSerializer
|
||||
from apps.public_api.serializers.schedules_base import FinalShiftQueryParamsSerializer
|
||||
from apps.public_api.throttlers.user_throttle import UserThrottle
|
||||
from apps.schedules.ical_utils import ical_export_from_schedule
|
||||
from apps.schedules.models import OnCallSchedule, OnCallScheduleWeb
|
||||
from apps.schedules.models.on_call_schedule import ScheduleEvents, ScheduleFinalShifts
|
||||
from apps.slack.tasks import update_slack_user_group_for_schedules
|
||||
from common.api_helpers.exceptions import BadRequest
|
||||
from common.api_helpers.filters import ByTeamFilter
|
||||
|
|
@ -19,6 +23,8 @@ from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMix
|
|||
from common.api_helpers.paginators import FiftyPageSizePaginator
|
||||
from common.insight_log import EntityEvent, write_resource_insight_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OnCallScheduleChannelView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet):
|
||||
authentication_classes = (ApiTokenAuthentication,)
|
||||
|
|
@ -120,3 +126,51 @@ class OnCallScheduleChannelView(RateLimitHeadersMixin, UpdateSerializerMixin, Mo
|
|||
# Not using existing get_object method because it requires access to the organization user attribute
|
||||
export = ical_export_from_schedule(self.request.auth.schedule)
|
||||
return Response(export, status=status.HTTP_200_OK)
|
||||
|
||||
@action(methods=["get"], detail=True)
|
||||
def final_shifts(self, request, pk):
|
||||
schedule = self.get_object()
|
||||
|
||||
if not isinstance(schedule, OnCallScheduleWeb):
|
||||
return Response(
|
||||
"OnCall shifts exports are currently only available for web calendars",
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
serializer = FinalShiftQueryParamsSerializer(data=request.query_params)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
start_date = serializer.validated_data["start_date"]
|
||||
end_date = serializer.validated_data["end_date"]
|
||||
days_between_start_and_end = (end_date - start_date).days
|
||||
|
||||
final_schedule_events: ScheduleEvents = schedule.final_events("UTC", start_date, days_between_start_and_end)
|
||||
|
||||
logger.info(
|
||||
f"Exporting oncall shifts for schedule {pk} between dates {start_date} and {end_date}. {len(final_schedule_events)} shift events were found."
|
||||
)
|
||||
|
||||
data: ScheduleFinalShifts = [
|
||||
{
|
||||
"user_pk": user["pk"],
|
||||
"user_email": user["email"],
|
||||
"user_username": user["display_name"],
|
||||
"shift_start": event["start"],
|
||||
"shift_end": event["end"],
|
||||
}
|
||||
for event in final_schedule_events
|
||||
for user in event["users"]
|
||||
]
|
||||
|
||||
# right now we'll "mock out" the pagination related parameters (next and previous)
|
||||
# rather than use a Pagination class from drf (as currently it operates on querysets). We've decided on this
|
||||
# to make this response schema consistent with the rest of the public API + make it easy to add pagination
|
||||
# here in the future (should we decide to migrate "final_shifts" to an actual model)
|
||||
return Response(
|
||||
{
|
||||
"count": len(data),
|
||||
"next": None,
|
||||
"previous": None,
|
||||
"results": data,
|
||||
}
|
||||
)
|
||||
|
|
|
|||
|
|
@ -76,6 +76,7 @@ class QualityReport(TypedDict):
|
|||
class ScheduleEventUser(TypedDict):
|
||||
display_name: str
|
||||
pk: str
|
||||
email: str
|
||||
|
||||
|
||||
class ScheduleEventShift(TypedDict):
|
||||
|
|
@ -97,8 +98,17 @@ class ScheduleEvent(TypedDict):
|
|||
shift: ScheduleEventShift
|
||||
|
||||
|
||||
class ScheduleFinalShift(TypedDict):
|
||||
user_pk: str
|
||||
user_email: str
|
||||
user_username: str
|
||||
shift_start: str
|
||||
shift_end: str
|
||||
|
||||
|
||||
ScheduleEvents = List[ScheduleEvent]
|
||||
ScheduleEventIntervals = List[List[datetime.datetime]]
|
||||
ScheduleFinalShifts = List[ScheduleFinalShift]
|
||||
|
||||
|
||||
def generate_public_primary_key_for_oncall_schedule_channel():
|
||||
|
|
@ -323,6 +333,7 @@ class OnCallSchedule(PolymorphicModel):
|
|||
"users": [
|
||||
{
|
||||
"display_name": user.username,
|
||||
"email": user.email,
|
||||
"pk": user.public_primary_key,
|
||||
}
|
||||
for user in shift["users"]
|
||||
|
|
|
|||
|
|
@ -93,7 +93,7 @@ def test_filter_events(make_organization, make_user_for_organization, make_sched
|
|||
"is_gap": False,
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"shift": {"pk": on_call_shift.public_primary_key},
|
||||
"source": "api",
|
||||
}
|
||||
|
|
@ -114,7 +114,7 @@ def test_filter_events(make_organization, make_user_for_organization, make_sched
|
|||
"is_gap": False,
|
||||
"priority_level": None,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"shift": {"pk": override.public_primary_key},
|
||||
"source": "api",
|
||||
}
|
||||
|
|
@ -179,7 +179,7 @@ def test_filter_events_include_gaps(make_organization, make_user_for_organizatio
|
|||
"is_gap": False,
|
||||
"priority_level": on_call_shift.priority_level,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key}],
|
||||
"users": [{"display_name": user.username, "pk": user.public_primary_key, "email": user.email}],
|
||||
"shift": {"pk": on_call_shift.public_primary_key},
|
||||
"source": "api",
|
||||
},
|
||||
|
|
@ -688,7 +688,9 @@ def test_preview_shift(make_organization, make_user_for_organization, make_sched
|
|||
"is_gap": False,
|
||||
"priority_level": new_shift.priority_level,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{"display_name": other_user.username, "pk": other_user.public_primary_key, "email": other_user.email}
|
||||
],
|
||||
"shift": {"pk": new_shift.public_primary_key},
|
||||
"source": "api",
|
||||
}
|
||||
|
|
@ -846,7 +848,9 @@ def test_preview_override_shift(make_organization, make_user_for_organization, m
|
|||
"is_gap": False,
|
||||
"priority_level": None,
|
||||
"missing_users": [],
|
||||
"users": [{"display_name": other_user.username, "pk": other_user.public_primary_key}],
|
||||
"users": [
|
||||
{"display_name": other_user.username, "pk": other_user.public_primary_key, "email": other_user.email}
|
||||
],
|
||||
"shift": {"pk": new_shift.public_primary_key},
|
||||
"source": "api",
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue