commit
fe48a1dec4
67 changed files with 2586 additions and 840 deletions
23
CHANGELOG.md
23
CHANGELOG.md
|
|
@ -5,7 +5,25 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## Unreleased
|
||||
## v1.3.23 (2023-08-10)
|
||||
|
||||
### Added
|
||||
|
||||
- Shift Swap Requests Web UI ([#2593](https://github.com/grafana/oncall/issues/2593))
|
||||
- Final schedule shifts should lay in one line ([#1665](https://github.com/grafana/oncall/issues/1665))
|
||||
- Add backend support for push notification sounds with custom extensions by @vadimkerr ([#2759](https://github.com/grafana/oncall/pull/2759))
|
||||
|
||||
### Changed
|
||||
|
||||
- Add stack slug to organization options for direct paging Slash command by @vadimkerr ([#2743](https://github.com/grafana/oncall/pull/2743))
|
||||
- Avoid creating (or notifying about) potential event splits resulting from untaken swap requests ([#2748](https://github.com/grafana/oncall/pull/2748))
|
||||
- Refactor heartbeats into a periodic task ([2723](https://github.com/grafana/oncall/pull/2723))
|
||||
|
||||
### Fixed
|
||||
|
||||
- Do not show override shortcut when web overrides are disabled ([#2745](https://github.com/grafana/oncall/pull/2745))
|
||||
- Handle ical schedule import with duplicated event UIDs ([#2760](https://github.com/grafana/oncall/pull/2760))
|
||||
- Allow Editor to access Phone Verification ([#2772](https://github.com/grafana/oncall/pull/2772))
|
||||
|
||||
## v1.3.22 (2023-08-03)
|
||||
|
||||
|
|
@ -16,6 +34,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
### Changed
|
||||
|
||||
- Skip past due swap requests when calculating events ([2718](https://github.com/grafana/oncall/pull/2718))
|
||||
- Update schedule slack notifications to use schedule final events by @Ferril ([#2710](https://github.com/grafana/oncall/pull/2710))
|
||||
|
||||
### Fixed
|
||||
|
||||
|
|
@ -28,7 +47,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
### Added
|
||||
|
||||
- [Helm] Add `extraContainers` for engine, celery and migrate-job pods to define sidecars by @lu1as ([#2650](https://github.com/grafana/oncall/pull/2650))
|
||||
– Rework of AlertManager integration ([#2643](https://github.com/grafana/oncall/pull/2643))
|
||||
– Rework of AlertManager integration ([#2643](https://github.com/grafana/oncall/pull/2643))
|
||||
|
||||
## v1.3.20 (2023-07-31)
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ environment variable.
|
|||
**NOTE**: the `docker-compose-developer.yml` file uses some syntax/features that are only supported by Docker Compose
|
||||
v2. For instructions on how to enable this (if you haven't already done so),
|
||||
see [here](https://www.docker.com/blog/announcing-compose-v2-general-availability/). Ensure you have Docker Compose
|
||||
version 2.10 or above installed - update instructions are [here](https://docs.docker.com/compose/install/linux/).
|
||||
version 2.20.2 or above installed - update instructions are [here](https://docs.docker.com/compose/install/linux/).
|
||||
2. Run `make init start`. By default this will run everything in Docker, using SQLite as the database and Redis as the
|
||||
message broker/cache. See [`COMPOSE_PROFILES`](#compose_profiles) below for more details on how to swap
|
||||
out/disable which components are run in Docker.
|
||||
|
|
@ -427,3 +427,32 @@ backwards compatible
|
|||
|
||||
See [django-migration-linter checklist](https://github.com/3YOURMIND/django-migration-linter/blob/main/docs/incompatibilities.md)
|
||||
for the common mistakes and best practices
|
||||
|
||||
### Removing a nullable field from a model
|
||||
|
||||
> This only works for nullable fields (fields with `null=True` in the field definition).
|
||||
>
|
||||
> DO NOT USE THIS APPROACH FOR NON-NULLABLE FIELDS, IT CAN BREAK THINGS!
|
||||
|
||||
1. Remove all usages of the field you want to remove. Make sure the field is not used anywhere, including filtering,
|
||||
querying, or explicit field referencing from views, models, forms, serializers, etc.
|
||||
2. Remove the field from the model definition.
|
||||
3. Generate migrations using the following management command:
|
||||
|
||||
```python
|
||||
python manage.py remove_field <APP_LABEL> <MODEL_NAME> <FIELD_NAME>
|
||||
```
|
||||
|
||||
Example: `python manage.py remove_field alerts AlertReceiveChannel restricted_at`
|
||||
|
||||
This command will generate two migrations that **MUST BE DEPLOYED IN TWO SEPARATE RELEASES**:
|
||||
- Migration #1 will remove the field from Django's state, but not from the database. Release #1 must include
|
||||
migration #1, and must not include migration #2.
|
||||
- Migration #2 will remove the field from the database. Stash this migration for use in a future release.
|
||||
|
||||
4. Make release #1 (removal of the field + migration #1). Once released and deployed, Django will not be
|
||||
aware of this field anymore, but the field will be still present in the database. This allows for a gradual migration,
|
||||
where the field is no longer used in new code, but still exists in the database for backward compatibility with old code.
|
||||
5. In any subsequent release, include migration #2 (the one that removes the field from the database).
|
||||
6. After releasing and deploying migration #2, the field will be removed both from the database and Django state,
|
||||
without backward compatibility issues or downtime 🎉
|
||||
|
|
|
|||
|
|
@ -144,12 +144,16 @@ services:
|
|||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
mysql:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
redis:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
profiles:
|
||||
- engine
|
||||
|
||||
|
|
@ -208,7 +212,7 @@ services:
|
|||
container_name: mysql
|
||||
labels: *oncall-labels
|
||||
image: mysql:8.0.32
|
||||
command: >
|
||||
command: >-
|
||||
--default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4
|
||||
--collation-server=utf8mb4_unicode_ci --max_connections=1024
|
||||
restart: always
|
||||
|
|
@ -236,7 +240,7 @@ services:
|
|||
container_name: mysql_to_create_grafana_db
|
||||
labels: *oncall-labels
|
||||
image: mysql:8.0.32
|
||||
command: >
|
||||
command: >-
|
||||
bash -c "mysql -h mysql -uroot -pempty
|
||||
-e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
|
||||
depends_on:
|
||||
|
|
@ -276,7 +280,7 @@ services:
|
|||
container_name: postgres_to_create_grafana_db
|
||||
labels: *oncall-labels
|
||||
image: postgres:14.4
|
||||
command: >
|
||||
command: >-
|
||||
bash -c "PGPASSWORD=empty psql -U postgres
|
||||
-h postgres -tc \"SELECT 1 FROM pg_database WHERE datname = 'grafana'\" | grep -q 1 || PGPASSWORD=empty psql
|
||||
-U postgres -h postgres -c \"CREATE DATABASE grafana\""
|
||||
|
|
@ -326,8 +330,10 @@ services:
|
|||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
mysql:
|
||||
condition: service_healthy
|
||||
required: false
|
||||
profiles:
|
||||
- grafana
|
||||
volumes:
|
||||
|
|
|
|||
|
|
@ -28,8 +28,7 @@ services:
|
|||
restart: always
|
||||
ports:
|
||||
- "8080:8080"
|
||||
command: >
|
||||
sh -c "uwsgi --ini uwsgi.ini"
|
||||
command: sh -c "uwsgi --ini uwsgi.ini"
|
||||
environment: *oncall-environment
|
||||
depends_on:
|
||||
mysql:
|
||||
|
|
@ -68,7 +67,7 @@ services:
|
|||
|
||||
mysql:
|
||||
image: mysql:8.0.32
|
||||
command: >
|
||||
command: >-
|
||||
--default-authentication-plugin=mysql_native_password
|
||||
--character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
|
||||
restart: always
|
||||
|
|
@ -123,7 +122,7 @@ services:
|
|||
|
||||
mysql_to_create_grafana_db:
|
||||
image: mysql:8.0.32
|
||||
command: >
|
||||
command: >-
|
||||
bash -c "mysql -h ${MYSQL_HOST:-mysql} -uroot -p${MYSQL_PASSWORD:?err}
|
||||
-e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
|
||||
depends_on:
|
||||
|
|
|
|||
|
|
@ -22,8 +22,7 @@ services:
|
|||
restart: always
|
||||
ports:
|
||||
- "8080:8080"
|
||||
command: >
|
||||
sh -c "uwsgi --ini uwsgi.ini"
|
||||
command: sh -c "uwsgi --ini uwsgi.ini"
|
||||
environment: *oncall-environment
|
||||
volumes:
|
||||
- oncall_data:/var/lib/oncall
|
||||
|
|
|
|||
|
|
@ -1,17 +1,11 @@
|
|||
import datetime
|
||||
import json
|
||||
from copy import copy
|
||||
import typing
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.schedules.ical_events import ical_events
|
||||
from apps.schedules.ical_utils import (
|
||||
calculate_shift_diff,
|
||||
event_start_end_all_day_with_respect_to_type,
|
||||
get_icalendar_tz_or_utc,
|
||||
get_usernames_from_ical_event,
|
||||
memoized_users_in_ical,
|
||||
)
|
||||
from apps.schedules.ical_utils import calculate_shift_diff, parse_event_uid
|
||||
from apps.slack.scenarios import scenario_step
|
||||
from apps.slack.slack_client import SlackClientWithErrorHandling
|
||||
from apps.slack.slack_client.exceptions import SlackAPIException, SlackAPITokenException
|
||||
|
|
@ -19,159 +13,45 @@ from common.custom_celery_tasks import shared_dedicated_queue_retry_task
|
|||
|
||||
from .task_logger import task_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from apps.schedules.models import OnCallSchedule
|
||||
|
||||
def get_current_shifts_from_ical(calendar, schedule, min_priority=0):
|
||||
calendar_tz = get_icalendar_tz_or_utc(calendar)
|
||||
now = datetime.datetime.now(timezone.utc)
|
||||
events_from_ical_for_three_days = ical_events.get_events_from_ical_between(
|
||||
calendar, now - datetime.timedelta(days=1), now + datetime.timedelta(days=1)
|
||||
)
|
||||
shifts = {}
|
||||
current_users = {}
|
||||
for event in events_from_ical_for_three_days:
|
||||
usernames, priority = get_usernames_from_ical_event(event)
|
||||
users = memoized_users_in_ical(tuple(usernames), schedule.organization)
|
||||
if len(users) > 0:
|
||||
event_start, event_end, all_day_event = event_start_end_all_day_with_respect_to_type(event, calendar_tz)
|
||||
|
||||
if event["UID"] in shifts:
|
||||
existing_event = shifts[event["UID"]]
|
||||
if existing_event["start"] < now < existing_event["end"]:
|
||||
continue
|
||||
shifts[event["UID"]] = {
|
||||
"users": [u.pk for u in users],
|
||||
"start": event_start,
|
||||
"end": event_end,
|
||||
"all_day": all_day_event,
|
||||
"priority": priority + min_priority, # increase priority for overrides
|
||||
"priority_increased_by": min_priority,
|
||||
def convert_prev_shifts_to_new_format(prev_shifts: dict, schedule: "OnCallSchedule") -> list:
|
||||
new_prev_shifts = []
|
||||
user_ids = []
|
||||
users_info: typing.Dict[int, typing.Dict[str, str]] = {}
|
||||
for shift in prev_shifts.values():
|
||||
user_ids.extend(shift.get("users", []))
|
||||
prev_users = schedule.organization.users.filter(id__in=user_ids)
|
||||
for user in prev_users:
|
||||
users_info.setdefault(
|
||||
user.id,
|
||||
{
|
||||
"display_name": user.username,
|
||||
"email": user.email,
|
||||
"pk": user.public_primary_key,
|
||||
"avatar_full": user.avatar_full_url,
|
||||
},
|
||||
)
|
||||
for uid, shift in prev_shifts.items():
|
||||
shift_pk, _ = parse_event_uid(uid)
|
||||
new_prev_shifts.append(
|
||||
{
|
||||
"users": [users_info[user_pk] for user_pk in shift["users"]],
|
||||
"start": shift["start"],
|
||||
"end": shift["end"],
|
||||
"all_day": shift["all_day"],
|
||||
"priority_level": shift["priority"],
|
||||
"shift": {"pk": shift_pk},
|
||||
}
|
||||
current_users[event["UID"]] = users
|
||||
|
||||
return shifts, current_users
|
||||
|
||||
|
||||
def get_next_shifts_from_ical(calendar, schedule, min_priority=0, days_to_lookup=3):
|
||||
calendar_tz = get_icalendar_tz_or_utc(calendar)
|
||||
now = datetime.datetime.now(timezone.utc)
|
||||
next_events_from_ical = ical_events.get_events_from_ical_between(
|
||||
calendar, now - datetime.timedelta(days=1), now + datetime.timedelta(days=days_to_lookup)
|
||||
)
|
||||
shifts = {}
|
||||
for event in next_events_from_ical:
|
||||
usernames, priority = get_usernames_from_ical_event(event)
|
||||
users = memoized_users_in_ical(tuple(usernames), schedule.organization)
|
||||
if len(users) > 0:
|
||||
event_start, event_end, all_day_event = event_start_end_all_day_with_respect_to_type(event, calendar_tz)
|
||||
|
||||
# next_shifts are not stored in db so we can use User objects directly
|
||||
shifts[f"{event_start.timestamp()}_{event['UID']}"] = {
|
||||
"users": users,
|
||||
"start": event_start,
|
||||
"end": event_end,
|
||||
"all_day": all_day_event,
|
||||
"priority": priority + min_priority, # increase priority for overrides
|
||||
"priority_increased_by": min_priority,
|
||||
}
|
||||
|
||||
return shifts
|
||||
|
||||
|
||||
def recalculate_shifts_with_respect_to_priority(shifts, users=None):
|
||||
flag = True
|
||||
while flag:
|
||||
splitted_shifts = {}
|
||||
uids_to_pop = set()
|
||||
splitted = False
|
||||
flag = False
|
||||
for outer_k, outer_shift in shifts.items():
|
||||
if not splitted:
|
||||
for inner_k, inner_shift in shifts.items():
|
||||
if outer_k == inner_k:
|
||||
continue
|
||||
else:
|
||||
if outer_shift.get("priority", 0) > inner_shift.get("priority", 0):
|
||||
if outer_shift["start"] > inner_shift["start"] and outer_shift["end"] < inner_shift["end"]:
|
||||
new_uid_r = f"{inner_k}-split-r"
|
||||
new_uid_l = f"{inner_k}-split-l"
|
||||
splitted_shift_left = copy(inner_shift)
|
||||
splitted_shift_right = copy(inner_shift)
|
||||
splitted_shift_left["end"] = outer_shift["start"]
|
||||
splitted_shift_right["start"] = outer_shift["end"]
|
||||
splitted_shift_left["all_day"] = False
|
||||
splitted_shift_right["all_day"] = False
|
||||
splitted_shifts[new_uid_l] = splitted_shift_left
|
||||
splitted_shifts[new_uid_r] = splitted_shift_right
|
||||
uids_to_pop.add(inner_k)
|
||||
if users is not None:
|
||||
users[new_uid_l] = users[inner_k]
|
||||
users[new_uid_r] = users[inner_k]
|
||||
|
||||
splitted = True
|
||||
flag = True
|
||||
break
|
||||
elif outer_shift["start"] <= inner_shift["start"] < outer_shift["end"] < inner_shift["end"]:
|
||||
inner_shift["start"] = outer_shift["end"]
|
||||
flag = True
|
||||
elif outer_shift["end"] >= inner_shift["end"] > outer_shift["start"] > inner_shift["start"]:
|
||||
inner_shift["end"] = outer_shift["start"]
|
||||
flag = True
|
||||
elif (
|
||||
outer_shift["start"] <= inner_shift["start"]
|
||||
and outer_shift["end"] >= inner_shift["end"]
|
||||
):
|
||||
uids_to_pop.add(inner_k)
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
elif outer_shift.get("priority", 0) < inner_shift.get("priority", 0):
|
||||
if inner_shift["start"] > outer_shift["start"] and inner_shift["end"] < outer_shift["end"]:
|
||||
new_uid_r = f"{outer_k}-split-r"
|
||||
new_uid_l = f"{outer_k}-split-l"
|
||||
splitted_shift_left = copy(outer_shift)
|
||||
splitted_shift_right = copy(outer_shift)
|
||||
splitted_shift_left["all_day"] = False
|
||||
splitted_shift_right["all_day"] = False
|
||||
splitted_shift_left["end"] = inner_shift["start"]
|
||||
splitted_shift_right["start"] = inner_shift["end"]
|
||||
splitted_shifts[new_uid_l] = splitted_shift_left
|
||||
splitted_shifts[new_uid_r] = splitted_shift_right
|
||||
uids_to_pop.add(outer_k)
|
||||
|
||||
if users is not None:
|
||||
users[new_uid_l] = users[outer_k]
|
||||
users[new_uid_r] = users[outer_k]
|
||||
|
||||
splitted = True
|
||||
flag = True
|
||||
break
|
||||
elif inner_shift["start"] <= outer_shift["start"] < inner_shift["end"] < outer_shift["end"]:
|
||||
outer_shift["start"] = inner_shift["end"]
|
||||
flag = True
|
||||
elif inner_shift["end"] >= outer_shift["end"] > inner_shift["start"] > outer_shift["start"]:
|
||||
outer_shift["end"] = inner_shift["start"]
|
||||
flag = True
|
||||
elif (
|
||||
inner_shift["start"] <= outer_shift["start"]
|
||||
and inner_shift["end"] >= outer_shift["end"]
|
||||
):
|
||||
uids_to_pop.add(outer_k)
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
else:
|
||||
flag = False
|
||||
else:
|
||||
break
|
||||
|
||||
shifts.update(splitted_shifts)
|
||||
for uid in uids_to_pop:
|
||||
shifts.pop(uid)
|
||||
)
|
||||
return new_prev_shifts
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def notify_ical_schedule_shift(schedule_pk):
|
||||
task_logger.info(f"Notify ical schedule shift {schedule_pk}")
|
||||
task_logger.info(f"Start notify ical schedule shift {schedule_pk}")
|
||||
from apps.schedules.models import OnCallSchedule
|
||||
|
||||
try:
|
||||
|
|
@ -183,160 +63,106 @@ def notify_ical_schedule_shift(schedule_pk):
|
|||
return
|
||||
|
||||
if schedule.organization.slack_team_identity is None:
|
||||
task_logger.info(f"Trying to notify ical schedule shift with no slack team identity {schedule_pk}")
|
||||
task_logger.info(
|
||||
f"Trying to notify ical schedule shift with no slack team identity {schedule_pk}, "
|
||||
f"organization {schedule.organization_id}"
|
||||
)
|
||||
return
|
||||
elif schedule.organization.deleted_at:
|
||||
task_logger.info(f"Trying to notify ical schedule shift from deleted organization {schedule_pk}")
|
||||
task_logger.info(
|
||||
f"Trying to notify ical schedule shift from deleted organization {schedule_pk}, "
|
||||
f"organization {schedule.organization_id}"
|
||||
)
|
||||
return
|
||||
|
||||
task_logger.info(f"Notify ical schedule shift {schedule_pk}, organization {schedule.organization_id}")
|
||||
|
||||
MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT = 3
|
||||
|
||||
now = datetime.datetime.now(timezone.utc)
|
||||
# get list of iCalendars from current iCal files. If there is more than one calendar, primary calendar will always
|
||||
# be the first
|
||||
current_calendars = schedule.get_icalendars()
|
||||
|
||||
current_shifts = {}
|
||||
# expected current_shifts structure:
|
||||
# {
|
||||
# some uid: {
|
||||
# "users": [users pks],
|
||||
# "start": event start date,
|
||||
# "end": event end date,
|
||||
# "all_day": bool if event has all-day type,
|
||||
# "priority": priority level,
|
||||
# "priority_increased_by": min priority level of primary calendar, (for primary calendar event it is 0)
|
||||
# },
|
||||
# }
|
||||
current_shifts = schedule.final_events(now, now, with_empty=False, with_gap=False, ignore_untaken_swaps=True)
|
||||
|
||||
# Current_user dict exists because it's bad idea to serialize User objects.
|
||||
# Instead users' pks are stored in db for calculation related to shift diff.
|
||||
# When it is needed to pass shift's user (e.g. in def get_report_blocks_ical())
|
||||
# we take users from current_users{} by shift uuid and replace users' pk
|
||||
current_users = {}
|
||||
prev_shifts = json.loads(schedule.current_shifts) if not schedule.empty_oncall else []
|
||||
prev_shifts_updated = False
|
||||
# convert prev_shifts to new events format for compatibility with the previous version of this task
|
||||
if prev_shifts and isinstance(prev_shifts, dict):
|
||||
prev_shifts = convert_prev_shifts_to_new_format(prev_shifts, schedule)
|
||||
prev_shifts_updated = True
|
||||
|
||||
overrides_priority = 0
|
||||
for calendar in current_calendars:
|
||||
if calendar is not None:
|
||||
current_shifts_result, current_users_result = get_current_shifts_from_ical(
|
||||
calendar,
|
||||
schedule,
|
||||
overrides_priority,
|
||||
)
|
||||
if overrides_priority == 0 and current_shifts_result:
|
||||
overrides_priority = max([current_shifts_result[uid]["priority"] for uid in current_shifts_result]) + 1
|
||||
current_shifts.update(current_shifts_result)
|
||||
current_users.update(current_users_result)
|
||||
|
||||
recalculate_shifts_with_respect_to_priority(current_shifts, current_users)
|
||||
|
||||
# drop events that don't intersection with current time
|
||||
drop = []
|
||||
for uid, current_shift in current_shifts.items():
|
||||
if not current_shift["start"] < now < current_shift["end"]:
|
||||
drop.append(uid)
|
||||
for item in drop:
|
||||
current_shifts.pop(item)
|
||||
|
||||
# compare events from prev and current shifts
|
||||
prev_shifts = json.loads(schedule.current_shifts) if not schedule.empty_oncall else {}
|
||||
# convert datetimes which was dumped to str back to datetime to calculate shift diff correct
|
||||
str_format = "%Y-%m-%d %X%z"
|
||||
for prev_shift in prev_shifts.values():
|
||||
for prev_shift in prev_shifts:
|
||||
prev_shift["start"] = datetime.datetime.strptime(prev_shift["start"], str_format)
|
||||
prev_shift["end"] = datetime.datetime.strptime(prev_shift["end"], str_format)
|
||||
|
||||
shift_changed, diff_uids = calculate_shift_diff(current_shifts, prev_shifts)
|
||||
shift_changed, diff_shifts = calculate_shift_diff(current_shifts, prev_shifts)
|
||||
|
||||
if shift_changed:
|
||||
task_logger.info(f"shifts_changed: {diff_uids}")
|
||||
# Get only new/changed shifts to send a reminder message.
|
||||
new_shifts = []
|
||||
for uid in diff_uids:
|
||||
# using copy to not to mutate original current_shifts dict which will be stored in db as current_shifts
|
||||
new_shift = copy(current_shifts[uid])
|
||||
# replace users' pk by objects to make reminder message from new shifts
|
||||
new_shift["users"] = current_users[uid]
|
||||
new_shifts.append(new_shift)
|
||||
new_shifts = sorted(new_shifts, key=lambda shift: shift["start"])
|
||||
|
||||
if len(new_shifts) != 0:
|
||||
days_to_lookup = (new_shifts[-1]["end"].date() - now.date()).days + 1
|
||||
days_to_lookup = max([days_to_lookup, MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT])
|
||||
else:
|
||||
days_to_lookup = MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT
|
||||
|
||||
next_shifts = {}
|
||||
next_overrides_priority = 0
|
||||
|
||||
for calendar in current_calendars:
|
||||
if calendar is not None:
|
||||
next_shifts_result = get_next_shifts_from_ical(
|
||||
calendar,
|
||||
schedule,
|
||||
next_overrides_priority,
|
||||
days_to_lookup=days_to_lookup,
|
||||
)
|
||||
if next_overrides_priority == 0 and next_shifts_result:
|
||||
next_overrides_priority = (
|
||||
max([next_shifts_result[uid]["priority"] for uid in next_shifts_result]) + 1
|
||||
)
|
||||
|
||||
next_shifts.update(next_shifts_result)
|
||||
|
||||
recalculate_shifts_with_respect_to_priority(next_shifts)
|
||||
|
||||
# drop events that already started
|
||||
drop = []
|
||||
for uid, next_shift in next_shifts.items():
|
||||
if now > next_shift["start"]:
|
||||
drop.append(uid)
|
||||
for item in drop:
|
||||
next_shifts.pop(item)
|
||||
|
||||
next_shifts_from_ical = sorted(next_shifts.values(), key=lambda shift: shift["start"])
|
||||
|
||||
upcoming_shifts = []
|
||||
# Add the earliest next_shift
|
||||
if len(next_shifts_from_ical) > 0:
|
||||
earliest_shift = next_shifts_from_ical[0]
|
||||
upcoming_shifts.append(earliest_shift)
|
||||
# Check if there are next shifts with the same start as the earliest
|
||||
for shift in next_shifts_from_ical[1:]:
|
||||
if shift["start"] == earliest_shift["start"]:
|
||||
upcoming_shifts.append(shift)
|
||||
|
||||
empty_oncall = len(current_shifts) == 0
|
||||
if empty_oncall:
|
||||
schedule.empty_oncall = True
|
||||
else:
|
||||
schedule.empty_oncall = False
|
||||
# Do not notify if there is no difference between current and previous shifts
|
||||
if not shift_changed:
|
||||
task_logger.info(f"No shift diff found for schedule {schedule_pk}, organization {schedule.organization_id}")
|
||||
# If prev shifts were converted to a new format, update related field in db
|
||||
if prev_shifts_updated:
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.save(update_fields=["current_shifts"])
|
||||
return
|
||||
|
||||
schedule.save(update_fields=["current_shifts", "empty_oncall"])
|
||||
new_shifts = sorted(diff_shifts, key=lambda shift: shift["start"])
|
||||
|
||||
if len(new_shifts) > 0 or empty_oncall:
|
||||
task_logger.info(f"new_shifts: {new_shifts}")
|
||||
# get days_to_lookup for next shifts
|
||||
if len(new_shifts) != 0:
|
||||
max_end_date = max([shift["end"].date() for shift in new_shifts])
|
||||
days_to_lookup = (max_end_date - now.date()).days + 1
|
||||
days_to_lookup = max([days_to_lookup, MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT])
|
||||
else:
|
||||
days_to_lookup = MIN_DAYS_TO_LOOKUP_FOR_THE_END_OF_EVENT
|
||||
|
||||
datetime_end = now + datetime.timedelta(days=days_to_lookup)
|
||||
|
||||
next_shifts_unfiltered = schedule.final_events(
|
||||
now, datetime_end, with_empty=False, with_gap=False, ignore_untaken_swaps=True
|
||||
)
|
||||
# drop events that already started
|
||||
next_shifts = []
|
||||
for next_shift in next_shifts_unfiltered:
|
||||
if now < next_shift["start"]:
|
||||
next_shifts.append(next_shift)
|
||||
|
||||
upcoming_shifts = []
|
||||
# Add the earliest next_shift
|
||||
if len(next_shifts) > 0:
|
||||
earliest_shift = next_shifts[0]
|
||||
upcoming_shifts.append(earliest_shift)
|
||||
# Check if there are next shifts with the same start as the earliest
|
||||
for shift in next_shifts[1:]:
|
||||
if shift["start"] == earliest_shift["start"]:
|
||||
upcoming_shifts.append(shift)
|
||||
|
||||
schedule.empty_oncall = len(current_shifts) == 0
|
||||
if not schedule.empty_oncall:
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
|
||||
schedule.save(update_fields=["current_shifts", "empty_oncall"])
|
||||
|
||||
if len(new_shifts) > 0 or schedule.empty_oncall:
|
||||
task_logger.info(f"new_shifts: {new_shifts}")
|
||||
if schedule.notify_oncall_shift_freq != OnCallSchedule.NotifyOnCallShiftFreq.NEVER:
|
||||
slack_client = SlackClientWithErrorHandling(schedule.organization.slack_team_identity.bot_access_token)
|
||||
step = scenario_step.ScenarioStep.get_step("schedules", "EditScheduleShiftNotifyStep")
|
||||
report_blocks = step.get_report_blocks_ical(new_shifts, upcoming_shifts, schedule, empty_oncall)
|
||||
report_blocks = step.get_report_blocks_ical(new_shifts, upcoming_shifts, schedule, schedule.empty_oncall)
|
||||
|
||||
if schedule.notify_oncall_shift_freq != OnCallSchedule.NotifyOnCallShiftFreq.NEVER:
|
||||
try:
|
||||
slack_client.api_call(
|
||||
"chat.postMessage",
|
||||
channel=schedule.channel,
|
||||
blocks=report_blocks,
|
||||
text=f"On-call shift for schedule {schedule.name} has changed",
|
||||
)
|
||||
except SlackAPITokenException:
|
||||
pass
|
||||
except SlackAPIException as e:
|
||||
if e.response["error"] == "channel_not_found":
|
||||
print(e)
|
||||
elif e.response["error"] == "is_archived":
|
||||
print(e)
|
||||
elif e.response["error"] == "invalid_auth":
|
||||
print(e)
|
||||
else:
|
||||
raise e
|
||||
try:
|
||||
slack_client.api_call(
|
||||
"chat.postMessage",
|
||||
channel=schedule.channel,
|
||||
blocks=report_blocks,
|
||||
text=f"On-call shift for schedule {schedule.name} has changed",
|
||||
)
|
||||
except SlackAPITokenException:
|
||||
pass
|
||||
except SlackAPIException as e:
|
||||
expected_exceptions = ["channel_not_found", "is_archived", "invalid_auth"]
|
||||
if e.response["error"] in expected_exceptions:
|
||||
print(e)
|
||||
else:
|
||||
raise e
|
||||
|
|
|
|||
|
|
@ -3,14 +3,13 @@ import json
|
|||
import textwrap
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import icalendar
|
||||
import pytest
|
||||
import pytz
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.alerts.tasks.notify_ical_schedule_shift import get_current_shifts_from_ical, notify_ical_schedule_shift
|
||||
from apps.alerts.tasks.notify_ical_schedule_shift import notify_ical_schedule_shift
|
||||
from apps.schedules.ical_utils import memoized_users_in_ical
|
||||
from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar, OnCallScheduleICal
|
||||
from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar, OnCallScheduleICal, OnCallScheduleWeb
|
||||
|
||||
ICAL_DATA = """
|
||||
BEGIN:VCALENDAR
|
||||
|
|
@ -72,7 +71,7 @@ def test_current_overrides_ical_schedule_is_none(
|
|||
)
|
||||
|
||||
# this should not raise
|
||||
notify_ical_schedule_shift(ical_schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(ical_schedule.pk)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
@ -102,7 +101,7 @@ def test_next_shift_notification_long_shifts(
|
|||
with patch("apps.alerts.tasks.notify_ical_schedule_shift.datetime", Mock(wraps=datetime)) as mock_datetime:
|
||||
mock_datetime.datetime.now.return_value = datetime.datetime(2021, 9, 29, 12, 0, tzinfo=pytz.UTC)
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(ical_schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(ical_schedule.pk)
|
||||
|
||||
slack_blocks = mock_slack_api_call.call_args_list[0][1]["blocks"]
|
||||
notification = slack_blocks[0]["text"]["text"]
|
||||
|
|
@ -176,12 +175,12 @@ def test_overrides_changes_no_current_no_triggering_notification(
|
|||
schedule_class=OnCallScheduleCalendar,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=ical_before,
|
||||
cached_ical_file_overrides=ical_after,
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=ical_before,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - timezone.timedelta(days=7)
|
||||
start_date = now - timezone.timedelta(days=7, minutes=1)
|
||||
|
||||
data = {
|
||||
"start": start_date,
|
||||
|
|
@ -197,14 +196,15 @@ def test_overrides_changes_no_current_no_triggering_notification(
|
|||
on_call_shift.schedules.add(schedule)
|
||||
|
||||
# setup current shifts before checking/triggering for notifications
|
||||
calendar = icalendar.Calendar.from_ical(schedule._ical_file_primary)
|
||||
current_shifts, _ = get_current_shifts_from_ical(calendar, schedule, 0)
|
||||
current_shifts = schedule.final_events(now, now, False, False)
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.cached_ical_file_overrides = ical_after
|
||||
schedule.prev_ical_file_overrides = ical_before
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
|
@ -231,7 +231,7 @@ def test_no_changes_no_triggering_notification(
|
|||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - timezone.timedelta(days=7)
|
||||
start_date = now - timezone.timedelta(days=7, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
|
|
@ -246,14 +246,13 @@ def test_no_changes_no_triggering_notification(
|
|||
on_call_shift.schedules.add(schedule)
|
||||
|
||||
# setup current shifts before checking/triggering for notifications
|
||||
calendar = icalendar.Calendar.from_ical(schedule._ical_file_primary)
|
||||
current_shifts, _ = get_current_shifts_from_ical(calendar, schedule, 0)
|
||||
current_shifts = schedule.final_events(now, now, False, False)
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
|
@ -301,11 +300,211 @@ def test_current_shift_changes_trigger_notification(
|
|||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("swap_taken", [False, True])
|
||||
def test_current_shift_changes_swap_split(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
make_shift_swap_request,
|
||||
swap_taken,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
duration = timezone.timedelta(hours=23, minutes=59, seconds=59)
|
||||
data = {
|
||||
"start": today,
|
||||
"rotation_start": today,
|
||||
"duration": duration,
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
|
||||
# setup in progress swap request
|
||||
swap_request = make_shift_swap_request(
|
||||
schedule,
|
||||
user1,
|
||||
swap_start=today,
|
||||
swap_end=today + timezone.timedelta(days=2),
|
||||
)
|
||||
if swap_taken:
|
||||
swap_request.benefactor = user2
|
||||
swap_request.save()
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
# setup empty current shifts before checking/triggering for notifications
|
||||
schedule.current_shifts = json.dumps({}, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
text_block = mock_slack_api_call.call_args_list[0][1]["blocks"][0]["text"]["text"]
|
||||
assert "user2" in text_block if swap_taken else "user1" in text_block
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_next_shift_changes_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date_1 = now - datetime.timedelta(days=7, minutes=1)
|
||||
data_1 = {
|
||||
"start": start_date_1,
|
||||
"rotation_start": start_date_1,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift_1 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_1
|
||||
)
|
||||
on_call_shift_1.add_rolling_users([[user1]])
|
||||
on_call_shift_1.schedules.add(schedule)
|
||||
|
||||
start_date_2 = now + datetime.timedelta(minutes=10)
|
||||
data_2 = {
|
||||
"start": start_date_2,
|
||||
"rotation_start": start_date_2,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 2,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift_2 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_2
|
||||
)
|
||||
on_call_shift_2.add_rolling_users([[user1]])
|
||||
on_call_shift_2.schedules.add(schedule)
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
# setup empty current shifts before checking/triggering for notifications
|
||||
current_shifts = schedule.final_events(now, now, False, False)
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
on_call_shift_2.add_rolling_users([[user2]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_lower_priority_changes_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
data_1 = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 2,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift_1 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_1
|
||||
)
|
||||
on_call_shift_1.add_rolling_users([[user1]])
|
||||
on_call_shift_1.schedules.add(schedule)
|
||||
|
||||
data_2 = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift_2 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_2
|
||||
)
|
||||
on_call_shift_2.add_rolling_users([[user1]])
|
||||
on_call_shift_2.schedules.add(schedule)
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
# setup empty current shifts before checking/triggering for notifications
|
||||
current_shifts = schedule.final_events(now, now, False, False)
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
on_call_shift_2.add_rolling_users([[user2]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_vtimezone_changes_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
|
|
@ -414,20 +613,213 @@ def test_vtimezone_changes_no_triggering_notification(
|
|||
name="test_ical_schedule",
|
||||
channel="channel",
|
||||
ical_url_primary="url",
|
||||
prev_ical_file_primary=ical_before,
|
||||
cached_ical_file_primary=ical_after,
|
||||
prev_ical_file_primary=None,
|
||||
cached_ical_file_primary=ical_before,
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
# setup current shifts before checking/triggering for notifications
|
||||
calendar = icalendar.Calendar.from_ical(ical_before)
|
||||
current_shifts, _ = get_current_shifts_from_ical(calendar, schedule, 0)
|
||||
now = datetime.datetime.now(timezone.utc)
|
||||
current_shifts = schedule.final_events(now, now, False, False)
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
# update schedule cached ical to ical_after
|
||||
schedule.prev_ical_file_primary = ical_before
|
||||
schedule.cached_ical_file_primary = ical_after
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_changes_no_triggering_notification_from_old_to_new_task_version(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - timezone.timedelta(days=7)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": timezone.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
on_call_shift.schedules.add(schedule)
|
||||
|
||||
# setup current shifts with old version of shifts structure before checking/triggering for notifications
|
||||
current_shifts = {
|
||||
"test_shift_uid": {
|
||||
"users": [user1.pk],
|
||||
"start": start_date,
|
||||
"end": start_date + data["duration"],
|
||||
"all_day": False,
|
||||
"priority": data["priority_level"],
|
||||
"priority_increased_by": 0,
|
||||
}
|
||||
}
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.oncallschedule_ptr_id)
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_current_shift_changes_trigger_notification_from_old_to_new_task_version(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleCalendar,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
on_call_shift.schedules.add(schedule)
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
# setup current shifts with old version of shifts structure before checking/triggering for notifications
|
||||
current_shifts = {
|
||||
"test_shift_uid": {
|
||||
"users": [user1.pk],
|
||||
"start": start_date,
|
||||
"end": start_date + data["duration"],
|
||||
"all_day": False,
|
||||
"priority": data["priority_level"],
|
||||
"priority_increased_by": 0,
|
||||
}
|
||||
}
|
||||
schedule.current_shifts = json.dumps(current_shifts, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
on_call_shift.add_rolling_users([[user2]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_next_shift_notification_long_and_short_shifts(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2")
|
||||
user3 = make_user(organization=organization, username="user3")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date_1 = now - datetime.timedelta(days=1)
|
||||
data_1 = {
|
||||
"start": start_date_1,
|
||||
"rotation_start": start_date_1,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24 * 7), # one week duration
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_WEEKLY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift_1 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_1
|
||||
)
|
||||
on_call_shift_1.add_rolling_users([[user1], [user2]])
|
||||
|
||||
start_date_2 = now - datetime.timedelta(hours=1)
|
||||
data_2 = {
|
||||
"start": start_date_2,
|
||||
"rotation_start": start_date_2,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_WEEKLY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift_2 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data_2
|
||||
)
|
||||
on_call_shift_2.add_rolling_users([[user3]])
|
||||
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
# setup empty current shifts before checking/triggering for notifications
|
||||
schedule.current_shifts = json.dumps({}, default=str)
|
||||
schedule.empty_oncall = False
|
||||
schedule.save()
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_ical_schedule_shift(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
notification = mock_slack_api_call.call_args[1]["blocks"][0]["text"]["text"]
|
||||
new_shift_notification, next_shift_notification = notification.split("\n\n")
|
||||
|
||||
assert "*New on-call shift:*\n[L1] user1" in new_shift_notification
|
||||
assert "[L1] user3" in new_shift_notification
|
||||
assert "*Next on-call shift:*\n[L1] user2" in notification
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from django.conf import settings
|
||||
from django.urls import include, path, re_path
|
||||
|
||||
from common.api_helpers.optional_slash_router import OptionalSlashRouter, optional_slash_path
|
||||
|
|
@ -66,9 +65,7 @@ router.register(r"heartbeats", IntegrationHeartBeatView, basename="integration_h
|
|||
router.register(r"tokens", PublicApiTokenView, basename="api_token")
|
||||
router.register(r"live_settings", LiveSettingViewSet, basename="live_settings")
|
||||
router.register(r"oncall_shifts", OnCallShiftView, basename="oncall_shifts")
|
||||
|
||||
if settings.FEATURE_SHIFT_SWAPS_ENABLED:
|
||||
router.register(r"shift_swaps", ShiftSwapViewSet, basename="shift_swap")
|
||||
router.register(r"shift_swaps", ShiftSwapViewSet, basename="shift_swap")
|
||||
|
||||
urlpatterns = [
|
||||
path("", include(router.urls)),
|
||||
|
|
|
|||
|
|
@ -4,10 +4,9 @@ from urllib.parse import urljoin
|
|||
|
||||
from django.conf import settings
|
||||
from django.core.validators import MinLengthValidator
|
||||
from django.db import models, transaction
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.integrations.tasks import create_alert
|
||||
from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -43,10 +42,26 @@ class IntegrationHeartBeat(models.Model):
|
|||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
timeout_seconds = models.IntegerField(default=0)
|
||||
|
||||
last_heartbeat_time = models.DateTimeField(default=None, null=True)
|
||||
"""
|
||||
Stores the latest received heartbeat signal time
|
||||
"""
|
||||
|
||||
last_checkup_task_time = models.DateTimeField(default=None, null=True)
|
||||
"""
|
||||
Deprecated. This field is not used. TODO: remove it
|
||||
"""
|
||||
|
||||
actual_check_up_task_id = models.CharField(max_length=100)
|
||||
"""
|
||||
Deprecated. Stored the latest scheduled `integration_heartbeat_checkup` task id. TODO: remove it
|
||||
"""
|
||||
|
||||
previous_alerted_state_was_life = models.BooleanField(default=True)
|
||||
"""
|
||||
Last status of the heartbeat. Determines if integration was alive on latest checkup
|
||||
"""
|
||||
|
||||
public_primary_key = models.CharField(
|
||||
max_length=20,
|
||||
|
|
@ -83,73 +98,6 @@ class IntegrationHeartBeat(models.Model):
|
|||
def link(self) -> str:
|
||||
return urljoin(self.alert_receive_channel.integration_url, "heartbeat/")
|
||||
|
||||
@classmethod
|
||||
def perform_heartbeat_check(cls, heartbeat_id: int, task_request_id: str) -> None:
|
||||
with transaction.atomic():
|
||||
heartbeats = cls.objects.filter(pk=heartbeat_id).select_for_update()
|
||||
if len(heartbeats) == 0:
|
||||
logger.info(f"Heartbeat {heartbeat_id} not found {task_request_id}")
|
||||
return
|
||||
heartbeat = heartbeats[0]
|
||||
if task_request_id == heartbeat.actual_check_up_task_id:
|
||||
heartbeat.check_heartbeat_state_and_save()
|
||||
else:
|
||||
logger.info(f"Heartbeat {heartbeat_id} is not actual {task_request_id}")
|
||||
|
||||
def check_heartbeat_state_and_save(self) -> bool:
|
||||
"""
|
||||
Use this method if you want just check heartbeat status.
|
||||
"""
|
||||
state_changed = self.check_heartbeat_state()
|
||||
if state_changed:
|
||||
self.save(update_fields=["previous_alerted_state_was_life"])
|
||||
return state_changed
|
||||
|
||||
def check_heartbeat_state(self) -> bool:
|
||||
"""
|
||||
Actually checking heartbeat.
|
||||
Use this method if you want to do changes of heartbeat instance while checking its status.
|
||||
( See IntegrationHeartBeatAPIView.post() for example )
|
||||
"""
|
||||
state_changed = False
|
||||
if self.is_expired:
|
||||
if self.previous_alerted_state_was_life:
|
||||
self.on_heartbeat_expired()
|
||||
self.previous_alerted_state_was_life = False
|
||||
state_changed = True
|
||||
else:
|
||||
if not self.previous_alerted_state_was_life:
|
||||
self.on_heartbeat_restored()
|
||||
self.previous_alerted_state_was_life = True
|
||||
state_changed = True
|
||||
return state_changed
|
||||
|
||||
def on_heartbeat_restored(self) -> None:
|
||||
create_alert.apply_async(
|
||||
kwargs={
|
||||
"title": self.alert_receive_channel.heartbeat_restored_title,
|
||||
"message": self.alert_receive_channel.heartbeat_restored_message,
|
||||
"image_url": None,
|
||||
"link_to_upstream_details": None,
|
||||
"alert_receive_channel_pk": self.alert_receive_channel.pk,
|
||||
"integration_unique_data": {},
|
||||
"raw_request_data": self.alert_receive_channel.heartbeat_restored_payload,
|
||||
},
|
||||
)
|
||||
|
||||
def on_heartbeat_expired(self) -> None:
|
||||
create_alert.apply_async(
|
||||
kwargs={
|
||||
"title": self.alert_receive_channel.heartbeat_expired_title,
|
||||
"message": self.alert_receive_channel.heartbeat_expired_message,
|
||||
"image_url": None,
|
||||
"link_to_upstream_details": None,
|
||||
"alert_receive_channel_pk": self.alert_receive_channel.pk,
|
||||
"integration_unique_data": {},
|
||||
"raw_request_data": self.alert_receive_channel.heartbeat_expired_payload,
|
||||
},
|
||||
)
|
||||
|
||||
# Insight logs
|
||||
@property
|
||||
def insight_logs_type_verbal(self) -> str:
|
||||
|
|
|
|||
|
|
@ -1,57 +1,105 @@
|
|||
from time import perf_counter
|
||||
import datetime
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import DateTimeField, DurationField, ExpressionWrapper, F
|
||||
from django.db.models.functions import Cast
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.heartbeat.models import IntegrationHeartBeat
|
||||
from apps.integrations.tasks import create_alert
|
||||
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
|
||||
from settings.base import DatabaseTypes
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def integration_heartbeat_checkup(heartbeat_id: int) -> None:
|
||||
from apps.heartbeat.models import IntegrationHeartBeat
|
||||
def check_heartbeats() -> str:
|
||||
"""
|
||||
Periodic task to check heartbeats status change and create alerts (or auto-resolve alerts) if needed
|
||||
"""
|
||||
# Heartbeat is considered enabled if it
|
||||
# * has timeout_seconds set to non-zero (non-default) value,
|
||||
# * received at least one checkup (last_heartbeat_time set to non-null value)\
|
||||
|
||||
IntegrationHeartBeat.perform_heartbeat_check(heartbeat_id, integration_heartbeat_checkup.request.id)
|
||||
def _get_timeout_expression() -> ExpressionWrapper:
|
||||
if settings.DATABASES["default"]["ENGINE"] == f"django.db.backends.{DatabaseTypes.POSTGRESQL}":
|
||||
# DurationField: When used on PostgreSQL, the data type used is an interval
|
||||
# https://docs.djangoproject.com/en/3.2/ref/models/fields/#durationfield
|
||||
return ExpressionWrapper(datetime.timedelta(seconds=1) * F("timeout_seconds"), output_field=DurationField())
|
||||
else:
|
||||
# DurationField: ...Otherwise a bigint of microseconds is used...
|
||||
# microseconds = seconds * 10**6
|
||||
# https://docs.djangoproject.com/en/3.2/ref/models/fields/#durationfield
|
||||
return ExpressionWrapper(F("timeout_seconds") * 10**6, output_field=DurationField())
|
||||
|
||||
enabled_heartbeats = (
|
||||
IntegrationHeartBeat.objects.filter(last_heartbeat_time__isnull=False)
|
||||
.exclude(timeout_seconds=0)
|
||||
.annotate(period_start=(Cast(timezone.now() - _get_timeout_expression(), DateTimeField())))
|
||||
)
|
||||
with transaction.atomic():
|
||||
# Heartbeat is considered expired if it
|
||||
# * is enabled,
|
||||
# * is not already expired,
|
||||
# * last check in was before the timeout period start
|
||||
expired_heartbeats = enabled_heartbeats.select_for_update().filter(
|
||||
last_heartbeat_time__lte=F("period_start"), previous_alerted_state_was_life=True
|
||||
)
|
||||
# Schedule alert creation for each expired heartbeat after transaction commit
|
||||
for heartbeat in expired_heartbeats:
|
||||
transaction.on_commit(
|
||||
lambda: create_alert.apply_async(
|
||||
kwargs={
|
||||
"title": heartbeat.alert_receive_channel.heartbeat_expired_title,
|
||||
"message": heartbeat.alert_receive_channel.heartbeat_expired_message,
|
||||
"image_url": None,
|
||||
"link_to_upstream_details": None,
|
||||
"alert_receive_channel_pk": heartbeat.alert_receive_channel.pk,
|
||||
"integration_unique_data": {},
|
||||
"raw_request_data": heartbeat.alert_receive_channel.heartbeat_expired_payload,
|
||||
},
|
||||
)
|
||||
)
|
||||
# Update previous_alerted_state_was_life to False
|
||||
expired_count = expired_heartbeats.update(previous_alerted_state_was_life=False)
|
||||
with transaction.atomic():
|
||||
# Heartbeat is considered restored if it
|
||||
# * is enabled,
|
||||
# * last check in was after the timeout period start,
|
||||
# * was is alerted state (previous_alerted_state_was_life is False), i.e. was expired
|
||||
restored_heartbeats = enabled_heartbeats.select_for_update().filter(
|
||||
last_heartbeat_time__gte=F("period_start"), previous_alerted_state_was_life=False
|
||||
)
|
||||
# Schedule auto-resolve alert creation for each expired heartbeat after transaction commit
|
||||
for heartbeat in restored_heartbeats:
|
||||
transaction.on_commit(
|
||||
lambda: create_alert.apply_async(
|
||||
kwargs={
|
||||
"title": heartbeat.alert_receive_channel.heartbeat_restored_title,
|
||||
"message": heartbeat.alert_receive_channel.heartbeat_restored_message,
|
||||
"image_url": None,
|
||||
"link_to_upstream_details": None,
|
||||
"alert_receive_channel_pk": heartbeat.alert_receive_channel.pk,
|
||||
"integration_unique_data": {},
|
||||
"raw_request_data": heartbeat.alert_receive_channel.heartbeat_restored_payload,
|
||||
},
|
||||
)
|
||||
)
|
||||
restored_count = restored_heartbeats.update(previous_alerted_state_was_life=True)
|
||||
return f"Found {expired_count} expired and {restored_count} restored heartbeats"
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def integration_heartbeat_checkup(heartbeat_id: int) -> None:
|
||||
"""Deprecated. TODO: Remove this task after this task cleared from queue"""
|
||||
pass
|
||||
|
||||
|
||||
@shared_dedicated_queue_retry_task()
|
||||
def process_heartbeat_task(alert_receive_channel_pk):
|
||||
start = perf_counter()
|
||||
from apps.heartbeat.models import IntegrationHeartBeat
|
||||
|
||||
with transaction.atomic():
|
||||
heartbeats = IntegrationHeartBeat.objects.filter(
|
||||
alert_receive_channel__pk=alert_receive_channel_pk,
|
||||
).select_for_update()
|
||||
if len(heartbeats) == 0:
|
||||
logger.info(f"Integration Heartbeat for alert_receive_channel {alert_receive_channel_pk} was not found.")
|
||||
return
|
||||
else:
|
||||
heartbeat = heartbeats[0]
|
||||
heartbeat_selected = perf_counter()
|
||||
logger.info(
|
||||
f"IntegrationHeartBeat selected for alert_receive_channel {alert_receive_channel_pk} in {heartbeat_selected - start}"
|
||||
)
|
||||
task = integration_heartbeat_checkup.apply_async(
|
||||
(heartbeat.pk,),
|
||||
countdown=heartbeat.timeout_seconds + 1,
|
||||
)
|
||||
is_touched = heartbeat.last_heartbeat_time is not None
|
||||
heartbeat.actual_check_up_task_id = task.id
|
||||
heartbeat.last_heartbeat_time = timezone.now()
|
||||
update_fields = ["actual_check_up_task_id", "last_heartbeat_time"]
|
||||
task_started = perf_counter()
|
||||
logger.info(
|
||||
f"heartbeat_checkup task started for alert_receive_channel {alert_receive_channel_pk} in {task_started - start}"
|
||||
)
|
||||
if is_touched:
|
||||
state_changed = heartbeat.check_heartbeat_state()
|
||||
state_checked = perf_counter()
|
||||
logger.info(
|
||||
f"state checked for alert_receive_channel {alert_receive_channel_pk} in {state_checked - start}"
|
||||
)
|
||||
if state_changed:
|
||||
update_fields.append("previous_alerted_state_was_life")
|
||||
heartbeat.save(update_fields=update_fields)
|
||||
IntegrationHeartBeat.objects.filter(
|
||||
alert_receive_channel__pk=alert_receive_channel_pk,
|
||||
).update(last_heartbeat_time=timezone.now())
|
||||
|
|
|
|||
|
|
@ -4,83 +4,77 @@ import pytest
|
|||
from django.utils import timezone
|
||||
|
||||
from apps.alerts.models import AlertReceiveChannel
|
||||
from apps.heartbeat.tasks import check_heartbeats
|
||||
from apps.integrations.tasks import create_alert
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("apps.heartbeat.models.IntegrationHeartBeat.on_heartbeat_expired", return_value=None)
|
||||
@pytest.mark.parametrize("integration", [AlertReceiveChannel.INTEGRATION_FORMATTED_WEBHOOK])
|
||||
def test_integration_heartbeat_expired(
|
||||
mocked_handler, make_organization_and_user, make_alert_receive_channel, make_integration_heartbeat, integration
|
||||
def test_check_heartbeats(
|
||||
make_organization_and_user,
|
||||
make_alert_receive_channel,
|
||||
make_integration_heartbeat,
|
||||
integration,
|
||||
django_capture_on_commit_callbacks,
|
||||
):
|
||||
# No heartbeats, nothing happens
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 0 expired and 0 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 0
|
||||
|
||||
# Prepare heartbeat
|
||||
team, _ = make_organization_and_user()
|
||||
# Some short timeout and last_heartbeat_time to make sure that heartbeat is expired
|
||||
timeout = 1
|
||||
last_heartbeat_time = timezone.now() - timezone.timedelta(seconds=timeout * 10)
|
||||
alert_receive_channel = make_alert_receive_channel(team, integration=integration)
|
||||
integration_heartbeat = make_integration_heartbeat(
|
||||
alert_receive_channel, timeout, last_heartbeat_time=last_heartbeat_time
|
||||
)
|
||||
integration_heartbeat.check_heartbeat_state_and_save()
|
||||
assert mocked_handler.called
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("apps.heartbeat.models.IntegrationHeartBeat.on_heartbeat_expired", return_value=None)
|
||||
@pytest.mark.parametrize("integration", [AlertReceiveChannel.INTEGRATION_FORMATTED_WEBHOOK])
|
||||
def test_integration_heartbeat_already_expired(
|
||||
mocked_handler, make_organization_and_user, make_alert_receive_channel, make_integration_heartbeat, integration
|
||||
):
|
||||
team, _ = make_organization_and_user()
|
||||
# Some short timeout and last_heartbeat_time to make sure that heartbeat is expired
|
||||
timeout = 1
|
||||
last_heartbeat_time = timezone.now() - timezone.timedelta(seconds=timeout * 10)
|
||||
alert_receive_channel = make_alert_receive_channel(team, integration=integration)
|
||||
integration_heartbeat = make_integration_heartbeat(
|
||||
alert_receive_channel,
|
||||
timeout,
|
||||
last_heartbeat_time=last_heartbeat_time,
|
||||
previous_alerted_state_was_life=False,
|
||||
)
|
||||
integration_heartbeat.check_heartbeat_state_and_save()
|
||||
assert mocked_handler.called is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("apps.heartbeat.models.IntegrationHeartBeat.on_heartbeat_restored", return_value=None)
|
||||
@pytest.mark.parametrize("integration", [AlertReceiveChannel.INTEGRATION_FORMATTED_WEBHOOK])
|
||||
def test_integration_heartbeat_restored(
|
||||
mocked_handler, make_organization_and_user, make_alert_receive_channel, make_integration_heartbeat, integration
|
||||
):
|
||||
team, _ = make_organization_and_user()
|
||||
# Some long timeout and last_heartbeat_time to make sure that heartbeat is not expired
|
||||
timeout = 1000
|
||||
timeout = 60
|
||||
last_heartbeat_time = timezone.now()
|
||||
alert_receive_channel = make_alert_receive_channel(team, integration=integration)
|
||||
integration_heartbeat = make_integration_heartbeat(
|
||||
alert_receive_channel,
|
||||
timeout,
|
||||
last_heartbeat_time=last_heartbeat_time,
|
||||
previous_alerted_state_was_life=False,
|
||||
alert_receive_channel, timeout, last_heartbeat_time=last_heartbeat_time, previous_alerted_state_was_life=True
|
||||
)
|
||||
integration_heartbeat.check_heartbeat_state_and_save()
|
||||
assert mocked_handler.called
|
||||
|
||||
# Heartbeat is alive, nothing happens
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 0 expired and 0 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 0
|
||||
|
||||
@pytest.mark.django_db
|
||||
@patch("apps.heartbeat.models.IntegrationHeartBeat.on_heartbeat_restored", return_value=None)
|
||||
@pytest.mark.parametrize("integration", [AlertReceiveChannel.INTEGRATION_FORMATTED_WEBHOOK])
|
||||
def test_integration_heartbeat_restored_and_alert_was_not_sent(
|
||||
mocked_handler, make_organization_and_user, make_alert_receive_channel, make_integration_heartbeat, integration
|
||||
):
|
||||
team, _ = make_organization_and_user()
|
||||
# Some long timeout and last_heartbeat_time to make sure that heartbeat is not expired
|
||||
timeout = 1000
|
||||
last_heartbeat_time = timezone.now()
|
||||
alert_receive_channel = make_alert_receive_channel(team, integration=integration)
|
||||
integration_heartbeat = make_integration_heartbeat(
|
||||
alert_receive_channel,
|
||||
timeout,
|
||||
last_heartbeat_time=last_heartbeat_time,
|
||||
)
|
||||
integration_heartbeat.check_heartbeat_state_and_save()
|
||||
assert mocked_handler.called is False
|
||||
# Hearbeat expires, send an alert
|
||||
integration_heartbeat.refresh_from_db()
|
||||
integration_heartbeat.last_heartbeat_time = timezone.now() - timezone.timedelta(seconds=timeout * 10)
|
||||
integration_heartbeat.save()
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 1 expired and 0 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 1
|
||||
|
||||
# Heartbeat is still expired, nothing happens
|
||||
integration_heartbeat.refresh_from_db()
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 0 expired and 0 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 0
|
||||
|
||||
# Hearbeat restored, send an auto-resolve alert
|
||||
integration_heartbeat.refresh_from_db()
|
||||
integration_heartbeat.last_heartbeat_time = timezone.now()
|
||||
integration_heartbeat.save()
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 0 expired and 1 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 1
|
||||
|
||||
# Heartbeat is alive, nothing happens
|
||||
integration_heartbeat.refresh_from_db()
|
||||
integration_heartbeat.last_heartbeat_time = timezone.now()
|
||||
integration_heartbeat.save()
|
||||
integration_heartbeat.refresh_from_db()
|
||||
with patch.object(create_alert, "apply_async") as mock_create_alert_apply_async:
|
||||
with django_capture_on_commit_callbacks(execute=True):
|
||||
result = check_heartbeats()
|
||||
assert result == "Found 0 expired and 0 restored heartbeats"
|
||||
assert mock_create_alert_apply_async.call_count == 0
|
||||
|
|
|
|||
|
|
@ -12,12 +12,12 @@ heartbeat_expired_message = heartbeat_text.heartbeat_expired_message
|
|||
heartbeat_expired_payload = {
|
||||
"alert_uid": "0eaf37c8-e1eb-4714-b79e-7c648b6a96fa",
|
||||
"title": heartbeat_expired_title,
|
||||
"image_url": None,
|
||||
"state": "alerting",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = heartbeat_text.heartbeat_restored_title
|
||||
|
|
@ -26,10 +26,10 @@ heartbeat_restored_message = heartbeat_text.heartbeat_restored_message
|
|||
heartbeat_restored_payload = {
|
||||
"alert_uid": "0eaf37c8-e1eb-4714-b79e-7c648b6a96fa",
|
||||
"title": heartbeat_restored_title,
|
||||
"image_url": None,
|
||||
"state": "ok",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,8 +17,10 @@ heartbeat_expired_payload = {
|
|||
"state": "alerting",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = heartbeat_text.heartbeat_restored_title
|
||||
|
|
@ -31,6 +33,8 @@ heartbeat_restored_payload = {
|
|||
"state": "ok",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,10 @@ heartbeat_expired_payload = {
|
|||
"state": "alerting",
|
||||
"title": heartbeat_expired_title,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = f"[OK] {heartbeat_text.heartbeat_restored_title}"
|
||||
|
|
@ -25,6 +27,8 @@ heartbeat_restored_payload = {
|
|||
"state": "ok",
|
||||
"title": heartbeat_restored_title,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,8 +17,10 @@ heartbeat_expired_payload = {
|
|||
"state": "alerting",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = heartbeat_text.heartbeat_restored_title
|
||||
|
|
@ -31,6 +33,8 @@ heartbeat_restored_payload = {
|
|||
"state": "ok",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,12 +13,12 @@ heartbeat_expired_message = heartbeat_text.heartbeat_expired_message
|
|||
heartbeat_expired_payload = {
|
||||
"alert_uid": "7973c835-ff3f-46e4-9444-06df127b6f8e",
|
||||
"title": heartbeat_expired_title,
|
||||
"image_url": None,
|
||||
"state": "alerting",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = heartbeat_text.heartbeat_restored_title
|
||||
|
|
@ -31,6 +31,8 @@ heartbeat_restored_payload = {
|
|||
"state": "ok",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,8 +16,10 @@ heartbeat_expired_payload = {
|
|||
"state": "alerting",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_expired_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": False,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": False,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": False, # Keep for backwards compatibility
|
||||
}
|
||||
|
||||
heartbeat_restored_title = heartbeat_text.heartbeat_restored_title
|
||||
|
|
@ -30,6 +32,8 @@ heartbeat_restored_payload = {
|
|||
"state": "ok",
|
||||
"link_to_upstream_details": None,
|
||||
"message": heartbeat_restored_message,
|
||||
"is_amixr_heartbeat": True,
|
||||
"is_amixr_heartbeat_restored": True,
|
||||
"is_oncall_heartbeat": True,
|
||||
"is_oncall_heartbeat_restored": True,
|
||||
"is_amixr_heartbeat": True, # Keep for backwards compatibility
|
||||
"is_amixr_heartbeat_restored": True, # Keep for backwards compatibility
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,5 +96,3 @@ def test_ratelimit_integration_heartbeats(
|
|||
|
||||
response = c.get(url)
|
||||
assert response.status_code == 429
|
||||
|
||||
assert mocked_task.call_count == 1
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import logging
|
|||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.db import OperationalError
|
||||
from django.http import HttpResponseBadRequest, JsonResponse
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
|
@ -324,6 +325,10 @@ class IntegrationHeartBeatAPIView(AlertChannelDefiningMixin, IntegrationHeartBea
|
|||
return Response(status=200)
|
||||
|
||||
def _process_heartbeat_signal(self, request, alert_receive_channel):
|
||||
process_heartbeat_task.apply_async(
|
||||
(alert_receive_channel.pk,),
|
||||
)
|
||||
try:
|
||||
process_heartbeat_task(alert_receive_channel.pk)
|
||||
# If database is not ready, fallback to celery task
|
||||
except OperationalError:
|
||||
process_heartbeat_task.apply_async(
|
||||
(alert_receive_channel.pk,),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,7 +6,8 @@ import typing
|
|||
from firebase_admin.messaging import APNSPayload, Aps, ApsAlert, CriticalSound, Message
|
||||
|
||||
from apps.mobile_app.exceptions import DeviceNotSet
|
||||
from apps.mobile_app.tasks import FCMMessageData, MessageType, _construct_fcm_message, _send_push_notification, logger
|
||||
from apps.mobile_app.tasks import _construct_fcm_message, _send_push_notification, logger
|
||||
from apps.mobile_app.types import FCMMessageData, MessageType, Platform
|
||||
from apps.user_management.models import User
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
|
|
@ -38,27 +39,22 @@ def _get_test_escalation_fcm_message(user: User, device_to_notify: "FCMDevice",
|
|||
|
||||
# APNS only allows to specify volume for critical notifications
|
||||
apns_volume = mobile_app_user_settings.important_notification_volume if critical else None
|
||||
apns_sound_name = (
|
||||
mobile_app_user_settings.important_notification_sound_name
|
||||
if critical
|
||||
else mobile_app_user_settings.default_notification_sound_name
|
||||
) + MobileAppUserSettings.IOS_SOUND_NAME_EXTENSION # iOS app expects the filename to have an extension
|
||||
message_type = MessageType.IMPORTANT if critical else MessageType.DEFAULT
|
||||
apns_sound_name = mobile_app_user_settings.get_notification_sound_name(message_type, Platform.IOS)
|
||||
|
||||
fcm_message_data: FCMMessageData = {
|
||||
"title": get_test_push_title(critical),
|
||||
# Pass user settings, so the Android app can use them to play the correct sound and volume
|
||||
"default_notification_sound_name": (
|
||||
mobile_app_user_settings.default_notification_sound_name
|
||||
+ MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
"default_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.DEFAULT, Platform.ANDROID
|
||||
),
|
||||
"default_notification_volume_type": mobile_app_user_settings.default_notification_volume_type,
|
||||
"default_notification_volume": str(mobile_app_user_settings.default_notification_volume),
|
||||
"default_notification_volume_override": json.dumps(
|
||||
mobile_app_user_settings.default_notification_volume_override
|
||||
),
|
||||
"important_notification_sound_name": (
|
||||
mobile_app_user_settings.important_notification_sound_name
|
||||
+ MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
"important_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.IMPORTANT, Platform.ANDROID
|
||||
),
|
||||
"important_notification_volume_type": mobile_app_user_settings.important_notification_volume_type,
|
||||
"important_notification_volume": str(mobile_app_user_settings.important_notification_volume),
|
||||
|
|
@ -84,8 +80,6 @@ def _get_test_escalation_fcm_message(user: User, device_to_notify: "FCMDevice",
|
|||
),
|
||||
)
|
||||
|
||||
message_type = MessageType.CRITICAL if critical else MessageType.NORMAL
|
||||
|
||||
return _construct_fcm_message(message_type, device_to_notify, thread_id, fcm_message_data, apns_payload)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from fcm_django.models import FCMDevice as BaseFCMDevice
|
|||
|
||||
from apps.auth_token import constants, crypto
|
||||
from apps.auth_token.models import BaseAuthToken
|
||||
from apps.mobile_app.types import MessageType, Platform
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from apps.user_management.models import Organization, User
|
||||
|
|
@ -175,3 +176,22 @@ class MobileAppUserSettings(models.Model):
|
|||
|
||||
locale = models.CharField(max_length=50, null=True)
|
||||
time_zone = models.CharField(max_length=100, default="UTC")
|
||||
|
||||
def get_notification_sound_name(self, message_type: MessageType, platform: Platform) -> str:
|
||||
sound_name = {
|
||||
MessageType.DEFAULT: self.default_notification_sound_name,
|
||||
MessageType.IMPORTANT: self.important_notification_sound_name,
|
||||
MessageType.INFO: self.info_notification_sound_name,
|
||||
}[message_type]
|
||||
|
||||
# If sound name already contains an extension, return it as is
|
||||
if "." in sound_name:
|
||||
return sound_name
|
||||
|
||||
# Add appropriate extension based on platform, for cases when no extension is specified in the sound name
|
||||
extension = {
|
||||
Platform.IOS: self.IOS_SOUND_NAME_EXTENSION,
|
||||
Platform.ANDROID: self.ANDROID_SOUND_NAME_EXTENSION,
|
||||
}[platform]
|
||||
|
||||
return f"{sound_name}{extension}"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import json
|
|||
import logging
|
||||
import math
|
||||
import typing
|
||||
from enum import Enum
|
||||
|
||||
import humanize
|
||||
import pytz
|
||||
|
|
@ -20,6 +19,7 @@ from rest_framework import status
|
|||
from apps.alerts.models import AlertGroup
|
||||
from apps.base.utils import live_settings
|
||||
from apps.mobile_app.alert_rendering import get_push_notification_subtitle
|
||||
from apps.mobile_app.types import FCMMessageData, MessageType, Platform
|
||||
from apps.schedules.models import ShiftSwapRequest
|
||||
from apps.schedules.models.on_call_schedule import OnCallSchedule, ScheduleEvent
|
||||
from apps.user_management.models import User
|
||||
|
|
@ -36,18 +36,6 @@ logger = get_task_logger(__name__)
|
|||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class MessageType(str, Enum):
|
||||
NORMAL = "oncall.message"
|
||||
CRITICAL = "oncall.critical_message"
|
||||
INFO = "oncall.info"
|
||||
|
||||
|
||||
class FCMMessageData(typing.TypedDict):
|
||||
title: str
|
||||
subtitle: typing.Optional[str]
|
||||
body: typing.Optional[str]
|
||||
|
||||
|
||||
def send_push_notification_to_fcm_relay(message: Message) -> requests.Response:
|
||||
"""
|
||||
Send push notification to FCM relay on cloud instance: apps.mobile_app.fcm_relay.FCMRelayView
|
||||
|
|
@ -168,11 +156,8 @@ def _get_alert_group_escalation_fcm_message(
|
|||
|
||||
# APNS only allows to specify volume for critical notifications
|
||||
apns_volume = mobile_app_user_settings.important_notification_volume if critical else None
|
||||
apns_sound_name = (
|
||||
mobile_app_user_settings.important_notification_sound_name
|
||||
if critical
|
||||
else mobile_app_user_settings.default_notification_sound_name
|
||||
) + MobileAppUserSettings.IOS_SOUND_NAME_EXTENSION # iOS app expects the filename to have an extension
|
||||
message_type = MessageType.IMPORTANT if critical else MessageType.DEFAULT
|
||||
apns_sound_name = mobile_app_user_settings.get_notification_sound_name(message_type, Platform.IOS)
|
||||
|
||||
fcm_message_data: FCMMessageData = {
|
||||
"title": alert_title,
|
||||
|
|
@ -183,18 +168,16 @@ def _get_alert_group_escalation_fcm_message(
|
|||
# alert_group.status is an int so it must be casted...
|
||||
"status": str(alert_group.status),
|
||||
# Pass user settings, so the Android app can use them to play the correct sound and volume
|
||||
"default_notification_sound_name": (
|
||||
mobile_app_user_settings.default_notification_sound_name
|
||||
+ MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
"default_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.DEFAULT, Platform.ANDROID
|
||||
),
|
||||
"default_notification_volume_type": mobile_app_user_settings.default_notification_volume_type,
|
||||
"default_notification_volume": str(mobile_app_user_settings.default_notification_volume),
|
||||
"default_notification_volume_override": json.dumps(
|
||||
mobile_app_user_settings.default_notification_volume_override
|
||||
),
|
||||
"important_notification_sound_name": (
|
||||
mobile_app_user_settings.important_notification_sound_name
|
||||
+ MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
"important_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.IMPORTANT, Platform.ANDROID
|
||||
),
|
||||
"important_notification_volume_type": mobile_app_user_settings.important_notification_volume_type,
|
||||
"important_notification_volume": str(mobile_app_user_settings.important_notification_volume),
|
||||
|
|
@ -222,8 +205,6 @@ def _get_alert_group_escalation_fcm_message(
|
|||
),
|
||||
)
|
||||
|
||||
message_type = MessageType.CRITICAL if critical else MessageType.NORMAL
|
||||
|
||||
return _construct_fcm_message(message_type, device_to_notify, thread_id, fcm_message_data, apns_payload)
|
||||
|
||||
|
||||
|
|
@ -268,8 +249,6 @@ def _get_youre_going_oncall_fcm_message(
|
|||
thread_id = f"{schedule.public_primary_key}:{user.public_primary_key}:going-oncall"
|
||||
|
||||
mobile_app_user_settings, _ = MobileAppUserSettings.objects.get_or_create(user=user)
|
||||
info_notification_sound_name = mobile_app_user_settings.info_notification_sound_name
|
||||
|
||||
notification_title = _get_youre_going_oncall_notification_title(seconds_until_going_oncall)
|
||||
notification_subtitle = _get_youre_going_oncall_notification_subtitle(
|
||||
schedule, schedule_event, mobile_app_user_settings
|
||||
|
|
@ -278,7 +257,9 @@ def _get_youre_going_oncall_fcm_message(
|
|||
data: FCMMessageData = {
|
||||
"title": notification_title,
|
||||
"subtitle": notification_subtitle,
|
||||
"info_notification_sound_name": f"{info_notification_sound_name}{MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION}",
|
||||
"info_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.INFO, Platform.ANDROID
|
||||
),
|
||||
"info_notification_volume_type": mobile_app_user_settings.info_notification_volume_type,
|
||||
"info_notification_volume": str(mobile_app_user_settings.info_notification_volume),
|
||||
"info_notification_volume_override": json.dumps(mobile_app_user_settings.info_notification_volume_override),
|
||||
|
|
@ -290,7 +271,7 @@ def _get_youre_going_oncall_fcm_message(
|
|||
alert=ApsAlert(title=notification_title, subtitle=notification_subtitle),
|
||||
sound=CriticalSound(
|
||||
critical=False,
|
||||
name=f"{info_notification_sound_name}{MobileAppUserSettings.IOS_SOUND_NAME_EXTENSION}",
|
||||
name=mobile_app_user_settings.get_notification_sound_name(MessageType.INFO, Platform.IOS),
|
||||
),
|
||||
custom_data={
|
||||
"interruption-level": "time-sensitive",
|
||||
|
|
@ -518,10 +499,6 @@ def notify_shift_swap_requests() -> None:
|
|||
"""
|
||||
A periodic task that notifies users about shift swap requests.
|
||||
"""
|
||||
|
||||
if not settings.FEATURE_SHIFT_SWAPS_ENABLED:
|
||||
return
|
||||
|
||||
for shift_swap_request in _get_shift_swap_requests_to_notify(timezone.now()):
|
||||
notify_shift_swap_request.delay(shift_swap_request.pk)
|
||||
|
||||
|
|
@ -645,8 +622,6 @@ def _shift_swap_request_fcm_message(
|
|||
device_to_notify: "FCMDevice",
|
||||
mobile_app_user_settings: "MobileAppUserSettings",
|
||||
) -> Message:
|
||||
from apps.mobile_app.models import MobileAppUserSettings
|
||||
|
||||
thread_id = f"{shift_swap_request.public_primary_key}:{user.public_primary_key}:ssr"
|
||||
notification_title = "New shift swap request"
|
||||
beneficiary_name = shift_swap_request.beneficiary.name or shift_swap_request.beneficiary.username
|
||||
|
|
@ -659,8 +634,8 @@ def _shift_swap_request_fcm_message(
|
|||
"title": notification_title,
|
||||
"subtitle": notification_subtitle,
|
||||
"route": route,
|
||||
"info_notification_sound_name": (
|
||||
mobile_app_user_settings.info_notification_sound_name + MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
"info_notification_sound_name": mobile_app_user_settings.get_notification_sound_name(
|
||||
MessageType.INFO, Platform.ANDROID
|
||||
),
|
||||
"info_notification_volume_type": mobile_app_user_settings.info_notification_volume_type,
|
||||
"info_notification_volume": str(mobile_app_user_settings.info_notification_volume),
|
||||
|
|
@ -673,8 +648,7 @@ def _shift_swap_request_fcm_message(
|
|||
alert=ApsAlert(title=notification_title, subtitle=notification_subtitle),
|
||||
sound=CriticalSound(
|
||||
critical=False,
|
||||
name=mobile_app_user_settings.info_notification_sound_name
|
||||
+ MobileAppUserSettings.IOS_SOUND_NAME_EXTENSION,
|
||||
name=mobile_app_user_settings.get_notification_sound_name(MessageType.INFO, Platform.IOS),
|
||||
),
|
||||
custom_data={
|
||||
"interruption-level": "time-sensitive",
|
||||
|
|
|
|||
|
|
@ -34,6 +34,7 @@ def test_test_escalation_fcm_message_user_settings(
|
|||
assert message.apns.payload.aps.badge is None
|
||||
assert message.apns.payload.aps.alert.title == get_test_push_title(critical=False)
|
||||
assert message.data["title"] == get_test_push_title(critical=False)
|
||||
assert message.data["type"] == "oncall.message"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
@ -67,6 +68,7 @@ def test_escalation_fcm_message_user_settings_critical(
|
|||
assert message.apns.payload.aps.badge is None
|
||||
assert message.apns.payload.aps.alert.title == get_test_push_title(critical=True)
|
||||
assert message.data["title"] == get_test_push_title(critical=True)
|
||||
assert message.data["type"] == "oncall.critical_message"
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
|
|
|
|||
|
|
@ -234,6 +234,7 @@ def test_fcm_message_user_settings(
|
|||
assert message.data["important_notification_volume"] == "0.8"
|
||||
assert message.data["important_notification_volume_override"] == "true"
|
||||
assert message.data["important_notification_override_dnd"] == "true"
|
||||
assert message.data["type"] == "oncall.message"
|
||||
|
||||
# Check APNS notification sound is set correctly
|
||||
apns_sound = message.apns.payload.aps.sound
|
||||
|
|
@ -265,6 +266,7 @@ def test_fcm_message_user_settings_critical(
|
|||
assert message.data["important_notification_volume"] == "0.8"
|
||||
assert message.data["important_notification_volume_override"] == "true"
|
||||
assert message.data["important_notification_override_dnd"] == "true"
|
||||
assert message.data["type"] == "oncall.critical_message"
|
||||
|
||||
# Check APNS notification sound is set correctly
|
||||
apns_sound = message.apns.payload.aps.sound
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from apps.mobile_app.models import FCMDevice, MobileAppUserSettings
|
|||
from apps.mobile_app.tasks import (
|
||||
SSR_EARLIEST_NOTIFICATION_OFFSET,
|
||||
SSR_NOTIFICATION_WINDOW,
|
||||
MessageType,
|
||||
_get_shift_swap_requests_to_notify,
|
||||
_has_user_been_notified_for_shift_swap_request,
|
||||
_mark_shift_swap_request_notified_for_user,
|
||||
|
|
@ -108,9 +107,7 @@ def test_get_shift_swap_requests_to_notify_starts_not_soon(
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notify_shift_swap_requests(make_organization, make_user, make_schedule, make_shift_swap_request, settings):
|
||||
settings.FEATURE_SHIFT_SWAPS_ENABLED = True
|
||||
|
||||
def test_notify_shift_swap_requests(make_organization, make_user, make_schedule, make_shift_swap_request):
|
||||
organization = make_organization()
|
||||
user = make_user(organization=organization)
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
|
|
@ -134,17 +131,6 @@ def test_notify_shift_swap_requests(make_organization, make_user, make_schedule,
|
|||
mock_notify_shift_swap_request.assert_called_once_with(shift_swap_request.pk)
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notify_shift_swap_requests_feature_flag_disabled(
|
||||
make_organization, make_user, make_schedule, make_shift_swap_request, settings
|
||||
):
|
||||
settings.FEATURE_SHIFT_SWAPS_ENABLED = False
|
||||
with patch("apps.mobile_app.tasks._get_shift_swap_requests_to_notify") as mock_get_shift_swap_requests_to_notify:
|
||||
notify_shift_swap_requests()
|
||||
|
||||
mock_get_shift_swap_requests_to_notify.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notify_shift_swap_request(make_organization, make_user, make_schedule, make_shift_swap_request, settings):
|
||||
organization = make_organization()
|
||||
|
|
@ -250,11 +236,7 @@ def test_notify_shift_swap_request_success(
|
|||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_notify_user_about_shift_swap_request(
|
||||
make_organization, make_user, make_schedule, make_shift_swap_request, settings
|
||||
):
|
||||
settings.FEATURE_SHIFT_SWAPS_ENABLED = True
|
||||
|
||||
def test_notify_user_about_shift_swap_request(make_organization, make_user, make_schedule, make_shift_swap_request):
|
||||
organization = make_organization()
|
||||
beneficiary = make_user(organization=organization, name="John Doe", username="john.doe")
|
||||
benefactor = make_user(organization=organization)
|
||||
|
|
@ -278,7 +260,7 @@ def test_notify_user_about_shift_swap_request(
|
|||
assert mock_send_push_notification.call_args.args[0] == device_to_notify
|
||||
|
||||
message: Message = mock_send_push_notification.call_args.args[1]
|
||||
assert message.data["type"] == MessageType.INFO
|
||||
assert message.data["type"] == "oncall.info"
|
||||
assert message.data["title"] == "New shift swap request"
|
||||
assert message.data["subtitle"] == "John Doe, Test Schedule"
|
||||
assert (
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ from django.urls import reverse
|
|||
from rest_framework import status
|
||||
from rest_framework.test import APIClient
|
||||
|
||||
from apps.mobile_app.models import MobileAppUserSettings
|
||||
from apps.mobile_app.types import MessageType, Platform
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_user_settings_get(make_organization_and_user_with_mobile_app_auth_token):
|
||||
|
|
@ -140,3 +143,35 @@ def test_user_settings_time_zone_must_be_valid(make_organization_and_user_with_m
|
|||
|
||||
response = client.put(url, data=null_timezone, format="json", HTTP_AUTHORIZATION=auth_token)
|
||||
assert response.status_code == status.HTTP_400_BAD_REQUEST
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"message_type,platform,sound_names,expected_sound_name",
|
||||
[
|
||||
(MessageType.DEFAULT, Platform.ANDROID, ["default", "empty", "empty"], "default.mp3"),
|
||||
(MessageType.DEFAULT, Platform.ANDROID, ["default.extension", "empty", "empty"], "default.extension"),
|
||||
(MessageType.DEFAULT, Platform.IOS, ["default", "empty", "empty"], "default.aiff"),
|
||||
(MessageType.DEFAULT, Platform.IOS, ["default.extension", "empty", "empty"], "default.extension"),
|
||||
(MessageType.IMPORTANT, Platform.ANDROID, ["empty", "important", "empty"], "important.mp3"),
|
||||
(MessageType.IMPORTANT, Platform.ANDROID, ["empty", "important.extension", "empty"], "important.extension"),
|
||||
(MessageType.IMPORTANT, Platform.IOS, ["empty", "important", "empty"], "important.aiff"),
|
||||
(MessageType.IMPORTANT, Platform.IOS, ["empty", "important.extension", "empty"], "important.extension"),
|
||||
(MessageType.INFO, Platform.ANDROID, ["empty", "empty", "info"], "info.mp3"),
|
||||
(MessageType.INFO, Platform.ANDROID, ["empty", "empty", "info.extension"], "info.extension"),
|
||||
(MessageType.INFO, Platform.IOS, ["empty", "empty", "info"], "info.aiff"),
|
||||
(MessageType.INFO, Platform.IOS, ["empty", "empty", "info.extension"], "info.extension"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.django_db
|
||||
def test_get_notification_sound_name(
|
||||
make_organization_and_user, message_type, platform, sound_names, expected_sound_name
|
||||
):
|
||||
organization, user = make_organization_and_user()
|
||||
mobile_app_user_settings = MobileAppUserSettings.objects.create(
|
||||
user=user,
|
||||
default_notification_sound_name=sound_names[0],
|
||||
important_notification_sound_name=sound_names[1],
|
||||
info_notification_sound_name=sound_names[2],
|
||||
)
|
||||
|
||||
assert mobile_app_user_settings.get_notification_sound_name(message_type, platform) == expected_sound_name
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ from django.utils import timezone
|
|||
|
||||
from apps.mobile_app import tasks
|
||||
from apps.mobile_app.models import FCMDevice, MobileAppUserSettings
|
||||
from apps.mobile_app.types import MessageType, Platform
|
||||
from apps.schedules.models import OnCallScheduleCalendar, OnCallScheduleICal, OnCallScheduleWeb
|
||||
from apps.schedules.models.on_call_schedule import ScheduleEvent
|
||||
|
||||
|
|
@ -217,9 +218,7 @@ def test_get_youre_going_oncall_fcm_message(
|
|||
data = {
|
||||
"title": mock_notification_title,
|
||||
"subtitle": mock_notification_subtitle,
|
||||
"info_notification_sound_name": (
|
||||
maus.info_notification_sound_name + MobileAppUserSettings.ANDROID_SOUND_NAME_EXTENSION
|
||||
),
|
||||
"info_notification_sound_name": maus.get_notification_sound_name(MessageType.INFO, Platform.ANDROID),
|
||||
"info_notification_volume_type": maus.info_notification_volume_type,
|
||||
"info_notification_volume": str(maus.info_notification_volume),
|
||||
"info_notification_volume_override": json.dumps(maus.info_notification_volume_override),
|
||||
|
|
@ -233,7 +232,7 @@ def test_get_youre_going_oncall_fcm_message(
|
|||
|
||||
mock_aps_alert.assert_called_once_with(title=mock_notification_title, subtitle=mock_notification_subtitle)
|
||||
mock_critical_sound.assert_called_once_with(
|
||||
critical=False, name=maus.info_notification_sound_name + MobileAppUserSettings.IOS_SOUND_NAME_EXTENSION
|
||||
critical=False, name=maus.get_notification_sound_name(MessageType.INFO, Platform.IOS)
|
||||
)
|
||||
mock_aps.assert_called_once_with(
|
||||
thread_id=notification_thread_id,
|
||||
|
|
@ -249,7 +248,7 @@ def test_get_youre_going_oncall_fcm_message(
|
|||
mock_get_youre_going_oncall_notification_title.assert_called_once_with(seconds_until_going_oncall)
|
||||
|
||||
mock_construct_fcm_message.assert_called_once_with(
|
||||
tasks.MessageType.INFO, device, notification_thread_id, data, mock_apns_payload.return_value
|
||||
MessageType.INFO, device, notification_thread_id, data, mock_apns_payload.return_value
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
19
engine/apps/mobile_app/types.py
Normal file
19
engine/apps/mobile_app/types.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import typing
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class MessageType(StrEnum):
|
||||
DEFAULT = "oncall.message"
|
||||
IMPORTANT = "oncall.critical_message"
|
||||
INFO = "oncall.info"
|
||||
|
||||
|
||||
class Platform(StrEnum):
|
||||
ANDROID = "android"
|
||||
IOS = "ios"
|
||||
|
||||
|
||||
class FCMMessageData(typing.TypedDict):
|
||||
title: str
|
||||
subtitle: typing.Optional[str]
|
||||
body: typing.Optional[str]
|
||||
|
|
@ -7,6 +7,8 @@ ICAL_SUMMARY = "SUMMARY"
|
|||
ICAL_DESCRIPTION = "DESCRIPTION"
|
||||
ICAL_ATTENDEE = "ATTENDEE"
|
||||
ICAL_UID = "UID"
|
||||
ICAL_SEQUENCE = "SEQUENCE"
|
||||
ICAL_RECURRENCE_ID = "RECURRENCE-ID"
|
||||
ICAL_RRULE = "RRULE"
|
||||
ICAL_UNTIL = "UNTIL"
|
||||
ICAL_LAST_MODIFIED = "LAST-MODIFIED"
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ from apps.schedules.constants import (
|
|||
ICAL_DATETIME_START,
|
||||
ICAL_DESCRIPTION,
|
||||
ICAL_LOCATION,
|
||||
ICAL_RECURRENCE_ID,
|
||||
ICAL_SEQUENCE,
|
||||
ICAL_SUMMARY,
|
||||
ICAL_UID,
|
||||
RE_EVENT_UID_V1,
|
||||
|
|
@ -198,8 +200,12 @@ def get_shifts_dict(
|
|||
result_datetime = []
|
||||
result_date = []
|
||||
for event in events:
|
||||
sequence = event.get(ICAL_SEQUENCE)
|
||||
recurrence_id = event.get(ICAL_RECURRENCE_ID)
|
||||
if recurrence_id:
|
||||
recurrence_id = recurrence_id.dt.isoformat()
|
||||
priority = parse_priority_from_string(event.get(ICAL_SUMMARY, "[L0]"))
|
||||
pk, source = parse_event_uid(event.get(ICAL_UID))
|
||||
pk, source = parse_event_uid(event.get(ICAL_UID), sequence=sequence, recurrence_id=recurrence_id)
|
||||
users = get_users_from_ical_event(event, schedule.organization)
|
||||
missing_users = get_missing_users_from_ical_event(event, schedule.organization)
|
||||
event_calendar_type = calendar_type
|
||||
|
|
@ -394,7 +400,7 @@ def parse_priority_from_string(string: str) -> int:
|
|||
return priority
|
||||
|
||||
|
||||
def parse_event_uid(string: str):
|
||||
def parse_event_uid(string: str, sequence: str = None, recurrence_id: str = None):
|
||||
pk = None
|
||||
source = None
|
||||
source_verbal = None
|
||||
|
|
@ -411,6 +417,13 @@ def parse_event_uid(string: str):
|
|||
else:
|
||||
# fallback to use the UID string as the rotation ID
|
||||
pk = string
|
||||
# in ical imported calendars, sequence and/or recurrence_id
|
||||
# distinguish main recurring event vs instance modification
|
||||
# (see https://icalendar.org/iCalendar-RFC-5545/3-8-4-4-recurrence-id.html)
|
||||
if sequence:
|
||||
pk = f"{pk}_{sequence}"
|
||||
if recurrence_id:
|
||||
pk = f"{pk}_{recurrence_id}"
|
||||
|
||||
if source is not None:
|
||||
source = int(source)
|
||||
|
|
@ -504,20 +517,23 @@ def ical_date_to_datetime(date, tz, start):
|
|||
return date, all_day
|
||||
|
||||
|
||||
def calculate_shift_diff(first_shift, second_shift):
|
||||
fields_to_compare = ["users", "end", "start", "all_day", "priority"]
|
||||
def calculate_shift_diff(shifts: list, prev_shifts: list) -> typing.Tuple[bool, list]:
|
||||
"""
|
||||
Get shifts diff comparing with the previous shifts
|
||||
"""
|
||||
fields_to_compare = ["users", "end", "start", "all_day", "priority_level", "shift"]
|
||||
|
||||
shift_changed = set(first_shift.keys()) != set(second_shift.keys())
|
||||
if not shift_changed:
|
||||
diff = set()
|
||||
for k, v in first_shift.items():
|
||||
for f in fields_to_compare:
|
||||
if v.get(f) != second_shift[k].get(f):
|
||||
shift_changed = True
|
||||
diff.add(k)
|
||||
break
|
||||
else:
|
||||
diff = set(first_shift.keys()) - set(second_shift.keys())
|
||||
shifts_fields = [{k: v for k, v in shift.items() if k in fields_to_compare} for shift in shifts]
|
||||
prev_shifts_fields = [{k: v for k, v in shift.items() if k in fields_to_compare} for shift in prev_shifts]
|
||||
|
||||
shift_changed = len(shifts) != len(prev_shifts)
|
||||
|
||||
diff = []
|
||||
|
||||
for idx, shift in enumerate(shifts_fields):
|
||||
if shift not in prev_shifts_fields:
|
||||
shift_changed = True
|
||||
diff.append(shifts[idx])
|
||||
|
||||
return shift_changed, diff
|
||||
|
||||
|
|
@ -611,29 +627,6 @@ def user_ical_export(user: "User", schedules: "OnCallScheduleQuerySet") -> bytes
|
|||
return ical_obj.to_ical()
|
||||
|
||||
|
||||
def list_of_gaps_in_schedule(
|
||||
schedule: "OnCallSchedule", start_date: datetime.date, end_date: datetime.date
|
||||
) -> DatetimeIntervals:
|
||||
calendars = schedule.get_icalendars()
|
||||
intervals: DatetimeIntervals = []
|
||||
start_datetime = datetime.datetime.combine(start_date, datetime.time.min) + datetime.timedelta(milliseconds=1)
|
||||
start_datetime = start_datetime.astimezone(pytz.UTC)
|
||||
end_datetime = datetime.datetime.combine(end_date, datetime.time.max).astimezone(pytz.UTC)
|
||||
|
||||
for calendar in calendars:
|
||||
if calendar is not None:
|
||||
calendar_tz = get_icalendar_tz_or_utc(calendar)
|
||||
events = ical_events.get_events_from_ical_between(
|
||||
calendar,
|
||||
start_datetime,
|
||||
end_datetime,
|
||||
)
|
||||
for event in events:
|
||||
start, end, _ = event_start_end_all_day_with_respect_to_type(event, calendar_tz)
|
||||
intervals.append(DatetimeInterval(start, end))
|
||||
return detect_gaps(intervals, start_datetime, end_datetime)
|
||||
|
||||
|
||||
def detect_gaps(intervals: DatetimeIntervals, start: datetime.datetime, end: datetime.datetime) -> DatetimeIntervals:
|
||||
gaps: DatetimeIntervals = []
|
||||
intervals = sorted(intervals, key=lambda dt: dt.start)
|
||||
|
|
|
|||
|
|
@ -38,7 +38,6 @@ from apps.schedules.ical_utils import (
|
|||
fetch_ical_file_or_get_error,
|
||||
get_oncall_users_for_multiple_schedules,
|
||||
list_of_empty_shifts_in_schedule,
|
||||
list_of_gaps_in_schedule,
|
||||
list_of_oncall_shifts_from_ical,
|
||||
)
|
||||
from apps.schedules.models import CustomOnCallShift
|
||||
|
|
@ -279,9 +278,10 @@ class OnCallSchedule(PolymorphicModel):
|
|||
(self.prev_ical_file_overrides, self.cached_ical_file_overrides),
|
||||
]
|
||||
|
||||
def check_gaps_for_next_week(self):
|
||||
today = timezone.now().date()
|
||||
gaps = list_of_gaps_in_schedule(self, today, today + datetime.timedelta(days=7))
|
||||
def check_gaps_for_next_week(self) -> bool:
|
||||
today = timezone.now()
|
||||
events = self.final_events(today, today + datetime.timedelta(days=7))
|
||||
gaps = [event for event in events if event["is_gap"] and not event["is_empty"]]
|
||||
has_gaps = len(gaps) != 0
|
||||
self.has_gaps = has_gaps
|
||||
self.save(update_fields=["has_gaps"])
|
||||
|
|
@ -344,6 +344,7 @@ class OnCallSchedule(PolymorphicModel):
|
|||
with_gap: bool = False,
|
||||
filter_by: str | None = None,
|
||||
all_day_datetime: bool = False,
|
||||
ignore_untaken_swaps: bool = False,
|
||||
from_cached_final: bool = False,
|
||||
) -> ScheduleEvents:
|
||||
"""Return filtered events from schedule."""
|
||||
|
|
@ -367,7 +368,9 @@ class OnCallSchedule(PolymorphicModel):
|
|||
end = shift["end"] - datetime.timedelta(days=1) if all_day else shift["end"]
|
||||
if all_day and all_day_datetime:
|
||||
start = datetime.datetime.combine(start, datetime.datetime.min.time(), tzinfo=pytz.UTC)
|
||||
end = datetime.datetime.combine(end, datetime.datetime.max.time(), tzinfo=pytz.UTC)
|
||||
end = datetime.datetime.combine(end, datetime.datetime.max.time(), tzinfo=pytz.UTC).replace(
|
||||
microsecond=0
|
||||
)
|
||||
is_gap = shift.get("is_gap", False)
|
||||
shift_json: ScheduleEvent = {
|
||||
"all_day": all_day,
|
||||
|
|
@ -399,13 +402,29 @@ class OnCallSchedule(PolymorphicModel):
|
|||
events = self._merge_events(events)
|
||||
|
||||
# annotate events with swap request details swapping users as needed
|
||||
events = self._apply_swap_requests(events, datetime_start, datetime_end)
|
||||
events = self._apply_swap_requests(
|
||||
events, datetime_start, datetime_end, ignore_untaken_swaps=ignore_untaken_swaps
|
||||
)
|
||||
|
||||
return events
|
||||
|
||||
def final_events(self, datetime_start: datetime.datetime, datetime_end: datetime.datetime) -> ScheduleEvents:
|
||||
def final_events(
|
||||
self,
|
||||
datetime_start: datetime.datetime,
|
||||
datetime_end: datetime.datetime,
|
||||
with_empty: bool = True,
|
||||
with_gap: bool = True,
|
||||
ignore_untaken_swaps: bool = False,
|
||||
) -> ScheduleEvents:
|
||||
"""Return schedule final events, after resolving shifts and overrides."""
|
||||
events = self.filter_events(datetime_start, datetime_end, with_empty=True, with_gap=True, all_day_datetime=True)
|
||||
events = self.filter_events(
|
||||
datetime_start,
|
||||
datetime_end,
|
||||
with_empty=with_empty,
|
||||
with_gap=with_gap,
|
||||
all_day_datetime=True,
|
||||
ignore_untaken_swaps=ignore_untaken_swaps,
|
||||
)
|
||||
events = self._resolve_schedule(events, datetime_start, datetime_end)
|
||||
return events
|
||||
|
||||
|
|
@ -432,7 +451,7 @@ class OnCallSchedule(PolymorphicModel):
|
|||
|
||||
# setup calendar with final schedule shift events
|
||||
calendar = create_base_icalendar(self.name)
|
||||
events = self.final_events(datetime_start, datetime_end)
|
||||
events = self.final_events(datetime_start, datetime_end, ignore_untaken_swaps=True)
|
||||
updated_ids = set()
|
||||
for e in events:
|
||||
for u in e["users"]:
|
||||
|
|
@ -637,7 +656,11 @@ class OnCallSchedule(PolymorphicModel):
|
|||
}
|
||||
|
||||
def _apply_swap_requests(
|
||||
self, events: ScheduleEvents, datetime_start: datetime.datetime, datetime_end: datetime.datetime
|
||||
self,
|
||||
events: ScheduleEvents,
|
||||
datetime_start: datetime.datetime,
|
||||
datetime_end: datetime.datetime,
|
||||
ignore_untaken_swaps: bool = False,
|
||||
) -> ScheduleEvents:
|
||||
"""Apply swap requests details to schedule events."""
|
||||
# get swaps requests affecting this schedule / time range
|
||||
|
|
@ -653,8 +676,8 @@ class OnCallSchedule(PolymorphicModel):
|
|||
|
||||
# apply swaps sequentially
|
||||
for swap in swaps:
|
||||
if swap.is_past_due:
|
||||
# ignore untaken expired requests
|
||||
if swap.is_past_due or (ignore_untaken_swaps and not swap.is_taken):
|
||||
# ignore expired requests, or untaken if specified
|
||||
continue
|
||||
i = 0
|
||||
while i < len(events):
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ class ShiftSwapRequest(models.Model):
|
|||
|
||||
@property
|
||||
def is_past_due(self) -> bool:
|
||||
return timezone.now() > self.swap_start
|
||||
return not self.is_taken and timezone.now() > self.swap_start
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
|
|
|
|||
|
|
@ -109,7 +109,7 @@ def notify_about_empty_shifts_in_schedule(schedule_pk):
|
|||
f'From {empty_shift.start.strftime("%b %d")} to {empty_shift.end.strftime("%b %d")}\n'
|
||||
)
|
||||
text += all_day_text
|
||||
text += f"*All-day* event in {empty_shift.calendar_tz} TZ\n"
|
||||
text += '*All-day* event in "UTC" TZ\n'
|
||||
else:
|
||||
text += f"From {format_datetime_to_slack_with_time(start_timestamp)} to {format_datetime_to_slack_with_time(end_timestamp)} (your TZ)\n"
|
||||
text += f"_From {OnCallSchedule.CALENDAR_TYPE_VERBAL[empty_shift.calendar_type]} calendar_\n"
|
||||
|
|
|
|||
|
|
@ -1,9 +1,10 @@
|
|||
import datetime
|
||||
|
||||
import pytz
|
||||
from celery.utils.log import get_task_logger
|
||||
from django.core.cache import cache
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.schedules.ical_utils import list_of_gaps_in_schedule
|
||||
from apps.slack.utils import format_datetime_to_slack_with_time, post_message_to_channel
|
||||
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
|
||||
|
||||
|
|
@ -79,20 +80,21 @@ def notify_about_gaps_in_schedule(schedule_pk):
|
|||
task_logger.info(f"Tried to notify_about_gaps_in_schedule for non-existing schedule {schedule_pk}")
|
||||
return
|
||||
|
||||
today = timezone.now().date()
|
||||
gaps = list_of_gaps_in_schedule(schedule, today, today + timezone.timedelta(days=7))
|
||||
schedule.gaps_report_sent_at = today
|
||||
now = timezone.now()
|
||||
events = schedule.final_events(now, now + datetime.timedelta(days=7))
|
||||
gaps = [event for event in events if event["is_gap"] and not event["is_empty"]]
|
||||
schedule.gaps_report_sent_at = now.date()
|
||||
|
||||
if len(gaps) != 0:
|
||||
schedule.has_gaps = True
|
||||
text = f"There are time periods that are unassigned in *{schedule.name}* on-call schedule.\n"
|
||||
for idx, gap in enumerate(gaps):
|
||||
if gap.start:
|
||||
start_verbal = format_datetime_to_slack_with_time(gap.start.astimezone(pytz.UTC).timestamp())
|
||||
if gap["start"]:
|
||||
start_verbal = format_datetime_to_slack_with_time(gap["start"].astimezone(pytz.UTC).timestamp())
|
||||
else:
|
||||
start_verbal = "..."
|
||||
if gap.end:
|
||||
end_verbal = format_datetime_to_slack_with_time(gap.end.astimezone(pytz.UTC).timestamp())
|
||||
if gap["end"]:
|
||||
end_verbal = format_datetime_to_slack_with_time(gap["end"].astimezone(pytz.UTC).timestamp())
|
||||
else:
|
||||
end_verbal = "..."
|
||||
text += f"From {start_verbal} to {end_verbal} (your TZ)\n"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
BEGIN:VCALENDAR
|
||||
PRODID:-//Google Inc//Google Calendar 70.9054//EN
|
||||
VERSION:2.0
|
||||
CALSCALE:GREGORIAN
|
||||
METHOD:PUBLISH
|
||||
X-WR-CALNAME:SRE_Magnet
|
||||
X-WR-TIMEZONE:Europe/Berlin
|
||||
BEGIN:VEVENT
|
||||
DTSTART;VALUE=DATE:20230717
|
||||
DTEND;VALUE=DATE:20230725
|
||||
RRULE:FREQ=WEEKLY;WKST=MO;UNTIL=20230806;INTERVAL=3;BYDAY=MO
|
||||
DTSTAMP:20230714T035605Z
|
||||
UID:eventuid@google.com
|
||||
CREATED:20230626T123728Z
|
||||
LAST-MODIFIED:20230707T095912Z
|
||||
SEQUENCE:1
|
||||
STATUS:CONFIRMED
|
||||
SUMMARY:user
|
||||
TRANSP:TRANSPARENT
|
||||
END:VEVENT
|
||||
BEGIN:VEVENT
|
||||
DTSTART;TZID=Europe/Warsaw:20230717T100000
|
||||
DTEND;TZID=Europe/Warsaw:20230724T100000
|
||||
DTSTAMP:20230714T035605Z
|
||||
UID:eventuid@google.com
|
||||
RECURRENCE-ID;TZID=Europe/Warsaw:19700101T010000
|
||||
CREATED:20230626T123728Z
|
||||
LAST-MODIFIED:20230707T095912Z
|
||||
SEQUENCE:2
|
||||
STATUS:CONFIRMED
|
||||
SUMMARY:user
|
||||
TRANSP:TRANSPARENT
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
|
|
@ -304,6 +304,20 @@ def test_parse_event_uid_fallback():
|
|||
assert source is None
|
||||
|
||||
|
||||
def test_parse_recurrent_event_uid_fallback_modified():
|
||||
# use ical existing UID for imported events
|
||||
event_uid = "someid@google.com"
|
||||
pk, source = parse_event_uid(event_uid, sequence="2")
|
||||
assert pk == f"{event_uid}_2"
|
||||
assert source is None
|
||||
pk, source = parse_event_uid(event_uid, recurrence_id="other-id")
|
||||
assert pk == f"{event_uid}_other-id"
|
||||
assert source is None
|
||||
pk, source = parse_event_uid(event_uid, sequence="3", recurrence_id="other-id")
|
||||
assert pk == f"{event_uid}_3_other-id"
|
||||
assert source is None
|
||||
|
||||
|
||||
def test_is_icals_equal_compare_events():
|
||||
with_vtimezone = textwrap.dedent(
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -0,0 +1,170 @@
|
|||
import datetime
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.api.permissions import LegacyAccessControlRole
|
||||
from apps.schedules.ical_utils import memoized_users_in_ical
|
||||
from apps.schedules.models import CustomOnCallShift, OnCallScheduleWeb
|
||||
from apps.schedules.tasks import notify_about_empty_shifts_in_schedule
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_empty_shifts_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
empty_shifts_report_sent_at = schedule.empty_shifts_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_empty_shifts_in_schedule(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert empty_shifts_report_sent_at != schedule.empty_shifts_report_sent_at
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_shifts_trigger_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1", role=LegacyAccessControlRole.VIEWER)
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
empty_shifts_report_sent_at = schedule.empty_shifts_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_empty_shifts_in_schedule(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert empty_shifts_report_sent_at != schedule.empty_shifts_report_sent_at
|
||||
assert schedule.has_empty_shifts
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_empty_non_empty_shifts_trigger_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
user2 = make_user(organization=organization, username="user2", role=LegacyAccessControlRole.VIEWER)
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
# non-empty shift has higher priority
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 2,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift_1 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift_1.add_rolling_users([[user1]])
|
||||
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift_2 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift_2.add_rolling_users([[user2]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
empty_shifts_report_sent_at = schedule.empty_shifts_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_empty_shifts_in_schedule(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert empty_shifts_report_sent_at != schedule.empty_shifts_report_sent_at
|
||||
assert schedule.has_empty_shifts
|
||||
|
|
@ -0,0 +1,281 @@
|
|||
import datetime
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from django.utils import timezone
|
||||
|
||||
from apps.schedules.ical_utils import memoized_users_in_ical
|
||||
from apps.schedules.models import CustomOnCallShift, OnCallScheduleWeb
|
||||
from apps.schedules.tasks import notify_about_gaps_in_schedule
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_no_gaps_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
gaps_report_sent_at = schedule.gaps_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_gaps_in_schedule(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert gaps_report_sent_at != schedule.gaps_report_sent_at
|
||||
assert schedule.check_gaps_for_next_week() is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_gaps_in_the_past_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date_1 = now - datetime.timedelta(days=1, minutes=1)
|
||||
data = {
|
||||
"start": start_date_1,
|
||||
"rotation_start": start_date_1,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift_1 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift_1.add_rolling_users([[user1]])
|
||||
|
||||
start_date_2 = now - datetime.timedelta(days=5, minutes=1)
|
||||
until_date = start_date_2 + datetime.timedelta(days=3)
|
||||
data = {
|
||||
"start": start_date_2,
|
||||
"rotation_start": start_date_2,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
"until": until_date,
|
||||
}
|
||||
on_call_shift_2 = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift_2.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
gaps_report_sent_at = schedule.gaps_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_gaps_in_schedule(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert gaps_report_sent_at != schedule.gaps_report_sent_at
|
||||
assert schedule.check_gaps_for_next_week() is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_gaps_now_trigger_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=1, minutes=1)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
"interval": 2,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
gaps_report_sent_at = schedule.gaps_report_sent_at
|
||||
|
||||
assert schedule.has_gaps is False
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_gaps_in_schedule(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert gaps_report_sent_at != schedule.gaps_report_sent_at
|
||||
assert schedule.has_gaps is True
|
||||
assert schedule.check_gaps_for_next_week() is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_gaps_near_future_trigger_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
until_date = now + datetime.timedelta(days=3)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
"until": until_date,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
gaps_report_sent_at = schedule.gaps_report_sent_at
|
||||
|
||||
assert schedule.has_gaps is False
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_gaps_in_schedule(schedule.pk)
|
||||
|
||||
assert mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert gaps_report_sent_at != schedule.gaps_report_sent_at
|
||||
assert schedule.has_gaps is True
|
||||
assert schedule.check_gaps_for_next_week() is True
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_gaps_later_than_7_days_no_triggering_notification(
|
||||
make_organization_and_user_with_slack_identities,
|
||||
make_user,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
):
|
||||
organization, _, _, _ = make_organization_and_user_with_slack_identities()
|
||||
user1 = make_user(organization=organization, username="user1")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
now = timezone.now().replace(microsecond=0)
|
||||
|
||||
schedule = make_schedule(
|
||||
organization,
|
||||
schedule_class=OnCallScheduleWeb,
|
||||
name="test_schedule",
|
||||
channel="channel",
|
||||
prev_ical_file_overrides=None,
|
||||
cached_ical_file_overrides=None,
|
||||
)
|
||||
start_date = now - datetime.timedelta(days=7, minutes=1)
|
||||
until_date = now + datetime.timedelta(days=8)
|
||||
data = {
|
||||
"start": start_date,
|
||||
"rotation_start": start_date,
|
||||
"duration": datetime.timedelta(seconds=3600 * 24),
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
"until": until_date,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user1]])
|
||||
schedule.refresh_ical_file()
|
||||
|
||||
gaps_report_sent_at = schedule.gaps_report_sent_at
|
||||
|
||||
with patch("apps.slack.slack_client.SlackClientWithErrorHandling.api_call") as mock_slack_api_call:
|
||||
notify_about_gaps_in_schedule(schedule.pk)
|
||||
|
||||
assert not mock_slack_api_call.called
|
||||
|
||||
schedule.refresh_from_db()
|
||||
assert gaps_report_sent_at != schedule.gaps_report_sent_at
|
||||
assert schedule.check_gaps_for_next_week() is False
|
||||
|
|
@ -299,13 +299,13 @@ def test_filter_events_ical_all_day(make_organization, make_user_for_organizatio
|
|||
True,
|
||||
["@Alex"],
|
||||
datetime.datetime(2021, 1, 27, 0, 0, tzinfo=pytz.UTC),
|
||||
datetime.datetime(2021, 1, 27, 23, 59, 59, 999999, tzinfo=pytz.UTC),
|
||||
datetime.datetime(2021, 1, 27, 23, 59, 59, tzinfo=pytz.UTC),
|
||||
),
|
||||
(
|
||||
True,
|
||||
["@Alice"],
|
||||
datetime.datetime(2021, 1, 27, 0, 0, tzinfo=pytz.UTC),
|
||||
datetime.datetime(2021, 1, 28, 23, 59, 59, 999999, tzinfo=pytz.UTC),
|
||||
datetime.datetime(2021, 1, 28, 23, 59, 59, tzinfo=pytz.UTC),
|
||||
),
|
||||
(
|
||||
False,
|
||||
|
|
@ -2169,21 +2169,36 @@ def test_swap_request_split_both(
|
|||
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_BEFORE", 0):
|
||||
schedule.refresh_ical_final_schedule()
|
||||
assert schedule.cached_ical_final_schedule
|
||||
expected_events = [
|
||||
# start, end, user
|
||||
(start, start + duration, user.username), # today shift unchanged
|
||||
(start + timezone.timedelta(days=1), start + timezone.timedelta(days=1, hours=1), user.username), # first split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=1),
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
other_user.username if swap_taken else user.username,
|
||||
), # second split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
start + timezone.timedelta(days=1, hours=3),
|
||||
user.username,
|
||||
), # third split
|
||||
]
|
||||
if swap_taken:
|
||||
expected_events = [
|
||||
# start, end, user
|
||||
(start, start + duration, user.username), # today shift unchanged
|
||||
(
|
||||
start + timezone.timedelta(days=1),
|
||||
start + timezone.timedelta(days=1, hours=1),
|
||||
user.username,
|
||||
), # first split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=1),
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
other_user.username if swap_taken else user.username,
|
||||
), # second split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
start + timezone.timedelta(days=1, hours=3),
|
||||
user.username,
|
||||
), # third split
|
||||
]
|
||||
else:
|
||||
expected_events = [
|
||||
# start, end, user
|
||||
(start, start + duration, user.username), # today shift unchanged
|
||||
(
|
||||
start + timezone.timedelta(days=1),
|
||||
start + timezone.timedelta(days=1) + duration,
|
||||
user.username,
|
||||
), # no split
|
||||
]
|
||||
calendar = icalendar.Calendar.from_ical(schedule.cached_ical_final_schedule)
|
||||
for component in calendar.walk():
|
||||
if component.name == ICAL_COMPONENT_VEVENT:
|
||||
|
|
@ -2195,6 +2210,83 @@ def test_swap_request_split_both(
|
|||
assert event in expected_events
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("swap_taken", [False, True])
|
||||
def test_swap_request_ignore_untaken(
|
||||
make_organization,
|
||||
make_user_for_organization,
|
||||
make_schedule,
|
||||
make_on_call_shift,
|
||||
make_shift_swap_request,
|
||||
swap_taken,
|
||||
):
|
||||
organization = make_organization()
|
||||
user = make_user_for_organization(organization)
|
||||
other_user = make_user_for_organization(organization)
|
||||
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
|
||||
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
start = today + timezone.timedelta(hours=12)
|
||||
duration = timezone.timedelta(hours=3)
|
||||
data = {
|
||||
"start": start,
|
||||
"rotation_start": start,
|
||||
"duration": duration,
|
||||
"priority_level": 1,
|
||||
"frequency": CustomOnCallShift.FREQUENCY_DAILY,
|
||||
"schedule": schedule,
|
||||
}
|
||||
on_call_shift = make_on_call_shift(
|
||||
organization=organization, shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT, **data
|
||||
)
|
||||
on_call_shift.add_rolling_users([[user]])
|
||||
|
||||
tomorrow = today + timezone.timedelta(days=1)
|
||||
# setup swap request
|
||||
swap_request = make_shift_swap_request(
|
||||
schedule,
|
||||
user,
|
||||
swap_start=tomorrow + timezone.timedelta(hours=13),
|
||||
swap_end=tomorrow + timezone.timedelta(hours=14),
|
||||
)
|
||||
if swap_taken:
|
||||
swap_request.take(other_user)
|
||||
|
||||
# set flag to ignore untaken swaps
|
||||
events = schedule.filter_events(today, today + timezone.timedelta(days=2), ignore_untaken_swaps=True)
|
||||
|
||||
if swap_taken:
|
||||
expected = [
|
||||
# start, end, swap requested
|
||||
(start, start + duration, False), # today shift unchanged
|
||||
(start + timezone.timedelta(days=1), start + timezone.timedelta(days=1, hours=1), False), # first split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=1),
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
True,
|
||||
), # second split
|
||||
(
|
||||
start + timezone.timedelta(days=1, hours=2),
|
||||
start + timezone.timedelta(days=1, hours=3),
|
||||
False,
|
||||
), # third split
|
||||
]
|
||||
else:
|
||||
expected = [
|
||||
# start, end, swap requested
|
||||
(start, start + duration, False), # today shift unchanged
|
||||
(start + timezone.timedelta(days=1), start + timezone.timedelta(days=1) + duration, False), # no split
|
||||
]
|
||||
|
||||
returned = [(e["start"], e["end"], bool(e["users"][0].get("swap_request", False))) for e in events]
|
||||
assert returned == expected
|
||||
# check swap request details
|
||||
if swap_taken:
|
||||
assert events[2]["users"][0]["swap_request"]["pk"] == swap_request.public_primary_key
|
||||
assert events[2]["users"][0]["pk"] == other_user.public_primary_key
|
||||
assert events[2]["users"][0]["swap_request"]["user"]["pk"] == user.public_primary_key
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
@pytest.mark.parametrize("swap_taken", [False, True])
|
||||
def test_swap_request_whole_shift(
|
||||
|
|
@ -2385,3 +2477,22 @@ def test_swap_request_no_changes(
|
|||
|
||||
events_after = schedule.filter_events(today, today + timezone.timedelta(days=2))
|
||||
assert events_before == events_after
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_filter_events_ical_duplicated_uid(make_organization, make_user_for_organization, make_schedule, get_ical):
|
||||
calendar = get_ical("modified_recurring_event.ics")
|
||||
organization = make_organization()
|
||||
schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar)
|
||||
schedule.cached_ical_file_primary = calendar.to_ical()
|
||||
make_user_for_organization(organization, username="user")
|
||||
# clear users pks <-> organization cache (persisting between tests)
|
||||
memoized_users_in_ical.cache_clear()
|
||||
|
||||
datetime_start = datetime.datetime(2023, 7, 17, 0, 0, tzinfo=pytz.UTC)
|
||||
datetime_end = datetime_start + datetime.timedelta(days=7)
|
||||
events = schedule.final_events(datetime_start, datetime_end)
|
||||
|
||||
assert len(events) == 2
|
||||
assert events[0]["shift"]["pk"] == "eventuid@google.com_1"
|
||||
assert events[1]["shift"]["pk"] == "eventuid@google.com_2_1970-01-01T01:00:00+01:00"
|
||||
|
|
|
|||
|
|
@ -46,6 +46,14 @@ def test_status_taken(shift_swap_request_setup) -> None:
|
|||
assert ssr.status == ShiftSwapRequest.Statuses.TAKEN
|
||||
assert ssr.is_taken is True
|
||||
|
||||
# taken in the past it's still taken
|
||||
now = timezone.now()
|
||||
ssr.swap_start = now - timezone.timedelta(days=2)
|
||||
ssr.save()
|
||||
assert ssr.status == ShiftSwapRequest.Statuses.TAKEN
|
||||
assert ssr.is_taken is True
|
||||
assert ssr.is_past_due is False
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_status_past_due(shift_swap_request_setup) -> None:
|
||||
|
|
|
|||
|
|
@ -80,27 +80,34 @@ class SlackUserGroup(models.Model):
|
|||
@property
|
||||
def oncall_slack_user_identities(self):
|
||||
users = set(user for schedule in self.oncall_schedules.get_oncall_users().values() for user in schedule)
|
||||
slack_user_identities = [user.slack_user_identity for user in users if user.slack_user_identity is not None]
|
||||
slack_user_identities = []
|
||||
for user in users:
|
||||
if user.slack_user_identity is not None:
|
||||
slack_user_identities.append(user.slack_user_identity)
|
||||
else:
|
||||
logger.warning(f"User {user.pk} does not have a Slack account connected")
|
||||
|
||||
return slack_user_identities
|
||||
|
||||
def update_oncall_members(self):
|
||||
slack_ids = [slack_user_identity.slack_id for slack_user_identity in self.oncall_slack_user_identities]
|
||||
logger.info(f"Updating usergroup {self.slack_id}, members {slack_ids}")
|
||||
|
||||
# Slack doesn't allow user groups to be empty
|
||||
if len(slack_ids) == 0:
|
||||
logger.info(f"Skipping usergroup {self.slack_id}, the list of members is empty")
|
||||
return
|
||||
|
||||
# Do not send requests to Slack API in case user group is populated correctly already
|
||||
if self.members is not None and set(self.members) == set(slack_ids):
|
||||
logger.info(f"Skipping usergroup {self.slack_id}, already populated correctly")
|
||||
return
|
||||
|
||||
try:
|
||||
self.update_members(slack_ids)
|
||||
except SlackAPIException as e:
|
||||
if e.response["error"] == "permission_denied":
|
||||
logger.warning(
|
||||
"Could not update the usergroup with Slack ID: {} due to permission_denied".format(self.slack_id)
|
||||
)
|
||||
logger.warning(f"Could not update usergroup {self.slack_id} due to permission_denied")
|
||||
|
||||
def update_members(self, slack_ids):
|
||||
sc = SlackClientWithErrorHandling(self.slack_team_identity.bot_access_token)
|
||||
|
|
|
|||
|
|
@ -515,7 +515,7 @@ def _get_organization_select(
|
|||
{
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": f"{org.org_title}",
|
||||
"text": f"{org.org_title} ({org.stack_slug})",
|
||||
"emoji": True,
|
||||
},
|
||||
"value": f"{org.pk}",
|
||||
|
|
|
|||
|
|
@ -169,9 +169,10 @@ class EditScheduleShiftNotifyStep(scenario_step.ScenarioStep):
|
|||
now_text = "Inviting <!channel>. No one on-call now!\n"
|
||||
elif schedule.notify_empty_oncall == schedule.NotifyEmptyOnCall.PREV:
|
||||
user_ids: typing.List[str] = []
|
||||
for item in json.loads(schedule.current_shifts).values():
|
||||
user_ids.extend(item.get("users", []))
|
||||
prev_users = organization.users.filter(id__in=user_ids)
|
||||
for item in json.loads(schedule.current_shifts):
|
||||
user_ids_from_shift = [u["pk"] for u in item.get("users", [])]
|
||||
user_ids.extend(user_ids_from_shift)
|
||||
prev_users = organization.users.filter(public_primary_key__in=user_ids)
|
||||
users_verbal = " ".join(
|
||||
[f"{user.get_username_with_slack_verbal(mention=True)}" for user in prev_users]
|
||||
)
|
||||
|
|
@ -183,6 +184,8 @@ class EditScheduleShiftNotifyStep(scenario_step.ScenarioStep):
|
|||
now_text = ""
|
||||
for shift in new_shifts:
|
||||
users = shift["users"]
|
||||
user_ids_from_shift = [u["pk"] for u in users]
|
||||
users = organization.users.filter(public_primary_key__in=user_ids_from_shift)
|
||||
now_text += cls.get_ical_shift_notification_text(shift, schedule.mention_oncall_start, users)
|
||||
now_text = "*New on-call shift:*\n" + now_text
|
||||
|
||||
|
|
@ -195,6 +198,8 @@ class EditScheduleShiftNotifyStep(scenario_step.ScenarioStep):
|
|||
next_text = ""
|
||||
for shift in next_shifts:
|
||||
users = shift["users"]
|
||||
user_ids_from_shift = [u["pk"] for u in users]
|
||||
users = organization.users.filter(public_primary_key__in=user_ids_from_shift)
|
||||
next_text += cls.get_ical_shift_notification_text(shift, schedule.mention_oncall_next, users)
|
||||
next_text = "\n*Next on-call shift:*\n" + next_text
|
||||
|
||||
|
|
@ -242,30 +247,29 @@ class EditScheduleShiftNotifyStep(scenario_step.ScenarioStep):
|
|||
|
||||
@classmethod
|
||||
def get_ical_shift_notification_text(cls, shift, mention, users) -> str:
|
||||
if shift["all_day"]:
|
||||
notification = " ".join([f"{user.get_username_with_slack_verbal(mention=mention)}" for user in users])
|
||||
user_verbal = shift["users"][0].get_username_with_slack_verbal(
|
||||
mention=False,
|
||||
)
|
||||
if shift["start"].day == shift["end"].day:
|
||||
all_day_text = shift["start"].strftime("%b %d")
|
||||
notification = ""
|
||||
for user in users:
|
||||
if shift["all_day"]:
|
||||
user_notification = user.get_username_with_slack_verbal(mention=mention)
|
||||
if shift["start"].day == shift["end"].day:
|
||||
all_day_text = shift["start"].strftime("%b %d")
|
||||
else:
|
||||
all_day_text = f'From {shift["start"].strftime("%b %d")} to {shift["end"].strftime("%b %d")}'
|
||||
user_notification += f' {all_day_text} _All-day event in timezone "UTC"_\n'
|
||||
else:
|
||||
all_day_text = f'From {shift["start"].strftime("%b %d")} to {shift["end"].strftime("%b %d")}'
|
||||
notification += (
|
||||
f" {all_day_text} _All-day event in *{user_verbal}'s* timezone_ " f'- {shift["users"][0].timezone}.\n'
|
||||
)
|
||||
else:
|
||||
shift_start_timestamp = shift["start"].astimezone(pytz.UTC).timestamp()
|
||||
shift_end_timestamp = shift["end"].astimezone(pytz.UTC).timestamp()
|
||||
shift_start_timestamp = shift["start"].astimezone(pytz.UTC).timestamp()
|
||||
shift_end_timestamp = shift["end"].astimezone(pytz.UTC).timestamp()
|
||||
|
||||
notification = (
|
||||
" ".join([f"{user.get_username_with_slack_verbal(mention=mention)}" for user in users])
|
||||
+ f" from {format_datetime_to_slack_with_time(shift_start_timestamp)}"
|
||||
f" to {format_datetime_to_slack_with_time(shift_end_timestamp)}\n"
|
||||
)
|
||||
priority = shift.get("priority", 0) - shift.get("priority_increased_by", 0)
|
||||
if priority != 0:
|
||||
notification = f"[L{shift.get('priority')}] {notification}"
|
||||
user_notification = (
|
||||
user.get_username_with_slack_verbal(mention=mention)
|
||||
+ f" from {format_datetime_to_slack_with_time(shift_start_timestamp)}"
|
||||
f" to {format_datetime_to_slack_with_time(shift_end_timestamp)}\n"
|
||||
)
|
||||
if not shift["is_override"]:
|
||||
priority = shift.get("priority_level", 0) or 0
|
||||
if priority != 0:
|
||||
user_notification = f"[L{priority}] {user_notification}"
|
||||
notification += user_notification
|
||||
return notification
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -24,7 +24,9 @@ from apps.slack.scenarios.paging import (
|
|||
OnPagingUserChange,
|
||||
Policy,
|
||||
StartDirectPaging,
|
||||
_get_organization_select,
|
||||
)
|
||||
from apps.user_management.models import Organization
|
||||
|
||||
|
||||
def make_slack_payload(
|
||||
|
|
@ -401,3 +403,13 @@ def test_remove_schedule(make_organization_and_user_with_slack_identities, make_
|
|||
metadata = json.loads(mock_slack_api_call.call_args.kwargs["view"]["private_metadata"])
|
||||
assert metadata[DataKey.SCHEDULES] == {}
|
||||
assert metadata[DataKey.USERS] == {str(user.pk): Policy.DEFAULT}
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
def test_get_organization_select(make_organization):
|
||||
organization = make_organization(org_title="Organization", stack_slug="stack_slug")
|
||||
select = _get_organization_select(Organization.objects.filter(pk=organization.pk), organization, "test")
|
||||
|
||||
assert len(select["element"]["options"]) == 1
|
||||
assert select["element"]["options"][0]["value"] == str(organization.pk)
|
||||
assert select["element"]["options"][0]["text"]["text"] == "Organization (stack_slug)"
|
||||
|
|
|
|||
0
engine/common/migrations/__init__.py
Normal file
0
engine/common/migrations/__init__.py
Normal file
79
engine/common/migrations/remove_field.py
Normal file
79
engine/common/migrations/remove_field.py
Normal file
|
|
@ -0,0 +1,79 @@
|
|||
from django.db import connection
|
||||
from django.db.migrations import RemoveField
|
||||
from django.db.migrations.loader import MigrationLoader
|
||||
|
||||
|
||||
class RemoveFieldState(RemoveField):
|
||||
"""
|
||||
Remove field from Django's migration state, but not from the database.
|
||||
This is essentially the same as RemoveField, but database_forwards and database_backwards methods are modified
|
||||
to do nothing.
|
||||
"""
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
pass
|
||||
|
||||
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
||||
pass
|
||||
|
||||
def describe(self):
|
||||
return f"{super().describe()} (state)"
|
||||
|
||||
@property
|
||||
def migration_name_fragment(self):
|
||||
return f"{super().migration_name_fragment}_state"
|
||||
|
||||
|
||||
class RemoveFieldDB(RemoveField):
|
||||
"""
|
||||
Remove field from the database, but not from Django's migration state.
|
||||
This is implemented as a custom operation, because Django's RemoveField operation does not support
|
||||
removing fields from the database after it has been removed from the state. The workaround is to use the state
|
||||
that was in effect before the field was removed from the state (i.e. just before the RemoveFieldState migration).
|
||||
"""
|
||||
|
||||
def __init__(self, model_name, name, remove_state_migration):
|
||||
"""
|
||||
Specifying "remove_state_migration" allows database operations to run against a particular historical state.
|
||||
Example: remove_state_migration = ("alerts", "0014_alertreceivechannel_restricted_at") will "trick" Django
|
||||
into thinking that the last applied migration in the "alerts" app is 0013.
|
||||
"""
|
||||
super().__init__(model_name, name)
|
||||
self.remove_state_migration = remove_state_migration
|
||||
|
||||
def deconstruct(self):
|
||||
"""Update serialized representation of the operation."""
|
||||
deconstructed = super().deconstruct()
|
||||
return (
|
||||
deconstructed[0],
|
||||
deconstructed[1],
|
||||
deconstructed[2] | {"remove_state_migration": self.remove_state_migration}
|
||||
)
|
||||
|
||||
def state_forwards(self, app_label, state):
|
||||
"""Skip any state changes."""
|
||||
pass
|
||||
|
||||
def database_forwards(self, app_label, schema_editor, from_state, to_state):
|
||||
# use historical state instead of what Django provides
|
||||
from_state = self.state_before_remove_state_migration
|
||||
|
||||
super().database_forwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
def database_backwards(self, app_label, schema_editor, from_state, to_state):
|
||||
# use historical state instead of what Django provides
|
||||
to_state = self.state_before_remove_state_migration
|
||||
|
||||
super().database_backwards(app_label, schema_editor, from_state, to_state)
|
||||
|
||||
def describe(self):
|
||||
return f"{super().describe()} (db)"
|
||||
|
||||
@property
|
||||
def migration_name_fragment(self):
|
||||
return f"{super().migration_name_fragment}_db"
|
||||
|
||||
@property
|
||||
def state_before_remove_state_migration(self):
|
||||
"""Get project state just before migration "remove_state_migration" was applied."""
|
||||
return MigrationLoader(connection).project_state(self.remove_state_migration, at_end=False)
|
||||
|
|
@ -46,14 +46,7 @@ source_link = None
|
|||
|
||||
grouping_id = '{{ payload.get("alert_uid", "")}}'
|
||||
|
||||
resolve_condition = """\
|
||||
{%- if "is_amixr_heartbeat_restored" in payload -%}
|
||||
{# We don't know the payload format from your integration. #}
|
||||
{# The heartbeat alerts will go here so we check for our own key #}
|
||||
{{ payload["is_amixr_heartbeat_restored"] }}
|
||||
{%- else -%}
|
||||
{{ payload.get("state", "").upper() == "OK" }}
|
||||
{%- endif %}"""
|
||||
resolve_condition = """{{ payload.get("state", "").upper() == "OK" }}"""
|
||||
|
||||
acknowledge_condition = None
|
||||
|
||||
|
|
|
|||
|
|
@ -45,16 +45,15 @@ telegram_image_url = slack_image_url
|
|||
|
||||
source_link = "{{ payload.url }}"
|
||||
|
||||
grouping_id = "{{ payload }}"
|
||||
grouping_id = """\
|
||||
{% if "is_oncall_heartbeat" in payload %}
|
||||
{# Case for heartbeat alerts generated by Grafana OnCall #}
|
||||
{{- payload.alert_uid }}
|
||||
{% else %}
|
||||
{{- payload }}
|
||||
{% endif %}"""
|
||||
|
||||
resolve_condition = """\
|
||||
{%- if "is_amixr_heartbeat_restored" in payload -%}
|
||||
{# We don't know the payload format from your integration. #}
|
||||
{# The heartbeat alerts will go here so we check for our own key #}
|
||||
{{ payload["is_amixr_heartbeat_restored"] }}
|
||||
{%- else -%}
|
||||
{{ payload.get("state", "").upper() == "OK" }}
|
||||
{%- endif %}"""
|
||||
resolve_condition = """{{ payload.get("state", "").upper() == "OK" }}"""
|
||||
acknowledge_condition = None
|
||||
|
||||
example_payload = {"message": "This alert was sent by user for demonstration purposes"}
|
||||
|
|
|
|||
64
engine/engine/management/commands/remove_field.py
Normal file
64
engine/engine/management/commands/remove_field.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
from django.core.management import BaseCommand
|
||||
from django.db import connection
|
||||
from django.db.migrations import Migration
|
||||
from django.db.migrations.autodetector import MigrationAutodetector
|
||||
from django.db.migrations.loader import MigrationLoader
|
||||
from django.db.migrations.writer import MigrationWriter
|
||||
|
||||
from common.migrations.remove_field import RemoveFieldDB, RemoveFieldState
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""
|
||||
Generate two migrations that remove a field from the state and the database separately.
|
||||
This allows removing a field in 2 separate releases and avoid downtime.
|
||||
"""
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"args", nargs=3, help="app_label model_name field_name, example: alerts AlertReceiveChannel restricted_at"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
app_label, model_name, field_name = args
|
||||
|
||||
# Check that the app, the model, and the field to be removed exist
|
||||
project_state = MigrationLoader(connection).project_state()
|
||||
model_state = project_state.apps.get_model(app_label, model_name)
|
||||
model_state._meta.get_field(field_name)
|
||||
|
||||
# Write migration that removes the field from the state
|
||||
remove_state_migration = self.write_operation(
|
||||
app_label, RemoveFieldState(model_name=model_name, name=field_name), project_state
|
||||
)
|
||||
|
||||
# Write migration that removes the field from the database
|
||||
self.write_operation(
|
||||
app_label,
|
||||
RemoveFieldDB(
|
||||
model_name=model_name, name=field_name, remove_state_migration=(app_label, remove_state_migration.name)
|
||||
),
|
||||
project_state,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def write_operation(app_label, operation, project_state):
|
||||
"""
|
||||
Some Django magic to write a single-operation migration to a file, so it's similar to what Django would generate
|
||||
when running the "makemigrations" command.
|
||||
"""
|
||||
|
||||
migration_class = type("Migration", (Migration,), {"operations": [operation]})
|
||||
|
||||
changes = MigrationAutodetector(project_state, project_state).arrange_for_graph(
|
||||
changes={app_label: [migration_class(None, app_label)]},
|
||||
graph=MigrationLoader(connection).graph,
|
||||
migration_name=operation.migration_name_fragment,
|
||||
)
|
||||
|
||||
migration = changes[app_label][0]
|
||||
writer = MigrationWriter(migration)
|
||||
with open(writer.path, "w", encoding="utf-8") as file:
|
||||
file.write(writer.as_string())
|
||||
|
||||
return migration
|
||||
|
|
@ -64,7 +64,6 @@ FEATURE_SLACK_INTEGRATION_ENABLED = getenv_boolean("FEATURE_SLACK_INTEGRATION_EN
|
|||
FEATURE_MULTIREGION_ENABLED = getenv_boolean("FEATURE_MULTIREGION_ENABLED", default=False)
|
||||
FEATURE_INBOUND_EMAIL_ENABLED = getenv_boolean("FEATURE_INBOUND_EMAIL_ENABLED", default=False)
|
||||
FEATURE_PROMETHEUS_EXPORTER_ENABLED = getenv_boolean("FEATURE_PROMETHEUS_EXPORTER_ENABLED", default=False)
|
||||
FEATURE_SHIFT_SWAPS_ENABLED = getenv_boolean("FEATURE_SHIFT_SWAPS_ENABLED", default=False)
|
||||
FEATURE_GRAFANA_ALERTING_V2_ENABLED = getenv_boolean("FEATURE_GRAFANA_ALERTING_V2_ENABLED", default=False)
|
||||
GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", default=True)
|
||||
GRAFANA_CLOUD_NOTIFICATIONS_ENABLED = getenv_boolean("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", default=True)
|
||||
|
|
@ -489,6 +488,11 @@ CELERY_BEAT_SCHEDULE = {
|
|||
"schedule": 60 * 30,
|
||||
"args": (),
|
||||
},
|
||||
"check_heartbeats": {
|
||||
"task": "apps.heartbeat.tasks.check_heartbeats",
|
||||
"schedule": crontab(minute="*/2"), # every 2 minutes
|
||||
"args": (),
|
||||
},
|
||||
}
|
||||
|
||||
if ESCALATION_AUDITOR_ENABLED:
|
||||
|
|
|
|||
|
|
@ -40,5 +40,3 @@ TWILIO_ACCOUNT_SID = "dummy_twilio_account_sid"
|
|||
TWILIO_AUTH_TOKEN = "dummy_twilio_auth_token"
|
||||
|
||||
EXTRA_MESSAGING_BACKENDS = [("apps.base.tests.messaging_backend.TestOnlyBackend", 42)]
|
||||
|
||||
FEATURE_SHIFT_SWAPS_ENABLED = True
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import hash from 'object-hash';
|
|||
|
||||
import { ScheduleFiltersType } from 'components/ScheduleFilters/ScheduleFilters.types';
|
||||
import ScheduleSlot from 'containers/ScheduleSlot/ScheduleSlot';
|
||||
import { Schedule, Event, RotationFormLiveParams } from 'models/schedule/schedule.types';
|
||||
import { Schedule, Event, RotationFormLiveParams, Shift, ShiftSwap } from 'models/schedule/schedule.types';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
|
||||
import RotationTutorial from './RotationTutorial';
|
||||
|
|
@ -26,11 +26,15 @@ interface RotationProps {
|
|||
events: Event[];
|
||||
onClick?: (start: dayjs.Dayjs, end: dayjs.Dayjs) => void;
|
||||
handleAddOverride?: (start: dayjs.Dayjs, end: dayjs.Dayjs) => void;
|
||||
handleAddShiftSwap?: (id: 'new', params: Partial<ShiftSwap>) => void;
|
||||
onShiftSwapClick?: (swapId: ShiftSwap['id']) => void;
|
||||
days?: number;
|
||||
transparent?: boolean;
|
||||
tutorialParams?: RotationFormLiveParams;
|
||||
simplified?: boolean;
|
||||
filters?: ScheduleFiltersType;
|
||||
getColor?: (shiftId: Shift['id']) => string;
|
||||
onSlotClick?: (event: Event) => void;
|
||||
}
|
||||
|
||||
const Rotation: FC<RotationProps> = (props) => {
|
||||
|
|
@ -39,14 +43,18 @@ const Rotation: FC<RotationProps> = (props) => {
|
|||
scheduleId,
|
||||
startMoment,
|
||||
currentTimezone,
|
||||
color,
|
||||
color: propsColor,
|
||||
days = 7,
|
||||
transparent = false,
|
||||
tutorialParams,
|
||||
onClick,
|
||||
handleAddOverride,
|
||||
handleAddShiftSwap,
|
||||
onShiftSwapClick,
|
||||
simplified,
|
||||
filters,
|
||||
getColor,
|
||||
onSlotClick,
|
||||
} = props;
|
||||
|
||||
const [animate, _setAnimate] = useState<boolean>(true);
|
||||
|
|
@ -72,6 +80,28 @@ const Rotation: FC<RotationProps> = (props) => {
|
|||
};
|
||||
};
|
||||
|
||||
const getAddShiftSwapClickHandler = (scheduleEvent: Event) => {
|
||||
return (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
event.stopPropagation();
|
||||
|
||||
handleAddShiftSwap('new', {
|
||||
swap_start: scheduleEvent.start,
|
||||
swap_end: scheduleEvent.end,
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
const getSlotClickHandler = (event: Event) => {
|
||||
if (!onSlotClick) {
|
||||
return undefined;
|
||||
}
|
||||
return (e) => {
|
||||
e.stopPropagation();
|
||||
|
||||
onSlotClick(event);
|
||||
};
|
||||
};
|
||||
|
||||
const x = useMemo(() => {
|
||||
if (!events || !events.length) {
|
||||
return 0;
|
||||
|
|
@ -85,7 +115,7 @@ const Rotation: FC<RotationProps> = (props) => {
|
|||
}, [events]);
|
||||
|
||||
return (
|
||||
<div className={cx('root')} onClick={handleRotationClick}>
|
||||
<div className={cx('root')} onClick={onClick && handleRotationClick}>
|
||||
<div className={cx('timeline')}>
|
||||
{tutorialParams && <RotationTutorial startMoment={startMoment} {...tutorialParams} />}
|
||||
{events ? (
|
||||
|
|
@ -102,10 +132,13 @@ const Rotation: FC<RotationProps> = (props) => {
|
|||
event={event}
|
||||
startMoment={startMoment}
|
||||
currentTimezone={currentTimezone}
|
||||
color={color}
|
||||
color={propsColor || getColor(event.shift?.pk)}
|
||||
handleAddOverride={getAddOverrideClickHandler(event)}
|
||||
handleAddShiftSwap={getAddShiftSwapClickHandler(event)}
|
||||
onShiftSwapClick={onShiftSwapClick}
|
||||
simplified={simplified}
|
||||
filters={filters}
|
||||
onClick={getSlotClickHandler(event)}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
|
|
|||
|
|
@ -40,6 +40,10 @@
|
|||
width: 200px;
|
||||
}
|
||||
|
||||
.user-item {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.user-title {
|
||||
padding: 6px 10px;
|
||||
color: #fff;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,8 @@
|
|||
.root {
|
||||
display: block;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.fields {
|
||||
width: 100%;
|
||||
}
|
||||
209
grafana-plugin/src/containers/RotationForm/ShiftSwapForm.tsx
Normal file
209
grafana-plugin/src/containers/RotationForm/ShiftSwapForm.tsx
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
import React, { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
|
||||
import { Button, Field, HorizontalGroup, IconButton, Input, TextArea, VerticalGroup } from '@grafana/ui';
|
||||
import cn from 'classnames/bind';
|
||||
import dayjs from 'dayjs';
|
||||
import Draggable from 'react-draggable';
|
||||
|
||||
import Modal from 'components/Modal/Modal';
|
||||
import Tag from 'components/Tag/Tag';
|
||||
import Text from 'components/Text/Text';
|
||||
import WithConfirm from 'components/WithConfirm/WithConfirm';
|
||||
import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip';
|
||||
import { SHIFT_SWAP_COLOR } from 'models/schedule/schedule.helpers';
|
||||
import { Schedule, ShiftSwap } from 'models/schedule/schedule.types';
|
||||
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
import { getUTCString } from 'pages/schedule/Schedule.helpers';
|
||||
import { useStore } from 'state/useStore';
|
||||
import { UserActions } from 'utils/authorization';
|
||||
|
||||
import DateTimePicker from './parts/DateTimePicker';
|
||||
import UserItem from './parts/UserItem';
|
||||
|
||||
import styles from './RotationForm.module.css';
|
||||
|
||||
const cx = cn.bind(styles);
|
||||
|
||||
interface ShiftSwapFormProps {
|
||||
id: ShiftSwap['id'] | 'new';
|
||||
scheduleId: Schedule['id'];
|
||||
params: Partial<ShiftSwap>;
|
||||
currentTimezone: Timezone;
|
||||
|
||||
onUpdate: () => void;
|
||||
|
||||
onHide: () => void;
|
||||
}
|
||||
|
||||
const ShiftSwapForm = (props: ShiftSwapFormProps) => {
|
||||
const { onUpdate, onHide, id, scheduleId, params: defaultParams, currentTimezone } = props;
|
||||
|
||||
const [shiftSwap, setShiftSwap] = useState({ ...defaultParams });
|
||||
|
||||
const store = useStore();
|
||||
const { scheduleStore } = store;
|
||||
|
||||
useEffect(() => {
|
||||
if (id !== 'new') {
|
||||
scheduleStore.loadShiftSwap(id).then(setShiftSwap);
|
||||
}
|
||||
}, [id]);
|
||||
|
||||
useEffect(() => {
|
||||
if (defaultParams) {
|
||||
setShiftSwap({ ...shiftSwap, swap_start: defaultParams.swap_start, swap_end: defaultParams.swap_end });
|
||||
}
|
||||
}, [defaultParams]);
|
||||
|
||||
const handleShiftSwapStartChange = useCallback(
|
||||
(value) => {
|
||||
setShiftSwap({ ...shiftSwap, swap_start: getUTCString(value) });
|
||||
},
|
||||
[shiftSwap]
|
||||
);
|
||||
|
||||
const handleShiftSwapEndChange = useCallback(
|
||||
(value) => {
|
||||
setShiftSwap({ ...shiftSwap, swap_end: getUTCString(value) });
|
||||
},
|
||||
[shiftSwap]
|
||||
);
|
||||
|
||||
const handleDescriptionChange = useCallback(
|
||||
(event) => {
|
||||
setShiftSwap({ ...shiftSwap, description: event.target.value });
|
||||
},
|
||||
[shiftSwap]
|
||||
);
|
||||
|
||||
const handleCreate = useCallback(async () => {
|
||||
await scheduleStore.createShiftSwap({ schedule: scheduleId, ...shiftSwap });
|
||||
|
||||
onHide();
|
||||
onUpdate();
|
||||
}, [shiftSwap]);
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
await scheduleStore.deleteShiftSwap(id);
|
||||
|
||||
onHide();
|
||||
onUpdate();
|
||||
}, [id]);
|
||||
|
||||
const handleTake = useCallback(async () => {
|
||||
await scheduleStore.takeShiftSwap(id);
|
||||
|
||||
onHide();
|
||||
onUpdate();
|
||||
}, [id]);
|
||||
|
||||
const beneficiaryName = shiftSwap?.beneficiary && store.userStore.items[shiftSwap.beneficiary]?.name;
|
||||
|
||||
const isNew = id === 'new';
|
||||
const isPastDue = useMemo(() => shiftSwap && dayjs(shiftSwap.swap_start).isBefore(dayjs()), [shiftSwap]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
top="0"
|
||||
isOpen
|
||||
width="430px"
|
||||
onDismiss={onHide}
|
||||
contentElement={(props, children) => (
|
||||
<Draggable handle=".drag-handler" defaultClassName={cx('draggable')} positionOffset={{ x: 0, y: 200 }}>
|
||||
<div {...props}>{children}</div>
|
||||
</Draggable>
|
||||
)}
|
||||
>
|
||||
<div className={cx('root')}>
|
||||
<VerticalGroup>
|
||||
<HorizontalGroup justify="space-between">
|
||||
<HorizontalGroup spacing="sm">
|
||||
{isNew && <Tag color={SHIFT_SWAP_COLOR}>New</Tag>}
|
||||
<Text.Title level={5} editable>
|
||||
Shift swap
|
||||
</Text.Title>
|
||||
</HorizontalGroup>
|
||||
<HorizontalGroup>
|
||||
{!isNew && (
|
||||
<WithPermissionControlTooltip userAction={UserActions.SchedulesWrite}>
|
||||
<WithConfirm title="Are you sure to delete shift swap request?" confirmText="Delete">
|
||||
<IconButton variant="secondary" tooltip="Delete" name="trash-alt" onClick={handleDelete} />
|
||||
</WithConfirm>
|
||||
</WithPermissionControlTooltip>
|
||||
)}
|
||||
<IconButton variant="secondary" className={cx('drag-handler')} name="draggabledots" />
|
||||
<IconButton name="times" variant="secondary" tooltip="Close" onClick={onHide} />
|
||||
</HorizontalGroup>
|
||||
</HorizontalGroup>
|
||||
|
||||
<div className={cx('fields')}>
|
||||
{!isNew && (
|
||||
<Field label="Creator">
|
||||
<Input disabled value={beneficiaryName}></Input>
|
||||
</Field>
|
||||
)}
|
||||
|
||||
<HorizontalGroup height="auto">
|
||||
<Field label="Swap start">
|
||||
<DateTimePicker
|
||||
timezone={store.currentTimezone}
|
||||
disabled={!isNew}
|
||||
value={dayjs(shiftSwap.swap_start)}
|
||||
onChange={handleShiftSwapStartChange}
|
||||
/>
|
||||
</Field>
|
||||
<Field label="Swap end">
|
||||
<DateTimePicker
|
||||
timezone={store.currentTimezone}
|
||||
disabled={!isNew}
|
||||
value={dayjs(shiftSwap.swap_end)}
|
||||
onChange={handleShiftSwapEndChange}
|
||||
/>
|
||||
</Field>
|
||||
</HorizontalGroup>
|
||||
|
||||
<Field label="Description">
|
||||
<TextArea rows={4} disabled={!isNew} value={shiftSwap.description} onChange={handleDescriptionChange}>
|
||||
{shiftSwap.description}
|
||||
</TextArea>
|
||||
</Field>
|
||||
{!isNew && (
|
||||
<Field label="Taken by">
|
||||
{shiftSwap?.benefactor ? (
|
||||
<UserItem
|
||||
pk={shiftSwap?.benefactor}
|
||||
shiftColor={SHIFT_SWAP_COLOR}
|
||||
shiftStart={shiftSwap.swap_start}
|
||||
shiftEnd={shiftSwap.swap_end}
|
||||
/>
|
||||
) : (
|
||||
<Text type="secondary">Not taken yet</Text>
|
||||
)}
|
||||
</Field>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<HorizontalGroup justify="space-between">
|
||||
<Text type="secondary">Current timezone: {getTzOffsetString(dayjs().tz(currentTimezone))}</Text>
|
||||
<HorizontalGroup>
|
||||
<WithPermissionControlTooltip userAction={UserActions.SchedulesWrite}>
|
||||
{isNew ? (
|
||||
<Button variant="primary" onClick={handleCreate}>
|
||||
Create
|
||||
</Button>
|
||||
) : (
|
||||
<Button variant="primary" onClick={handleTake} disabled={Boolean(isPastDue || shiftSwap?.benefactor)}>
|
||||
Take
|
||||
</Button>
|
||||
)}
|
||||
</WithPermissionControlTooltip>
|
||||
</HorizontalGroup>
|
||||
</HorizontalGroup>
|
||||
</VerticalGroup>
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default ShiftSwapForm;
|
||||
|
|
@ -37,7 +37,7 @@ const UserItem = ({ pk, shiftColor, shiftStart, shiftEnd }: UserItemProps) => {
|
|||
const duration = dayjs(shiftEnd).diff(dayjs(shiftStart), 'seconds');
|
||||
|
||||
return (
|
||||
<div style={{ backgroundColor: shiftColor, width: '100%' }}>
|
||||
<div className={cx('user-item')} style={{ backgroundColor: shiftColor, width: '100%' }}>
|
||||
{duration <= WEEK_IN_SECONDS && (
|
||||
<WorkingHours
|
||||
timezone={timezone}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,8 @@
|
|||
import dayjs from 'dayjs';
|
||||
|
||||
import { getColor, getOverrideColor } from 'models/schedule/schedule.helpers';
|
||||
import { Layer, Shift } from 'models/schedule/schedule.types';
|
||||
import { User } from 'models/user/user.types';
|
||||
|
||||
export const findColor = (shiftId: Shift['id'], layers: Layer[], overrides?) => {
|
||||
let color = undefined;
|
||||
|
|
@ -37,3 +40,35 @@ export const findColor = (shiftId: Shift['id'], layers: Layer[], overrides?) =>
|
|||
|
||||
return color;
|
||||
};
|
||||
|
||||
export const findClosestUserEvent = (startMoment: dayjs.Dayjs, userPk: User['pk'], layers: Layer[]) => {
|
||||
let minDiff;
|
||||
let closestEvent;
|
||||
|
||||
if (!layers) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
for (let i = 0; i < layers.length; i++) {
|
||||
for (let j = 0; j < layers[i].shifts.length; j++) {
|
||||
const shift = layers[i].shifts[j];
|
||||
const events = shift.events;
|
||||
for (let k = 0; k < events.length; k++) {
|
||||
const event = events[k];
|
||||
const diff = dayjs(event.start).diff(startMoment, 'seconds');
|
||||
|
||||
if (
|
||||
event.users.some((user) => user.pk === userPk) &&
|
||||
!event.users.some((user) => user.swap_request) &&
|
||||
diff > 0 &&
|
||||
(minDiff === undefined || diff < minDiff)
|
||||
) {
|
||||
minDiff = diff;
|
||||
closestEvent = event;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return closestEvent;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -14,14 +14,16 @@ import Rotation from 'containers/Rotation/Rotation';
|
|||
import RotationForm from 'containers/RotationForm/RotationForm';
|
||||
import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip';
|
||||
import { getColor, getLayersFromStore } from 'models/schedule/schedule.helpers';
|
||||
import { Layer, Schedule, ScheduleType, Shift } from 'models/schedule/schedule.types';
|
||||
import { Layer, Schedule, ScheduleType, Shift, ShiftSwap, Event } from 'models/schedule/schedule.types';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
import { User } from 'models/user/user.types';
|
||||
import { getUTCString } from 'pages/schedule/Schedule.helpers';
|
||||
import { WithStoreProps } from 'state/types';
|
||||
import { withMobXProviderContext } from 'state/withStore';
|
||||
import { UserActions } from 'utils/authorization';
|
||||
|
||||
import { DEFAULT_TRANSITION_TIMEOUT } from './Rotations.config';
|
||||
import { findColor } from './Rotations.helpers';
|
||||
import { findClosestUserEvent, findColor } from './Rotations.helpers';
|
||||
|
||||
import styles from './Rotations.module.css';
|
||||
|
||||
|
|
@ -35,11 +37,14 @@ interface RotationsProps extends WithStoreProps {
|
|||
onShowRotationForm: (shiftId: Shift['id'] | 'new') => void;
|
||||
onClick: (id: Shift['id'] | 'new') => void;
|
||||
onShowOverrideForm: (shiftId: 'new', shiftStart: dayjs.Dayjs, shiftEnd: dayjs.Dayjs) => void;
|
||||
onShowShiftSwapForm: (id: ShiftSwap['id'] | 'new', params?: Partial<ShiftSwap>) => void;
|
||||
onCreate: () => void;
|
||||
onUpdate: () => void;
|
||||
onDelete: () => void;
|
||||
onShiftSwapRequest: (beneficiary: User['pk'], swap_start: string, swap_end: string) => void;
|
||||
disabled: boolean;
|
||||
filters: ScheduleFiltersType;
|
||||
onSlotClick?: (event: Event) => void;
|
||||
}
|
||||
|
||||
interface RotationsState {
|
||||
|
|
@ -68,6 +73,11 @@ class Rotations extends Component<RotationsProps, RotationsState> {
|
|||
shiftIdToShowRotationForm,
|
||||
disabled,
|
||||
filters,
|
||||
onShowShiftSwapForm,
|
||||
onSlotClick,
|
||||
store: {
|
||||
userStore: { currentUserPk },
|
||||
},
|
||||
} = this.props;
|
||||
const { layerPriority, shiftStartToShowRotationForm, shiftEndToShowRotationForm } = this.state;
|
||||
|
||||
|
|
@ -104,35 +114,55 @@ class Rotations extends Component<RotationsProps, RotationsState> {
|
|||
Rotations
|
||||
</Text.Title>
|
||||
</div>
|
||||
{disabled ? (
|
||||
isTypeReadOnly ? (
|
||||
<Tooltip content="Ical and API/Terraform rotations are read-only here" placement="top">
|
||||
<div>
|
||||
<HorizontalGroup>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={() => {
|
||||
const closestEvent = findClosestUserEvent(dayjs(), currentUserPk, layers);
|
||||
const swapStart = closestEvent
|
||||
? dayjs(closestEvent.start)
|
||||
: dayjs().tz(currentTimezone).startOf('day').add(1, 'day');
|
||||
|
||||
const swapEnd = closestEvent ? dayjs(closestEvent.end) : swapStart.add(1, 'day');
|
||||
|
||||
onShowShiftSwapForm('new', {
|
||||
swap_start: getUTCString(swapStart),
|
||||
swap_end: getUTCString(swapEnd),
|
||||
});
|
||||
}}
|
||||
>
|
||||
Request shift swap
|
||||
</Button>
|
||||
{disabled ? (
|
||||
isTypeReadOnly ? (
|
||||
<Tooltip content="Ical and API/Terraform rotations are read-only here" placement="top">
|
||||
<div>
|
||||
<Button variant="primary" icon="plus" disabled>
|
||||
Add rotation
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<WithPermissionControlTooltip userAction={UserActions.SchedulesWrite}>
|
||||
<Button variant="primary" icon="plus" disabled>
|
||||
Add rotation
|
||||
</Button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</WithPermissionControlTooltip>
|
||||
)
|
||||
) : options.length > 0 ? (
|
||||
<ValuePicker
|
||||
label="Add rotation"
|
||||
options={options}
|
||||
onChange={this.handleAddRotation}
|
||||
variant="primary"
|
||||
size="md"
|
||||
/>
|
||||
) : (
|
||||
<WithPermissionControlTooltip userAction={UserActions.SchedulesWrite}>
|
||||
<Button variant="primary" icon="plus" disabled>
|
||||
Add rotation
|
||||
</Button>
|
||||
</WithPermissionControlTooltip>
|
||||
)
|
||||
) : options.length > 0 ? (
|
||||
<ValuePicker
|
||||
label="Add rotation"
|
||||
options={options}
|
||||
onChange={this.handleAddRotation}
|
||||
variant="primary"
|
||||
size="md"
|
||||
/>
|
||||
) : (
|
||||
<Button variant="primary" icon="plus" onClick={() => this.handleAddLayer(nextPriority, startMoment)}>
|
||||
Add rotation
|
||||
</Button>
|
||||
)}
|
||||
<Button variant="primary" icon="plus" onClick={() => this.handleAddLayer(nextPriority, startMoment)}>
|
||||
Add rotation
|
||||
</Button>
|
||||
)}
|
||||
</HorizontalGroup>
|
||||
</HorizontalGroup>
|
||||
</div>
|
||||
<div className={cx('rotations-plus-title')}>
|
||||
|
|
@ -164,6 +194,8 @@ class Rotations extends Component<RotationsProps, RotationsState> {
|
|||
this.onRotationClick(shiftId, shiftStart, shiftEnd);
|
||||
}}
|
||||
handleAddOverride={this.handleShowOverrideForm}
|
||||
handleAddShiftSwap={onShowShiftSwapForm}
|
||||
onShiftSwapClick={onShowShiftSwapForm}
|
||||
color={getColor(layerIndex, rotationIndex)}
|
||||
events={events}
|
||||
layerIndex={layerIndex}
|
||||
|
|
@ -173,6 +205,7 @@ class Rotations extends Component<RotationsProps, RotationsState> {
|
|||
transparent={isPreview}
|
||||
tutorialParams={isPreview && store.scheduleStore.rotationFormLiveParams}
|
||||
filters={filters}
|
||||
onSlotClick={onSlotClick}
|
||||
/>
|
||||
</CSSTransition>
|
||||
))}
|
||||
|
|
|
|||
|
|
@ -10,8 +10,13 @@ import { ScheduleFiltersType } from 'components/ScheduleFilters/ScheduleFilters.
|
|||
import Text from 'components/Text/Text';
|
||||
import TimelineMarks from 'components/TimelineMarks/TimelineMarks';
|
||||
import Rotation from 'containers/Rotation/Rotation';
|
||||
import { getLayersFromStore, getOverridesFromStore, getShiftsFromStore } from 'models/schedule/schedule.helpers';
|
||||
import { Schedule, Shift } from 'models/schedule/schedule.types';
|
||||
import {
|
||||
flattenFinalShifs,
|
||||
getLayersFromStore,
|
||||
getOverridesFromStore,
|
||||
getShiftsFromStore,
|
||||
} from 'models/schedule/schedule.helpers';
|
||||
import { Schedule, Shift, ShiftSwap, Event } from 'models/schedule/schedule.types';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
import { WithStoreProps } from 'state/types';
|
||||
import { withMobXProviderContext } from 'state/withStore';
|
||||
|
|
@ -30,8 +35,10 @@ interface ScheduleFinalProps extends WithStoreProps {
|
|||
simplified?: boolean;
|
||||
onClick: (shiftId: Shift['id']) => void;
|
||||
onShowOverrideForm: (shiftId: 'new', shiftStart: dayjs.Dayjs, shiftEnd: dayjs.Dayjs) => void;
|
||||
onShowShiftSwapForm: (id: ShiftSwap['id'] | 'new', params?: Partial<ShiftSwap>) => void;
|
||||
disabled?: boolean;
|
||||
filters: ScheduleFiltersType;
|
||||
onSlotClick?: (event: Event) => void;
|
||||
}
|
||||
|
||||
interface ScheduleOverridesState {
|
||||
|
|
@ -45,14 +52,15 @@ class ScheduleFinal extends Component<ScheduleFinalProps, ScheduleOverridesState
|
|||
};
|
||||
|
||||
render() {
|
||||
const { startMoment, currentTimezone, store, simplified, scheduleId, filters } = this.props;
|
||||
const { startMoment, currentTimezone, store, simplified, scheduleId, filters, onShowShiftSwapForm, onSlotClick } =
|
||||
this.props;
|
||||
|
||||
const base = 7 * 24 * 60; // in minutes
|
||||
const diff = dayjs().tz(currentTimezone).diff(startMoment, 'minutes');
|
||||
|
||||
const currentTimeX = diff / base;
|
||||
|
||||
const shifts = getShiftsFromStore(store, scheduleId, startMoment);
|
||||
const shifts = flattenFinalShifs(getShiftsFromStore(store, scheduleId, startMoment));
|
||||
|
||||
const layers = getLayersFromStore(store, scheduleId, startMoment);
|
||||
|
||||
|
|
@ -60,6 +68,8 @@ class ScheduleFinal extends Component<ScheduleFinalProps, ScheduleOverridesState
|
|||
|
||||
const currentTimeHidden = currentTimeX < 0 || currentTimeX > 1;
|
||||
|
||||
const getColor = (shiftId: Shift['id']) => findColor(shiftId, layers, overrides);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className={cx('root')}>
|
||||
|
|
@ -79,7 +89,7 @@ class ScheduleFinal extends Component<ScheduleFinalProps, ScheduleOverridesState
|
|||
<TimelineMarks startMoment={startMoment} timezone={currentTimezone} />
|
||||
<TransitionGroup className={cx('rotations')}>
|
||||
{shifts && shifts.length ? (
|
||||
shifts.map(({ shiftId, events }, index) => {
|
||||
shifts.map(({ events }, index) => {
|
||||
return (
|
||||
<CSSTransition key={index} timeout={DEFAULT_TRANSITION_TIMEOUT} classNames={{ ...styles }}>
|
||||
<Rotation
|
||||
|
|
@ -88,11 +98,13 @@ class ScheduleFinal extends Component<ScheduleFinalProps, ScheduleOverridesState
|
|||
events={events}
|
||||
startMoment={startMoment}
|
||||
currentTimezone={currentTimezone}
|
||||
color={findColor(shiftId, layers, overrides)}
|
||||
onClick={this.getRotationClickHandler(shiftId)}
|
||||
handleAddOverride={this.handleShowOverrideForm}
|
||||
handleAddShiftSwap={onShowShiftSwapForm}
|
||||
onShiftSwapClick={onShowShiftSwapForm}
|
||||
simplified={simplified}
|
||||
filters={filters}
|
||||
getColor={getColor}
|
||||
onSlotClick={onSlotClick}
|
||||
/>
|
||||
</CSSTransition>
|
||||
);
|
||||
|
|
@ -114,18 +126,6 @@ class ScheduleFinal extends Component<ScheduleFinalProps, ScheduleOverridesState
|
|||
);
|
||||
}
|
||||
|
||||
getRotationClickHandler = (shiftId: Shift['id']) => {
|
||||
const { onClick, disabled } = this.props;
|
||||
|
||||
return () => {
|
||||
if (disabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
onClick(shiftId);
|
||||
};
|
||||
};
|
||||
|
||||
onSearchTermChangeCallback = () => {};
|
||||
|
||||
handleShowOverrideForm = (shiftStart: dayjs.Dayjs, shiftEnd: dayjs.Dayjs) => {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import React, { FC, useMemo } from 'react';
|
||||
import React, { FC, useCallback, useMemo } from 'react';
|
||||
|
||||
import { Button, HorizontalGroup, Icon, Tooltip, VerticalGroup } from '@grafana/ui';
|
||||
import cn from 'classnames/bind';
|
||||
|
|
@ -8,8 +8,8 @@ import { observer } from 'mobx-react';
|
|||
import { ScheduleFiltersType } from 'components/ScheduleFilters/ScheduleFilters.types';
|
||||
import Text from 'components/Text/Text';
|
||||
import WorkingHours from 'components/WorkingHours/WorkingHours';
|
||||
import { getShiftName } from 'models/schedule/schedule.helpers';
|
||||
import { Event, Schedule } from 'models/schedule/schedule.types';
|
||||
import { getShiftName, SHIFT_SWAP_COLOR } from 'models/schedule/schedule.helpers';
|
||||
import { Event, Schedule, ShiftSwap } from 'models/schedule/schedule.types';
|
||||
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
import { User } from 'models/user/user.types';
|
||||
|
|
@ -25,17 +25,39 @@ interface ScheduleSlotProps {
|
|||
startMoment: dayjs.Dayjs;
|
||||
currentTimezone: Timezone;
|
||||
handleAddOverride: (event: React.MouseEvent<HTMLDivElement>) => void;
|
||||
handleAddShiftSwap: (event: React.MouseEvent<HTMLDivElement>) => void;
|
||||
onShiftSwapClick: (id: ShiftSwap['id']) => void;
|
||||
color?: string;
|
||||
simplified?: boolean;
|
||||
filters?: ScheduleFiltersType;
|
||||
onClick: (event: React.MouseEvent<HTMLDivElement>) => void;
|
||||
}
|
||||
|
||||
const cx = cn.bind(styles);
|
||||
|
||||
const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
||||
const { event, scheduleId, currentTimezone, color, handleAddOverride, simplified, filters } = props;
|
||||
const {
|
||||
event,
|
||||
scheduleId,
|
||||
currentTimezone,
|
||||
color,
|
||||
handleAddOverride,
|
||||
handleAddShiftSwap,
|
||||
onShiftSwapClick,
|
||||
simplified,
|
||||
filters,
|
||||
onClick,
|
||||
} = props;
|
||||
const { users } = event;
|
||||
|
||||
const getShiftSwapClickHandler = useCallback((swapId: ShiftSwap['id']) => {
|
||||
return (event: React.MouseEvent<HTMLDivElement>) => {
|
||||
event.stopPropagation();
|
||||
|
||||
onShiftSwapClick(swapId);
|
||||
};
|
||||
}, []);
|
||||
|
||||
const start = dayjs(event.start);
|
||||
const end = dayjs(event.end);
|
||||
|
||||
|
|
@ -49,8 +71,10 @@ const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
|||
|
||||
const onCallNow = store.scheduleStore.items[scheduleId]?.on_call_now;
|
||||
|
||||
const enableWebOverrides = store.scheduleStore.items[scheduleId]?.enable_web_overrides;
|
||||
|
||||
return (
|
||||
<div className={cx('stack')} style={{ width: `${width * 100}%` }}>
|
||||
<div className={cx('stack')} style={{ width: `${width * 100}%` }} onClick={onClick}>
|
||||
{event.is_gap ? (
|
||||
<Tooltip content={<ScheduleGapDetails event={event} currentTimezone={currentTimezone} />}>
|
||||
<div className={cx('root', 'root__type_gap')} />
|
||||
|
|
@ -63,9 +87,10 @@ const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
|||
}}
|
||||
/>
|
||||
) : (
|
||||
users.map(({ display_name, pk: userPk }) => {
|
||||
users.map(({ display_name, pk: userPk, swap_request }) => {
|
||||
const storeUser = store.userStore.items[userPk];
|
||||
|
||||
const isCurrentUserSlot = userPk === store.userStore.currentUserPk;
|
||||
const inactive = filters && filters.users.length && !filters.users.includes(userPk);
|
||||
|
||||
const title = storeUser ? getTitle(storeUser) : display_name;
|
||||
|
|
@ -74,14 +99,22 @@ const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
|||
storeUser && onCallNow && onCallNow.some((onCallUser) => storeUser.pk === onCallUser.pk)
|
||||
);
|
||||
|
||||
const isShiftSwap = Boolean(swap_request);
|
||||
|
||||
let backgroundColor = color;
|
||||
if (isShiftSwap) {
|
||||
backgroundColor = SHIFT_SWAP_COLOR;
|
||||
}
|
||||
|
||||
const scheduleSlotContent = (
|
||||
<div
|
||||
className={cx('root', { root__inactive: inactive })}
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
backgroundColor,
|
||||
}}
|
||||
onClick={swap_request ? getShiftSwapClickHandler(swap_request.pk) : undefined}
|
||||
>
|
||||
{storeUser && (
|
||||
{storeUser && (!swap_request || swap_request.user) && (
|
||||
<WorkingHours
|
||||
className={cx('working-hours')}
|
||||
timezone={storeUser.timezone}
|
||||
|
|
@ -90,7 +123,9 @@ const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
|||
duration={duration}
|
||||
/>
|
||||
)}
|
||||
<div className={cx('title')}>{title}</div>
|
||||
<div className={cx('title')}>
|
||||
{swap_request && !swap_request.user ? <Icon name="user-arrows" /> : title}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
|
|
@ -104,13 +139,23 @@ const ScheduleSlot: FC<ScheduleSlotProps> = observer((props) => {
|
|||
key={userPk}
|
||||
content={
|
||||
<ScheduleSlotDetails
|
||||
isShiftSwap={isShiftSwap}
|
||||
beneficiaryName={
|
||||
isShiftSwap ? (swap_request.user ? swap_request.user.display_name : display_name) : undefined
|
||||
}
|
||||
benefactorName={isShiftSwap ? (swap_request.user ? display_name : undefined) : undefined}
|
||||
user={storeUser}
|
||||
isOncall={isOncall}
|
||||
currentTimezone={currentTimezone}
|
||||
event={event}
|
||||
handleAddOverride={handleAddOverride}
|
||||
handleAddOverride={
|
||||
!enableWebOverrides || simplified || event.is_override || isShiftSwap
|
||||
? undefined
|
||||
: handleAddOverride
|
||||
}
|
||||
handleAddShiftSwap={simplified || isShiftSwap || !isCurrentUserSlot ? undefined : handleAddShiftSwap}
|
||||
simplified={simplified}
|
||||
color={color}
|
||||
color={backgroundColor}
|
||||
/>
|
||||
}
|
||||
>
|
||||
|
|
@ -131,12 +176,26 @@ interface ScheduleSlotDetailsProps {
|
|||
currentTimezone: Timezone;
|
||||
event: Event;
|
||||
handleAddOverride: (event: React.SyntheticEvent) => void;
|
||||
handleAddShiftSwap: (event: React.SyntheticEvent) => void;
|
||||
simplified?: boolean;
|
||||
color: string;
|
||||
isShiftSwap?: boolean;
|
||||
beneficiaryName?: string;
|
||||
benefactorName?: string;
|
||||
}
|
||||
|
||||
const ScheduleSlotDetails = (props: ScheduleSlotDetailsProps) => {
|
||||
const { user, currentTimezone, event, handleAddOverride, simplified, color } = props;
|
||||
const {
|
||||
user,
|
||||
currentTimezone,
|
||||
event,
|
||||
handleAddOverride,
|
||||
handleAddShiftSwap,
|
||||
color,
|
||||
isShiftSwap,
|
||||
beneficiaryName,
|
||||
benefactorName,
|
||||
} = props;
|
||||
|
||||
const store = useStore();
|
||||
const { scheduleStore } = store;
|
||||
|
|
@ -153,16 +212,34 @@ const ScheduleSlotDetails = (props: ScheduleSlotDetailsProps) => {
|
|||
<div className={cx('badge')} style={{ backgroundColor: color }} />
|
||||
</div>
|
||||
<Text type="primary" maxWidth="222px">
|
||||
{getShiftName(shift)}
|
||||
{isShiftSwap ? 'Shift swap' : getShiftName(shift)}
|
||||
</Text>
|
||||
</HorizontalGroup>
|
||||
<HorizontalGroup align="flex-start">
|
||||
<div className={cx('details-icon')}>
|
||||
<Icon className={cx('icon')} name="user" />
|
||||
<Icon className={cx('icon')} name={isShiftSwap ? 'user-arrows' : 'user'} />
|
||||
</div>
|
||||
<Text type="primary" className={cx('username')}>
|
||||
{user?.username}
|
||||
</Text>
|
||||
{isShiftSwap ? (
|
||||
<VerticalGroup spacing="xs">
|
||||
<Text type="primary">Swap pair</Text>
|
||||
<Text type="primary" className={cx('username')}>
|
||||
{beneficiaryName} <Text type="secondary">(creator)</Text>
|
||||
</Text>
|
||||
{benefactorName ? (
|
||||
<Text type="primary" className={cx('username')}>
|
||||
{benefactorName} <Text type="secondary">(taken by)</Text>
|
||||
</Text>
|
||||
) : (
|
||||
<Text type="secondary" className={cx('username')}>
|
||||
Not taken yet
|
||||
</Text>
|
||||
)}
|
||||
</VerticalGroup>
|
||||
) : (
|
||||
<Text type="primary" className={cx('username')}>
|
||||
{user?.username}
|
||||
</Text>
|
||||
)}
|
||||
</HorizontalGroup>
|
||||
<HorizontalGroup align="flex-start">
|
||||
<div className={cx('details-icon')}>
|
||||
|
|
@ -199,13 +276,18 @@ const ScheduleSlotDetails = (props: ScheduleSlotDetailsProps) => {
|
|||
{dayjs(event.end).tz(currentTimezone).format('DD MMM, HH:mm')}
|
||||
</Text>
|
||||
</HorizontalGroup>
|
||||
{!simplified && !event.is_override && (
|
||||
<HorizontalGroup justify="flex-end">
|
||||
<HorizontalGroup justify="flex-end">
|
||||
{handleAddShiftSwap && (
|
||||
<Button size="sm" variant="secondary" onClick={handleAddShiftSwap}>
|
||||
Request shift swap
|
||||
</Button>
|
||||
)}
|
||||
{handleAddOverride && (
|
||||
<Button size="sm" variant="secondary" onClick={handleAddOverride}>
|
||||
+ Override
|
||||
</Button>
|
||||
</HorizontalGroup>
|
||||
)}
|
||||
)}
|
||||
</HorizontalGroup>
|
||||
</VerticalGroup>
|
||||
</div>
|
||||
);
|
||||
|
|
|
|||
|
|
@ -112,7 +112,7 @@ const CloudPhoneSettings = observer((props: CloudPhoneSettingsProps) => {
|
|||
|
||||
return (
|
||||
<WithPermissionControlDisplay
|
||||
userAction={UserActions.OtherSettingsWrite}
|
||||
userAction={UserActions.UserSettingsWrite}
|
||||
title="OnCall uses Grafana Cloud for SMS and phone call notifications"
|
||||
>
|
||||
<VerticalGroup spacing="lg">
|
||||
|
|
|
|||
|
|
@ -188,7 +188,7 @@ const PhoneVerification = observer((props: PhoneVerificationProps) => {
|
|||
}
|
||||
|
||||
return (
|
||||
<WithPermissionControlDisplay userAction={UserActions.OtherSettingsWrite}>
|
||||
<WithPermissionControlDisplay userAction={UserActions.UserSettingsWrite}>
|
||||
<VerticalGroup>
|
||||
{isPhoneValid && !user.verified_phone_number && (
|
||||
<Alert severity="info" title="You will receive alerts to a new number after verification" />
|
||||
|
|
|
|||
|
|
@ -80,8 +80,8 @@ const UsersTimezones: FC<UsersTimezonesProps> = (props) => {
|
|||
light
|
||||
startMoment={currentMoment.startOf('day')}
|
||||
duration={24 * 60 * 60}
|
||||
timezone={userStore.currentUser.timezone}
|
||||
workingHours={userStore.currentUser.working_hours}
|
||||
timezone={userStore.currentUser?.timezone}
|
||||
workingHours={userStore.currentUser?.working_hours}
|
||||
className={cx('working-hours')}
|
||||
/>
|
||||
{/* <div className={cx('shades', 'shades--left')} />
|
||||
|
|
|
|||
|
|
@ -8,6 +8,23 @@ export const getFromString = (moment: dayjs.Dayjs) => {
|
|||
return moment.format('YYYY-MM-DD');
|
||||
};
|
||||
|
||||
const createGap = (start, end) => {
|
||||
return {
|
||||
start,
|
||||
end,
|
||||
is_gap: true,
|
||||
users: [],
|
||||
all_day: false,
|
||||
shift: null,
|
||||
missing_users: [],
|
||||
is_empty: true,
|
||||
calendar_type: ScheduleType.API,
|
||||
priority_level: null,
|
||||
source: 'web',
|
||||
is_override: false,
|
||||
};
|
||||
};
|
||||
|
||||
export const fillGaps = (events: Event[]) => {
|
||||
const newEvents = [];
|
||||
|
||||
|
|
@ -18,19 +35,7 @@ export const fillGaps = (events: Event[]) => {
|
|||
|
||||
if (nextEvent) {
|
||||
if (nextEvent.start !== event.end) {
|
||||
newEvents.push({
|
||||
start: event.end,
|
||||
end: nextEvent.start,
|
||||
is_gap: true,
|
||||
users: [],
|
||||
all_day: false,
|
||||
shift: null,
|
||||
missing_users: [],
|
||||
is_empty: true,
|
||||
calendar_type: ScheduleType.API,
|
||||
priority_level: null,
|
||||
source: 'web',
|
||||
});
|
||||
newEvents.push(createGap(event.end, nextEvent.start));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -69,6 +74,119 @@ export const getShiftsFromStore = (
|
|||
: (store.scheduleStore.events[scheduleId]?.['final']?.[getFromString(startMoment)] as any);
|
||||
};
|
||||
|
||||
export const flattenFinalShifs = (shifts: ShiftEvents[]) => {
|
||||
if (!shifts) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function splitToPairs(shifts: ShiftEvents[]) {
|
||||
const pairs = [];
|
||||
for (let i = 0; i < shifts.length - 1; i++) {
|
||||
for (let j = i + 1; j < shifts.length; j++) {
|
||||
pairs.push([
|
||||
{ ...shifts[i], events: [...shifts[i].events] },
|
||||
{ ...shifts[j], events: [...shifts[j].events] },
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
|
||||
let pairs = splitToPairs(shifts);
|
||||
|
||||
while (pairs.length > 0) {
|
||||
const currentPair = pairs.shift();
|
||||
|
||||
const merged = mergePair(currentPair);
|
||||
|
||||
if (merged !== currentPair) {
|
||||
// means pair was fully merged
|
||||
|
||||
shifts = shifts.filter((shift) => !currentPair.some((pairShift) => pairShift.shiftId === shift.shiftId));
|
||||
shifts.unshift(merged[0]);
|
||||
pairs = splitToPairs(shifts);
|
||||
}
|
||||
}
|
||||
|
||||
function mergePair(pair: ShiftEvents[]): ShiftEvents[] {
|
||||
const recipient = { ...pair[0], events: [...pair[0].events] };
|
||||
const donor = pair[1];
|
||||
|
||||
const donorEvents = donor.events.filter((event) => !event.is_gap);
|
||||
|
||||
for (let i = 0; i < donorEvents.length; i++) {
|
||||
const donorEvent = donorEvents[i];
|
||||
|
||||
const eventStartMoment = dayjs(donorEvent.start);
|
||||
const eventEndMoment = dayjs(donorEvent.end);
|
||||
|
||||
const suitablerRecepientGapIndex = recipient.events.findIndex((event) => {
|
||||
if (!event.is_gap) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const gap = event;
|
||||
|
||||
const gapStartMoment = dayjs(gap.start);
|
||||
const gapEndMoment = dayjs(gap.end);
|
||||
|
||||
return gapStartMoment.isSameOrBefore(eventStartMoment) && gapEndMoment.isSameOrAfter(eventEndMoment);
|
||||
});
|
||||
|
||||
if (suitablerRecepientGapIndex > -1) {
|
||||
const suitablerRecepientGap = recipient.events[suitablerRecepientGapIndex];
|
||||
|
||||
const itemsToAdd = [];
|
||||
const leftGap = createGap(suitablerRecepientGap.start, donorEvent.start);
|
||||
if (leftGap.start !== leftGap.end) {
|
||||
itemsToAdd.push(leftGap);
|
||||
}
|
||||
itemsToAdd.push(donorEvent);
|
||||
|
||||
const rightGap = createGap(donorEvent.end, suitablerRecepientGap.end);
|
||||
if (rightGap.start !== rightGap.end) {
|
||||
itemsToAdd.push(rightGap);
|
||||
}
|
||||
|
||||
recipient.events = [
|
||||
...recipient.events.slice(0, suitablerRecepientGapIndex),
|
||||
...itemsToAdd,
|
||||
...recipient.events.slice(suitablerRecepientGapIndex + 1),
|
||||
];
|
||||
} else {
|
||||
const firstRecepientEvent = recipient.events[0];
|
||||
const firstRecepientEventStartMoment = dayjs(firstRecepientEvent.start);
|
||||
|
||||
const lastRecepientEvent = recipient.events[recipient.events.length - 1];
|
||||
const lastRecepientEventEndMoment = dayjs(lastRecepientEvent.end);
|
||||
|
||||
if (eventEndMoment.isSameOrBefore(firstRecepientEventStartMoment)) {
|
||||
const itemsToAdd = [donorEvent];
|
||||
if (donorEvent.end !== firstRecepientEvent.start) {
|
||||
itemsToAdd.push(createGap(donorEvent.end, firstRecepientEvent.start));
|
||||
}
|
||||
recipient.events = [...itemsToAdd, ...recipient.events];
|
||||
} else if (eventStartMoment.isSameOrAfter(lastRecepientEventEndMoment)) {
|
||||
const itemsToAdd = [donorEvent];
|
||||
if (lastRecepientEvent.end !== donorEvent.start) {
|
||||
itemsToAdd.unshift(createGap(lastRecepientEvent.end, donorEvent.start));
|
||||
}
|
||||
recipient.events = [...recipient.events, ...itemsToAdd];
|
||||
} else {
|
||||
// the pair can't be fully merged
|
||||
|
||||
return pair;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [recipient];
|
||||
}
|
||||
|
||||
return shifts;
|
||||
};
|
||||
|
||||
export const getLayersFromStore = (store: RootStore, scheduleId: Schedule['id'], startMoment: dayjs.Dayjs): Layer[] => {
|
||||
return store.scheduleStore.rotationPreview
|
||||
? store.scheduleStore.rotationPreview[getFromString(startMoment)]
|
||||
|
|
@ -79,7 +197,7 @@ export const getOverridesFromStore = (
|
|||
store: RootStore,
|
||||
scheduleId: Schedule['id'],
|
||||
startMoment: dayjs.Dayjs
|
||||
): Layer[] | ShiftEvents[] => {
|
||||
): ShiftEvents[] => {
|
||||
return store.scheduleStore.overridePreview
|
||||
? store.scheduleStore.overridePreview[getFromString(startMoment)]
|
||||
: (store.scheduleStore.events[scheduleId]?.['override']?.[getFromString(startMoment)] as Layer[]);
|
||||
|
|
@ -204,6 +322,8 @@ const L3_COLORS = ['#377277', '#638282', '#364E4E', '#423220'];
|
|||
|
||||
const OVERRIDE_COLORS = ['#C69B06', '#C2C837'];
|
||||
|
||||
export const SHIFT_SWAP_COLOR = '#C69B06';
|
||||
|
||||
const COLORS = [L1_COLORS, L2_COLORS, L3_COLORS];
|
||||
|
||||
export const getColor = (layerIndex: number, rotationIndex: number) => {
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ import {
|
|||
ShiftEvents,
|
||||
RotationFormLiveParams,
|
||||
ScheduleScoreQualityResponse,
|
||||
ShiftSwap,
|
||||
} from './schedule.types';
|
||||
|
||||
export class ScheduleStore extends BaseStore {
|
||||
|
|
@ -44,6 +45,9 @@ export class ScheduleStore extends BaseStore {
|
|||
@observable.shallow
|
||||
relatedUsers: { [id: string]: { [key: string]: Event } } = {};
|
||||
|
||||
@observable.shallow
|
||||
shiftSwaps: { [id: string]: ShiftSwap } = {};
|
||||
|
||||
@observable.shallow
|
||||
rotations: {
|
||||
[id: string]: {
|
||||
|
|
@ -432,4 +436,24 @@ export class ScheduleStore extends BaseStore {
|
|||
method: 'GET',
|
||||
});
|
||||
}
|
||||
|
||||
async createShiftSwap(params: Partial<ShiftSwap>) {
|
||||
return await makeRequest(`/shift_swaps/`, { method: 'POST', data: params }).catch(this.onApiError);
|
||||
}
|
||||
|
||||
async deleteShiftSwap(shiftSwapId: ShiftSwap['id']) {
|
||||
return await makeRequest(`/shift_swaps/${shiftSwapId}`, { method: 'DELETE' }).catch(this.onApiError);
|
||||
}
|
||||
|
||||
async takeShiftSwap(shiftSwapId: ShiftSwap['id']) {
|
||||
return await makeRequest(`/shift_swaps/${shiftSwapId}/take`, { method: 'POST' }).catch(this.onApiError);
|
||||
}
|
||||
|
||||
async loadShiftSwap(id: ShiftSwap['id']) {
|
||||
const result = await makeRequest(`/shift_swaps/${id}`, {});
|
||||
|
||||
this.shiftSwaps = { ...this.shiftSwaps, [id]: result };
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,6 +81,11 @@ export interface Rotation {
|
|||
|
||||
export type RotationType = 'final' | 'rotation' | 'override';
|
||||
|
||||
export interface SwapRequest {
|
||||
pk: ShiftSwap['id'];
|
||||
user: Partial<User> & { display_name: string };
|
||||
}
|
||||
|
||||
export interface Event {
|
||||
all_day: boolean;
|
||||
calendar_type: ScheduleType;
|
||||
|
|
@ -92,7 +97,11 @@ export interface Event {
|
|||
shift: { pk: Shift['id'] | null };
|
||||
source: string;
|
||||
start: string;
|
||||
users: Array<{ display_name: User['username']; pk: User['pk'] }>;
|
||||
users: Array<{
|
||||
display_name: User['username'];
|
||||
pk: User['pk'];
|
||||
swap_request?: SwapRequest;
|
||||
}>;
|
||||
is_override: boolean;
|
||||
}
|
||||
|
||||
|
|
@ -111,6 +120,7 @@ export interface Layer {
|
|||
export interface ShiftEvents {
|
||||
shiftId: string;
|
||||
events: Event[];
|
||||
priority: number;
|
||||
isPreview?: boolean;
|
||||
}
|
||||
|
||||
|
|
@ -120,6 +130,19 @@ export interface ScheduleScoreQualityResponse {
|
|||
overloaded_users: Array<{ id: string; username: string; score: number }>;
|
||||
}
|
||||
|
||||
export interface ShiftSwap {
|
||||
id: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
schedule: Schedule['id'];
|
||||
swap_start: string;
|
||||
swap_end: string;
|
||||
beneficiary: User['pk'];
|
||||
status: 'open' | 'taken' | 'past_due';
|
||||
benefactor: User['pk'];
|
||||
description: string;
|
||||
}
|
||||
|
||||
export enum ScheduleScoreQualityResult {
|
||||
Bad = 'Bad',
|
||||
Low = 'Low',
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ import ScheduleQuality from 'components/ScheduleQuality/ScheduleQuality';
|
|||
import Text from 'components/Text/Text';
|
||||
import UserTimezoneSelect from 'components/UserTimezoneSelect/UserTimezoneSelect';
|
||||
import WithConfirm from 'components/WithConfirm/WithConfirm';
|
||||
import ShiftSwapForm from 'containers/RotationForm/ShiftSwapForm';
|
||||
import Rotations from 'containers/Rotations/Rotations';
|
||||
import ScheduleFinal from 'containers/Rotations/ScheduleFinal';
|
||||
import ScheduleOverrides from 'containers/Rotations/ScheduleOverrides';
|
||||
|
|
@ -34,7 +35,7 @@ import ScheduleForm from 'containers/ScheduleForm/ScheduleForm';
|
|||
import ScheduleICalSettings from 'containers/ScheduleIcalLink/ScheduleIcalLink';
|
||||
import UsersTimezones from 'containers/UsersTimezones/UsersTimezones';
|
||||
import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip';
|
||||
import { Schedule, ScheduleType, Shift } from 'models/schedule/schedule.types';
|
||||
import { Event, Schedule, ScheduleType, Shift, ShiftSwap } from 'models/schedule/schedule.types';
|
||||
import { Timezone } from 'models/timezone/timezone.types';
|
||||
import { PageProps, WithStoreProps } from 'state/types';
|
||||
import { withMobXProviderContext } from 'state/withStore';
|
||||
|
|
@ -64,10 +65,14 @@ interface SchedulePageState extends PageBaseState {
|
|||
showScheduleICalSettings: boolean;
|
||||
lastUpdated: number;
|
||||
filters: ScheduleFiltersType;
|
||||
shiftSwapIdToShowForm?: ShiftSwap['id'] | 'new';
|
||||
shiftSwapParamsToShowForm?: Partial<ShiftSwap>;
|
||||
}
|
||||
|
||||
@observer
|
||||
class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState> {
|
||||
highlightMyShiftsWasToggled = false;
|
||||
|
||||
constructor(props: SchedulePageProps) {
|
||||
super(props);
|
||||
|
||||
|
|
@ -132,6 +137,8 @@ class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState>
|
|||
shiftStartToShowOverrideForm,
|
||||
shiftEndToShowOverrideForm,
|
||||
filters,
|
||||
shiftSwapIdToShowForm,
|
||||
shiftSwapParamsToShowForm,
|
||||
} = this.state;
|
||||
|
||||
const { isNotFoundError } = errorData;
|
||||
|
|
@ -276,10 +283,17 @@ class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState>
|
|||
scheduleId={scheduleId}
|
||||
currentTimezone={currentTimezone}
|
||||
startMoment={startMoment}
|
||||
onClick={this.handleShowForm}
|
||||
disabled={disabledRotationForm}
|
||||
onShowOverrideForm={this.handleShowOverridesForm}
|
||||
filters={filters}
|
||||
onShowShiftSwapForm={this.handleShowShiftSwapForm}
|
||||
onSlotClick={
|
||||
shiftSwapIdToShowForm
|
||||
? this.adjustShiftSwapForm
|
||||
: (event: Event) => {
|
||||
this.handleShowForm(event.shift.pk);
|
||||
}
|
||||
}
|
||||
/>
|
||||
<Rotations
|
||||
scheduleId={scheduleId}
|
||||
|
|
@ -293,6 +307,8 @@ class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState>
|
|||
onShowOverrideForm={this.handleShowOverridesForm}
|
||||
disabled={disabledRotationForm}
|
||||
filters={filters}
|
||||
onShowShiftSwapForm={this.handleShowShiftSwapForm}
|
||||
onSlotClick={shiftSwapIdToShowForm ? this.adjustShiftSwapForm : undefined}
|
||||
/>
|
||||
<ScheduleOverrides
|
||||
scheduleId={scheduleId}
|
||||
|
|
@ -331,6 +347,16 @@ class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState>
|
|||
<ScheduleICalSettings id={scheduleId} />
|
||||
</Modal>
|
||||
)}
|
||||
{shiftSwapIdToShowForm && (
|
||||
<ShiftSwapForm
|
||||
id={shiftSwapIdToShowForm}
|
||||
scheduleId={scheduleId}
|
||||
currentTimezone={currentTimezone}
|
||||
params={shiftSwapParamsToShowForm}
|
||||
onHide={this.handleHideShiftSwapForm}
|
||||
onUpdate={this.updateEvents}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</PageErrorHandlingWrapper>
|
||||
|
|
@ -537,6 +563,52 @@ class SchedulePage extends React.Component<SchedulePageProps, SchedulePageState>
|
|||
|
||||
store.scheduleStore.delete(id).then(() => history.replace(`${PLUGIN_ROOT}/schedules`));
|
||||
};
|
||||
|
||||
handleShowShiftSwapForm = (id: ShiftSwap['id'], params: Partial<ShiftSwap>) => {
|
||||
const { filters } = this.state;
|
||||
|
||||
const {
|
||||
store: { userStore },
|
||||
} = this.props;
|
||||
|
||||
if (!filters.users.includes(userStore.currentUserPk)) {
|
||||
this.setState({ filters: { ...filters, users: [...this.state.filters.users, userStore.currentUserPk] } });
|
||||
this.highlightMyShiftsWasToggled = true;
|
||||
}
|
||||
|
||||
this.setState({ shiftSwapIdToShowForm: id, shiftSwapParamsToShowForm: params });
|
||||
};
|
||||
|
||||
handleHideShiftSwapForm = () => {
|
||||
const { filters } = this.state;
|
||||
|
||||
const {
|
||||
store: { userStore },
|
||||
} = this.props;
|
||||
|
||||
if (this.highlightMyShiftsWasToggled) {
|
||||
this.highlightMyShiftsWasToggled = false;
|
||||
const index = filters.users.indexOf(userStore.currentUserPk);
|
||||
|
||||
if (index > -1) {
|
||||
const newUsers = [...filters.users];
|
||||
newUsers.splice(index, 1);
|
||||
|
||||
this.setState({ filters: { ...filters, users: newUsers } });
|
||||
}
|
||||
}
|
||||
this.setState({ shiftSwapIdToShowForm: undefined, shiftSwapParamsToShowForm: undefined });
|
||||
};
|
||||
|
||||
adjustShiftSwapForm = (event: Event) => {
|
||||
this.setState({
|
||||
shiftSwapParamsToShowForm: {
|
||||
...this.state.shiftSwapParamsToShowForm,
|
||||
swap_start: event.start,
|
||||
swap_end: event.end,
|
||||
},
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export default withRouter(withMobXProviderContext(SchedulePage));
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue