This commit is contained in:
Joey Orlando 2023-12-05 07:56:23 -05:00 committed by GitHub
commit 168efc730c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 454 additions and 135 deletions

View file

@ -7,6 +7,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## Unreleased
## v1.3.71 (2023-12-05)
### Added
- Add `datetimeformat_as_timezone` Jinja2 template helper filter by @jorgeav ([#3426](https://github.com/grafana/oncall/pull/3426))
### Changed
- Disallow creating and deleting direct paging integrations by @vadimkerr ([#3475](https://github.com/grafana/oncall/pull/3475))
## v1.3.70 (2023-12-01)
Maintenance release

View file

@ -205,8 +205,12 @@ Built-in functions:
- `tojson_pretty` - same as tojson, but prettified
- `iso8601_to_time` - converts time from iso8601 (`2015-02-17T18:30:20.000Z`) to datetime
- `datetimeformat` - converts time from datetime to the given format (`%H:%M / %d-%m-%Y` by default)
- `datetimeformat_as_timezone` - same as `datetimeformat`, with the inclusion of timezone conversion (`UTC` by default)
- Usage example: `{{ payload.alerts.startsAt | iso8601_to_time | datetimeformat_as_timezone('%Y-%m-%dT%H:%M:%S%z', 'America/Chicago') }}`
- `regex_replace` - performs a regex find and replace
- `regex_match` - performs a regex match, returns `True` or `False`. Usage example: `{{ payload.ruleName | regex_match(".*") }}`
- `b64decode` - performs a base64 string decode. Usage example: `{{ payload.data | b64decode }}`
- `regex_match` - performs a regex match, returns `True` or `False`
- Usage example: `{{ payload.ruleName | regex_match(".*") }}`
- `b64decode` - performs a base64 string decode
- Usage example: `{{ payload.data | b64decode }}`
{{< section >}}

View file

@ -42,9 +42,9 @@ The above command returns JSON structured in the following way:
| Parameter | Unique | Required | Description |
| -------------------- | :----: | :--------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `name` | Yes | Yes | Schedule name. |
| `type` | No | Yes | Schedule type. May be `ical` (used for iCalendar integration) or `calendar` (used for manually created on-call shifts). |
| `type` | No | Yes | Schedule type. May be `ical` (used for iCalendar integration), `calendar` (used for manually created on-call shifts) or `web` (for web UI managed schedules). |
| `team_id` | No | No | ID of the team. |
| `time_zone` | No | Optional | Schedule time zone. Is used for manually added on-call shifts in Schedules with type `calendar`. Default time zone is `UTC`. For more information about time zones, see [time zones](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). |
| `time_zone` | No | Yes | Schedule time zone. It is used for manually added on-call shifts in Schedules with type `calendar`. Default time zone is `UTC`. For more information about time zones, see [time zones](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). Not used for schedules with type `ical`. |
| `ical_url_primary` | No | If type = `ical` | URL of external iCal calendar for schedule with type `ical`. |
| `ical_url_overrides` | No | Optional | URL of external iCal calendar for schedule with any type. Events from this calendar override events from primary calendar or from on-call shifts. |
| `enable_web_overrides` | No | Optional | Whether to enable web overrides or not. Setting specific for API/Terraform based schedules (`calendar` type). |

View file

@ -349,6 +349,10 @@ class AlertReceiveChannelSerializer(
def validate_integration(integration):
if integration is None or integration not in AlertReceiveChannel.WEB_INTEGRATION_CHOICES:
raise BadRequest(detail="invalid integration")
if integration == AlertReceiveChannel.INTEGRATION_DIRECT_PAGING:
raise BadRequest(detail="Direct paging integrations can't be created")
return integration
def validate_verbal_name(self, verbal_name):
@ -372,7 +376,8 @@ class AlertReceiveChannelSerializer(
return IntegrationHeartBeatSerializer(heartbeat).data
def get_allow_delete(self, obj: "AlertReceiveChannel"):
return True
# don't allow deleting direct paging integrations
return obj.integration != AlertReceiveChannel.INTEGRATION_DIRECT_PAGING
def get_alert_count(self, obj: "AlertReceiveChannel"):
return 0

View file

@ -785,45 +785,17 @@ def test_get_alert_receive_channels_direct_paging_present_for_filters(
@pytest.mark.django_db
def test_create_alert_receive_channels_direct_paging(
def test_cant_create_alert_receive_channels_direct_paging(
make_organization_and_user_with_plugin_token, make_team, make_alert_receive_channel, make_user_auth_headers
):
organization, user, token = make_organization_and_user_with_plugin_token()
team = make_team(organization)
client = APIClient()
url = reverse("api-internal:alert_receive_channel-list")
response_1 = client.post(
response = client.post(
url, data={"integration": "direct_paging"}, format="json", **make_user_auth_headers(user, token)
)
response_2 = client.post(
url, data={"integration": "direct_paging"}, format="json", **make_user_auth_headers(user, token)
)
response_3 = client.post(
url,
data={"integration": "direct_paging", "team": team.public_primary_key},
format="json",
**make_user_auth_headers(user, token),
)
response_4 = client.post(
url,
data={"integration": "direct_paging", "team": team.public_primary_key},
format="json",
**make_user_auth_headers(user, token),
)
# Check direct paging integration for "No team" is created
assert response_1.status_code == status.HTTP_201_CREATED
# Check direct paging integration is not created, as it already exists for "No team"
assert response_2.status_code == status.HTTP_400_BAD_REQUEST
# Check direct paging integration for team is created
assert response_3.status_code == status.HTTP_201_CREATED
# Check direct paging integration is not created, as it already exists for team
assert response_4.status_code == status.HTTP_400_BAD_REQUEST
assert response_4.json()["detail"] == AlertReceiveChannel.DuplicateDirectPagingError.DETAIL
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
@ -852,6 +824,27 @@ def test_update_alert_receive_channels_direct_paging(
assert response.json()["detail"] == AlertReceiveChannel.DuplicateDirectPagingError.DETAIL
@pytest.mark.django_db
def test_cant_delete_direct_paging_integration(
make_organization_and_user_with_plugin_token, make_alert_receive_channel, make_user_auth_headers
):
organization, user, token = make_organization_and_user_with_plugin_token()
integration = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_DIRECT_PAGING)
# check allow_delete is False (so the frontend can hide the delete button)
client = APIClient()
url = reverse("api-internal:alert_receive_channel-detail", kwargs={"pk": integration.public_primary_key})
response = client.get(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json()["allow_delete"] is False
# check delete is not allowed
client = APIClient()
url = reverse("api-internal:alert_receive_channel-detail", kwargs={"pk": integration.public_primary_key})
response = client.delete(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
def test_start_maintenance_integration(
make_user_auth_headers,

View file

@ -134,6 +134,14 @@ class AlertReceiveChannelView(
new_state=new_state,
)
def destroy(self, request, *args, **kwargs):
# don't allow deleting direct paging integrations
instance = self.get_object()
if instance.integration == AlertReceiveChannel.INTEGRATION_DIRECT_PAGING:
raise BadRequest(detail="Direct paging integrations can't be deleted")
return super().destroy(request, *args, **kwargs)
def perform_destroy(self, instance):
write_resource_insight_log(
instance=instance,

View file

@ -22,6 +22,17 @@ class DatabaseBlocker(_DatabaseBlocker):
raise OperationalError("Database access disabled")
def setup_failing_redis_cache(settings):
settings.DJANGO_REDIS_IGNORE_EXCEPTIONS = True
settings.RATELIMIT_FAIL_OPEN = True
settings.CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://no-redis-here/",
}
}
@pytest.mark.django_db
def test_integration_json_data_too_big(settings, make_organization_and_user, make_alert_receive_channel):
settings.DATA_UPLOAD_MAX_MEMORY_SIZE = 50
@ -293,3 +304,96 @@ def test_integration_grafana_endpoint_without_db_has_alerts(
call((alert_receive_channel.pk, data["alerts"][1])),
]
)
@patch("apps.integrations.views.create_alert")
@pytest.mark.parametrize(
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)
@pytest.mark.django_db
def test_integration_universal_endpoint_works_without_cache(
mock_create_alert,
make_organization_and_user,
make_alert_receive_channel,
integration_type,
settings,
):
# setup failing redis cache and ignore exception settings
setup_failing_redis_cache(settings)
organization, user = make_organization_and_user()
alert_receive_channel = make_alert_receive_channel(
organization=organization,
author=user,
integration=integration_type,
)
client = APIClient()
url = reverse(
"integrations:universal",
kwargs={"integration_type": integration_type, "alert_channel_key": alert_receive_channel.token},
)
data = {"foo": "bar"}
response = client.post(url, data, format="json")
assert response.status_code == status.HTTP_200_OK
mock_create_alert.apply_async.assert_called_once_with(
[],
{
"title": None,
"message": None,
"image_url": None,
"link_to_upstream_details": None,
"alert_receive_channel_pk": alert_receive_channel.pk,
"integration_unique_data": None,
"raw_request_data": data,
},
)
@patch("apps.integrations.views.create_alertmanager_alerts")
@pytest.mark.django_db
def test_integration_grafana_endpoint_without_cache_has_alerts(
mock_create_alertmanager_alerts, settings, make_organization_and_user, make_alert_receive_channel
):
settings.DEBUG = False
# setup failing redis cache and ignore exception settings
setup_failing_redis_cache(settings)
integration_type = "grafana"
organization, user = make_organization_and_user()
alert_receive_channel = make_alert_receive_channel(
organization=organization,
author=user,
integration=integration_type,
)
client = APIClient()
url = reverse("integrations:grafana", kwargs={"alert_channel_key": alert_receive_channel.token})
data = {
"alerts": [
{
"foo": 123,
},
{
"foo": 456,
},
]
}
response = client.post(url, data, format="json")
assert response.status_code == status.HTTP_200_OK
mock_create_alertmanager_alerts.apply_async.assert_has_calls(
[
call((alert_receive_channel.pk, data["alerts"][0])),
call((alert_receive_channel.pk, data["alerts"][1])),
]
)

View file

@ -103,6 +103,10 @@ def _custom_labels(alert_receive_channel: "AlertReceiveChannel", raw_request_dat
value = rendered_labels[key]
# check value length
if len(value) == 0:
logger.warning("Template result value is empty. %s", value)
continue
if len(value) > MAX_VALUE_NAME_LENGTH:
logger.warning("Template result value is too long. %s", value)
continue
@ -147,11 +151,19 @@ def _template_labels(alert_receive_channel: "AlertReceiveChannel", raw_request_d
value = str(value)
# check key length
if len(key) == 0:
logger.warning("Template result key is empty. %s", key)
continue
if len(key) > MAX_KEY_NAME_LENGTH:
logger.warning("Template result key is too long. %s", key)
continue
# check value length
if len(value) == 0:
logger.warning("Template result value is empty. %s", value)
continue
if len(value) > MAX_VALUE_NAME_LENGTH:
logger.warning("Template result value is too long. %s", value)
continue

View file

@ -46,6 +46,7 @@ def test_assign_labels(
label_key_1 = make_label_key(organization=organization, key_name="c")
label_key_2 = make_label_key(organization=organization)
label_key_3 = make_label_key(organization=organization)
label_key_4 = make_label_key(organization=organization)
# create alert receive channel with all 3 types of labels
alert_receive_channel = make_alert_receive_channel(
@ -56,6 +57,7 @@ def test_assign_labels(
[label_key_2.id, "nonexistent", None], # plain label with nonexistent value ID
[label_key_1.id, None, "{{ payload.c }}"], # templated label
[label_key_3.id, None, TOO_LONG_VALUE_NAME], # templated label too long
[label_key_4.id, None, "{{ payload.nonexistent }}"], # templated label with nonexistent key
],
alert_group_labels_template="{{ payload.advanced_template | tojson }}",
)
@ -94,8 +96,6 @@ def test_assign_labels(
def test_assign_labels_custom_labels_none(
make_organization,
make_alert_receive_channel,
make_label_key_and_value,
make_label_key,
make_integration_label_association,
):
organization = make_organization()

View file

@ -16,18 +16,18 @@ def test_labels_feature_flag(mock_is_labels_feature_enabled_for_org, make_organi
organization = make_organization()
# returns True if feature flag is enabled
assert settings.FEATURE_LABELS_ENABLED_FOR_ALL
assert organization.id not in settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS
assert organization.id not in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert is_labels_feature_enabled(organization)
mock_is_labels_feature_enabled_for_org(organization.org_id)
mock_is_labels_feature_enabled_for_org(organization.id)
# returns True if feature flag is disabled and organization is in the feature list
assert not settings.FEATURE_LABELS_ENABLED_FOR_ALL
assert organization.org_id in settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS
assert organization.id in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert is_labels_feature_enabled(organization)
mock_is_labels_feature_enabled_for_org(12345)
# returns False if feature flag is disabled and organization is not in the feature list
assert organization.org_id not in settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS
assert organization.org_id not in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert not is_labels_feature_enabled(organization)

View file

@ -51,10 +51,7 @@ def get_associating_label_model(obj_model_name: str) -> typing.Type["AssociatedL
def is_labels_feature_enabled(organization: "Organization") -> bool:
return (
settings.FEATURE_LABELS_ENABLED_FOR_ALL
or organization.org_id in settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS # Grafana org ID, not OnCall org ID
)
return settings.FEATURE_LABELS_ENABLED_FOR_ALL or organization.id in settings.FEATURE_LABELS_ENABLED_PER_ORG
def get_labels_dict(labelable) -> dict[str, str]:

View file

@ -22,6 +22,7 @@ from apps.metrics_exporter.constants import (
RecalculateMetricsTimer,
UserWasNotifiedOfAlertGroupsMetricsDict,
)
from common.cache import ensure_cache_key_allocates_to_the_same_hash_slot
if typing.TYPE_CHECKING:
from apps.alerts.models import AlertReceiveChannel
@ -98,24 +99,27 @@ def get_metrics_cache_timeout(organization_id):
def get_metrics_cache_timer_key(organization_id) -> str:
return f"{METRICS_CACHE_TIMER}_{organization_id}"
def get_metrics_cache_timer_for_organization(organization_id):
key = get_metrics_cache_timer_key(organization_id)
return cache.get(key)
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{METRICS_CACHE_TIMER}_{organization_id}", METRICS_CACHE_TIMER
)
def get_metric_alert_groups_total_key(organization_id) -> str:
return f"{ALERT_GROUPS_TOTAL}_{organization_id}"
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{ALERT_GROUPS_TOTAL}_{organization_id}", ALERT_GROUPS_TOTAL
)
def get_metric_alert_groups_response_time_key(organization_id) -> str:
return f"{ALERT_GROUPS_RESPONSE_TIME}_{organization_id}"
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{ALERT_GROUPS_RESPONSE_TIME}_{organization_id}", ALERT_GROUPS_RESPONSE_TIME
)
def get_metric_user_was_notified_of_alert_groups_key(organization_id) -> str:
return f"{USER_WAS_NOTIFIED_OF_ALERT_GROUPS}_{organization_id}"
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{USER_WAS_NOTIFIED_OF_ALERT_GROUPS}_{organization_id}", USER_WAS_NOTIFIED_OF_ALERT_GROUPS
)
def get_metric_calculation_started_key(metric_name) -> str:

View file

@ -15,6 +15,7 @@ from apps.mobile_app.types import FCMMessageData, MessageType, Platform
from apps.mobile_app.utils import MAX_RETRIES, construct_fcm_message, send_push_notification
from apps.schedules.models.on_call_schedule import OnCallSchedule, ScheduleEvent
from apps.user_management.models import User
from common.cache import ensure_cache_key_allocates_to_the_same_hash_slot
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
from common.l10n import format_localized_datetime, format_localized_time
@ -164,7 +165,10 @@ def _should_we_send_push_notification(
def _generate_cache_key(user_pk: str, schedule_event: ScheduleEvent) -> str:
return f"going_oncall_push_notification:{user_pk}:{schedule_event['shift']['pk']}"
KEY_PREFIX = "going_oncall_push_notification"
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{KEY_PREFIX}:{user_pk}:{schedule_event['shift']['pk']}", KEY_PREFIX
)
@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=MAX_RETRIES)

View file

@ -67,6 +67,8 @@ class IntegrationTypeField(fields.CharField):
raise BadRequest(detail="Invalid integration type")
if has_legacy_prefix(data):
raise BadRequest("This integration type is deprecated")
if data == AlertReceiveChannel.INTEGRATION_DIRECT_PAGING:
raise BadRequest(detail="Direct paging integrations can't be created")
return data

View file

@ -820,35 +820,15 @@ def test_update_integration_default_route(
@pytest.mark.django_db
def test_create_integrations_direct_paging(
def test_cant_create_integrations_direct_paging(
make_organization_and_user_with_token, make_team, make_alert_receive_channel, make_user_auth_headers
):
organization, _, token = make_organization_and_user_with_token()
team = make_team(organization)
client = APIClient()
url = reverse("api-public:integrations-list")
response_1 = client.post(url, data={"type": "direct_paging"}, format="json", HTTP_AUTHORIZATION=token)
response_2 = client.post(url, data={"type": "direct_paging"}, format="json", HTTP_AUTHORIZATION=token)
response_3 = client.post(
url, data={"type": "direct_paging", "team_id": team.public_primary_key}, format="json", HTTP_AUTHORIZATION=token
)
response_4 = client.post(
url, data={"type": "direct_paging", "team_id": team.public_primary_key}, format="json", HTTP_AUTHORIZATION=token
)
# Check direct paging integration for "No team" is created
assert response_1.status_code == status.HTTP_201_CREATED
# Check direct paging integration is not created, as it already exists for "No team"
assert response_2.status_code == status.HTTP_400_BAD_REQUEST
# Check direct paging integration for team is created
assert response_3.status_code == status.HTTP_201_CREATED
# Check direct paging integration is not created, as it already exists for team
assert response_4.status_code == status.HTTP_400_BAD_REQUEST
assert response_4.data["detail"] == AlertReceiveChannel.DuplicateDirectPagingError.DETAIL
response = client.post(url, data={"type": "direct_paging"}, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
@ -873,6 +853,17 @@ def test_update_integrations_direct_paging(
assert response.data["detail"] == AlertReceiveChannel.DuplicateDirectPagingError.DETAIL
@pytest.mark.django_db
def test_cant_delete_direct_paging_integration(make_organization_and_user_with_token, make_alert_receive_channel):
organization, user, token = make_organization_and_user_with_token()
integration = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_DIRECT_PAGING)
client = APIClient()
url = reverse("api-public:integrations-detail", args=[integration.public_primary_key])
response = client.delete(url, HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
def test_get_integration_type_legacy(
make_organization_and_user_with_token, make_alert_receive_channel, make_channel_filter, make_integration_heartbeat

View file

@ -8,6 +8,7 @@ from apps.alerts.models import AlertReceiveChannel
from apps.auth_token.auth import ApiTokenAuthentication
from apps.public_api.serializers import IntegrationSerializer, IntegrationUpdateSerializer
from apps.public_api.throttlers.user_throttle import UserThrottle
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.filters import ByTeamFilter
from common.api_helpers.mixins import FilterSerializerMixin, RateLimitHeadersMixin, UpdateSerializerMixin
from common.api_helpers.paginators import FiftyPageSizePaginator
@ -70,6 +71,14 @@ class IntegrationView(
new_state=new_state,
)
def destroy(self, request, *args, **kwargs):
# don't allow deleting direct paging integrations
instance = self.get_object()
if instance.integration == AlertReceiveChannel.INTEGRATION_DIRECT_PAGING:
raise BadRequest(detail="Direct paging integrations can't be deleted")
return super().destroy(request, *args, **kwargs)
def perform_destroy(self, instance):
write_resource_insight_log(instance=instance, author=self.request.user, event=EntityEvent.DELETED)
instance.delete()

View file

@ -35,6 +35,7 @@ from apps.schedules.constants import (
RE_PRIORITY,
)
from apps.schedules.ical_events import ical_events
from common.cache import ensure_cache_key_allocates_to_the_same_hash_slot
from common.timezones import is_valid_timezone
from common.utils import timed_lru_cache
@ -403,15 +404,24 @@ def get_cached_oncall_users_for_multiple_schedules(schedules: typing.List["OnCal
from apps.schedules.models import OnCallSchedule
from apps.user_management.models import User
CACHE_KEY_PREFIX = "schedule_oncall_users_"
def _generate_cache_key_for_schedule_oncall_users(schedule: "OnCallSchedule") -> str:
return f"schedule_{schedule.public_primary_key}_oncall_users"
return ensure_cache_key_allocates_to_the_same_hash_slot(
f"{CACHE_KEY_PREFIX}{schedule.public_primary_key}", CACHE_KEY_PREFIX
)
def _get_schedule_public_primary_key_from_schedule_oncall_users_cache_key(cache_key: str) -> str:
return cache_key.replace("schedule_", "").replace("_oncall_users", "")
"""
remove any brackets that might be included in the cache key (when redis cluster is active).
See `_generate_cache_key_for_schedule_oncall_users` just above
"""
cache_key = cache_key.replace("{", "").replace("}", "")
return cache_key.replace(CACHE_KEY_PREFIX, "")
CACHE_TTL = 15 * 60 # 15 minutes in seconds
cache_keys: typing.List[str] = [_generate_cache_key_for_schedule_oncall_users(schedule) for schedule in schedules]
cache_keys = [_generate_cache_key_for_schedule_oncall_users(schedule) for schedule in schedules]
# get_many returns a dictionary with all the keys we asked for that actually exist
# in the cache (and havent expired)

View file

@ -584,7 +584,7 @@ def test_get_cached_oncall_users_for_multiple_schedules(
return users, (schedule1, schedule2, schedule3)
def _generate_cache_key(schedule):
return f"schedule_{schedule.public_primary_key}_oncall_users"
return f"schedule_oncall_users_{schedule.public_primary_key}"
# scenario: nothing is cached, need to recalculate everything and cache it
users, schedules = _test_setup()

41
engine/common/cache.py Normal file
View file

@ -0,0 +1,41 @@
import typing
from django.conf import settings
_RT = typing.TypeVar("_RT", str, typing.List[str], typing.Dict[str, typing.Any])
def ensure_cache_key_allocates_to_the_same_hash_slot(cache_keys: _RT, pattern_to_wrap_in_brackets: str) -> _RT:
"""
This method will ensure that when using Redis Cluster, multiple cache keys will be allocated to the same hash slot.
This ensures that multi-key operations (ex `cache.get_many` and `cache.set_many`) will work without raising this
exception:
```
File "/usr/local/lib/python3.11/site-packages/redis/cluster.py", line 1006, in determine_slot
raise RedisClusterException(
redis.exceptions.RedisClusterException: MGET - all keys must map to the same key slot
```
From the Redis Cluster [docs](https://redis.io/docs/reference/cluster-spec/#hash-tags):
There is an exception for the computation of the hash slot that is used in order to implement hash tags.
Hash tags are a way to ensure that multiple keys are allocated in the same hash slot.
This is used in order to implement multi-key operations in Redis Cluster.
To implement hash tags, the hash slot for a key is computed in a slightly different way in certain conditions.
If the key contains a "{...}" pattern only the substring between { and } is hashed in order to obtain the hash slot.
However since it is possible that there are multiple occurrences of { or } the algorithm is well specified by the
following rules:
"""
if not settings.USE_REDIS_CLUSTER:
return cache_keys
def _replace_key(key: str) -> str:
return key.replace(pattern_to_wrap_in_brackets, f"{{{pattern_to_wrap_in_brackets}}}")
if isinstance(cache_keys, str):
return _replace_key(cache_keys)
elif isinstance(cache_keys, dict):
return {_replace_key(key): value for key, value in cache_keys.items()}
return [_replace_key(key) for key in cache_keys]

View file

@ -3,6 +3,7 @@ import json
import re
from django.utils.dateparse import parse_datetime
from pytz import timezone
def datetimeformat(value, format="%H:%M / %d-%m-%Y"):
@ -12,6 +13,13 @@ def datetimeformat(value, format="%H:%M / %d-%m-%Y"):
return None
def datetimeformat_as_timezone(value, format="%H:%M / %d-%m-%Y", tz="UTC"):
try:
return value.astimezone(timezone(tz)).strftime(format)
except (ValueError, AttributeError, TypeError):
return None
def iso8601_to_time(value):
try:
return parse_datetime(value)

View file

@ -6,6 +6,7 @@ from jinja2.sandbox import SandboxedEnvironment
from .filters import (
b64decode,
datetimeformat,
datetimeformat_as_timezone,
iso8601_to_time,
json_dumps,
regex_match,
@ -22,6 +23,7 @@ def raise_security_exception(name):
jinja_template_env = SandboxedEnvironment(loader=BaseLoader())
jinja_template_env.filters["datetimeformat"] = datetimeformat
jinja_template_env.filters["datetimeformat_as_timezone"] = datetimeformat_as_timezone
jinja_template_env.filters["iso8601_to_time"] = iso8601_to_time
jinja_template_env.filters["tojson_pretty"] = to_pretty_json
jinja_template_env.globals["time"] = timezone.now

View file

@ -1,7 +1,10 @@
import base64
import json
import pytest
from django.conf import settings
from django.utils.dateparse import parse_datetime
from pytz import timezone
from common.jinja_templater import apply_jinja_template
from common.jinja_templater.apply_jinja_template import JinjaTemplateError, JinjaTemplateWarning
@ -14,6 +17,60 @@ def test_apply_jinja_template():
assert payload == result
def test_apply_jinja_template_iso8601_to_time():
payload = {"name": "2023-11-22T15:30:00.000000000Z"}
result = apply_jinja_template(
"{{ payload.name | iso8601_to_time }}",
payload,
)
expected = str(parse_datetime(payload["name"]))
assert result == expected
def test_apply_jinja_template_datetimeformat():
payload = {"aware": "2023-05-28 23:11:12+0000", "naive": "2023-05-28 23:11:12"}
assert apply_jinja_template(
"{{ payload.aware | iso8601_to_time | datetimeformat('%Y-%m-%dT%H:%M:%S%z') }}",
payload,
) == parse_datetime(payload["aware"]).strftime("%Y-%m-%dT%H:%M:%S%z")
assert apply_jinja_template(
"{{ payload.naive | iso8601_to_time | datetimeformat('%Y-%m-%dT%H:%M:%S%z') }}",
payload,
) == parse_datetime(payload["naive"]).strftime("%Y-%m-%dT%H:%M:%S%z")
def test_apply_jinja_template_datetimeformat_as_timezone():
payload = {"aware": "2023-05-28 23:11:12+0000", "naive": "2023-05-28 23:11:12"}
assert apply_jinja_template(
"{{ payload.aware | iso8601_to_time | datetimeformat_as_timezone('%Y-%m-%dT%H:%M:%S%z', 'America/Chicago') }}",
payload,
) == parse_datetime(payload["aware"]).astimezone(timezone("America/Chicago")).strftime("%Y-%m-%dT%H:%M:%S%z")
assert apply_jinja_template(
"{{ payload.naive | iso8601_to_time | datetimeformat_as_timezone('%Y-%m-%dT%H:%M:%S%z', 'America/Chicago') }}",
payload,
) == parse_datetime(payload["naive"]).astimezone(timezone("America/Chicago")).strftime("%Y-%m-%dT%H:%M:%S%z")
with pytest.raises(JinjaTemplateWarning):
apply_jinja_template(
"{{ payload.aware | iso8601_to_time | datetimeformat_as_timezone('%Y-%m-%dT%H:%M:%S%z', 'potato') }}",
payload,
)
def test_apply_jinja_template_b64decode():
payload = {"name": "SGVsbG8sIHdvcmxkIQ=="}
assert apply_jinja_template(
"{{ payload.name | b64decode }}",
payload,
) == base64.b64decode(
payload["name"]
).decode("utf-8")
def test_apply_jinja_template_json_dumps():
payload = {"name": "test"}

View file

@ -1,7 +0,0 @@
from common.jinja_templater.filters import b64decode
def test_base64_decode():
original = "dGVzdCBzdHJpbmch"
expected = "test string!"
assert b64decode(original) == expected

View file

@ -0,0 +1,51 @@
from django.test import override_settings
from common.cache import ensure_cache_key_allocates_to_the_same_hash_slot
PATTERN = "schedule_oncall_users"
NON_EXISTENT_PATTERN = "nmzxcnvmzxcv"
NUM_CACHE_KEYS = 5
SINGLE_CACHE_KEY = f"{PATTERN}_0"
CACHE_KEYS = [f"{PATTERN}_{pk}" for pk in range(NUM_CACHE_KEYS)]
SET_MANY_CACHE_KEYS_DICT = {k: "foo" for k in CACHE_KEYS}
def test_ensure_cache_key_allocates_to_the_same_hash_slot() -> None:
def _convert_key(key: str) -> str:
return key.replace(PATTERN, f"{{{PATTERN}}}")
# when USE_REDIS_CLUSTER is False the method should just return the cache keys
with override_settings(USE_REDIS_CLUSTER=False):
assert ensure_cache_key_allocates_to_the_same_hash_slot(SINGLE_CACHE_KEY, PATTERN) == SINGLE_CACHE_KEY
assert ensure_cache_key_allocates_to_the_same_hash_slot(CACHE_KEYS, PATTERN) == CACHE_KEYS
assert (
ensure_cache_key_allocates_to_the_same_hash_slot(SET_MANY_CACHE_KEYS_DICT, PATTERN)
== SET_MANY_CACHE_KEYS_DICT
)
# when USE_REDIS_CLUSTER is True the method should wrap the specified pattern within the cache keys in curly brackets
with override_settings(USE_REDIS_CLUSTER=True):
# works with a single str cache key
assert ensure_cache_key_allocates_to_the_same_hash_slot(SINGLE_CACHE_KEY, PATTERN) == _convert_key(
SINGLE_CACHE_KEY
)
# works with a list (useful for cache.get_many operations)
assert ensure_cache_key_allocates_to_the_same_hash_slot(CACHE_KEYS, PATTERN) == [
_convert_key(k) for k in CACHE_KEYS
]
# works with a dict (useful for cache.set_many operations)
assert ensure_cache_key_allocates_to_the_same_hash_slot(SET_MANY_CACHE_KEYS_DICT, PATTERN) == {
_convert_key(k): v for k, v in SET_MANY_CACHE_KEYS_DICT.items()
}
# if the pattern doesn't exist, we don't wrap it in brackets
assert (
ensure_cache_key_allocates_to_the_same_hash_slot(SINGLE_CACHE_KEY, NON_EXISTENT_PATTERN) == SINGLE_CACHE_KEY
)
assert ensure_cache_key_allocates_to_the_same_hash_slot(CACHE_KEYS, NON_EXISTENT_PATTERN) == CACHE_KEYS
assert (
ensure_cache_key_allocates_to_the_same_hash_slot(SET_MANY_CACHE_KEYS_DICT, NON_EXISTENT_PATTERN)
== SET_MANY_CACHE_KEYS_DICT
)

View file

@ -200,7 +200,7 @@ def clear_ical_users_cache():
def mock_is_labels_feature_enabled_for_org(settings):
def _mock_is_labels_feature_enabled_for_org(org_id):
settings.FEATURE_LABELS_ENABLED_FOR_ALL = False
settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS = [org_id]
settings.FEATURE_LABELS_ENABLED_PER_ORG = [org_id]
return _mock_is_labels_feature_enabled_for_org

View file

@ -54,3 +54,4 @@ lxml==4.9.2
babel==2.12.1
drf-spectacular==0.26.5
grpcio==1.57.0
markdown2==2.4.10

View file

@ -69,8 +69,8 @@ GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HE
GRAFANA_CLOUD_NOTIFICATIONS_ENABLED = getenv_boolean("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", default=True)
# Enable labels feature fo all organizations. This flag overrides FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS
FEATURE_LABELS_ENABLED_FOR_ALL = getenv_boolean("FEATURE_LABELS_ENABLED_FOR_ALL", default=False)
# Enable labels feature for organizations from the list. Use Grafana org_id, not OnCall id, for this flag
FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS = getenv_list("FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS", default=list())
# Enable labels feature for organizations from the list. Use OnCall organization ID, for this flag
FEATURE_LABELS_ENABLED_PER_ORG = getenv_list("FEATURE_LABELS_ENABLED_PER_ORG", default=list())
TWILIO_API_KEY_SID = os.environ.get("TWILIO_API_KEY_SID")
TWILIO_API_KEY_SECRET = os.environ.get("TWILIO_API_KEY_SECRET")
@ -194,6 +194,7 @@ REDIS_URI = os.getenv("REDIS_URI")
if not REDIS_URI:
REDIS_URI = f"{REDIS_PROTOCOL}://{REDIS_USERNAME}:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_DATABASE}"
USE_REDIS_CLUSTER = getenv_boolean("USE_REDIS_CLUSTER", default=False)
REDIS_USE_SSL = os.getenv("REDIS_USE_SSL")
REDIS_SSL_CONFIG = {}
@ -494,7 +495,7 @@ CELERY_BEAT_SCHEDULE = {
},
"start_refresh_ical_files": {
"task": "apps.schedules.tasks.refresh_ical_files.start_refresh_ical_files",
"schedule": 10 * 60,
"schedule": crontab(minute="*/10"), # every 10 minutes
"args": (),
},
"start_notify_about_gaps_in_schedule": {
@ -544,24 +545,24 @@ CELERY_BEAT_SCHEDULE = {
},
"process_failed_to_invoke_celery_tasks": {
"task": "apps.base.tasks.process_failed_to_invoke_celery_tasks",
"schedule": 60 * 10,
"schedule": crontab(minute="*/10"), # every 10 minutes
"args": (),
},
"conditionally_send_going_oncall_push_notifications_for_all_schedules": {
"task": "apps.mobile_app.tasks.going_oncall_notification.conditionally_send_going_oncall_push_notifications_for_all_schedules",
"schedule": 10 * 60,
"schedule": crontab(minute="*/10"), # every 10 minutes
},
"notify_shift_swap_requests": {
"task": "apps.mobile_app.tasks.new_shift_swap_request.notify_shift_swap_requests",
"schedule": getenv_integer("NOTIFY_SHIFT_SWAP_REQUESTS_INTERVAL", default=10 * 60),
"schedule": crontab(minute="*/{}".format(getenv_integer("NOTIFY_SHIFT_SWAP_REQUESTS_INTERVAL", default=10))),
},
"send_shift_swap_request_slack_followups": {
"task": "apps.schedules.tasks.shift_swaps.slack_followups.send_shift_swap_request_slack_followups",
"schedule": 10 * 60,
"schedule": crontab(minute="*/10"), # every 10 minutes
},
"save_organizations_ids_in_cache": {
"task": "apps.metrics_exporter.tasks.save_organizations_ids_in_cache",
"schedule": 60 * 30,
"schedule": crontab(minute="*/30"), # every 30 minutes
"args": (),
},
"check_heartbeats": {
@ -578,7 +579,11 @@ if ESCALATION_AUDITOR_ENABLED:
#
# ex. if the integration is configured to expect a heartbeat every 15 minutes then this value should be set
# to something like 13 * 60 (every 13 minutes)
"schedule": getenv_integer("ALERT_GROUP_ESCALATION_AUDITOR_CELERY_TASK_HEARTBEAT_INTERVAL", 13 * 60),
"schedule": crontab(
minute="*/{}".format(
getenv_integer("ALERT_GROUP_ESCALATION_AUDITOR_CELERY_TASK_HEARTBEAT_INTERVAL", default=13)
)
),
"args": (),
}

View file

@ -80,7 +80,7 @@ export const genericTemplateCheatSheet: CheatSheetInterface = {
{ listItemName: 'payload - payload of last alert in the group' },
{ listItemName: 'web_title, web_mesage, web_image_url - templates from Web' },
{ listItemName: 'payload, grafana_oncall_link, grafana_oncall_incident_id, integration_name, source_link' },
{ listItemName: 'time(), datetimeformat, iso8601_to_time' },
{ listItemName: 'time(), datetimeformat, datetimeformat_as_timezone, iso8601_to_time' },
{ listItemName: 'to_pretty_json' },
{ listItemName: 'regex_replace, regex_match' },
{ listItemName: 'b64decode' },

View file

@ -88,6 +88,11 @@ const IntegrationForm = observer((props: IntegrationFormProps) => {
return false;
}
// don't allow creating direct paging integrations
if (option.value === 'direct_paging') {
return false;
}
return (
option.display_name.toLowerCase().includes(filterValue.toLowerCase()) &&
!option.value.toLowerCase().startsWith('legacy_')

View file

@ -67,5 +67,7 @@
}
.template-editor-block-content {
height: calc(100% - 60px);
height: calc(100% - 57px);
border-left: var(--border-weak);
border-right: var(--border-weak);
}

View file

@ -34,6 +34,7 @@ import IntegrationBlock from 'components/Integrations/IntegrationBlock';
import PageErrorHandlingWrapper, { PageBaseState } from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper';
import { initErrorDataState } from 'components/PageErrorHandlingWrapper/PageErrorHandlingWrapper.helpers';
import PluginLink from 'components/PluginLink/PluginLink';
import RenderConditionally from 'components/RenderConditionally/RenderConditionally';
import Tag from 'components/Tag/Tag';
import Text from 'components/Text/Text';
import TooltipBadge from 'components/TooltipBadge/TooltipBadge';
@ -958,37 +959,37 @@ const IntegrationActions: React.FC<IntegrationActionsProps> = ({
</HorizontalGroup>
</div>
</CopyToClipboard>
<div className={cx('thin-line-break')} />
<WithPermissionControlTooltip userAction={UserActions.IntegrationsWrite}>
<div className={cx('integration__actionItem')}>
<div
onClick={() => {
setConfirmModal({
isOpen: true,
title: 'Delete Integration?',
body: (
<Text type="primary">
Are you sure you want to delete <Emoji text={alertReceiveChannel.verbal_name} /> ?
</Text>
),
onConfirm: deleteIntegration,
dismissText: 'Cancel',
confirmText: 'Delete',
});
}}
className="u-width-100"
>
<Text type="danger">
<HorizontalGroup spacing={'xs'}>
<Icon name="trash-alt" />
<span>Delete Integration</span>
</HorizontalGroup>
</Text>
<RenderConditionally shouldRender={alertReceiveChannel.allow_delete}>
<div className={cx('thin-line-break')} />
<WithPermissionControlTooltip userAction={UserActions.IntegrationsWrite}>
<div className={cx('integration__actionItem')}>
<div
onClick={() => {
setConfirmModal({
isOpen: true,
title: 'Delete Integration?',
body: (
<Text type="primary">
Are you sure you want to delete <Emoji text={alertReceiveChannel.verbal_name} /> ?
</Text>
),
onConfirm: deleteIntegration,
dismissText: 'Cancel',
confirmText: 'Delete',
});
}}
className="u-width-100"
>
<Text type="danger">
<HorizontalGroup spacing={'xs'}>
<Icon name="trash-alt" />
<span>Delete Integration</span>
</HorizontalGroup>
</Text>
</div>
</div>
</div>
</WithPermissionControlTooltip>
</WithPermissionControlTooltip>
</RenderConditionally>
</div>
)}
>

View file

@ -3,9 +3,9 @@
"@adobe/css-tools@^4.0.1":
version "4.3.1"
resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.3.1.tgz#abfccb8ca78075a2b6187345c26243c1a0842f28"
integrity sha512-/62yikz7NLScCGAAST5SHdnjaDJQBDq0M2muyRTpf2VQhw6StBg2ALiu73zSJQ4fMVLA+0uBhBHAle7Wg+2kSg==
version "4.3.2"
resolved "https://registry.yarnpkg.com/@adobe/css-tools/-/css-tools-4.3.2.tgz#a6abc715fb6884851fca9dad37fc34739a04fd11"
integrity sha512-DA5a1C0gD/pLOvhv33YMrbf2FK3oUzwNl9oOJqE4XVjuEtt6XIakRcsd7eLiOSPkp1kTRQGICTA8cKra/vFbjw==
"@ampproject/remapping@^2.1.0":
version "2.2.0"