Merge pull request #3787 from grafana/dev

v1.3.95
This commit is contained in:
Ildar Iskhakov 2024-01-30 20:59:22 +08:00 committed by GitHub
commit fcdddb7d30
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 902 additions and 234 deletions

View file

@ -5,7 +5,22 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Unreleased
## v1.3.94 (2024-01-30)
### Added
- Improved logging during plugin sync and install with Grafana @mderynck ([#3730](https://github.com/grafana/oncall/pull/3730))
- Added `is_currently_oncall` information to internal user details API ([#3765](https://github.com/grafana/oncall/pull/3765))
- Add a modal for autoresolve and grouping templates for Alertmanager-based integrations ([#3764](https://github.com/grafana/oncall/pull/3764))
### Fixed
- Fixed too frequent retry of `perform_notification` task on Telegram ratelimit error by @Ferril ([#3744](https://github.com/grafana/oncall/pull/3744))
- Dynamic labels & multi-label extraction label are broken ([#3750](https://github.com/grafana/oncall/issues/3750))
- Add check whether organization has Slack connection on update Slack related field using public api endpoints
by @Ferril ([#3751](https://github.com/grafana/oncall/pull/3751))
- Fixed calculating the number of on-call users per team by @Ferril ([#3773](https://github.com/grafana/oncall/pull/3773))
- Refactor create_alert task by @iskhakov ([#3604](https://github.com/grafana/oncall/pull/3759))
## v1.3.92 (2024-01-23)

View file

@ -1,3 +1,3 @@
aiohttp==3.9.0
aiohttp==3.9.2
Faker==16.4.0
tqdm==4.64.1

View file

@ -717,7 +717,7 @@ case "${image}" in
"${DOCS_IMAGE}" \
"--minAlertLevel=${VALE_MINALERTLEVEL}" \
'--glob=*.md' \
--output=line \
--output=/etc/vale/rdjsonl.tmpl \
/hugo/content/docs | sed "s#$(proj_dst "${proj}")#sources#"
;;
*)

View file

@ -12,6 +12,8 @@ from django.db.models import JSONField
from apps.alerts import tasks
from apps.alerts.constants import TASK_DELAY_SECONDS
from apps.alerts.incident_appearance.templaters import TemplateLoader
from apps.alerts.signals import alert_group_escalation_snapshot_built
from apps.alerts.tasks.distribute_alert import send_alert_create_signal
from apps.labels.alert_group_labels import assign_labels
from common.jinja_templater import apply_jinja_template
from common.jinja_templater.apply_jinja_template import JinjaTemplateError, JinjaTemplateWarning
@ -102,28 +104,16 @@ class Alert(models.Model):
if channel_filter is None:
channel_filter = ChannelFilter.select_filter(alert_receive_channel, raw_request_data, force_route_id)
# Get or create group
group, group_created = AlertGroup.objects.get_or_create_grouping(
channel=alert_receive_channel,
channel_filter=channel_filter,
group_data=group_data,
received_at=received_at,
)
logger.debug(f"alert group {group.pk} created={group_created}")
if group_created:
assign_labels(group, alert_receive_channel, raw_request_data)
group.log_records.create(type=AlertGroupLogRecord.TYPE_REGISTERED)
group.log_records.create(type=AlertGroupLogRecord.TYPE_ROUTE_ASSIGNED)
mark_as_resolved = (
enable_autoresolve and group_data.is_resolve_signal and alert_receive_channel.allow_source_based_resolving
)
if not group.resolved and mark_as_resolved:
group.resolve_by_source()
mark_as_acknowledged = group_data.is_acknowledge_signal
if not group.acknowledged and mark_as_acknowledged:
group.acknowledge_by_source()
# Create alert
alert = cls(
is_resolve_signal=group_data.is_resolve_signal,
title=title,
@ -135,21 +125,39 @@ class Alert(models.Model):
raw_request_data=raw_request_data,
is_the_first_alert_in_group=group_created,
)
alert.save()
logger.debug(f"alert {alert.pk} created")
transaction.on_commit(partial(send_alert_create_signal.apply_async, (alert.pk,)))
if group_created:
assign_labels(group, alert_receive_channel, raw_request_data)
group.log_records.create(type=AlertGroupLogRecord.TYPE_REGISTERED)
group.log_records.create(type=AlertGroupLogRecord.TYPE_ROUTE_ASSIGNED)
if group_created or alert.group.pause_escalation:
# Build escalation snapshot if needed and start escalation
alert.group.start_escalation_if_needed(countdown=TASK_DELAY_SECONDS)
if group_created:
# TODO: consider moving to start_escalation_if_needed
alert_group_escalation_snapshot_built.send(sender=cls.__class__, alert_group=alert.group)
mark_as_acknowledged = group_data.is_acknowledge_signal
if not group.acknowledged and mark_as_acknowledged:
group.acknowledge_by_source()
mark_as_resolved = (
enable_autoresolve and group_data.is_resolve_signal and alert_receive_channel.allow_source_based_resolving
)
if not group.resolved and mark_as_resolved:
group.resolve_by_source()
# Store exact alert which resolved group.
if group.resolved_by == AlertGroup.SOURCE and group.resolved_by_alert is None:
group.resolved_by_alert = alert
group.save(update_fields=["resolved_by_alert"])
if settings.DEBUG:
tasks.distribute_alert(alert.pk)
else:
transaction.on_commit(
partial(tasks.distribute_alert.apply_async, (alert.pk,), countdown=TASK_DELAY_SECONDS)
)
if group_created:
# all code below related to maintenance mode
maintenance_uuid = None

View file

@ -13,6 +13,8 @@ from .task_logger import task_logger
def distribute_alert(alert_id):
"""
We need this task to make task processing async and to make sure the task is delivered.
This task is not used anymore, but we keep it for the tasks in the queue to be processed.
TODO: remove this task after all the tasks in the queue are processed.
"""
from apps.alerts.models import Alert

View file

@ -1,10 +1,12 @@
import time
from functools import partial
from celery.exceptions import Retry
from django.conf import settings
from django.db import transaction
from django.utils import timezone
from kombu.utils.uuid import uuid as celery_uuid
from telegram.error import RetryAfter
from apps.alerts.constants import NEXT_ESCALATION_DELAY
from apps.alerts.signals import user_notification_action_triggered_signal
@ -234,7 +236,10 @@ def notify_user_task(
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=1 if settings.DEBUG else None
autoretry_for=(Exception,),
retry_backoff=True,
dont_autoretry_for=(Retry,),
max_retries=1 if settings.DEBUG else None,
)
def perform_notification(log_record_pk):
from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRecord
@ -289,7 +294,11 @@ def perform_notification(log_record_pk):
phone_backend.notify_by_call(user, alert_group, notification_policy)
elif notification_channel == UserNotificationPolicy.NotificationChannel.TELEGRAM:
TelegramToUserConnector.notify_user(user, alert_group, notification_policy)
try:
TelegramToUserConnector.notify_user(user, alert_group, notification_policy)
except RetryAfter as e:
countdown = getattr(e, "retry_after", 3)
raise perform_notification.retry((log_record_pk,), countdown=countdown, exc=e)
elif notification_channel == UserNotificationPolicy.NotificationChannel.SLACK:
# TODO: refactor checking the possibility of sending a notification in slack

View file

@ -8,9 +8,9 @@ from apps.alerts.tasks import distribute_alert, escalate_alert_group
@pytest.mark.django_db
@patch("apps.alerts.tasks.distribute_alert.distribute_alert.apply_async", return_value=None)
@patch("apps.alerts.tasks.distribute_alert.send_alert_create_signal.apply_async", return_value=None)
def test_alert_create_default_channel_filter(
mocked_distribute_alert_task,
mocked_send_alert_create_signal,
make_organization,
make_alert_receive_channel,
make_channel_filter,
@ -30,10 +30,9 @@ def test_alert_create_default_channel_filter(
image_url=None,
link_to_upstream_details=None,
)
assert alert.group.channel_filter == channel_filter
assert len(callbacks) == 1
mocked_distribute_alert_task.assert_called_once_with((alert.pk,), countdown=1)
mocked_send_alert_create_signal.assert_called_once_with((alert.pk,))
@pytest.mark.django_db

View file

@ -1,6 +1,7 @@
from unittest.mock import patch
import pytest
from telegram.error import RetryAfter
from apps.alerts.models import AlertGroup
from apps.alerts.tasks.notify_user import notify_user_task, perform_notification
@ -8,6 +9,7 @@ from apps.api.permissions import LegacyAccessControlRole
from apps.base.models.user_notification_policy import UserNotificationPolicy
from apps.base.models.user_notification_policy_log_record import UserNotificationPolicyLogRecord
from apps.slack.models import SlackMessage
from apps.telegram.models import TelegramToUserConnector
NOTIFICATION_UNAUTHORIZED_MSG = "notification is not allowed for user"
@ -297,3 +299,35 @@ def test_perform_notification_missing_user_notification_policy_log_record(caplog
"The alert group associated with this log record may have been deleted."
) in caplog.text
assert f"perform_notification: found record for {invalid_pk}" not in caplog.text
@pytest.mark.django_db
def test_perform_notification_telegram_retryafter_error(
make_organization_and_user,
make_user_notification_policy,
make_alert_receive_channel,
make_alert_group,
make_user_notification_policy_log_record,
):
organization, user = make_organization_and_user()
user_notification_policy = make_user_notification_policy(
user=user,
step=UserNotificationPolicy.Step.NOTIFY,
notify_by=UserNotificationPolicy.NotificationChannel.TELEGRAM,
)
alert_receive_channel = make_alert_receive_channel(organization=organization)
alert_group = make_alert_group(alert_receive_channel=alert_receive_channel)
log_record = make_user_notification_policy_log_record(
author=user,
alert_group=alert_group,
notification_policy=user_notification_policy,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED,
)
countdown = 15
exc = RetryAfter(countdown)
with patch.object(TelegramToUserConnector, "notify_user", side_effect=exc) as mock_notify_user:
with pytest.raises(RetryAfter):
perform_notification(log_record.pk)
mock_notify_user.assert_called_once_with(user, alert_group, user_notification_policy)
assert alert_group.personal_log_records.last() == log_record

View file

@ -50,10 +50,10 @@ class TeamLongSerializer(TeamSerializer):
]
def get_number_of_users_currently_oncall(self, obj: Team) -> int:
num_of_users_oncall_for_team = 0
oncall_users = set()
for schedule, users in self.context["schedules_with_oncall_users"].items():
if schedule.team == obj:
num_of_users_oncall_for_team += len(users)
oncall_users |= set(users)
return num_of_users_oncall_for_team
return len(oncall_users)

View file

@ -51,7 +51,7 @@ class WorkingHoursSerializer(serializers.Serializer):
sunday = serializers.ListField(child=WorkingHoursPeriodSerializer())
class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
class ListUserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
pk = serializers.CharField(read_only=True, source="public_primary_key")
slack_user_identity = SlackUserIdentitySerializer(read_only=True)
@ -165,6 +165,24 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
return f"{HIDE_SYMBOL * (len(number) - SHOW_LAST_SYMBOLS)}{number[-SHOW_LAST_SYMBOLS:]}"
class UserSerializer(ListUserSerializer):
context: UserSerializerContext
is_currently_oncall = serializers.SerializerMethodField()
class Meta(ListUserSerializer.Meta):
fields = ListUserSerializer.Meta.fields + [
"is_currently_oncall",
]
read_only_fields = ListUserSerializer.Meta.read_only_fields + [
"is_currently_oncall",
]
def get_is_currently_oncall(self, obj: User) -> bool:
# Serializer context is set here: apps.api.views.user.UserView.get_serializer_context.
return any(obj in users for users in self.context.get("schedules_with_oncall_users", {}).values())
class CurrentUserSerializer(UserSerializer):
rbac_permissions = UserPermissionSerializer(read_only=True, many=True, source="permissions")
@ -176,7 +194,7 @@ class CurrentUserSerializer(UserSerializer):
read_only_fields = UserSerializer.Meta.read_only_fields
class UserHiddenFieldsSerializer(UserSerializer):
class UserHiddenFieldsSerializer(ListUserSerializer):
fields_available_for_all_users = [
"pk",
"organization",
@ -198,7 +216,7 @@ class UserHiddenFieldsSerializer(UserSerializer):
return ret
class ScheduleUserSerializer(UserSerializer):
class ScheduleUserSerializer(ListUserSerializer):
fields_to_keep = [
"pk",
"organization",
@ -214,7 +232,7 @@ class ScheduleUserSerializer(UserSerializer):
]
def to_representation(self, instance):
serialized = super(UserSerializer, self).to_representation(instance)
serialized = super(ListUserSerializer, self).to_representation(instance)
ret = {field: value for field, value in serialized.items() if field in self.fields_to_keep}
return ret
@ -288,10 +306,7 @@ class UserIsCurrentlyOnCallSerializer(UserShortSerializer, EagerLoadingMixin):
def get_is_currently_oncall(self, obj: User) -> bool:
# Serializer context is set here: apps.api.views.user.UserView.get_serializer_context.
for users in self.context.get("schedules_with_oncall_users", {}).values():
if obj in users:
return True
return False
return any(obj in users for users in self.context.get("schedules_with_oncall_users", {}).values())
class PagedUserSerializer(serializers.Serializer):

View file

@ -166,7 +166,7 @@ def test_teams_number_of_users_currently_oncall_attribute_works_properly(
team3 = make_team(organization)
team1.users.set([user1, user2, user3])
team2.users.set([user1])
team2.users.set([user1, user2])
team3.users.set([user3])
def _make_schedule(team=None, oncall_users=None):
@ -193,7 +193,9 @@ def test_teams_number_of_users_currently_oncall_attribute_works_properly(
schedule.refresh_ical_file()
schedule.refresh_ical_final_schedule()
# create two schedules for team 1 to make sure that every user is calculated only once per team
_make_schedule(team=team1, oncall_users=[user1, user2])
_make_schedule(team=team1, oncall_users=[user1, user3])
_make_schedule(team=team2, oncall_users=[user1])
_make_schedule(team=team3, oncall_users=[])
@ -203,7 +205,7 @@ def test_teams_number_of_users_currently_oncall_attribute_works_properly(
response = client.get(url, format="json", **make_user_auth_headers(user1, token))
number_of_oncall_users = {
team1.public_primary_key: 2,
team1.public_primary_key: 3,
team2.public_primary_key: 1,
team3.public_primary_key: 0,
NO_TEAM_VALUE: 0, # this covers the case of "No team"

View file

@ -24,7 +24,12 @@ def clear_cache():
@pytest.mark.django_db
def test_current_user(make_organization_and_user_with_plugin_token, make_user_auth_headers):
def test_current_user(
make_organization_and_user_with_plugin_token,
make_user_auth_headers,
make_schedule,
make_on_call_shift,
):
organization, user, token = make_organization_and_user_with_plugin_token()
client = APIClient()
@ -42,6 +47,7 @@ def test_current_user(make_organization_and_user_with_plugin_token, make_user_au
"rbac_permissions": user.permissions,
"timezone": None,
"working_hours": default_working_hours(),
"is_currently_oncall": False,
"unverified_phone_number": None,
"verified_phone_number": None,
"telegram_configuration": None,
@ -61,6 +67,28 @@ def test_current_user(make_organization_and_user_with_plugin_token, make_user_au
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
# current user is on-call
today = timezone.now().replace(hour=0, minute=0, second=0, microsecond=0)
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
on_call_shift = make_on_call_shift(
organization=organization,
shift_type=CustomOnCallShift.TYPE_ROLLING_USERS_EVENT,
start=today,
rotation_start=today,
duration=timezone.timedelta(seconds=24 * 60 * 60),
priority_level=1,
frequency=CustomOnCallShift.FREQUENCY_DAILY,
schedule=schedule,
)
on_call_shift.add_rolling_users([[user]])
schedule.refresh_ical_file()
schedule.refresh_ical_final_schedule()
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
expected_response["is_currently_oncall"] = True
assert response.json() == expected_response
data_to_update = {"hide_phone_number": True}
response = client.put(url, data=data_to_update, format="json", **make_user_auth_headers(user, token))
@ -127,6 +155,7 @@ def test_update_user_cant_change_email_and_username(
"role": admin.role,
"timezone": None,
"working_hours": default_working_hours(),
"is_currently_oncall": False,
"unverified_phone_number": phone_number,
"verified_phone_number": None,
"telegram_configuration": None,
@ -2017,6 +2046,12 @@ def test_users_is_currently_oncall_attribute_works_properly(
assert user["teams"] == []
assert user["is_currently_oncall"] == oncall_statuses[user["pk"]]
# getting specific user details include currently on-call info
url = reverse("api-internal:user-detail", kwargs={"pk": user1.public_primary_key})
response = client.get(url, format="json", **make_user_auth_headers(user1, token))
assert response.json()["is_currently_oncall"]
@pytest.mark.django_db
def test_list_users_filtered_by_is_currently_oncall(

View file

@ -31,6 +31,7 @@ from apps.api.serializers.team import TeamSerializer
from apps.api.serializers.user import (
CurrentUserSerializer,
FilterUserSerializer,
ListUserSerializer,
UserHiddenFieldsSerializer,
UserIsCurrentlyOnCallSerializer,
UserSerializer,
@ -101,12 +102,40 @@ class UpcomingShift(typing.TypedDict):
UpcomingShifts = list[UpcomingShift]
class CurrentUserView(APIView):
class CachedSchedulesContextMixin:
@cached_property
def schedules_with_oncall_users(self):
"""
The result of this method is cached and is reused for the whole lifetime of a request,
since self.get_serializer_context() is called multiple times for every instance in the queryset.
"""
return get_cached_oncall_users_for_multiple_schedules(self.request.user.organization.oncall_schedules.all())
def _populate_schedules_oncall_cache(self):
return False
def get_serializer_context(self):
context = getattr(super(), "get_serializer_context", lambda: {})()
context.update(
{
"schedules_with_oncall_users": self.schedules_with_oncall_users
if self._populate_schedules_oncall_cache()
else {}
}
)
return context
class CurrentUserView(APIView, CachedSchedulesContextMixin):
authentication_classes = (MobileAppAuthTokenAuthentication, PluginAuthentication)
permission_classes = (IsAuthenticated,)
def _populate_schedules_oncall_cache(self):
return True
def get(self, request):
context = {"request": self.request, "format": self.format_kwarg, "view": self}
context = self.get_serializer_context()
context.update({"request": self.request, "format": self.format_kwarg, "view": self})
if settings.IS_OPEN_SOURCE and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
@ -122,8 +151,10 @@ class CurrentUserView(APIView):
return Response(serializer.data)
def put(self, request):
context = self.get_serializer_context()
context.update({"request": self.request})
data = self.request.data
serializer = CurrentUserSerializer(request.user, data=data, context={"request": self.request})
serializer = CurrentUserSerializer(request.user, data=data, context=context)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
@ -158,6 +189,7 @@ class UserFilter(ByTeamModelFieldFilterMixin, filters.FilterSet):
class UserView(
PublicPrimaryKeyMixin,
CachedSchedulesContextMixin,
mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.ListModelMixin,
@ -247,51 +279,49 @@ class UserView(
filterset_class = UserFilter
@cached_property
def schedules_with_oncall_users(self):
"""
The result of this method is cached and is reused for the whole lifetime of a request,
since self.get_serializer_context() is called multiple times for every instance in the queryset.
"""
return get_cached_oncall_users_for_multiple_schedules(self.request.user.organization.oncall_schedules.all())
def _get_is_currently_oncall_query_param(self) -> str:
return self.request.query_params.get("is_currently_oncall", "").lower()
def _is_currently_oncall_request(self) -> bool:
return self._get_is_currently_oncall_query_param() in ["true", "false", "all"]
def get_serializer_context(self):
context = super().get_serializer_context()
context.update(
{
"schedules_with_oncall_users": self.schedules_with_oncall_users
if self._is_currently_oncall_request()
else {}
}
def _populate_schedules_oncall_cache(self):
return (
# admin or owner can see on-call schedule information for a user
(self.is_owner_or_admin() and self.action != "list")
or
# list requests need to explicitly request on-call information
self._get_is_currently_oncall_query_param() in ["true", "false", "all"]
)
return context
def get_serializer_class(self):
def is_owner_or_admin(self):
request = self.request
user = request.user
kwargs = self.kwargs
query_params = request.query_params
is_list_request = self.action in ["list"]
is_filters_request = query_params.get("filters", "false") == "true"
if is_list_request and is_filters_request:
return FilterUserSerializer
elif is_list_request and self._is_currently_oncall_request():
return UserIsCurrentlyOnCallSerializer
is_users_own_data = kwargs.get("pk") is not None and kwargs.get("pk") == user.public_primary_key
has_admin_permission = user_is_authorized(user, [RBACPermission.Permissions.USER_SETTINGS_ADMIN])
if is_users_own_data or has_admin_permission:
return UserSerializer
return UserHiddenFieldsSerializer
return is_users_own_data or has_admin_permission
def get_serializer_class(self):
request = self.request
query_params = request.query_params
is_list_request = self.action == "list"
is_filters_request = query_params.get("filters", "false") == "true"
if is_list_request:
serializer = ListUserSerializer
if is_filters_request:
serializer = FilterUserSerializer
elif self._populate_schedules_oncall_cache():
serializer = UserIsCurrentlyOnCallSerializer
return serializer
# non-list requests
serializer = UserHiddenFieldsSerializer
if self.is_owner_or_admin():
serializer = UserSerializer
return serializer
def get_queryset(self):
slack_identity = self.request.query_params.get("slack_identity", None) == "true"
@ -308,7 +338,7 @@ class UserView(
@extend_schema(
responses=PolymorphicProxySerializer(
component_name="UserPolymorphic",
serializers=[FilterUserSerializer, UserIsCurrentlyOnCallSerializer, UserSerializer],
serializers=[FilterUserSerializer, UserIsCurrentlyOnCallSerializer, ListUserSerializer],
resource_type_field_name=None,
)
)
@ -401,10 +431,6 @@ class UserView(
status=status.HTTP_403_FORBIDDEN,
)
def current(self, request) -> Response:
serializer = UserSerializer(self.get_queryset().get(pk=self.request.user.pk))
return Response(serializer.data)
@extend_schema(responses={status.HTTP_200_OK: resolve_type_hint(typing.List[str])})
@action(detail=False, methods=["get"])
def timezone_options(self, request) -> Response:

View file

@ -104,9 +104,11 @@ class BasePluginAuthentication(BaseAuthentication):
try:
context = dict(json.loads(request.headers.get("X-Grafana-Context")))
except (ValueError, TypeError):
logger.info("auth request user not found - missing valid X-Grafana-Context")
return None
if "UserId" not in context and "UserID" not in context:
logger.info("auth request user not found - X-Grafana-Context missing UserID")
return None
try:
@ -117,6 +119,7 @@ class BasePluginAuthentication(BaseAuthentication):
try:
return organization.users.get(user_id=user_id)
except User.DoesNotExist:
logger.info(f"auth request user not found - user_id={user_id}")
return None

View file

@ -162,6 +162,7 @@ class APIClient:
class GrafanaAPIClient(APIClient):
GRAFANA_INCIDENT_PLUGIN = "grafana-incident-app"
GRAFANA_INCIDENT_PLUGIN_BACKEND_URL_KEY = "backendUrl"
GRAFANA_LABELS_PLUGIN = "grafana-labels-app"
USER_PERMISSION_ENDPOINT = f"api/access-control/users/permissions/search?actionPrefix={ACTION_PREFIX}"
@ -302,6 +303,9 @@ class GrafanaAPIClient(APIClient):
def get_grafana_incident_plugin_settings(self) -> APIClientResponse["GrafanaAPIClient.Types.PluginSettings"]:
return self.get_grafana_plugin_settings(self.GRAFANA_INCIDENT_PLUGIN)
def get_grafana_labels_plugin_settings(self) -> APIClientResponse["GrafanaAPIClient.Types.PluginSettings"]:
return self.get_grafana_plugin_settings(self.GRAFANA_LABELS_PLUGIN)
def get_service_account(self, login: str) -> APIClientResponse["GrafanaAPIClient.Types.ServiceAccountResponse"]:
return self.api_get(f"api/serviceaccounts/search?query={login}")

View file

@ -1,3 +1,5 @@
import logging
from rest_framework import status
from rest_framework.request import Request
from rest_framework.response import Response
@ -8,6 +10,8 @@ from apps.user_management.models import Organization
from apps.user_management.sync import sync_organization
from common.api_helpers.mixins import GrafanaHeadersMixin
logger = logging.getLogger(__name__)
class InstallView(GrafanaHeadersMixin, APIView):
authentication_classes = (BasePluginAuthentication,)
@ -21,6 +25,11 @@ class InstallView(GrafanaHeadersMixin, APIView):
organization.deleted_at = None
organization.api_token = self.instance_context["grafana_token"]
organization.save(update_fields=["api_token", "deleted_at"])
logger.info(f"install - grafana_token replaced org={organization.pk}")
sync_organization(organization)
logger.info(
f"install - sync organization finished org={organization.pk} "
f"token_status={organization.api_token_status}"
)
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -1,3 +1,5 @@
import logging
from django.conf import settings
from rest_framework.request import Request
from rest_framework.response import Response
@ -11,6 +13,8 @@ from apps.user_management.models import Organization
from common.api_helpers.mixins import GrafanaHeadersMixin
from common.api_helpers.utils import create_engine_url
logger = logging.getLogger(__name__)
class StatusView(GrafanaHeadersMixin, APIView):
authentication_classes = (
@ -19,8 +23,13 @@ class StatusView(GrafanaHeadersMixin, APIView):
)
def post(self, request: Request) -> Response:
logger.info(
f"authenticated via {type(request.successful_authenticator)}, user=[{request.user}] "
f"org=[{request.auth.organization.stack_slug if request.auth.organization else None}]"
)
"""
Called asyncronounsly on each start of the plugin
Called asynchronously on each start of the plugin
Checks if plugin is correctly installed and async runs a task
to sync users, teams and org
"""
@ -42,7 +51,12 @@ class StatusView(GrafanaHeadersMixin, APIView):
if organization:
is_installed = True
token_ok = organization.api_token_status == Organization.API_TOKEN_STATUS_OK
logger.info(
f"Status - check token org={organization.pk} status={organization.api_token_status} "
f"token_ok={token_ok}"
)
if organization.is_moved:
logger.info(f"Organization Moved! org={organization.pk}")
api_url = create_engine_url("", override_base=organization.migration_destination.oncall_backend_url)
else:
allow_signup = DynamicSetting.objects.get_or_create(
@ -51,6 +65,7 @@ class StatusView(GrafanaHeadersMixin, APIView):
# If user is not present in OnCall database, set token_ok to False, which will trigger reinstall
if not request.user:
logger.info(f"Status - user not found org={organization.pk} " f"setting token_status to PENDING")
token_ok = False
organization.api_token_status = Organization.API_TOKEN_STATUS_PENDING
organization.save(update_fields=["api_token_status"])

View file

@ -63,8 +63,8 @@ def create_alertmanager_alerts(alert_receive_channel_pk, alert, is_demo=False, f
alert.group.active_resolve_calculation_id = task.id
alert.group.save(update_fields=["active_resolve_calculation_id"])
logger.info(
f"Created alert alert_id={alert.pk} alert_group_id={alert.group.pk} channel_id={alert_receive_channel.pk}"
logger.debug(
f"Created alertmanager alert alert_id={alert.pk} alert_group_id={alert.group.pk} channel_id={alert_receive_channel.pk}"
)
@ -109,7 +109,7 @@ def create_alert(
is_demo=is_demo,
received_at=received_at,
)
logger.info(
logger.debug(
f"Created alert alert_id={alert.pk} alert_group_id={alert.group.pk} channel_id={alert_receive_channel.pk}"
)
except ConcurrentUpdateError:

View file

@ -28,6 +28,27 @@ def test_labels_feature_flag(mock_is_labels_feature_enabled_for_org, make_organi
mock_is_labels_feature_enabled_for_org(12345)
# returns False if feature flag is disabled and organization is not in the feature list
assert organization.org_id not in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert not is_labels_feature_enabled(organization)
@pytest.mark.django_db
def test_labels_feature_flag_when_plugin_is_disabled(
mock_is_labels_feature_enabled_for_org, make_organization, settings
):
organization = make_organization()
organization.is_grafana_labels_enabled = False
# returns False if feature flag is enabled, but plugin is disabled
assert settings.FEATURE_LABELS_ENABLED_FOR_ALL
assert organization.id not in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert is_labels_feature_enabled(organization) is False
mock_is_labels_feature_enabled_for_org(organization.id)
# returns False if feature flag is disabled, organization is in the feature list, , but plugin is disabled
assert not settings.FEATURE_LABELS_ENABLED_FOR_ALL
assert organization.id in settings.FEATURE_LABELS_ENABLED_PER_ORG
assert is_labels_feature_enabled(organization) is False
assert not is_labels_feature_enabled(organization)

View file

@ -51,7 +51,11 @@ def get_associating_label_model(obj_model_name: str) -> typing.Type["AssociatedL
def is_labels_feature_enabled(organization: "Organization") -> bool:
return settings.FEATURE_LABELS_ENABLED_FOR_ALL or organization.id in settings.FEATURE_LABELS_ENABLED_PER_ORG
"""
is_labels_feature_enabled checks if env with labels feature is enabled and plugin is provisioned.
"""
env_enabled = settings.FEATURE_LABELS_ENABLED_FOR_ALL or organization.id in settings.FEATURE_LABELS_ENABLED_PER_ORG
return organization.is_grafana_labels_enabled and env_enabled
def get_labels_dict(labelable) -> dict[str, str]:

View file

@ -4,28 +4,27 @@ from unittest.mock import patch
import pytest
import requests
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APIClient
from rest_framework.views import APIView
from apps.mobile_app.views import MobileAppGatewayView
from common.cloud_auth_api.client import CloudAuthApiClient, CloudAuthApiException
DOWNSTREAM_BACKEND = "incident"
MOCK_DOWNSTREAM_URL = "https://mockdownstream.com"
MOCK_DOWNSTREAM_INCIDENT_API_URL = "https://mockdownstreamincidentapi.com"
MOCK_DOWNSTREAM_HEADERS = {"X-OnCall-Mobile-Proxy-Authorization": "Bearer mock_jwt"}
MOCK_DOWNSTREAM_HEADERS = {"Authorization": "Bearer mock_auth_token"}
MOCK_DOWNSTREAM_RESPONSE_DATA = {"foo": "bar"}
MOCK_TIMEZONE_NOW = timezone.datetime(2021, 1, 1, 3, 4, 5, tzinfo=timezone.utc)
MOCK_JWT = "mncn,zxcnv,mznxcv"
MOCK_JWT_PRIVATE_KEY = "asd,mzcxn,vmnzxcv,mnzx,cvmnzaslkdjflaksjdf"
MOCK_AUTH_TOKEN = "mncn,zxcnv,mznxcv"
@pytest.fixture(autouse=True)
def enable_mobile_app_gateway(settings):
settings.MOBILE_APP_GATEWAY_ENABLED = True
settings.MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY = MOCK_JWT_PRIVATE_KEY
settings.GRAFANA_CLOUD_AUTH_API_URL = "asdfasdf"
settings.GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN = "zxcvzx"
class MockResponse:
@ -209,6 +208,7 @@ def test_mobile_app_gateway_supported_downstream_backends(
(requests.exceptions.TooManyRedirects, (), status.HTTP_502_BAD_GATEWAY),
(requests.exceptions.Timeout, (), status.HTTP_502_BAD_GATEWAY),
(requests.exceptions.JSONDecodeError, ("", "", 5), status.HTTP_400_BAD_REQUEST),
(CloudAuthApiException, (403, "http://example.com"), status.HTTP_502_BAD_GATEWAY),
],
)
def test_mobile_app_gateway_catches_errors_from_downstream_server(
@ -290,11 +290,11 @@ def test_mobile_app_gateway_incident_api_url(
@pytest.mark.django_db
@patch("apps.mobile_app.views.requests")
@patch("apps.mobile_app.views.MobileAppGatewayView._construct_jwt", return_value=MOCK_JWT)
@patch("apps.mobile_app.views.MobileAppGatewayView._get_auth_token", return_value=MOCK_AUTH_TOKEN)
@patch("apps.mobile_app.views.MobileAppGatewayView._get_downstream_url", return_value=MOCK_DOWNSTREAM_URL)
def test_mobile_app_gateway_proxies_headers(
_mock_get_downstream_url,
_mock_construct_jwt,
_mock_get_auth_token,
mock_requests,
make_organization_and_user_with_mobile_app_auth_token,
):
@ -313,16 +313,14 @@ def test_mobile_app_gateway_proxies_headers(
MOCK_DOWNSTREAM_URL,
data=b"",
params={},
headers={"X-OnCall-Mobile-Proxy-Authorization": f"Bearer {MOCK_JWT}", "Content-Type": content_type_header},
headers={"Authorization": f"Bearer {MOCK_AUTH_TOKEN}", "Content-Type": content_type_header},
)
@pytest.mark.django_db
@patch("apps.mobile_app.views.jwt.encode", return_value=MOCK_JWT)
@patch("apps.mobile_app.views.timezone.now", return_value=MOCK_TIMEZONE_NOW)
def test_mobile_app_gateway_properly_generates_a_jwt(
_mock_timezone_now,
mock_jwt_encode,
@patch("apps.mobile_app.views.CloudAuthApiClient.request_signed_token", return_value=MOCK_AUTH_TOKEN)
def test_mobile_app_gateway_properly_generates_an_auth_token(
mock_request_signed_token,
make_organization,
make_user_for_organization,
):
@ -337,13 +335,14 @@ def test_mobile_app_gateway_properly_generates_a_jwt(
)
user = make_user_for_organization(organization, user_id=user_id)
encoded_jwt = MobileAppGatewayView._construct_jwt(user)
auth_token = MobileAppGatewayView._get_auth_token(DOWNSTREAM_BACKEND, user)
assert encoded_jwt == MOCK_JWT
mock_jwt_encode.assert_called_once_with(
assert auth_token == f"{stack_id}:{MOCK_AUTH_TOKEN}"
mock_request_signed_token.assert_called_once_with(
organization,
[CloudAuthApiClient.Scopes.INCIDENT_WRITE],
{
"iat": MOCK_TIMEZONE_NOW,
"exp": MOCK_TIMEZONE_NOW + timezone.timedelta(minutes=1),
"user_id": user.user_id, # grafana user ID
"user_email": user.email,
"stack_id": organization.stack_id,
@ -351,6 +350,4 @@ def test_mobile_app_gateway_properly_generates_a_jwt(
"stack_slug": organization.stack_slug,
"org_slug": organization.org_slug,
},
MOCK_JWT_PRIVATE_KEY,
algorithm="RS256",
)

View file

@ -1,11 +1,10 @@
import enum
import logging
import typing
import jwt
import requests
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.utils import timezone
from fcm_django.api.rest_framework import FCMDeviceAuthorizedViewSet as BaseFCMDeviceAuthorizedViewSet
from rest_framework import mixins, status, viewsets
from rest_framework.exceptions import NotFound, ParseError
@ -17,6 +16,7 @@ from rest_framework.views import APIView
from apps.mobile_app.auth import MobileAppAuthTokenAuthentication, MobileAppVerificationTokenAuthentication
from apps.mobile_app.models import FCMDevice, MobileAppAuthToken, MobileAppUserSettings
from apps.mobile_app.serializers import FCMDeviceSerializer, MobileAppUserSettingsSerializer
from common.cloud_auth_api.client import CloudAuthApiClient, CloudAuthApiException
if typing.TYPE_CHECKING:
from apps.user_management.models import Organization, User
@ -135,12 +135,10 @@ class MobileAppGatewayView(APIView):
authentication_classes = (MobileAppAuthTokenAuthentication,)
permission_classes = (IsAuthenticated,)
class SupportedDownstreamBackends:
class SupportedDownstreamBackends(enum.StrEnum):
INCIDENT = "incident"
ALL_SUPPORTED_DOWNSTREAM_BACKENDS = [
SupportedDownstreamBackends.INCIDENT,
]
ALL_SUPPORTED_DOWNSTREAM_BACKENDS = list(SupportedDownstreamBackends)
def initial(self, request: Request, *args, **kwargs):
# If the mobile app gateway is not enabled, return a 404
@ -149,37 +147,33 @@ class MobileAppGatewayView(APIView):
super().initial(request, *args, **kwargs)
@classmethod
def _construct_jwt_payload(cls, user: "User") -> typing.Dict[str, typing.Any]:
organization = user.organization
now = timezone.now()
return {
# registered claim names
"iat": now,
"exp": now + timezone.timedelta(minutes=1), # jwt is short lived. expires in 1 minute.
# custom data
"user_id": user.user_id, # grafana user ID
"user_email": user.email,
"stack_id": organization.stack_id,
"organization_id": organization.org_id, # grafana org ID
"stack_slug": organization.stack_slug,
"org_slug": organization.org_slug,
}
@classmethod
def _construct_jwt(cls, user: "User") -> str:
def _get_auth_token(cls, downstream_backend: SupportedDownstreamBackends, user: "User") -> str:
"""
RS256 = asymmetric = public/private key pair
HS256 = symmetric = shared secret (don't use this)
"""
return jwt.encode(
cls._construct_jwt_payload(user), settings.MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY, algorithm="RS256"
)
org = user.organization
token_claims = {
"user_id": user.user_id, # grafana user ID
"user_email": user.email,
"stack_id": org.stack_id,
"organization_id": org.org_id, # grafana org ID
"stack_slug": org.stack_slug,
"org_slug": org.org_slug,
}
token_scopes = {
cls.SupportedDownstreamBackends.INCIDENT: [CloudAuthApiClient.Scopes.INCIDENT_WRITE],
}[downstream_backend]
return f"{org.stack_id}:{CloudAuthApiClient().request_signed_token(org, token_scopes, token_claims)}"
@classmethod
def _get_downstream_headers(cls, request: Request, user: "User") -> typing.Dict[str, str]:
def _get_downstream_headers(
cls, request: Request, downstream_backend: SupportedDownstreamBackends, user: "User"
) -> typing.Dict[str, str]:
headers = {
"X-OnCall-Mobile-Proxy-Authorization": f"Bearer {cls._construct_jwt(user)}",
"Authorization": f"Bearer {cls._get_auth_token(downstream_backend, user)}",
}
if (v := request.META.get("CONTENT_TYPE", None)) is not None:
@ -188,7 +182,9 @@ class MobileAppGatewayView(APIView):
return headers
@classmethod
def _get_downstream_url(cls, organization: "Organization", downstream_backend: str, downstream_path: str) -> str:
def _get_downstream_url(
cls, organization: "Organization", downstream_backend: SupportedDownstreamBackends, downstream_path: str
) -> str:
downstream_url = {
cls.SupportedDownstreamBackends.INCIDENT: organization.grafana_incident_backend_url,
}[downstream_backend]
@ -210,6 +206,10 @@ class MobileAppGatewayView(APIView):
raise NotFound(f"Downstream backend {downstream_backend} not supported")
downstream_url = self._get_downstream_url(user.organization, downstream_backend, downstream_path)
log_msg_common = f"{downstream_backend} request to {method} {downstream_url}"
logger.info(f"Proxying {log_msg_common}")
downstream_request_handler = getattr(requests, method.lower())
try:
@ -217,13 +217,15 @@ class MobileAppGatewayView(APIView):
downstream_url,
data=request.body,
params=request.query_params.dict(),
headers=self._get_downstream_headers(request, user),
headers=self._get_downstream_headers(request, downstream_backend, user),
)
logger.info(f"Successfully proxied {log_msg_common}")
return Response(status=downstream_response.status_code, data=downstream_response.json())
except (
requests.exceptions.RequestException,
requests.exceptions.JSONDecodeError,
CloudAuthApiException,
) as e:
if isinstance(e, requests.exceptions.JSONDecodeError):
final_status = status.HTTP_400_BAD_REQUEST

View file

@ -86,6 +86,8 @@ class BaseChannelFilterSerializer(OrderedModelSerializer):
slack_channel_id = slack_channel_id.upper()
organization = self.context["request"].auth.organization
slack_team_identity = organization.slack_team_identity
if not slack_team_identity:
raise BadRequest(detail="Slack isn't connected to this workspace")
try:
slack_team_identity.get_cached_channels().get(slack_id=slack_channel_id)
except SlackChannel.DoesNotExist:

View file

@ -45,6 +45,9 @@ class ScheduleBaseSerializer(serializers.ModelSerializer):
organization = self.context["request"].auth.organization
slack_team_identity = organization.slack_team_identity
if (slack_channel_id or user_group_id) and not slack_team_identity:
raise BadRequest(detail="Slack isn't connected to this workspace")
if slack_channel_id is not None:
slack_channel_id = slack_channel_id.upper()
try:

View file

@ -819,6 +819,50 @@ def test_update_integration_default_route(
assert response.data["default_route"]["escalation_chain_id"] == escalation_chain.public_primary_key
@pytest.mark.django_db
def test_create_integration_default_route_with_slack_field(
make_organization_and_user_with_token,
make_escalation_chain,
):
organization, _, token = make_organization_and_user_with_token()
escalation_chain = make_escalation_chain(organization)
client = APIClient()
data_for_create = {
"type": "grafana",
"name": "grafana_created",
"team_id": None,
"default_route": {
"escalation_chain_id": escalation_chain.public_primary_key,
"slack": {"channel_id": "TEST_SLACK_ID"},
},
}
url = reverse("api-public:integrations-list")
response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_update_integration_default_route_with_slack_field(
make_organization_and_user_with_token, make_alert_receive_channel, make_channel_filter
):
organization, _, token = make_organization_and_user_with_token()
integration = make_alert_receive_channel(organization)
make_channel_filter(integration, is_default=True)
client = APIClient()
data_for_update = {
"default_route": {"slack": {"channel_id": "TEST_SLACK_ID"}},
}
url = reverse("api-public:integrations-detail", args=[integration.public_primary_key])
response = client.put(url, data=data_for_update, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_cant_create_integrations_direct_paging(
make_organization_and_user_with_token, make_team, make_alert_receive_channel, make_user_auth_headers

View file

@ -282,6 +282,52 @@ def test_delete_route(
new_channel_filter.refresh_from_db()
@pytest.mark.django_db
def test_create_route_slack_error(
route_public_api_setup,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
client = APIClient()
url = reverse("api-public:routes-list")
data_for_create = {
"integration_id": alert_receive_channel.public_primary_key,
"routing_regex": "testreg",
"escalation_chain_id": escalation_chain.public_primary_key,
"slack": {"channel_id": "TEST_SLACK_ID"},
}
response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_update_route_slack_error(
route_public_api_setup,
make_channel_filter,
):
_, _, token, alert_receive_channel, escalation_chain, _ = route_public_api_setup
new_channel_filter = make_channel_filter(
alert_receive_channel,
is_default=False,
filtering_term="testreg",
)
client = APIClient()
url = reverse("api-public:routes-detail", kwargs={"pk": new_channel_filter.public_primary_key})
data_to_update = {
"slack": {"channel_id": "TEST_SLACK_ID"},
}
response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_create_route_with_messaging_backend(
route_public_api_setup,

View file

@ -844,6 +844,65 @@ def test_create_schedule_invalid_timezone(make_organization_and_user_with_token,
assert response.json() == {"time_zone": ["Invalid timezone"]}
@pytest.mark.django_db
def test_create_calendar_schedule_slack_error(make_organization_and_user_with_token):
organization, user, token = make_organization_and_user_with_token()
client = APIClient()
url = reverse("api-public:schedules-list")
# with slack channel id
data = {
"team_id": None,
"name": "schedule test name",
"time_zone": "Europe/Moscow",
"type": "calendar",
"slack": {
"channel_id": "TEST_SLACK_ID",
},
}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
# with slack user group id
data = {
"team_id": None,
"name": "schedule test name",
"time_zone": "Europe/Moscow",
"type": "calendar",
"slack": {
"user_group_id": "TEST_SLACK_ID",
},
}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_update_calendar_schedule_slack_error(
make_organization_and_user_with_token,
make_schedule,
):
organization, user, token = make_organization_and_user_with_token()
client = APIClient()
schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar)
url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key})
data = {"slack": {"channel_id": "TEST_SLACK_ID"}}
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
data = {"slack": {"user_group_id": "TEST_SLACK_ID"}}
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert response.data["detail"] == "Slack isn't connected to this workspace"
@pytest.mark.django_db
def test_create_ical_schedule_without_ical_url(make_organization_and_user_with_token):
_, _, token = make_organization_and_user_with_token()

View file

@ -0,0 +1,18 @@
# Generated by Django 4.2.7 on 2024-01-30 07:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user_management', '0019_organization_grafana_incident_backend_url'),
]
operations = [
migrations.AddField(
model_name='organization',
name='is_grafana_labels_enabled',
field=models.BooleanField(default=False, null=True),
),
]

View file

@ -252,6 +252,7 @@ class Organization(MaintainableObject):
is_rbac_permissions_enabled = models.BooleanField(default=False)
is_grafana_incident_enabled = models.BooleanField(default=False)
is_grafana_labels_enabled = models.BooleanField(default=False, null=True)
alert_group_table_columns: list[AlertGroupTableColumn] | None = JSONField(default=None, null=True)
grafana_incident_backend_url = models.CharField(max_length=300, null=True, default=None)

View file

@ -42,6 +42,7 @@ def _sync_organization(organization: Organization) -> None:
rbac_is_enabled = grafana_api_client.is_rbac_enabled_for_organization()
organization.is_rbac_permissions_enabled = rbac_is_enabled
logger.info(f"RBAC status org={organization.pk} rbac_enabled={organization.is_rbac_permissions_enabled}")
_sync_instance_info(organization)
@ -51,14 +52,11 @@ def _sync_organization(organization: Organization) -> None:
sync_users_and_teams(grafana_api_client, organization)
organization.last_time_synced = timezone.now()
grafana_incident_settings, _ = grafana_api_client.get_grafana_incident_plugin_settings()
if grafana_incident_settings is not None:
organization.is_grafana_incident_enabled = grafana_incident_settings["enabled"]
organization.grafana_incident_backend_url = grafana_incident_settings.get("jsonData", {}).get(
GrafanaAPIClient.GRAFANA_INCIDENT_PLUGIN_BACKEND_URL_KEY
)
_sync_grafana_incident_plugin(organization, grafana_api_client)
_sync_grafana_labels_plugin(organization, grafana_api_client)
else:
organization.api_token_status = Organization.API_TOKEN_STATUS_FAILED
logger.warning(f"Sync not successful org={organization.pk} token_status=FAILED")
organization.save(
update_fields=[
@ -97,6 +95,29 @@ def _sync_instance_info(organization: Organization) -> None:
organization.gcom_token_org_last_time_synced = timezone.now()
def _sync_grafana_labels_plugin(organization: Organization, grafana_api_client) -> None:
"""
_sync_grafana_labels_plugin checks if grafana-labels-app plugin is enabled and sets a flag in the organization.
It intended to use only inside _sync_organization. It mutates, but not saves org, it's saved in _sync_organization.
"""
grafana_labels_plugin_settings, _ = grafana_api_client.get_grafana_labels_plugin_settings()
if grafana_labels_plugin_settings is not None:
organization.is_grafana_labels_enabled = grafana_labels_plugin_settings["enabled"]
def _sync_grafana_incident_plugin(organization: Organization, grafana_api_client) -> None:
"""
_sync_grafana_incident_plugin check if incident plugin is enabled and sets a flag and its url in the organization.
It intended to use only inside _sync_organization. It mutates, but not saves org, it's saved in _sync_organization.
"""
grafana_incident_settings, _ = grafana_api_client.get_grafana_incident_plugin_settings()
if grafana_incident_settings is not None:
organization.is_grafana_incident_enabled = grafana_incident_settings["enabled"]
organization.grafana_incident_backend_url = grafana_incident_settings.get("jsonData", {}).get(
GrafanaAPIClient.GRAFANA_INCIDENT_PLUGIN_BACKEND_URL_KEY
)
def sync_users_and_teams(client: GrafanaAPIClient, organization: Organization) -> None:
sync_users(client, organization)
sync_teams(client, organization)

View file

@ -1,3 +1,5 @@
from dataclasses import dataclass
from typing import Optional
from unittest.mock import patch
import pytest
@ -8,7 +10,12 @@ from apps.alerts.models import AlertReceiveChannel
from apps.api.permissions import LegacyAccessControlRole
from apps.grafana_plugin.helpers.client import GcomAPIClient, GrafanaAPIClient
from apps.user_management.models import Team, User
from apps.user_management.sync import cleanup_organization, sync_organization
from apps.user_management.sync import (
_sync_grafana_incident_plugin,
_sync_grafana_labels_plugin,
cleanup_organization,
sync_organization,
)
MOCK_GRAFANA_INCIDENT_BACKEND_URL = "https://grafana-incident.test"
@ -177,14 +184,6 @@ def test_sync_users_for_team(make_organization, make_user_for_organization, make
@pytest.mark.django_db
@pytest.mark.parametrize(
"get_grafana_incident_plugin_settings_return_value",
[
({"enabled": True, "jsonData": {"backendUrl": MOCK_GRAFANA_INCIDENT_BACKEND_URL}}, None),
# missing jsonData (sometimes this is what we get back from the Grafana API)
({"enabled": True}, None),
],
)
@patch.object(GrafanaAPIClient, "is_rbac_enabled_for_organization", return_value=False)
@patch.object(
GrafanaAPIClient,
@ -221,18 +220,28 @@ def test_sync_users_for_team(make_organization, make_user_for_organization, make
)
@patch.object(GrafanaAPIClient, "check_token", return_value=(None, {"connected": True}))
@patch.object(GrafanaAPIClient, "get_grafana_incident_plugin_settings")
@patch.object(GrafanaAPIClient, "get_grafana_labels_plugin_settings")
@patch("apps.user_management.sync.org_sync_signal")
def test_sync_organization(
mocked_org_sync_signal,
mock_get_grafana_labels_plugin_settings,
mock_get_grafana_incident_plugin_settings,
_mock_check_token,
_mock_get_teams,
_mock_get_users,
_mock_is_rbac_enabled_for_organization,
get_grafana_incident_plugin_settings_return_value,
make_organization,
):
mock_get_grafana_incident_plugin_settings.return_value = get_grafana_incident_plugin_settings_return_value
# Set optimistic responses from grafana api.
# All cases are tested properly in test_sync_grafana_incident_plugin/test_sync_grafana_labels_plugin
mock_get_grafana_incident_plugin_settings.return_value = (
{
"enabled": True,
"jsonData": {"backendUrl": MOCK_GRAFANA_INCIDENT_BACKEND_URL},
},
None,
)
mock_get_grafana_labels_plugin_settings.return_value = ({"enabled": True, "jsonData": {}}, None)
organization = make_organization()
@ -266,10 +275,10 @@ def test_sync_organization(
# check that is_grafana_incident_enabled flag is set
assert organization.is_grafana_incident_enabled is True
if get_grafana_incident_plugin_settings_return_value[0].get("jsonData"):
assert organization.grafana_incident_backend_url == MOCK_GRAFANA_INCIDENT_BACKEND_URL
else:
assert organization.grafana_incident_backend_url is None
assert organization.grafana_incident_backend_url == MOCK_GRAFANA_INCIDENT_BACKEND_URL
# check that is_grafana_labels_enabled flag is set
assert organization.is_grafana_labels_enabled is True
mocked_org_sync_signal.send.assert_called_once_with(sender=None, organization=organization)
@ -328,7 +337,15 @@ def test_sync_organization_is_rbac_permissions_enabled_open_source(make_organiza
None,
),
):
sync_organization(organization)
with patch.object(
GrafanaAPIClient,
"get_grafana_labels_plugin_settings",
return_value=(
{"enabled": True, "jsonData": {}},
None,
),
):
sync_organization(organization)
organization.refresh_from_db()
assert organization.is_rbac_permissions_enabled == grafana_api_response
@ -396,7 +413,15 @@ def test_sync_organization_is_rbac_permissions_enabled_cloud(
None,
),
):
sync_organization(organization)
with patch.object(
GrafanaAPIClient,
"get_grafana_labels_plugin_settings",
return_value=(
{"enabled": True, "jsonData": {}},
None,
),
):
sync_organization(organization)
organization.refresh_from_db()
@ -467,3 +492,68 @@ def test_sync_organization_lock(make_organization):
mock_task_lock.assert_called_once_with(f"sync-organization-lock-{organization.id}", random_uuid)
assert not mock_client.called
@dataclass
class TestSyncGrafanaLabelsPluginParams:
response: tuple
expected_result: bool
@pytest.mark.django_db
@pytest.mark.parametrize(
"test_params",
[
TestSyncGrafanaLabelsPluginParams(({"enabled": True, "jsonData": {}}, None), True),
TestSyncGrafanaLabelsPluginParams(({"enabled": True}, None), True),
TestSyncGrafanaLabelsPluginParams(({"enabled": False}, None), False),
],
)
@pytest.mark.django_db
def test_sync_grafana_labels_plugin(make_organization, test_params: TestSyncGrafanaLabelsPluginParams):
organization = make_organization()
organization.is_grafana_labels_enabled = False # by default in tests it's true, so setting to false
with patch.object(
GrafanaAPIClient,
"get_grafana_labels_plugin_settings",
return_value=test_params.response,
):
grafana_api_client = GrafanaAPIClient(api_url=organization.grafana_url, api_token=organization.api_token)
_sync_grafana_labels_plugin(organization, grafana_api_client)
assert organization.is_grafana_labels_enabled is test_params.expected_result
@dataclass
class TestSyncGrafanaIncidentParams:
response: tuple
expected_flag: bool
expected_url: Optional[str]
@pytest.mark.django_db
@pytest.mark.parametrize(
"test_params",
[
TestSyncGrafanaIncidentParams(
({"enabled": True, "jsonData": {"backendUrl": MOCK_GRAFANA_INCIDENT_BACKEND_URL}}, None),
True,
MOCK_GRAFANA_INCIDENT_BACKEND_URL,
),
TestSyncGrafanaIncidentParams(({"enabled": True}, None), True, None),
# missing jsonData (sometimes this is what we get back from the Grafana API)
TestSyncGrafanaIncidentParams(({"enabled": False}, None), False, None), # plugin is disabled for some reason
],
)
@pytest.mark.django_db
def test_sync_grafana_incident_plugin(make_organization, test_params: TestSyncGrafanaIncidentParams):
organization = make_organization()
with patch.object(
GrafanaAPIClient,
"get_grafana_incident_plugin_settings",
return_value=test_params.response,
):
grafana_api_client = GrafanaAPIClient(api_url=organization.grafana_url, api_token=organization.api_token)
_sync_grafana_incident_plugin(organization, grafana_api_client)
assert organization.is_grafana_incident_enabled is test_params.expected_flag
assert organization.grafana_incident_backend_url is test_params.expected_url

View file

@ -249,7 +249,6 @@ ACKNOWLEDGE_CONDITION = "acknowledge_condition"
GROUPING_ID = "grouping_id"
SOURCE_LINK = "source_link"
ROUTE = "route"
ALERT_GROUP_LABELS = "alert_group_labels"
ALERT_GROUP_MULTI_LABEL = "alert_group_multi_label"
ALERT_GROUP_DYNAMIC_LABEL = "alert_group_dynamic_label"

View file

View file

@ -0,0 +1,92 @@
import enum
import json
import logging
import typing
from urllib.parse import urljoin
import requests
from django.conf import settings
from rest_framework import status
if typing.TYPE_CHECKING:
from apps.user_management.models import Organization
logger = logging.getLogger(__name__)
class CloudAuthApiException(Exception):
"""A generic 400 or 500 level exception from the Cloud Auth API"""
def __init__(self, status, url, msg="", method="GET"):
self.url = url
self.status = status
self.method = method
self.msg = msg
def __str__(self):
return f"CloudAuthApiException: status={self.status} url={self.url} method={self.method} error={self.msg}"
class CloudAuthApiClient:
class Scopes(enum.StrEnum):
INCIDENT_WRITE = "incident:write"
def __init__(self):
if settings.GRAFANA_CLOUD_AUTH_API_URL is None or settings.GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN is None:
raise RuntimeError(
"GRAFANA_CLOUD_AUTH_API_URL and GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN must be set"
"to use CloudAuthApiClient"
)
self.api_base_url = settings.GRAFANA_CLOUD_AUTH_API_URL
self.api_token = settings.GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN
def request_signed_token(
self, org: "Organization", scopes: typing.List[Scopes], claims: typing.Dict[str, typing.Any]
) -> str:
# The Cloud Auth API expects the org_id and stack_id to be strings
org_id = str(org.org_id)
stack_id = str(org.stack_id)
# NOTE: header values must always be strings
headers = {
"Authorization": f"Bearer {self.api_token}",
# need to cast to str otherwise - requests.exceptions.InvalidHeader: Header part ... from ('X-Org-ID', 5000)
# must be of type str or bytes, not <class 'int'>
"X-Org-ID": org_id,
"X-Realms": json.dumps(
[
{
"type": "stack",
"identifier": stack_id,
},
]
),
}
url = urljoin(self.api_base_url, "v1/sign")
common_log_msg = f"org_id={org_id} stack_id={stack_id} url={url}"
logger.info(f"Requesting signed token from Cloud Auth API {common_log_msg}")
response = requests.post(
url,
headers=headers,
json={
"claims": claims,
"extra": {
"scopes": scopes,
"org_id": org_id,
},
},
)
if response.status_code != status.HTTP_200_OK:
logger.warning(
f"Got non-HTTP 200 when attempting to request signed token from Cloud Auth API {common_log_msg} "
f"status_code={response.status_code} response={response.text}"
)
raise CloudAuthApiException(response.status_code, url, response.text, method="POST")
logger.info(f"Successfully requested signed token from Cloud Auth API {common_log_msg}")
return response.json()["data"]["token"]

View file

@ -0,0 +1,86 @@
import json
from unittest.mock import patch
import pytest
from rest_framework import status
from common.cloud_auth_api.client import CloudAuthApiClient, CloudAuthApiException
GRAFANA_CLOUD_AUTH_API_URL = "http://example.com"
GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN = "asdfasdfasdfasdf"
@pytest.fixture(autouse=True)
def configure_cloud_auth_api_client(settings):
settings.GRAFANA_CLOUD_AUTH_API_URL = GRAFANA_CLOUD_AUTH_API_URL
settings.GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN = GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN
@patch("common.cloud_auth_api.client.requests")
@pytest.mark.django_db
@pytest.mark.parametrize("response_status_code", [status.HTTP_200_OK, status.HTTP_401_UNAUTHORIZED])
def test_request_signed_token(mock_requests, make_organization, response_status_code):
mock_auth_token = ",mnasdlkjlakjoqwejroiqwejr"
mock_response_text = "error message"
org_id = 1
stack_id = 5
organization = make_organization(stack_id=stack_id, org_id=org_id)
scopes = ["incident:write", "foo:bar"]
claims = {"vegetable": "carrot", "fruit": "apple"}
class MockResponse:
text = mock_response_text
def __init__(self, status_code):
self.status_code = status_code
def json(self):
return {
"data": {
"token": mock_auth_token,
},
}
mock_requests.post.return_value = MockResponse(response_status_code)
def _make_request():
return CloudAuthApiClient().request_signed_token(organization, scopes, claims)
url = f"{GRAFANA_CLOUD_AUTH_API_URL}/v1/sign"
if response_status_code != status.HTTP_200_OK:
with pytest.raises(CloudAuthApiException) as excinfo:
_make_request()
assert excinfo.value.status == response_status_code
assert excinfo.value.method == "POST"
assert excinfo.value.msg == mock_response_text
assert excinfo.value.url == url
else:
assert _make_request() == mock_auth_token
mock_requests.post.assert_called_once_with(
url,
headers={
"Authorization": f"Bearer {GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN}",
"X-Org-ID": str(org_id),
"X-Realms": json.dumps(
[
{
"type": "stack",
"identifier": str(stack_id),
},
]
),
},
json={
"claims": claims,
"extra": {
"scopes": scopes,
"org_id": str(org_id),
},
},
)

View file

@ -44,7 +44,7 @@ class ChatopsProxyAPIException(Exception):
self.msg = msg
def __str__(self):
return f"LabelsRepoAPIException: status={self.status} url={self.url} method={self.method} error={self.msg}"
return f"ChatopsProxyAPIException: status={self.status} url={self.url} method={self.method} error={self.msg}"
class ChatopsProxyAPIClient:

View file

@ -54,7 +54,7 @@ def check_slack_installation_possible(oncall_org_id: str, slack_id: str, backend
def create_slack_connector(oncall_org_id: str, slack_id: str, backend: str):
client = ChatopsProxyAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN)
client = OnCallGatewayAPIClient(settings.ONCALL_GATEWAY_URL, settings.ONCALL_GATEWAY_API_TOKEN)
try:
client.post_slack_connector(oncall_org_id, slack_id, backend)
except Exception as e:
@ -175,11 +175,11 @@ def unregister_oncall_tenant_wrapper(service_tenant_id: str, cluster_slug: str):
delete_oncall_connector(service_tenant_id)
def can_link_slack_team_wrapper(service_tenant_id: str, slack_team_id, cluster_slug: str):
def can_link_slack_team_wrapper(service_tenant_id: str, slack_team_id, cluster_slug: str) -> bool:
if settings.CHATOPS_V3:
can_link_slack_team(service_tenant_id, slack_team_id, cluster_slug)
return can_link_slack_team(service_tenant_id, slack_team_id, cluster_slug)
else:
check_slack_installation_possible(service_tenant_id, slack_team_id, cluster_slug)
return check_slack_installation_possible(service_tenant_id, slack_team_id, cluster_slug)
def link_slack_team_wrapper(service_tenant_id: str, slack_team_id: str):

View file

@ -185,17 +185,17 @@ def mock_apply_async(monkeypatch):
monkeypatch.setattr(Task, "apply_async", mock_apply_async)
@pytest.fixture(autouse=True)
def mock_is_labels_feature_enabled(settings):
settings.FEATURE_LABELS_ENABLED_FOR_ALL = True
@pytest.fixture(autouse=True)
def clear_ical_users_cache():
# clear users pks <-> organization cache (persisting between tests)
memoized_users_in_ical.cache_clear()
@pytest.fixture(autouse=True)
def mock_is_labels_feature_enabled(settings):
settings.FEATURE_LABELS_ENABLED_FOR_ALL = True
@pytest.fixture
def mock_is_labels_feature_enabled_for_org(settings):
def _mock_is_labels_feature_enabled_for_org(org_id):
@ -208,7 +208,9 @@ def mock_is_labels_feature_enabled_for_org(settings):
@pytest.fixture
def make_organization():
def _make_organization(**kwargs):
return OrganizationFactory(**kwargs, is_rbac_permissions_enabled=IS_RBAC_ENABLED)
return OrganizationFactory(
**kwargs, is_rbac_permissions_enabled=IS_RBAC_ENABLED, is_grafana_labels_enabled=True
)
return _make_organization

View file

@ -56,4 +56,3 @@ babel==2.12.1
drf-spectacular==0.26.5
grpcio==1.57.0
markdown2==2.4.10
PyJWT==2.8.0

View file

@ -726,9 +726,8 @@ FCM_DJANGO_SETTINGS = {
}
MOBILE_APP_GATEWAY_ENABLED = getenv_boolean("MOBILE_APP_GATEWAY_ENABLED", default=False)
MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY = os.environ.get("MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY", None)
if MOBILE_APP_GATEWAY_ENABLED and not MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY:
raise RuntimeError("MOBILE_APP_GATEWAY_RSA_PRIVATE_KEY is required when MOBILE_APP_GATEWAY_ENABLED is True")
GRAFANA_CLOUD_AUTH_API_URL = os.environ.get("GRAFANA_CLOUD_AUTH_API_URL", None)
GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN = os.environ.get("GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN", None)
SELF_HOSTED_SETTINGS = {
"STACK_ID": 5,

View file

@ -1,8 +1,9 @@
import React from 'react';
import { Button, InlineLabel, LoadingPlaceholder, Tooltip } from '@grafana/ui';
import { Button, InlineLabel, LoadingPlaceholder } from '@grafana/ui';
import cn from 'classnames/bind';
import WithConfirm from 'components/WithConfirm/WithConfirm';
import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip';
import { UserActions } from 'utils/authorization';
@ -17,6 +18,7 @@ interface IntegrationTemplateBlockProps {
renderInput: () => React.ReactNode;
showHelp?: boolean;
isLoading?: boolean;
warningOnEdit?: string;
onEdit: (templateName) => void;
onRemove?: () => void;
@ -31,6 +33,7 @@ const IntegrationTemplateBlock: React.FC<IntegrationTemplateBlockProps> = ({
onEdit,
onRemove,
isLoading,
warningOnEdit,
}) => {
let tooltip = labelTooltip;
let inlineLabelProps = { tooltip };
@ -48,14 +51,24 @@ const IntegrationTemplateBlock: React.FC<IntegrationTemplateBlockProps> = ({
{isTemplateEditable && (
<>
<WithPermissionControlTooltip userAction={UserActions.IntegrationsWrite}>
<Tooltip content={'Edit'}>
<Button variant={'secondary'} icon={'edit'} tooltip="Edit" size={'md'} onClick={onEdit} />
</Tooltip>
<WithConfirm skip={!warningOnEdit} title="" body={warningOnEdit} confirmText="Edit">
<Button variant={'secondary'} icon="edit" tooltip="Edit" size="md" onClick={onEdit} />
</WithConfirm>
</WithPermissionControlTooltip>
<WithPermissionControlTooltip userAction={UserActions.IntegrationsWrite}>
<Tooltip content={'Reset template to default'}>
<Button variant={'secondary'} icon={'times'} size={'md'} onClick={onRemove} />
</Tooltip>
<WithConfirm
title=""
body={`Are you sure you want to reset the ${label} template to its default state?`}
confirmText="Reset"
>
<Button
variant="secondary"
icon="times"
size="md"
tooltip="Reset template to default"
onClick={onRemove}
/>
</WithConfirm>
</WithPermissionControlTooltip>
</>
)}

View file

@ -5,6 +5,7 @@ import { ConfirmModal, ConfirmModalProps } from '@grafana/ui';
type WithConfirmProps = Partial<ConfirmModalProps> & {
children: ReactElement;
disabled?: boolean;
skip?: boolean;
};
const WithConfirm: React.FC<WithConfirmProps> = ({
@ -15,14 +16,23 @@ const WithConfirm: React.FC<WithConfirmProps> = ({
confirmationText,
children,
disabled,
skip = false,
}) => {
const [showConfirmation, setShowConfirmation] = useState<boolean>(false);
const onClickCallback = useCallback((event) => {
event.stopPropagation();
const onClickCallback = useCallback(
(event) => {
if (skip) {
children.props.onClick?.();
return;
}
setShowConfirmation(true);
}, []);
event.stopPropagation();
setShowConfirmation(true);
},
[skip]
);
const onConfirmCallback = useCallback(() => {
if (children.props.onClick) {

View file

@ -1,6 +1,6 @@
import React, { useState, useCallback } from 'react';
import { ConfirmModal, InlineSwitch, Tooltip } from '@grafana/ui';
import { InlineSwitch, Tooltip } from '@grafana/ui';
import cn from 'classnames/bind';
import { observer } from 'mobx-react';
@ -39,7 +39,6 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
const { alertReceiveChannelStore, features } = useStore();
const [isRestoringTemplate, setIsRestoringTemplate] = useState<boolean>(false);
const [templateRestoreName, setTemplateRestoreName] = useState<string>(undefined);
const [showConfirmModal, setShowConfirmModal] = useState(false);
const [autoresolveValue, setAutoresolveValue] = useState<boolean>(alertReceiveChannelAllowSourceBasedResolving);
const handleSaveClick = useCallback((event: React.ChangeEvent<HTMLInputElement>) => {
@ -57,19 +56,6 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
return (
<div className={cx('integration__templates')}>
{showConfirmModal && (
<ConfirmModal
isOpen={true}
title={undefined}
confirmText={'Reset'}
dismissText="Cancel"
body={'Are you sure you want to reset Slack Title template to default state?'}
description={undefined}
confirmationText={undefined}
onConfirm={() => onResetTemplate(templateRestoreName)}
onDismiss={() => onDismiss()}
/>
)}
{templatesToRender.map((template, key) => (
<IntegrationBlockItem key={key}>
<VerticalBlock>
@ -78,10 +64,18 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
<IntegrationTemplateBlock
key={innerKey}
isLoading={isRestoringTemplate && templateRestoreName === contents.name}
onRemove={() => onShowConfirmModal(contents.name)}
onRemove={() => onResetTemplate(contents.name)}
label={contents.label}
labelTooltip={contents.labelTooltip}
isTemplateEditable={isResolveConditionTemplateEditable(contents.name)}
isTemplateEditable={isTemplateEditable(contents.name)}
warningOnEdit={
alertReceiveChannelIsBasedOnAlertManager &&
(isGroupingIdTemplate(contents.name) || isResolveConditionTemplate(contents.name))
? 'Caution: Changing this template can lead to unexpected alert behavior, ' +
'especially if grouping is enabled in AlertManager/Grafana Alerting. ' +
'Please proceed only if you are completely sure of the modifications you are about to make.'
: undefined
}
renderInput={() => (
<>
{isResolveConditionTemplate(contents.name) && (
@ -90,10 +84,11 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
value={autoresolveValue}
onChange={handleSaveClick}
className={cx('inline-switch')}
transparent
/>
</Tooltip>
)}
{isResolveConditionTemplateEditable(contents.name) && (
{isTemplateEditable(contents.name) && (
<div
className={cx('input', { 'input-with-toggle': isResolveConditionTemplate(contents.name) })}
>
@ -121,25 +116,16 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
</div>
);
function isResolveConditionTemplateEditable(templateName: string) {
return (
!(alertReceiveChannelIsBasedOnAlertManager && isResolveConditionTemplate(templateName)) &&
(alertReceiveChannelAllowSourceBasedResolving || !isResolveConditionTemplate(templateName))
);
function isTemplateEditable(templateName: string) {
return alertReceiveChannelAllowSourceBasedResolving || !isResolveConditionTemplate(templateName);
}
function isResolveConditionTemplate(templateName: string) {
return templateName === 'resolve_condition_template';
}
function onShowConfirmModal(templateName: string) {
setTemplateRestoreName(templateName);
setShowConfirmModal(true);
}
function onDismiss() {
setTemplateRestoreName(undefined);
setShowConfirmModal(false);
function isGroupingIdTemplate(templateName: string) {
return templateName === 'grouping_id_template';
}
function onResetTemplate(templateName: string) {
@ -157,10 +143,6 @@ const IntegrationTemplateList: React.FC<IntegrationTemplateListProps> = observer
} else {
openErrorNotification(err.message);
}
})
.finally(() => {
setIsRestoringTemplate(false);
setShowConfirmModal(false);
});
}
}

View file

@ -206,9 +206,10 @@ const IntegrationLabelsForm = observer((props: IntegrationLabelsFormProps) => {
templates={templates}
templateBody={alertGroupLabels.custom[customLabelIndexToShowTemplateEditor].value.name}
onHide={() => setCustomLabelIndexToShowTemplateEditor(undefined)}
onUpdateTemplates={({ alert_group_labels }) => {
onUpdateTemplates={(templates) => {
const newCustom = [...alertGroupLabels.custom];
newCustom[customLabelIndexToShowTemplateEditor].value.name = alert_group_labels;
newCustom[customLabelIndexToShowTemplateEditor].value.name =
templates[LabelTemplateOptions.AlertGroupDynamicLabel.key];
setAlertGroupLabels({
...alertGroupLabels,
@ -229,10 +230,10 @@ const IntegrationLabelsForm = observer((props: IntegrationLabelsFormProps) => {
templates={templates}
templateBody={alertGroupLabels.template}
onHide={() => setShowTemplateEditor(false)}
onUpdateTemplates={({ alert_group_labels }) => {
onUpdateTemplates={(templates) => {
setAlertGroupLabels({
...alertGroupLabels,
template: alert_group_labels,
template: templates[LabelTemplateOptions.AlertGroupMultiLabel.key],
});
setShowTemplateEditor(undefined);

View file

@ -177,6 +177,7 @@ const OutgoingWebhookForm = observer((props: OutgoingWebhookFormProps) => {
preset: selectedPreset?.id,
trigger_type: null,
http_method: 'POST',
forward_all: true,
};
} else if (isNewOrCopy) {
data = { ...outgoingWebhookStore.items[id], is_legacy: false, name: '' };