Merge pull request #5180 from grafana/dev

v1.11.1
This commit is contained in:
Matias Bordese 2024-10-15 14:08:28 -03:00 committed by GitHub
commit 468051220b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 217 additions and 68 deletions

View file

@ -108,12 +108,6 @@ This set of permissions is supporting the ability of Grafana OnCall to match use
- **View user groups in your workspace**
- **View profile details about people in your workspace**
### Perform actions as you
- **Send messages on your behalf** — this permission may sound suspicious, but it's actually a general ability
to send messages as the bot: <https://api.slack.com/scopes/chat:write> Grafana OnCall will not impersonate or post
using your handle to slack. It will always post as the bot.
### Perform actions in channels & conversations
- **View messages that directly mention @grafana_oncall in conversations that the app is in**

View file

@ -122,7 +122,6 @@ oauth_config:
scopes:
user:
- channels:read
- chat:write
- identify
- users.profile:read
bot:

View file

@ -202,9 +202,6 @@ class AlertGroupSlackRenderer(AlertGroupBaseRenderer):
unsilence_button = _make_button("Unsilence", "UnSilenceGroupStep")
responders_button = _make_button("Responders", "StartManageResponders", "manage_responders")
attach_button = _make_button("Attach to ...", "SelectAttachGroupStep")
format_alert_button = _make_button(
":mag: Format Alert", "OpenAlertAppearanceDialogStep", "alertgroup_appearance"
)
resolution_notes_count = alert_group.resolution_notes.count()
resolution_notes_button = {
@ -275,9 +272,6 @@ class AlertGroupSlackRenderer(AlertGroupBaseRenderer):
else:
buttons.append(unresolve_button)
if integration.is_available_for_custom_templates:
buttons.append(format_alert_button)
buttons.append(resolution_notes_button)
if grafana_incident_enabled and not alert_group.acknowledged:

View file

@ -26,6 +26,9 @@ if typing.TYPE_CHECKING:
from apps.user_management.models import User
RETRY_TIMEOUT_HOURS = 1
def schedule_send_bundled_notification_task(
user_notification_bundle: "UserNotificationBundle", alert_group: "AlertGroup"
):
@ -445,10 +448,29 @@ def perform_notification(log_record_pk, use_default_notification_policy_fallback
try:
TelegramToUserConnector.notify_user(user, alert_group, notification_policy)
except RetryAfter as e:
countdown = getattr(e, "retry_after", 3)
raise perform_notification.retry(
(log_record_pk, use_default_notification_policy_fallback), countdown=countdown, exc=e
)
task_logger.exception(f"Telegram API rate limit exceeded. Retry after {e.retry_after} seconds.")
# check how much time has passed since log record was created
# to prevent eternal loop of restarting perform_notification task
if timezone.now() < log_record.created_at + timezone.timedelta(hours=RETRY_TIMEOUT_HOURS):
countdown = getattr(e, "retry_after", 3)
perform_notification.apply_async(
(log_record_pk, use_default_notification_policy_fallback), countdown=countdown
)
else:
task_logger.debug(
f"telegram notification for alert_group {alert_group.pk} failed because of rate limit"
)
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
notification_policy=notification_policy,
reason="Telegram rate limit exceeded",
alert_group=alert_group,
notification_step=notification_policy.step,
notification_channel=notification_channel,
notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_TELEGRAM_RATELIMIT,
).save()
return
elif notification_channel == UserNotificationPolicy.NotificationChannel.SLACK:
# TODO: refactor checking the possibility of sending a notification in slack
@ -516,13 +538,12 @@ def perform_notification(log_record_pk, use_default_notification_policy_fallback
).save()
return
retry_timeout_hours = 1
if alert_group.slack_message:
alert_group.slack_message.send_slack_notification(user, alert_group, notification_policy)
task_logger.debug(f"Finished send_slack_notification for alert_group {alert_group.pk}.")
# check how much time has passed since log record was created
# to prevent eternal loop of restarting perform_notification task
elif timezone.now() < log_record.created_at + timezone.timedelta(hours=retry_timeout_hours):
elif timezone.now() < log_record.created_at + timezone.timedelta(hours=RETRY_TIMEOUT_HOURS):
task_logger.debug(
f"send_slack_notification for alert_group {alert_group.pk} failed because slack message "
f"does not exist. Restarting perform_notification."
@ -534,7 +555,7 @@ def perform_notification(log_record_pk, use_default_notification_policy_fallback
else:
task_logger.debug(
f"send_slack_notification for alert_group {alert_group.pk} failed because slack message "
f"after {retry_timeout_hours} hours still does not exist"
f"after {RETRY_TIMEOUT_HOURS} hours still does not exist"
)
UserNotificationPolicyLogRecord(
author=user,

View file

@ -360,12 +360,30 @@ def test_perform_notification_telegram_retryafter_error(
countdown = 15
exc = RetryAfter(countdown)
with patch.object(TelegramToUserConnector, "notify_user", side_effect=exc) as mock_notify_user:
with pytest.raises(RetryAfter):
with patch.object(perform_notification, "apply_async") as mock_apply_async:
perform_notification(log_record.pk, False)
mock_notify_user.assert_called_once_with(user, alert_group, user_notification_policy)
# task is rescheduled using the countdown value from the exception
mock_apply_async.assert_called_once_with((log_record.pk, False), countdown=countdown)
assert alert_group.personal_log_records.last() == log_record
# but if the log was too old, skip and create a failed log record
log_record.created_at = timezone.now() - timezone.timedelta(minutes=90)
log_record.save()
with patch.object(TelegramToUserConnector, "notify_user", side_effect=exc) as mock_notify_user:
with patch.object(perform_notification, "apply_async") as mock_apply_async:
perform_notification(log_record.pk, False)
mock_notify_user.assert_called_once_with(user, alert_group, user_notification_policy)
assert not mock_apply_async.called
last_log_record = UserNotificationPolicyLogRecord.objects.last()
assert last_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED
assert last_log_record.reason == "Telegram rate limit exceeded"
assert (
last_log_record.notification_error_code
== UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_TELEGRAM_RATELIMIT
)
@patch("apps.base.models.UserNotificationPolicy.get_default_fallback_policy")
@patch("apps.base.tests.messaging_backend.TestOnlyBackend.notify_user")

View file

@ -106,7 +106,8 @@ class UserNotificationPolicyLogRecord(models.Model):
ERROR_NOTIFICATION_TELEGRAM_USER_IS_DEACTIVATED,
ERROR_NOTIFICATION_MOBILE_USER_HAS_NO_ACTIVE_DEVICE,
ERROR_NOTIFICATION_FORMATTING_ERROR,
) = range(29)
ERROR_NOTIFICATION_IN_TELEGRAM_RATELIMIT,
) = range(30)
# for this errors we want to send message to general log channel
ERRORS_TO_SEND_IN_SLACK_CHANNEL = [
@ -304,6 +305,10 @@ class UserNotificationPolicyLogRecord(models.Model):
result += f"failed to notify {user_verbal} in Slack, because channel is archived"
elif self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_SLACK_RATELIMIT:
result += f"failed to notify {user_verbal} in Slack due to Slack rate limit"
elif (
self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_TELEGRAM_RATELIMIT
):
result += f"failed to notify {user_verbal} in Telegram due to Telegram rate limit"
elif self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_FORBIDDEN:
result += f"failed to notify {user_verbal}, not allowed"
elif (

View file

@ -11,6 +11,9 @@ from rest_framework import status
from apps.api.permissions import GrafanaAPIPermission, GrafanaAPIPermissions
from common.constants.plugin_ids import PluginID
if typing.TYPE_CHECKING:
from apps.user_management.models import Organization
logger = logging.getLogger(__name__)
@ -309,6 +312,9 @@ class GrafanaAPIClient(APIClient):
def get_grafana_labels_plugin_settings(self) -> APIClientResponse["GrafanaAPIClient.Types.PluginSettings"]:
return self.get_grafana_plugin_settings(PluginID.LABELS)
def get_grafana_irm_plugin_settings(self) -> APIClientResponse["GrafanaAPIClient.Types.PluginSettings"]:
return self.get_grafana_plugin_settings(PluginID.IRM)
def get_service_account(self, login: str) -> APIClientResponse["GrafanaAPIClient.Types.ServiceAccountResponse"]:
return self.api_get(f"api/serviceaccounts/search?query={login}")
@ -328,8 +334,8 @@ class GrafanaAPIClient(APIClient):
def get_service_account_token_permissions(self) -> APIClientResponse[typing.Dict[str, typing.List[str]]]:
return self.api_get("api/access-control/user/permissions")
def sync(self) -> APIClientResponse:
return self.api_post("api/plugins/grafana-oncall-app/resources/plugin/sync")
def sync(self, organization: "Organization") -> APIClientResponse:
return self.api_post(f"api/plugins/{organization.active_ui_plugin_id}/resources/plugin/sync")
@staticmethod
def validate_grafana_token_format(grafana_token: str) -> bool:

View file

@ -71,6 +71,7 @@ class SyncOnCallSettingsSerializer(serializers.Serializer):
incident_enabled = serializers.BooleanField()
incident_backend_url = serializers.CharField(allow_blank=True)
labels_enabled = serializers.BooleanField()
irm_enabled = serializers.BooleanField(default=False)
def create(self, validated_data):
return SyncSettings(**validated_data)

View file

@ -40,6 +40,7 @@ class SyncSettings:
incident_enabled: bool
incident_backend_url: str
labels_enabled: bool
irm_enabled: bool
@dataclass

View file

@ -49,7 +49,7 @@ def sync_organizations_v2(org_ids=None):
organization_qs = Organization.objects.filter(id__in=org_ids)
for org in organization_qs:
client = GrafanaAPIClient(api_url=org.grafana_url, api_token=org.api_token)
_, status = client.sync()
_, status = client.sync(org)
if status["status_code"] != 200:
logger.error(
f"Failed to request sync org_id={org.pk} stack_slug={org.stack_slug} status_code={status['status_code']} url={status['url']} message={status['message']}"

View file

@ -12,7 +12,8 @@ from rest_framework.test import APIClient
from apps.api.permissions import LegacyAccessControlRole
from apps.grafana_plugin.serializers.sync_data import SyncTeamSerializer
from apps.grafana_plugin.sync_data import SyncData, SyncSettings, SyncUser
from apps.grafana_plugin.tasks.sync_v2 import start_sync_organizations_v2
from apps.grafana_plugin.tasks.sync_v2 import start_sync_organizations_v2, sync_organizations_v2
from common.constants.plugin_ids import PluginID
@pytest.mark.django_db
@ -121,6 +122,7 @@ def test_sync_v2_content_encoding(
incident_enabled=False,
incident_backend_url="",
labels_enabled=False,
irm_enabled=False,
),
)
@ -140,6 +142,57 @@ def test_sync_v2_content_encoding(
mock_sync.assert_called()
@pytest.mark.parametrize(
"irm_enabled,expected",
[
(True, True),
(False, False),
],
)
@pytest.mark.django_db
def test_sync_v2_irm_enabled(
make_organization_and_user_with_plugin_token,
make_user_auth_headers,
settings,
irm_enabled,
expected,
):
settings.LICENSE = settings.CLOUD_LICENSE_NAME
organization, _, token = make_organization_and_user_with_plugin_token()
assert organization.is_grafana_irm_enabled is False
client = APIClient()
headers = make_user_auth_headers(None, token, organization=organization)
url = reverse("grafana-plugin:sync-v2")
data = SyncData(
users=[],
teams=[],
team_members={},
settings=SyncSettings(
stack_id=organization.stack_id,
org_id=organization.org_id,
license=settings.CLOUD_LICENSE_NAME,
oncall_api_url="http://localhost",
oncall_token="",
grafana_url="http://localhost",
grafana_token="fake_token",
rbac_enabled=False,
incident_enabled=False,
incident_backend_url="",
labels_enabled=False,
irm_enabled=irm_enabled,
),
)
response = client.post(url, format="json", data=asdict(data), **headers)
assert response.status_code == status.HTTP_200_OK
organization.refresh_from_db()
assert organization.is_grafana_irm_enabled == expected
@pytest.mark.parametrize(
"test_team, validation_pass",
[
@ -190,3 +243,23 @@ def test_sync_batch_tasks(make_organization, settings):
assert check_call(actual_call, expected_call)
assert mock_sync.call_count == len(expected_calls)
@patch(
"apps.grafana_plugin.tasks.sync_v2.GrafanaAPIClient.api_post",
return_value=(None, {"status_code": status.HTTP_200_OK}),
)
@pytest.mark.parametrize(
"is_grafana_irm_enabled,expected",
[
(True, PluginID.IRM),
(False, PluginID.ONCALL),
],
)
@pytest.mark.django_db
def test_sync_organizations_v2_calls_right_backend_plugin_sync_endpoint(
mocked_grafana_api_client_api_post, make_organization, is_grafana_irm_enabled, expected
):
org = make_organization(is_grafana_irm_enabled=is_grafana_irm_enabled)
sync_organizations_v2(org_ids=[org.pk])
mocked_grafana_api_client_api_post.assert_called_once_with(f"api/plugins/{expected}/resources/plugin/sync")

View file

@ -2,14 +2,17 @@ from rest_framework import serializers
from apps.alerts.models import EscalationChain
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
from common.api_helpers.mixins import EagerLoadingMixin
from common.api_helpers.utils import CurrentOrganizationDefault
class EscalationChainSerializer(serializers.ModelSerializer):
class EscalationChainSerializer(EagerLoadingMixin, serializers.ModelSerializer):
id = serializers.ReadOnlyField(source="public_primary_key")
organization = serializers.HiddenField(default=CurrentOrganizationDefault())
team_id = TeamPrimaryKeyRelatedField(required=False, allow_null=True, source="team")
SELECT_RELATED = ["organization", "team"]
class Meta:
model = EscalationChain
fields = (

View file

@ -107,7 +107,13 @@ class EscalationPolicySerializer(EagerLoadingMixin, OrderedModelSerializer):
]
PREFETCH_RELATED = ["notify_to_users_queue"]
SELECT_RELATED = ["escalation_chain"]
SELECT_RELATED = [
"custom_webhook",
"escalation_chain",
"notify_schedule",
"notify_to_group",
"notify_to_team_members",
]
@cached_property
def escalation_chain(self):

View file

@ -85,7 +85,7 @@ class IntegrationSerializer(EagerLoadingMixin, serializers.ModelSerializer, Main
description_short = serializers.CharField(max_length=250, required=False, allow_null=True)
PREFETCH_RELATED = ["channel_filters"]
SELECT_RELATED = ["organization", "integration_heartbeat"]
SELECT_RELATED = ["organization", "integration_heartbeat", "team"]
class Meta:
model = AlertReceiveChannel

View file

@ -122,7 +122,7 @@ class CustomOnCallShiftSerializer(EagerLoadingMixin, serializers.ModelSerializer
"source": {"required": False, "write_only": True},
}
SELECT_RELATED = ["schedule"]
SELECT_RELATED = ["organization", "team", "schedule"]
PREFETCH_RELATED = ["schedules", "users"]
def create(self, validated_data):

View file

@ -4,6 +4,7 @@ from apps.alerts.models import AlertReceiveChannel, ChannelFilter, EscalationCha
from apps.base.messaging import get_messaging_backend_from_id, get_messaging_backends
from common.api_helpers.custom_fields import OrganizationFilteredPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
from common.api_helpers.utils import valid_jinja_template_for_serializer_method_field
from common.jinja_templater.apply_jinja_template import JinjaTemplateError
from common.ordered_model.serializer import OrderedModelSerializer
@ -129,7 +130,7 @@ class RoutingTypeField(fields.CharField):
raise BadRequest(detail="Invalid route type")
class ChannelFilterSerializer(BaseChannelFilterSerializer):
class ChannelFilterSerializer(EagerLoadingMixin, BaseChannelFilterSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
slack = serializers.DictField(required=False)
telegram = serializers.DictField(required=False)
@ -146,6 +147,8 @@ class ChannelFilterSerializer(BaseChannelFilterSerializer):
is_the_last_route = serializers.BooleanField(read_only=True, source="is_default")
SELECT_RELATED = ["alert_receive_channel", "escalation_chain"]
class Meta:
model = ChannelFilter
fields = OrderedModelSerializer.Meta.fields + [

View file

@ -6,21 +6,26 @@ from apps.schedules.ical_utils import list_users_to_notify_from_ical
from apps.slack.models import SlackUserGroup
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
class ScheduleBaseSerializer(serializers.ModelSerializer):
class ScheduleBaseSerializer(EagerLoadingMixin, serializers.ModelSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
on_call_now = serializers.SerializerMethodField()
slack = serializers.DictField(required=False)
team_id = TeamPrimaryKeyRelatedField(required=False, allow_null=True, source="team")
SELECT_RELATED = ["team"]
def create(self, validated_data):
validated_data = self._correct_validated_data(validated_data)
validated_data["organization"] = self.context["request"].auth.organization
return super().create(validated_data)
def get_on_call_now(self, obj):
users_on_call = list_users_to_notify_from_ical(obj, datetime.datetime.now(datetime.timezone.utc))
users_on_call = list_users_to_notify_from_ical(
obj, datetime.datetime.now(datetime.timezone.utc), from_cached_final=True
)
if users_on_call is not None:
return [user.public_primary_key for user in users_on_call]
else:

View file

@ -5,11 +5,10 @@ from apps.schedules.tasks import (
schedule_notify_about_empty_shifts_in_schedule,
schedule_notify_about_gaps_in_schedule,
)
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField, TimeZoneField, UsersFilteredByOrganizationField
from common.api_helpers.custom_fields import TimeZoneField, UsersFilteredByOrganizationField
class ScheduleWebSerializer(ScheduleBaseSerializer):
team_id = TeamPrimaryKeyRelatedField(required=False, allow_null=True, source="team")
time_zone = TimeZoneField(required=True)
shifts = UsersFilteredByOrganizationField(
queryset=CustomOnCallShift.objects,

View file

@ -8,6 +8,7 @@ from apps.webhooks.models.webhook import PUBLIC_WEBHOOK_HTTP_METHODS, WEBHOOK_FI
from apps.webhooks.presets.preset_options import WebhookPresetOptions
from common.api_helpers.custom_fields import IntegrationFilteredByOrganizationField, TeamPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
from common.api_helpers.utils import CurrentOrganizationDefault, CurrentTeamDefault, CurrentUserDefault
from common.jinja_templater import apply_jinja_template
from common.jinja_templater.apply_jinja_template import JinjaTemplateError, JinjaTemplateWarning
@ -48,7 +49,7 @@ class WebhookResponseSerializer(serializers.ModelSerializer):
]
class WebhookCreateSerializer(serializers.ModelSerializer):
class WebhookCreateSerializer(EagerLoadingMixin, serializers.ModelSerializer):
id = serializers.CharField(read_only=True, source="public_primary_key")
organization = serializers.HiddenField(default=CurrentOrganizationDefault())
team = TeamPrimaryKeyRelatedField(allow_null=True, default=CurrentTeamDefault())
@ -58,6 +59,8 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
source="filtered_integrations", many=True, required=False
)
SELECT_RELATED = ["organization", "team"]
class Meta:
model = Webhook
fields = [

View file

@ -34,6 +34,7 @@ class EscalationChainView(RateLimitHeadersMixin, ModelViewSet):
if name is not None:
queryset = queryset.filter(name=name)
queryset = self.serializer_class.setup_eager_loading(queryset)
return queryset.order_by("id")
def get_object(self):

View file

@ -1,4 +1,3 @@
from django.db.models import Count
from django_filters import rest_framework as filters
from rest_framework.exceptions import NotFound
from rest_framework.permissions import IsAuthenticated
@ -47,7 +46,6 @@ class IntegrationView(
queryset = queryset.filter(verbal_name=name)
queryset = self.filter_queryset(queryset)
queryset = self.serializer_class.setup_eager_loading(queryset)
queryset = queryset.annotate(alert_groups_count_annotated=Count("alert_groups", distinct=True))
return queryset

View file

@ -37,6 +37,7 @@ class ChannelFilterView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewS
queryset = ChannelFilter.objects.filter(
alert_receive_channel__organization=self.request.auth.organization, alert_receive_channel__deleted_at=None
)
queryset = self.serializer_class.setup_eager_loading(queryset)
if integration_id:
queryset = queryset.filter(alert_receive_channel__public_primary_key=integration_id)

View file

@ -57,6 +57,7 @@ class OnCallScheduleChannelView(RateLimitHeadersMixin, UpdateSerializerMixin, Mo
if name is not None:
queryset = queryset.filter(name=name)
queryset = self.serializer_class.setup_eager_loading(queryset)
return queryset.order_by("id")
def get_object(self):

View file

@ -42,6 +42,7 @@ class WebhooksView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet):
if webhook_name:
queryset = queryset.filter(name=webhook_name)
queryset = self.serializer_class.setup_eager_loading(queryset)
return queryset.order_by("id")
def get_object(self):

View file

@ -344,6 +344,7 @@ def list_of_empty_shifts_in_schedule(
def list_users_to_notify_from_ical(
schedule: "OnCallSchedule",
events_datetime: typing.Optional[datetime.datetime] = None,
from_cached_final: bool = False,
) -> typing.Sequence["User"]:
"""
Retrieve on-call users for the current time
@ -353,6 +354,7 @@ def list_users_to_notify_from_ical(
schedule,
events_datetime,
events_datetime,
from_cached_final=from_cached_final,
)
@ -360,8 +362,12 @@ def list_users_to_notify_from_ical_for_period(
schedule: "OnCallSchedule",
start_datetime: datetime.datetime,
end_datetime: datetime.datetime,
from_cached_final: bool = False,
) -> typing.Sequence["User"]:
events = schedule.final_events(start_datetime, end_datetime)
if from_cached_final and schedule.cached_ical_final_schedule:
events = schedule.filter_events(start_datetime, end_datetime, from_cached_final=True)
else:
events = schedule.final_events(start_datetime, end_datetime)
usernames: typing.List[str] = []
for event in events:
usernames += [u["email"] for u in event.get("users", [])]

View file

@ -235,30 +235,6 @@ def test_slack_renderer_unattach_button(make_organization, make_alert_receive_ch
)
@pytest.mark.django_db
def test_slack_renderer_format_alert_button(
make_organization, make_alert_receive_channel, make_alert_group, make_alert
):
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
make_alert(alert_group=alert_group, raw_request_data={})
elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[0]["blocks"][0]["elements"]
button = elements[5]
assert button["text"]["text"] == ":mag: Format Alert"
assert json.loads(button["value"]) == json.loads(
make_value(
{
"organization_id": organization.pk,
"alert_group_ppk": alert_group.public_primary_key,
},
organization,
)
)
@pytest.mark.django_db
def test_slack_renderer_resolution_notes_button(
make_organization, make_alert_receive_channel, make_alert_group, make_alert
@ -270,7 +246,7 @@ def test_slack_renderer_resolution_notes_button(
elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[0]["blocks"][0]["elements"]
button = elements[6]
button = elements[5]
assert button["text"]["text"] == "Add Resolution notes"
assert json.loads(button["value"]) == json.loads(
make_value(

View file

@ -128,8 +128,6 @@ class SlackEventApiEndpointView(APIView):
if isinstance(payload, str):
payload = json.JSONDecoder().decode(payload)
logger.info(f"Slack payload is {payload}")
# Checking if it's repeated Slack request
if "HTTP_X_SLACK_RETRY_NUM" in request.META and int(request.META["HTTP_X_SLACK_RETRY_NUM"]) > 1:
logger.critical(

View file

@ -9,7 +9,7 @@ from apps.auth_token.models import GoogleOAuth2Token, SlackAuthToken
# Scopes for slack user token.
# It is main purpose - retrieve user data in SlackOAuth2V2 but we are using it in legacy code or weird Slack api cases.
USER_SCOPE = ["channels:read", "identify", "chat:write", "users.profile:read", "users:read", "users:read.email"]
USER_SCOPE = ["channels:read", "identify", "users.profile:read", "users:read", "users:read.email"]
# Scopes for slack bot token.
# It is prime token we are using for most requests to Slack api.

View file

@ -18,6 +18,7 @@ from apps.chatops_proxy.utils import (
from apps.grafana_plugin.ui_url_builder import UIURLBuilder
from apps.user_management.subscription_strategy import FreePublicBetaSubscriptionStrategy
from apps.user_management.types import AlertGroupTableColumn
from common.constants.plugin_ids import PluginID
from common.insight_log import ChatOpsEvent, ChatOpsTypePlug, write_chatops_insight_log
from common.public_primary_keys import generate_public_primary_key, increase_public_primary_key_length
@ -352,6 +353,13 @@ class Organization(MaintainableObject):
"""
return UIURLBuilder(self).home(f"?oncall-uuid={self.uuid}")
@property
def active_ui_plugin_id(self) -> str:
"""
If `is_grafana_irm_enabled` is True, this will be IRM, otherwise OnCall
"""
return PluginID.IRM if self.is_grafana_irm_enabled else PluginID.ONCALL
@classmethod
def __str__(self):
return f"{self.pk}: {self.org_title}"

View file

@ -69,6 +69,12 @@ def _sync_organization(organization: Organization) -> None:
if grafana_labels_plugin_settings is not None:
is_grafana_labels_enabled = grafana_labels_plugin_settings["enabled"]
# get IRM plugin settings
is_grafana_irm_enabled = False
grafana_irm_plugin_settings, _ = grafana_api_client.get_grafana_irm_plugin_settings()
if grafana_irm_plugin_settings is not None:
is_grafana_irm_enabled = grafana_irm_plugin_settings["enabled"]
oncall_api_url = settings.BASE_URL
if settings.LICENSE == CLOUD_LICENSE_NAME:
oncall_api_url = settings.GRAFANA_CLOUD_ONCALL_API_URL
@ -85,6 +91,7 @@ def _sync_organization(organization: Organization) -> None:
incident_enabled=is_grafana_incident_enabled,
incident_backend_url=grafana_incident_backend_url,
labels_enabled=is_grafana_labels_enabled,
irm_enabled=is_grafana_irm_enabled,
)
_sync_organization_data(organization, sync_settings)
if organization.api_token_status == Organization.API_TOKEN_STATUS_OK:
@ -288,6 +295,7 @@ def _sync_organization_data(organization: Organization, sync_settings: SyncSetti
organization.is_rbac_permissions_enabled = sync_settings.rbac_enabled
logger.info(f"RBAC status org={organization.pk} rbac_enabled={organization.is_rbac_permissions_enabled}")
organization.is_grafana_irm_enabled = sync_settings.irm_enabled
organization.is_grafana_labels_enabled = sync_settings.labels_enabled
organization.is_grafana_incident_enabled = sync_settings.incident_enabled
organization.grafana_incident_backend_url = sync_settings.incident_backend_url
@ -321,6 +329,7 @@ def _sync_organization_data(organization: Organization, sync_settings: SyncSetti
"is_rbac_permissions_enabled",
"is_grafana_incident_enabled",
"is_grafana_labels_enabled",
"is_grafana_irm_enabled",
"grafana_incident_backend_url",
]
)

View file

@ -9,6 +9,7 @@ from apps.base.models import UserNotificationPolicy, UserNotificationPolicyLogRe
from apps.schedules.models import OnCallScheduleICal, OnCallScheduleWeb
from apps.telegram.models import TelegramMessage
from apps.user_management.models import Organization
from common.constants.plugin_ids import PluginID
@pytest.mark.django_db
@ -276,3 +277,16 @@ def test_get_notifiable_direct_paging_integrations(
make_channel_filter(arc, is_default=False)
notifiable_direct_paging_integrations = _assert(org, arc)
assert notifiable_direct_paging_integrations.count() == 1
@pytest.mark.parametrize(
"is_grafana_irm_enabled,expected",
[
(True, PluginID.IRM),
(False, PluginID.ONCALL),
],
)
@pytest.mark.django_db
def test_active_ui_plugin_id(make_organization, is_grafana_irm_enabled, expected):
org = make_organization(is_grafana_irm_enabled=is_grafana_irm_enabled)
assert org.active_ui_plugin_id == expected

View file

@ -68,6 +68,10 @@ def patched_grafana_api_client(organization, is_rbac_enabled_for_organization=(F
],
None,
)
mock_client_instance.get_grafana_irm_plugin_settings.return_value = (
{"enabled": False, "jsonData": {}},
None,
)
mock_client_instance.get_grafana_incident_plugin_settings.return_value = (
{"enabled": True, "jsonData": {GRAFANA_INCIDENT_PLUGIN_BACKEND_URL_KEY: MOCK_GRAFANA_INCIDENT_BACKEND_URL}},
None,
@ -587,6 +591,7 @@ def test_apply_sync_data_none_values(make_organization):
grafana_token=organization.api_token,
oncall_token=organization.gcom_token,
grafana_url=organization.grafana_url,
irm_enabled=False,
),
)

View file

@ -7,7 +7,6 @@ CELERY_TASK_ROUTES = {
"apps.alerts.tasks.delete_alert_group.send_alert_group_signal_for_delete": {"queue": "default"},
"apps.alerts.tasks.delete_alert_group.finish_delete_alert_group": {"queue": "default"},
"apps.alerts.tasks.invalidate_web_cache_for_alert_group.invalidate_web_cache_for_alert_group": {"queue": "default"},
"apps.alerts.tasks.send_alert_group_signal.send_alert_group_signal": {"queue": "default"},
"apps.alerts.tasks.wipe.wipe": {"queue": "default"},
"common.oncall_gateway.tasks.create_oncall_connector_async": {"queue": "default"},
"common.oncall_gateway.tasks.delete_oncall_connector_async": {"queue": "default"},
@ -134,6 +133,7 @@ CELERY_TASK_ROUTES = {
"apps.phone_notifications.phone_backend.notify_by_sms_bundle_async_task": {"queue": "critical"},
"apps.schedules.tasks.drop_cached_ical.drop_cached_ical_for_custom_events_for_organization": {"queue": "critical"},
"apps.schedules.tasks.drop_cached_ical.drop_cached_ical_task": {"queue": "critical"},
"apps.alerts.tasks.send_alert_group_signal.send_alert_group_signal": {"queue": "critical"},
# GRAFANA
"apps.grafana_plugin.tasks.sync.plugin_sync_organization_async": {"queue": "grafana"},
# LONG