Merge pull request #3843 from grafana/dev

v1.3.102
This commit is contained in:
Matias Bordese 2024-02-06 15:12:09 -03:00 committed by GitHub
commit d63b5ba026
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
16 changed files with 126 additions and 62 deletions

View file

@ -146,7 +146,7 @@ jobs:
unit-test-backend-mysql-rabbitmq:
name: "Backend Tests: MySQL + RabbitMQ (RBAC enabled: ${{ matrix.rbac_enabled }})"
runs-on: ubuntu-latest
runs-on: ubuntu-latest-8-cores
strategy:
matrix:
rbac_enabled: ["True", "False"]
@ -189,7 +189,7 @@ jobs:
unit-test-backend-postgresql-rabbitmq:
name: "Backend Tests: PostgreSQL + RabbitMQ (RBAC enabled: ${{ matrix.rbac_enabled }})"
runs-on: ubuntu-latest
runs-on: ubuntu-latest-8-cores
strategy:
matrix:
rbac_enabled: ["True", "False"]
@ -238,7 +238,7 @@ jobs:
unit-test-backend-sqlite-redis:
name: "Backend Tests: SQLite + Redis (RBAC enabled: ${{ matrix.rbac_enabled }})"
runs-on: ubuntu-latest
runs-on: ubuntu-latest-8-cores
strategy:
matrix:
rbac_enabled: ["True", "False"]

View file

@ -5,9 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Unreleased
## v1.3.102 (2024-02-06)
## v1.3.101 (2024-05-01)
### Fixed
Maintenance release
## v1.3.101 (2024-02-05)
### Added

View file

@ -30,6 +30,8 @@ The above command returns JSON structured in the following way:
"created_at": "2020-05-19T12:37:01.430444Z",
"resolved_at": "2020-05-19T13:37:01.429805Z",
"acknowledged_at": null,
"acknowledged_by": null,
"resolved_by": "UCGEIXI1MR1NZ",
"title": "Memory above 90% threshold",
"permalinks": {
"slack": "https://ghostbusters.slack.com/archives/C1H9RESGA/p135854651500008",
@ -54,6 +56,18 @@ These available filter parameters should be provided as `GET` arguments:
`GET {{API_URL}}/api/v1/alert_groups/`
# Alert group details
```shell
curl "{{API_URL}}/api/v1/alert_groups/I68T24C13IFW1" \
--request GET \
--header "Authorization: meowmeowmeow"
```
**HTTP request**
`GET {{API_URL}}/api/v1/alert_groups/<ALERT_GROUP_ID>`
# Acknowledge an alert group
```shell

View file

@ -13,6 +13,9 @@ from rest_framework.test import APIClient
from apps.alerts.models import AlertReceiveChannel
from apps.integrations.mixins import AlertChannelDefiningMixin
# https://github.com/pytest-dev/pytest-xdist/issues/432#issuecomment-528510433
INTEGRATION_TYPES = sorted(AlertReceiveChannel.INTEGRATION_TYPES)
class DatabaseBlocker(_DatabaseBlocker):
"""Customize pytest_django db blocker to raise OperationalError exception."""
@ -78,7 +81,7 @@ def test_integration_form_data_too_big(settings, make_organization_and_user, mak
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
for arc_type in INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)
@ -230,7 +233,7 @@ def test_integration_old_grafana_endpoint(
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
for arc_type in INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)
@ -264,7 +267,7 @@ def test_integration_universal_endpoint_not_allow_files(
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
for arc_type in INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)
@ -367,7 +370,7 @@ def test_integration_grafana_endpoint_without_db_has_alerts(
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
for arc_type in INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)
@ -467,7 +470,7 @@ def test_integration_grafana_endpoint_without_cache_has_alerts(
"integration_type",
[
arc_type
for arc_type in AlertReceiveChannel.INTEGRATION_TYPES
for arc_type in INTEGRATION_TYPES
if arc_type not in ["amazon_sns", "grafana", "alertmanager", "grafana_alerting", "maintenance"]
],
)

View file

@ -3,6 +3,7 @@ from rest_framework import serializers
from apps.alerts.models import AlertGroup
from apps.telegram.models.message import TelegramMessage
from common.api_helpers.custom_fields import UserIdField
from common.api_helpers.mixins import EagerLoadingMixin
@ -14,6 +15,8 @@ class IncidentSerializer(EagerLoadingMixin, serializers.ModelSerializer):
alerts_count = serializers.SerializerMethodField()
title = serializers.SerializerMethodField()
state = serializers.SerializerMethodField()
acknowledged_by = UserIdField(read_only=True, source="acknowledged_by_user")
resolved_by = UserIdField(read_only=True, source="resolved_by_user")
SELECT_RELATED = ["channel", "channel_filter", "slack_message", "channel__organization"]
PREFETCH_RELATED = [
@ -35,7 +38,9 @@ class IncidentSerializer(EagerLoadingMixin, serializers.ModelSerializer):
"state",
"created_at",
"resolved_at",
"resolved_by",
"acknowledged_at",
"acknowledged_by",
"title",
"permalinks",
]

View file

@ -29,6 +29,11 @@ def construct_expected_response_from_alert_groups(alert_groups):
acknowledged_at = alert_group.acknowledged_at.isoformat()
acknowledged_at = acknowledged_at[:-6] + "Z"
def user_pk_or_none(alert_group, user_field):
u = getattr(alert_group, user_field)
if u is not None:
return u.public_primary_key
results.append(
{
"id": alert_group.public_primary_key,
@ -39,6 +44,8 @@ def construct_expected_response_from_alert_groups(alert_groups):
"created_at": created_at,
"resolved_at": resolved_at,
"acknowledged_at": acknowledged_at,
"acknowledged_by": user_pk_or_none(alert_group, "acknowledged_by_user"),
"resolved_by": user_pk_or_none(alert_group, "resolved_by_user"),
"title": None,
"permalinks": {
"slack": None,
@ -95,6 +102,21 @@ def alert_group_public_api_setup(
return token, alert_groups, integrations, routes
@pytest.mark.django_db
def test_get_alert_group(alert_group_public_api_setup):
token, _, _, _ = alert_group_public_api_setup
alert_groups = AlertGroup.objects.all().order_by("-started_at")
client = APIClient()
list_response = construct_expected_response_from_alert_groups(alert_groups)
expected_response = list_response["results"][0]
url = reverse("api-public:alert_groups-detail", kwargs={"pk": expected_response["id"]})
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
@pytest.mark.django_db
def test_get_alert_groups(alert_group_public_api_setup):
token, _, _, _ = alert_group_public_api_setup

View file

@ -34,7 +34,9 @@ class IncidentByTeamFilter(ByTeamModelFieldFilterMixin, filters.FilterSet):
id = filters.CharFilter(field_name="public_primary_key")
class IncidentView(RateLimitHeadersMixin, mixins.ListModelMixin, mixins.DestroyModelMixin, GenericViewSet):
class IncidentView(
RateLimitHeadersMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, GenericViewSet
):
authentication_classes = (ApiTokenAuthentication,)
permission_classes = (IsAuthenticated,)

View file

@ -8,6 +8,7 @@ from telegram.utils.request import Request
from apps.alerts.models import AlertGroup
from apps.base.utils import live_settings
from apps.telegram.exceptions import AlertGroupTelegramMessageDoesNotExist
from apps.telegram.models import TelegramMessage
from apps.telegram.renderers.keyboard import TelegramKeyboardRenderer
from apps.telegram.renderers.message import TelegramMessageRenderer
@ -157,7 +158,10 @@ class TelegramClient:
).first()
if alert_group_message is None:
raise Exception("No alert group message found, probably it is not saved to database yet")
raise AlertGroupTelegramMessageDoesNotExist(
f"No alert group message found, probably it is not saved to database yet, "
f"alert group: {alert_group.id}"
)
include_title = message_type == TelegramMessage.LINK_TO_CHANNEL_MESSAGE
link = alert_group_message.link

View file

@ -0,0 +1,2 @@
class AlertGroupTelegramMessageDoesNotExist(Exception):
pass

View file

@ -43,10 +43,15 @@ class TelegramToUserConnector(models.Model):
telegram_channel = TelegramToOrganizationConnector.get_channel_for_alert_group(alert_group)
if telegram_channel is not None:
send_link_to_channel_message_or_fallback_to_full_alert_group.delay(
alert_group_pk=alert_group.pk,
notification_policy_pk=notification_policy.pk,
user_connector_pk=self.pk,
# Call this task with a countdown to avoid unnecessary retry when alert group telegram message hasn't been
# created yet
send_link_to_channel_message_or_fallback_to_full_alert_group.apply_async(
kwargs={
"alert_group_pk": alert_group.pk,
"notification_policy_pk": notification_policy.pk,
"user_connector_pk": self.pk,
},
countdown=3,
)
else:
self.send_full_alert_group(alert_group=alert_group, notification_policy=notification_policy)

View file

@ -74,9 +74,8 @@ class CloudAuthApiClient:
headers=headers,
json={
"claims": claims,
"extra": {
"accessPolicy": {
"scopes": scopes,
"org_id": org_id,
},
},
)

View file

@ -1,6 +1,6 @@
import json
from unittest.mock import patch
import httpretty
import pytest
from rest_framework import status
@ -16,10 +16,10 @@ def configure_cloud_auth_api_client(settings):
settings.GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN = GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN
@patch("common.cloud_auth_api.client.requests")
@pytest.mark.django_db
@pytest.mark.parametrize("response_status_code", [status.HTTP_200_OK, status.HTTP_401_UNAUTHORIZED])
def test_request_signed_token(mock_requests, make_organization, response_status_code):
@httpretty.activate(verbose=True, allow_net_connect=False)
def test_request_signed_token(make_organization, response_status_code):
mock_auth_token = ",mnasdlkjlakjoqwejroiqwejr"
mock_response_text = "error message"
@ -31,25 +31,12 @@ def test_request_signed_token(mock_requests, make_organization, response_status_
scopes = ["incident:write", "foo:bar"]
claims = {"vegetable": "carrot", "fruit": "apple"}
class MockResponse:
text = mock_response_text
def __init__(self, status_code):
self.status_code = status_code
def json(self):
return {
"data": {
"token": mock_auth_token,
},
}
mock_requests.post.return_value = MockResponse(response_status_code)
def _make_request():
return CloudAuthApiClient().request_signed_token(organization, scopes, claims)
url = f"{GRAFANA_CLOUD_AUTH_API_URL}/v1/sign"
mock_response = httpretty.Response(json.dumps({"data": {"token": mock_auth_token}}), status=response_status_code)
httpretty.register_uri(httpretty.POST, url, responses=[mock_response])
if response_status_code != status.HTTP_200_OK:
with pytest.raises(CloudAuthApiException) as excinfo:
@ -62,25 +49,26 @@ def test_request_signed_token(mock_requests, make_organization, response_status_
else:
assert _make_request() == mock_auth_token
mock_requests.post.assert_called_once_with(
url,
headers={
"Authorization": f"Bearer {GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN}",
"X-Org-ID": str(org_id),
"X-Realms": json.dumps(
[
{
"type": "stack",
"identifier": str(stack_id),
},
]
),
last_request = httpretty.last_request()
assert last_request.method == "POST"
assert last_request.url == url
# assert we're sending the right body
assert json.loads(last_request.body) == {
"claims": claims,
"accessPolicy": {
"scopes": scopes,
},
json={
"claims": claims,
"extra": {
"scopes": scopes,
"org_id": str(org_id),
}
# assert we're sending the right headers
assert last_request.headers["Authorization"] == f"Bearer {GRAFANA_CLOUD_AUTH_API_SYSTEM_TOKEN}"
assert last_request.headers["X-Org-ID"] == str(org_id)
assert last_request.headers["X-Realms"] == json.dumps(
[
{
"type": "stack",
"identifier": str(stack_id),
},
},
]
)

View file

@ -142,6 +142,21 @@ register(AlertReceiveChannelAssociatedLabelFactory)
IS_RBAC_ENABLED = os.getenv("ONCALL_TESTING_RBAC_ENABLED", "True") == "True"
@pytest.fixture(autouse=True)
def isolated_cache(settings):
"""
https://github.com/pytest-dev/pytest-django/issues/527#issuecomment-1115887487
"""
cache_version = uuid.uuid4().hex
for name in settings.CACHES.keys():
settings.CACHES[name]["VERSION"] = cache_version
from django.test.signals import clear_cache_handlers
clear_cache_handlers(setting="CACHES")
@pytest.fixture(autouse=True)
def mock_slack_api_call(monkeypatch):
def mock_api_call(*args, **kwargs):

View file

@ -12,3 +12,4 @@ types-PyMySQL==1.0.19.7
types-python-dateutil==2.8.19.13
types-requests==2.31.0.1
httpretty==1.1.4
pytest-xdist[psutil]==3.5.0

View file

@ -25,13 +25,12 @@ if BROKER_TYPE == BrokerTypes.RABBITMQ:
elif BROKER_TYPE == BrokerTypes.REDIS:
CELERY_BROKER_URL = REDIS_URI
# use redis as cache and celery broker on CI tests
if BROKER_TYPE != BrokerTypes.REDIS:
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
}
# always use in-memory cache for testing.. this makes things alot easier wrt pytest-xdist (parallel test execution)
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
}
}
# Dummy Telegram token (fake one)
TELEGRAM_TOKEN = "0000000000:XXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXX"

View file

@ -10,6 +10,7 @@ banned-modules =
[pytest]
# https://pytest-django.readthedocs.io/en/latest/configuring_django.html#order-of-choosing-settings
# https://pytest-django.readthedocs.io/en/latest/database.html
addopts = --no-migrations --color=yes --showlocals
# dist=load = "load balance by sending any pending test to any available environment"
addopts = -n auto --dist=load --no-migrations --color=yes --showlocals
# https://pytest-django.readthedocs.io/en/latest/faq.html#my-tests-are-not-being-found-why
python_files = tests.py test_*.py *_tests.py