Merge branch 'dev' into 666-display-ical-and-terraform-schedules-in-list

This commit is contained in:
Yulia Shanyrova 2022-10-31 12:50:21 +01:00
commit 6b0727efb9
230 changed files with 1241 additions and 3154 deletions

View file

@ -23,16 +23,26 @@ repos:
- flake8-tidy-imports
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v7.21.0
rev: v8.25.0
hooks:
- id: eslint
entry: bash -c 'cd grafana-plugin && eslint --fix ${@/grafana-plugin\//}' --
entry: bash -c 'cd grafana-plugin && eslint --max-warnings=0 --fix ${@/grafana-plugin\//}' --
types: [file]
files: ^grafana-plugin/src/.*\.(js|jsx|ts|tsx)$
additional_dependencies:
- eslint@7.21.0
- eslint@^8.25.0
- eslint-plugin-import@^2.25.4
- eslint-plugin-rulesdir@^0.2.1
- "@grafana/eslint-config@^5.0.0"
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v2.7.1"
hooks:
- id: prettier
types_or: [css, javascript, jsx, ts, tsx, json]
files: ^grafana-plugin/src
additional_dependencies:
- prettier@^2.7.1
- repo: https://github.com/thibaudcolas/pre-commit-stylelint
rev: v13.13.1
@ -43,4 +53,6 @@ repos:
files: ^grafana-plugin/src/.*\.css$
additional_dependencies:
- stylelint@^13.13.1
- stylelint-prettier@^2.0.0
- stylelint-config-standard@^22.0.0
- stylelint-config-prettier@^9.0.3

View file

@ -1,6 +1,33 @@
# Change Log
## v1.0.46 (2022-10-28)
- Bug fixes
- remove `POST /api/internal/v1/custom_buttons/{id}/action` endpoint
## v1.0.45 (2022-10-27)
- Bug fix to revert commit which removed unused engine code
## v1.0.44 (2022-10-26)
- Bug fix for an issue that was affecting phone verification
## v1.0.43 (2022-10-25)
- Bug fixes
## v1.0.42 (2022-10-24)
- Fix posting resolution notes to Slack
## v1.0.41 (2022-10-24)
- Add personal email notifications
- Bug fixes
## v1.0.40 (2022-10-05)
- Improved database and celery backends support
- Added script to import PagerDuty users to Grafana
- Bug fixes

View file

@ -20,7 +20,7 @@ Developer-friendly incident response with brilliant Slack integration.
We prepared multiple environments: [production](https://grafana.com/docs/grafana-cloud/oncall/open-source/#production-environment), [developer](DEVELOPER.md) and hobby:
1. Download docker-compose.yaml:
1. Download [`docker-compose.yml`](docker-compose.yml):
```bash
curl -fsSL https://raw.githubusercontent.com/grafana/oncall/dev/docker-compose.yml -o docker-compose.yml
@ -31,9 +31,7 @@ curl -fsSL https://raw.githubusercontent.com/grafana/oncall/dev/docker-compose.y
```bash
echo "DOMAIN=http://localhost:8080
COMPOSE_PROFILES=with_grafana # Remove this line if you want to use existing grafana
SECRET_KEY=my_random_secret_must_be_more_than_32_characters_long
RABBITMQ_PASSWORD=rabbitmq_secret_pw
MYSQL_PASSWORD=mysql_secret_pw" > .env
SECRET_KEY=my_random_secret_must_be_more_than_32_characters_long" > .env
```
3. Launch services:

View file

@ -0,0 +1,162 @@
version: "3.8"
x-environment:
&oncall-environment
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
RABBITMQ_USERNAME: "rabbitmq"
RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD
RABBITMQ_HOST: "rabbitmq"
RABBITMQ_PORT: "5672"
RABBITMQ_DEFAULT_VHOST: "/"
MYSQL_PASSWORD: $MYSQL_PASSWORD
MYSQL_DB_NAME: oncall_hobby
MYSQL_USER: ${MYSQL_USER:-root}
MYSQL_HOST: ${MYSQL_HOST:-mysql}
MYSQL_PORT: 3306
REDIS_URI: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: settings.hobby
CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery"
CELERY_WORKER_CONCURRENCY: "1"
CELERY_WORKER_MAX_TASKS_PER_CHILD: "100"
CELERY_WORKER_SHUTDOWN_INTERVAL: "65m"
CELERY_WORKER_BEAT_ENABLED: "True"
services:
engine:
image: grafana/oncall
restart: always
ports:
- "8080:8080"
command: >
sh -c "uwsgi --ini uwsgi.ini"
environment: *oncall-environment
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_healthy
redis:
condition: service_started
celery:
image: grafana/oncall
restart: always
command: sh -c "./celery_with_exporter.sh"
environment: *oncall-environment
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_healthy
redis:
condition: service_started
oncall_db_migration:
image: grafana/oncall
command: python manage.py migrate --noinput
environment: *oncall-environment
depends_on:
mysql:
condition: service_healthy
rabbitmq:
condition: service_healthy
mysql:
image: mysql:5.7
platform: linux/x86_64
command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
restart: always
expose:
- 3306
volumes:
- dbdata:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: $MYSQL_PASSWORD
MYSQL_DATABASE: oncall_hobby
deploy:
resources:
limits:
memory: 500m
cpus: '0.5'
healthcheck:
test: "mysql -uroot -p$MYSQL_PASSWORD oncall_hobby -e 'select 1'"
timeout: 20s
retries: 10
redis:
image: redis
restart: always
expose:
- 6379
deploy:
resources:
limits:
memory: 100m
cpus: '0.1'
rabbitmq:
image: "rabbitmq:3.7.15-management"
restart: always
hostname: rabbitmq
volumes:
- rabbitmqdata:/var/lib/rabbitmq
environment:
RABBITMQ_DEFAULT_USER: "rabbitmq"
RABBITMQ_DEFAULT_PASS: $RABBITMQ_PASSWORD
RABBITMQ_DEFAULT_VHOST: "/"
deploy:
resources:
limits:
memory: 1000m
cpus: '0.5'
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 30s
retries: 3
mysql_to_create_grafana_db:
image: mysql:5.7
platform: linux/x86_64
command: bash -c "mysql -h ${MYSQL_HOST:-mysql} -uroot -p${MYSQL_PASSWORD:?err} -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
depends_on:
mysql:
condition: service_healthy
profiles:
- with_grafana
grafana:
image: "grafana/grafana:9.0.0-beta3"
restart: always
ports:
- "3000:3000"
environment:
GF_DATABASE_TYPE: mysql
GF_DATABASE_HOST: ${MYSQL_HOST:-mysql}
GF_DATABASE_USER: ${MYSQL_USER:-root}
GF_DATABASE_PASSWORD: ${MYSQL_PASSWORD:?err}
GF_SECURITY_ADMIN_USER: ${GRAFANA_USER:-admin}
GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_PASSWORD:-admin}
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app
GF_INSTALL_PLUGINS: grafana-oncall-app
deploy:
resources:
limits:
memory: 500m
cpus: '0.5'
depends_on:
mysql_to_create_grafana_db:
condition: service_completed_successfully
mysql:
condition: service_healthy
profiles:
- with_grafana
volumes:
dbdata:
rabbitmqdata:

View file

@ -2,18 +2,10 @@ version: "3.8"
x-environment:
&oncall-environment
DATABASE_TYPE: sqlite3
BROKER_TYPE: redis
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
RABBITMQ_USERNAME: "rabbitmq"
RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD
RABBITMQ_HOST: "rabbitmq"
RABBITMQ_PORT: "5672"
RABBITMQ_DEFAULT_VHOST: "/"
MYSQL_PASSWORD: $MYSQL_PASSWORD
MYSQL_DB_NAME: oncall_hobby
MYSQL_USER: ${MYSQL_USER:-root}
MYSQL_HOST: ${MYSQL_HOST:-mysql}
MYSQL_PORT: 3306
REDIS_URI: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: settings.hobby
CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery"
@ -31,104 +23,54 @@ services:
command: >
sh -c "uwsgi --ini uwsgi.ini"
environment: *oncall-environment
volumes:
- oncall_data:/var/lib/oncall
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_healthy
redis:
condition: service_started
condition: service_healthy
celery:
image: grafana/oncall
restart: always
command: sh -c "./celery_with_exporter.sh"
environment: *oncall-environment
volumes:
- oncall_data:/var/lib/oncall
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_healthy
redis:
condition: service_started
condition: service_healthy
oncall_db_migration:
image: grafana/oncall
command: python manage.py migrate --noinput
environment: *oncall-environment
depends_on:
mysql:
condition: service_healthy
rabbitmq:
condition: service_healthy
mysql:
image: mysql:5.7
platform: linux/x86_64
command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
restart: always
expose:
- 3306
volumes:
- dbdata:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: $MYSQL_PASSWORD
MYSQL_DATABASE: oncall_hobby
deploy:
resources:
limits:
memory: 500m
cpus: '0.5'
healthcheck:
test: "mysql -uroot -p$MYSQL_PASSWORD oncall_hobby -e 'select 1'"
timeout: 20s
retries: 10
- oncall_data:/var/lib/oncall
depends_on:
redis:
condition: service_healthy
redis:
image: redis
restart: always
expose:
- 6379
deploy:
resources:
limits:
memory: 100m
cpus: '0.1'
rabbitmq:
image: "rabbitmq:3.7.15-management"
restart: always
hostname: rabbitmq
volumes:
- rabbitmqdata:/var/lib/rabbitmq
environment:
RABBITMQ_DEFAULT_USER: "rabbitmq"
RABBITMQ_DEFAULT_PASS: $RABBITMQ_PASSWORD
RABBITMQ_DEFAULT_VHOST: "/"
- redis_data:/data
deploy:
resources:
limits:
memory: 1000m
memory: 500m
cpus: '0.5'
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 30s
retries: 3
mysql_to_create_grafana_db:
image: mysql:5.7
platform: linux/x86_64
command: bash -c "mysql -h ${MYSQL_HOST:-mysql} -uroot -p${MYSQL_PASSWORD:?err} -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
depends_on:
mysql:
condition: service_healthy
profiles:
- with_grafana
test: ["CMD", "redis-cli", "ping"]
timeout: 5s
interval: 5s
retries: 10
grafana:
image: "grafana/grafana:9.0.0-beta3"
@ -136,27 +78,21 @@ services:
ports:
- "3000:3000"
environment:
GF_DATABASE_TYPE: mysql
GF_DATABASE_HOST: ${MYSQL_HOST:-mysql}
GF_DATABASE_USER: ${MYSQL_USER:-root}
GF_DATABASE_PASSWORD: ${MYSQL_PASSWORD:?err}
GF_SECURITY_ADMIN_USER: ${GRAFANA_USER:-admin}
GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_PASSWORD:-admin}
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app
GF_INSTALL_PLUGINS: grafana-oncall-app
volumes:
- grafana_data:/var/lib/grafana
deploy:
resources:
limits:
memory: 500m
cpus: '0.5'
depends_on:
mysql_to_create_grafana_db:
condition: service_completed_successfully
mysql:
condition: service_healthy
profiles:
- with_grafana
volumes:
dbdata:
rabbitmqdata:
grafana_data:
oncall_data:
redis_data:

View file

@ -9,30 +9,30 @@ keywords:
- on-call
- Alertmanager
- Prometheus
title: Connect Alert Manager to Grafana OnCall
title: Connect Alertmanager to Grafana OnCall
canonical: "https://grafana.com/docs/oncall/latest/integrations/available-integrations/add-alertmanager/"
weight: 300
---
# Connect AlertManager to Grafana OnCall
# Connect Alertmanager to Grafana OnCall
The AlertManager integration for Grafana OnCall handles alerts sent by client applications such as the Prometheus server.
The Alertmanager integration for Grafana OnCall handles alerts sent by client applications such as the Prometheus server.
Grafana OnCall provides<!--[grouping](#alertmanager-grouping-amp-oncall-grouping)--> grouping abilities when processing alerts from Alert Manager, including initial deduplicating, grouping, and routing the alerts to Grafana OnCall.
Grafana OnCall provides<!--[grouping](#alertmanager-grouping-amp-oncall-grouping)--> grouping abilities when processing alerts from Alertmanager, including initial deduplicating, grouping, and routing the alerts to Grafana OnCall.
## Configure AlertManager integration for Grafana OnCall
## Configure Alertmanager integration for Grafana OnCall
You must have an Admin role to create integrations in Grafana OnCall.
1. In the **Integrations** tab, click **+ New integration for receiving alerts**.
2. Select **AlertManager** from the list of available integrations.
2. Select **Alertmanager** from the list of available integrations.
3. Follow the instructions in the **How to connect** window to get your unique integration URL and identify next steps.
<!--![123](../_images/connect-new-monitoring.png)-->
## Configure AlertManager
## Configure Alertmanager
Update the `receivers` section of your Alertmanager configuration to use a unique integration URL:
@ -48,11 +48,11 @@ receivers:
send_resolved: true
```
## Configure grouping with AlertManager and Grafana OnCall
## Configure grouping with Alertmanager and Grafana OnCall
You can use the alert grouping mechanics of AlertManager and Grafana OnCall to configure your alert grouping preferences.
You can use the alert grouping mechanics of Alertmanager and Grafana OnCall to configure your alert grouping preferences.
AlertManager offers three alert grouping options:
Alertmanager offers three alert grouping options:
- `group_by` provides two options, `instance` or `job`.
- `group_wait` sets the length of time to initially wait before sending a notification for a particular group of alerts. For example, `group_wait` can be set to 45s.
@ -61,7 +61,7 @@ AlertManager offers three alert grouping options:
- `group_interval` sets the length of time to wait before sending notifications about new alerts that have been added to a group of alerts that have been previously alerted on. This setting is usually set to five minutes or more.
During high alert volume periods, AlertManager will send alerts at each `group_interval`, which can mean a lot of distraction. Grafana OnCall grouping will help manage this in the following ways:
During high alert volume periods, Alertmanager will send alerts at each `group_interval`, which can mean a lot of distraction. Grafana OnCall grouping will help manage this in the following ways:
- Grafana OnCall groups alerts based on the first label of each alert.

View file

@ -49,8 +49,10 @@ class AlertGroupTelegramRenderer(AlertGroupBaseRenderer):
status_verbose = self.alert_group.get_resolve_text()
elif self.alert_group.acknowledged:
status_verbose = self.alert_group.get_acknowledge_text()
text = f"{status_emoji} #{self.alert_group.inside_organization_number}, {title}\n"
# First line in the invisible link with id of organization.
# It is needed to add info about organization to the telegram message for the oncall-gateway.
text = f"<a href='{self.alert_group.channel.organization.web_link_with_id}'>&#8205;</a>"
text += f"{status_emoji} #{self.alert_group.inside_organization_number}, {title}\n"
text += f"{status_verbose}, alerts: {alerts_count_str}\n"
text += f"Source: {self.alert_group.channel.short_name}\n"
text += f"{self.alert_group.web_link}"

View file

@ -596,7 +596,7 @@ class IncidentLogBuilder:
except ValueError:
pass
else:
result += f"send {backend.label.lower()} message to {user_verbal}"
result += f"send {backend.label.lower() if backend else ''} message to {user_verbal}"
if not result:
result += f"inviting {user_verbal} but notification channel is unspecified"
return result

View file

@ -232,21 +232,13 @@ class Alert(models.Model):
return distinction
@property
def skip_signal(self):
try:
_ = self.migrator_lock
return True
except Alert.migrator_lock.RelatedObjectDoesNotExist:
return False
def listen_for_alert_model_save(sender, instance, created, *args, **kwargs):
AlertGroup = apps.get_model("alerts", "AlertGroup")
"""
Here we invoke AlertShootingStep by model saving action.
"""
if created and instance.group.maintenance_uuid is None and not instance.skip_signal:
if created and instance.group.maintenance_uuid is None:
# RFCT - why additinal save ?
instance.save()

View file

@ -37,7 +37,9 @@ def notify_group_task(alert_group_pk, escalation_policy_snapshot_order=None):
escalation_policy_step = escalation_policy_snapshot.step
usergroup = escalation_policy_snapshot.notify_to_group
usergroup_users = usergroup.get_users_from_members_for_organization(organization)
usergroup_users = []
if usergroup is not None:
usergroup_users = usergroup.get_users_from_members_for_organization(organization)
if len(usergroup_users) == 0:
log_record = AlertGroupLogRecord(

View file

@ -6,6 +6,7 @@ import pytz
from django.utils import timezone
from apps.alerts.tasks.notify_ical_schedule_shift import notify_ical_schedule_shift
from apps.schedules.ical_utils import memoized_users_in_ical
from apps.schedules.models import OnCallScheduleICal
ICAL_DATA = """
@ -80,6 +81,8 @@ def test_next_shift_notification_long_shifts(
organization, _, _, _ = make_organization_and_user_with_slack_identities()
make_user(organization=organization, username="user1")
make_user(organization=organization, username="user2")
# clear users pks <-> organization cache (persisting between tests)
memoized_users_in_ical.cache_clear()
ical_schedule = make_schedule(
organization,

View file

@ -447,39 +447,6 @@ def test_custom_button_delete_permissions(
assert response.status_code == expected_status
@pytest.mark.django_db
@pytest.mark.parametrize(
"role,expected_status",
[
(Role.ADMIN, status.HTTP_200_OK),
(Role.EDITOR, status.HTTP_200_OK),
(Role.VIEWER, status.HTTP_403_FORBIDDEN),
],
)
def test_custom_button_action_permissions(
make_organization_and_user_with_plugin_token,
make_custom_action,
make_user_auth_headers,
role,
expected_status,
):
organization, user, token = make_organization_and_user_with_plugin_token(role)
custom_button = make_custom_action(organization=organization)
client = APIClient()
url = reverse("api-internal:custom_button-action", kwargs={"pk": custom_button.public_primary_key})
with patch(
"apps.api.views.custom_button.CustomButtonView.action",
return_value=Response(
status=status.HTTP_200_OK,
),
):
response = client.post(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == expected_status
@pytest.mark.django_db
def test_get_custom_button_from_other_team_with_flag(
make_organization_and_user_with_plugin_token,

View file

@ -134,6 +134,84 @@ def test_get_list_schedules(
assert response.json() == expected_payload
@pytest.mark.django_db
def test_get_list_schedules_by_type(
schedule_internal_api_setup, make_escalation_chain, make_escalation_policy, make_user_auth_headers
):
user, token, calendar_schedule, ical_schedule, web_schedule, slack_channel = schedule_internal_api_setup
client = APIClient()
# setup escalation chain linked to web schedule
escalation_chain = make_escalation_chain(user.organization)
make_escalation_policy(
escalation_chain=escalation_chain,
escalation_policy_step=EscalationPolicy.STEP_NOTIFY_SCHEDULE,
notify_schedule=web_schedule,
)
expected_payload = [
{
"id": calendar_schedule.public_primary_key,
"type": 0,
"team": None,
"name": "test_calendar_schedule",
"time_zone": "UTC",
"slack_channel": None,
"user_group": None,
"warnings": [],
"ical_url_overrides": None,
"on_call_now": [],
"has_gaps": False,
"mention_oncall_next": False,
"mention_oncall_start": True,
"notify_empty_oncall": 0,
"notify_oncall_shift_freq": 1,
"number_of_escalation_chains": 0,
},
{
"id": ical_schedule.public_primary_key,
"type": 1,
"team": None,
"name": "test_ical_schedule",
"ical_url_primary": ICAL_URL,
"ical_url_overrides": None,
"slack_channel": None,
"user_group": None,
"warnings": [],
"on_call_now": [],
"has_gaps": False,
"mention_oncall_next": False,
"mention_oncall_start": True,
"notify_empty_oncall": 0,
"notify_oncall_shift_freq": 1,
"number_of_escalation_chains": 0,
},
{
"id": web_schedule.public_primary_key,
"type": 2,
"time_zone": "UTC",
"team": None,
"name": "test_web_schedule",
"slack_channel": None,
"user_group": None,
"warnings": [],
"on_call_now": [],
"has_gaps": False,
"mention_oncall_next": False,
"mention_oncall_start": True,
"notify_empty_oncall": 0,
"notify_oncall_shift_freq": 1,
"number_of_escalation_chains": 1,
},
]
for expected, schedule_type in enumerate(("api", "ical", "web")):
url = reverse("api-internal:schedule-list") + "?type={}".format(schedule_type)
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json() == [expected_payload[expected]]
@pytest.mark.django_db
def test_get_detail_calendar_schedule(schedule_internal_api_setup, make_user_auth_headers):
user, token, calendar_schedule, _, _, _ = schedule_internal_api_setup

View file

@ -1,17 +1,12 @@
from django.core.exceptions import ObjectDoesNotExist
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import NotFound
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from apps.alerts.models import AlertGroup, CustomButton
from apps.alerts.tasks.custom_button_result import custom_button_result
from apps.api.permissions import MODIFY_ACTIONS, READ_ACTIONS, ActionPermission, AnyRole, IsAdmin, IsAdminOrEditor
from apps.alerts.models import CustomButton
from apps.api.permissions import MODIFY_ACTIONS, READ_ACTIONS, ActionPermission, AnyRole, IsAdmin
from apps.api.serializers.custom_button import CustomButtonSerializer
from apps.auth_token.auth import PluginAuthentication
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import PublicPrimaryKeyMixin, TeamFilteringMixin
from common.insight_log import EntityEvent, write_resource_insight_log
@ -21,7 +16,6 @@ class CustomButtonView(TeamFilteringMixin, PublicPrimaryKeyMixin, ModelViewSet):
permission_classes = (IsAuthenticated, ActionPermission)
action_permissions = {
IsAdmin: MODIFY_ACTIONS,
IsAdminOrEditor: ("action",),
AnyRole: READ_ACTIONS,
}
@ -85,19 +79,3 @@ class CustomButtonView(TeamFilteringMixin, PublicPrimaryKeyMixin, ModelViewSet):
event=EntityEvent.DELETED,
)
instance.delete()
@action(detail=True, methods=["post"])
def action(self, request, pk):
alert_group_id = request.query_params.get("alert_group", None)
if alert_group_id is not None:
custom_button = self.get_object()
try:
alert_group = AlertGroup.unarchived_objects.get(
public_primary_key=alert_group_id, channel=custom_button.alert_receive_channel
)
custom_button_result.apply_async((custom_button.pk, alert_group.pk, self.request.user.pk))
except AlertGroup.DoesNotExist:
raise BadRequest(detail="AlertGroup does not exist or archived")
return Response(status=status.HTTP_200_OK)
else:
raise BadRequest(detail="AlertGroup is required")

View file

@ -59,7 +59,9 @@ class GetTelegramVerificationCode(APIView):
telegram_client = TelegramClient()
bot_username = telegram_client.api_client.username
bot_link = f"https://t.me/{bot_username}"
return Response({"telegram_code": str(new_code.uuid), "bot_link": bot_link}, status=status.HTTP_200_OK)
return Response(
{"telegram_code": str(new_code.uuid_with_org_id), "bot_link": bot_link}, status=status.HTTP_200_OK
)
class GetChannelVerificationCode(APIView):

View file

@ -24,7 +24,7 @@ from apps.api.serializers.schedule_polymorphic import (
from apps.auth_token.auth import PluginAuthentication
from apps.auth_token.constants import SCHEDULE_EXPORT_TOKEN_NAME
from apps.auth_token.models import ScheduleExportAuthToken
from apps.schedules.models import OnCallSchedule
from apps.schedules.models import OnCallSchedule, OnCallScheduleCalendar, OnCallScheduleICal, OnCallScheduleWeb
from apps.slack.models import SlackChannel
from apps.slack.tasks import update_slack_user_group_for_schedules
from common.api_helpers.exceptions import BadRequest, Conflict
@ -42,6 +42,8 @@ EVENTS_FILTER_BY_ROTATION = "rotation"
EVENTS_FILTER_BY_OVERRIDE = "override"
EVENTS_FILTER_BY_FINAL = "final"
SCHEDULE_TYPE_TO_CLASS = {"api": OnCallScheduleCalendar, "ical": OnCallScheduleICal, "web": OnCallScheduleWeb}
class ScheduleView(
TeamFilteringMixin,
@ -123,6 +125,7 @@ class ScheduleView(
def get_queryset(self):
is_short_request = self.request.query_params.get("short", "false") == "true"
filter_by_type = self.request.query_params.get("type")
organization = self.request.auth.organization
queryset = OnCallSchedule.objects.filter(organization=organization, team=self.request.user.current_team).defer(
# avoid requesting large text fields which are not used when listing schedules
@ -134,6 +137,8 @@ class ScheduleView(
if not is_short_request:
queryset = self._annotate_queryset(queryset)
queryset = self.serializer_class.setup_eager_loading(queryset)
if filter_by_type is not None and filter_by_type in SCHEDULE_TYPE_TO_CLASS:
queryset = queryset.filter().instance_of(SCHEDULE_TYPE_TO_CLASS[filter_by_type])
return queryset
def perform_create(self, serializer):

View file

@ -374,7 +374,9 @@ class UserView(
bot_username = telegram_client.api_client.username
bot_link = f"https://t.me/{bot_username}"
return Response({"telegram_code": str(new_code.uuid), "bot_link": bot_link}, status=status.HTTP_200_OK)
return Response(
{"telegram_code": str(new_code.uuid_with_org_id), "bot_link": bot_link}, status=status.HTTP_200_OK
)
@action(detail=True, methods=["post"])
def unlink_slack(self, request, pk):

View file

@ -1,7 +0,0 @@
# amixr api url
REQUEST_URL = "https://amixr.io/api/v1"
# migration status
NOT_STARTED = "not_started"
IN_PROGRESS = "in_progress"
FINISHED = "finished"

View file

@ -1,33 +0,0 @@
# Generated by Django 3.2.5 on 2022-05-31 14:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('alerts', '0001_squashed_initial'),
]
operations = [
migrations.CreateModel(
name='AmixrMigrationTaskStatus',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('task_id', models.CharField(db_index=True, max_length=500)),
('name', models.CharField(max_length=500)),
('started_at', models.DateTimeField(auto_now_add=True)),
('is_finished', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='LockedAlert',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('alert', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='migrator_lock', to='alerts.alert')),
],
),
]

View file

@ -1,22 +0,0 @@
# Generated by Django 3.2.5 on 2022-05-31 14:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('user_management', '0001_squashed_initial'),
('migration_tool', '0001_squashed_initial'),
]
operations = [
migrations.AddField(
model_name='amixrmigrationtaskstatus',
name='organization',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='migration_tasks', to='user_management.organization'),
),
]

View file

@ -1,2 +0,0 @@
from .amixr_migration_task_status import AmixrMigrationTaskStatus # noqa: F401
from .locked_alert import LockedAlert # noqa: F401

View file

@ -1,27 +0,0 @@
from celery import uuid as celery_uuid
from django.db import models
class AmixrMigrationTaskStatusQuerySet(models.QuerySet):
def get_migration_task_id(self, organization_id, name):
migrate_schedules_task_id = celery_uuid()
self.model(organization_id=organization_id, name=name, task_id=migrate_schedules_task_id).save()
return migrate_schedules_task_id
class AmixrMigrationTaskStatus(models.Model):
objects = AmixrMigrationTaskStatusQuerySet.as_manager()
task_id = models.CharField(max_length=500, db_index=True)
name = models.CharField(max_length=500)
organization = models.ForeignKey(
to="user_management.Organization",
related_name="migration_tasks",
on_delete=models.deletion.CASCADE,
)
started_at = models.DateTimeField(auto_now_add=True)
is_finished = models.BooleanField(default=False)
def update_status_to_finished(self):
self.is_finished = True
self.save(update_fields=["is_finished"])

View file

@ -1,5 +0,0 @@
from django.db import models
class LockedAlert(models.Model):
alert = models.OneToOneField("alerts.Alert", on_delete=models.CASCADE, related_name="migrator_lock")

View file

@ -1,612 +0,0 @@
import logging
from celery.utils.log import get_task_logger
from django.apps import apps
from django.conf import settings
from django.db import transaction
from django.utils import timezone
from rest_framework import exceptions
from apps.alerts.models import Alert, AlertGroup, AlertReceiveChannel, ResolutionNote
from apps.migration_tool.models import AmixrMigrationTaskStatus, LockedAlert
from apps.migration_tool.utils import convert_string_to_datetime, get_data_with_respect_to_pagination
from apps.public_api.serializers import PersonalNotificationRuleSerializer
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
logger = get_task_logger(__name__)
logger.setLevel(logging.DEBUG)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_from_old_amixr(api_token, organization_id, user_id):
logger.info(f"Start migration task from amixr for organization {organization_id}")
users = get_users(organization_id, api_token)
migrate_schedules_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_schedules.name
)
migrate_schedules.apply_async(
(api_token, organization_id, user_id, users),
task_id=migrate_schedules_task_id,
countdown=5,
)
start_migration_user_data_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_user_data.name
)
start_migration_user_data.apply_async(
(api_token, organization_id, users),
task_id=start_migration_user_data_task_id,
)
logger.info(f"Start 'start_migration_from_old_amixr' task for organization {organization_id}")
def get_users(organization_id, api_token):
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
# get all users from old amixr
old_users = get_data_with_respect_to_pagination(api_token, "users")
old_users_emails = [old_user["email"] for old_user in old_users]
# find users in Grafana OnCall by email
grafana_users = organization.users.filter(email__in=old_users_emails).values("email", "id")
grafana_users_dict = {
gu["email"]: {
"id": gu["id"],
}
for gu in grafana_users
}
users = {}
for old_user in old_users:
if old_user["email"] in grafana_users_dict:
users[old_user["id"]] = grafana_users_dict[old_user["email"]]
users[old_user["id"]]["old_verified_phone_number"] = old_user.get("verified_phone_number")
users[old_user["id"]]["old_public_primary_key"] = old_user["id"]
# Example result:
# users = {
# "OLD_PUBLIC_PK": {
# "id": 1, # user pk in OnCall db
# "old_verified_phone_number": "1234",
# "old_public_primary_key": "OLD_PUBLIC_PK",
# },
# ...
# }
return users
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_schedules(api_token, organization_id, user_id, users):
logger.info(f"Started migration schedules for organization {organization_id}")
OnCallScheduleICal = apps.get_model("schedules", "OnCallScheduleICal")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
schedules = get_data_with_respect_to_pagination(api_token, "schedules")
existing_schedules_names = set(organization.oncall_schedules.values_list("name", flat=True))
created_schedules = {}
for schedule in schedules:
if not schedule["ical_url"] or schedule["name"] in existing_schedules_names:
continue
new_schedule = OnCallScheduleICal(
organization=organization,
name=schedule["name"],
ical_url_primary=schedule["ical_url"],
team_id=None,
)
new_schedule.save()
created_schedules[schedule["id"]] = {
"id": new_schedule.pk,
}
# Example result:
# created_schedules = {
# "OLD_PUBLIC_PK": {
# "id": 1, # schedule pk in OnCall db
# },
# ...
# }
migrate_integrations_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_integrations.name
)
migrate_integrations.apply_async(
(api_token, organization_id, user_id, created_schedules, users), task_id=migrate_integrations_task_id
)
current_task_id = migrate_schedules.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration schedules for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_integrations(api_token, organization_id, user_id, created_schedules, users):
logger.info(f"Started migration integrations for organization {organization_id}")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
integrations = get_data_with_respect_to_pagination(api_token, "integrations")
existing_integrations_names = set(organization.alert_receive_channels.values_list("verbal_name", flat=True))
for integration in integrations:
if integration["name"] in existing_integrations_names:
continue
try:
integration_type = [
key
for key, value in AlertReceiveChannel.INTEGRATIONS_TO_REVERSE_URL_MAP.items()
if value == integration["type"]
][0]
except IndexError:
continue
if integration_type not in AlertReceiveChannel.WEB_INTEGRATION_CHOICES:
continue
new_integration = AlertReceiveChannel.create(
organization=organization,
verbal_name=integration["name"],
integration=integration_type,
author_id=user_id,
slack_title_template=integration["templates"]["slack"]["title"],
slack_message_template=integration["templates"]["slack"]["message"],
slack_image_url_template=integration["templates"]["slack"]["image_url"],
sms_title_template=integration["templates"]["sms"]["title"],
phone_call_title_template=integration["templates"]["phone_call"]["title"],
web_title_template=integration["templates"]["web"]["title"],
web_message_template=integration["templates"]["web"]["message"],
web_image_url_template=integration["templates"]["web"]["image_url"],
email_title_template=integration["templates"]["email"]["title"],
email_message_template=integration["templates"]["email"]["message"],
telegram_title_template=integration["templates"]["telegram"]["title"],
telegram_message_template=integration["templates"]["telegram"]["message"],
telegram_image_url_template=integration["templates"]["telegram"]["image_url"],
grouping_id_template=integration["templates"]["grouping_key"],
resolve_condition_template=integration["templates"]["resolve_signal"],
acknowledge_condition_template=integration["templates"]["acknowledge_signal"],
)
# collect integration data in a dict
integration_data = {
"id": new_integration.pk,
"verbal_name": new_integration.verbal_name,
"old_public_primary_key": integration["id"],
}
migrate_routes_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_routes.name
)
migrate_routes.apply_async(
(api_token, organization_id, users, created_schedules, integration_data),
task_id=migrate_routes_task_id,
countdown=3,
)
current_task_id = migrate_integrations.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration integrations for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_routes(api_token, organization_id, users, created_schedules, integration_data):
logger.info(f"Start migration routes for organization {organization_id}")
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
Organization = apps.get_model("user_management", "Organization")
organization = Organization.objects.get(pk=organization_id)
integration = AlertReceiveChannel.objects.filter(pk=integration_data["id"]).first()
if integration:
url = "routes?integration_id={}".format(integration_data["old_public_primary_key"])
routes = get_data_with_respect_to_pagination(api_token, url)
default_route = integration.channel_filters.get(is_default=True)
existing_chain_names = set(organization.escalation_chains.values_list("name", flat=True))
existing_route_filtering_term = set(integration.channel_filters.values_list("filtering_term", flat=True))
for route in routes:
is_default_route = route["is_the_last_route"]
filtering_term = route["routing_regex"]
if is_default_route:
escalation_chain_name = f"{integration_data['verbal_name'][:90]} - default"
else:
if filtering_term in existing_route_filtering_term:
continue
escalation_chain_name = f"{integration_data['verbal_name']} - {filtering_term}"[:100]
if escalation_chain_name in existing_chain_names:
escalation_chain = organization.escalation_chains.get(name=escalation_chain_name)
else:
escalation_chain = organization.escalation_chains.create(name=escalation_chain_name)
if is_default_route:
new_route = default_route
new_route.escalation_chain = escalation_chain
new_route.save(update_fields=["escalation_chain"])
else:
new_route = ChannelFilter(
alert_receive_channel_id=integration_data["id"],
escalation_chain_id=escalation_chain.pk,
filtering_term=filtering_term,
order=route["position"],
)
new_route.save()
route_data = {
"id": new_route.pk,
"old_public_primary_key": route["id"],
"escalation_chain": {
"id": escalation_chain.pk,
},
}
migrate_escalation_policies_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_escalation_policies.name
)
migrate_escalation_policies.apply_async(
(api_token, organization_id, users, created_schedules, route_data),
task_id=migrate_escalation_policies_task_id,
countdown=2,
)
start_migration_alert_groups_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_alert_groups.name
)
start_migration_alert_groups.apply_async(
(api_token, organization_id, users, integration_data, route_data),
task_id=start_migration_alert_groups_task_id,
countdown=10,
)
current_task_id = migrate_routes.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration routes for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_escalation_policies(api_token, organization_id, users, created_schedules, route_data):
logger.info(f"Start migration escalation policies for organization {organization_id}")
EscalationChain = apps.get_model("alerts", "EscalationChain")
EscalationPolicy = apps.get_model("alerts", "EscalationPolicy")
escalation_chain = EscalationChain.objects.filter(pk=route_data["escalation_chain"]["id"]).first()
if escalation_chain and not escalation_chain.escalation_policies.exists():
url = "escalation_policies?route_id={}".format(route_data["old_public_primary_key"])
escalation_policies = get_data_with_respect_to_pagination(api_token, url)
for escalation_policy in escalation_policies:
try:
step_type = [
key
for key, value in EscalationPolicy.PUBLIC_STEP_CHOICES_MAP.items()
if value == escalation_policy["type"] and key in EscalationPolicy.PUBLIC_STEP_CHOICES
][0]
except IndexError:
continue
if step_type in EscalationPolicy.DEFAULT_TO_IMPORTANT_STEP_MAPPING and escalation_policy.get("important"):
step_type = EscalationPolicy.DEFAULT_TO_IMPORTANT_STEP_MAPPING[step_type]
notify_to_users_queue = []
if step_type == EscalationPolicy.STEP_NOTIFY_USERS_QUEUE:
notify_to_users_queue = [
users[user_old_public_pk]["id"]
for user_old_public_pk in escalation_policy.get("persons_to_notify_next_each_time", [])
if user_old_public_pk in users
]
elif step_type in [
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS,
EscalationPolicy.STEP_NOTIFY_MULTIPLE_USERS_IMPORTANT,
]:
notify_to_users_queue = [
users[user_old_public_pk]["id"]
for user_old_public_pk in escalation_policy.get("persons_to_notify", [])
if user_old_public_pk in users
]
if step_type == EscalationPolicy.STEP_NOTIFY_IF_TIME:
notify_from_time = timezone.datetime.strptime(
escalation_policy.get("notify_if_time_from"), "%H:%M:%SZ"
).time()
notify_to_time = timezone.datetime.strptime(
escalation_policy.get("notify_if_time_to"), "%H:%M:%SZ"
).time()
else:
notify_from_time, notify_to_time = None, None
duration = escalation_policy.get("duration")
wait_delay = timezone.timedelta(seconds=duration) if duration else None
schedule_id = escalation_policy.get("notify_on_call_from_schedule")
notify_schedule_id = created_schedules.get(schedule_id, {}).get("id") if schedule_id else None
new_escalation_policy = EscalationPolicy(
step=step_type,
order=escalation_policy["position"],
escalation_chain=escalation_chain,
notify_schedule_id=notify_schedule_id,
wait_delay=wait_delay,
from_time=notify_from_time,
to_time=notify_to_time,
)
new_escalation_policy.save()
if notify_to_users_queue:
new_escalation_policy.notify_to_users_queue.set(notify_to_users_queue)
current_task_id = migrate_escalation_policies.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration escalation policies for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_alert_groups(api_token, organization_id, users, integration_data, route_data):
logger.info(f"Start migration alert groups for organization {organization_id}")
ChannelFilter = apps.get_model("alerts", "ChannelFilter")
url = "incidents?route_id={}".format(route_data["old_public_primary_key"])
alert_groups = get_data_with_respect_to_pagination(api_token, url)
route = ChannelFilter.objects.filter(pk=route_data["id"]).first()
if route and not route.alert_groups.exists():
for alert_group in alert_groups:
migrate_alert_group_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_alert_group.name
)
migrate_alert_group.apply_async(
(api_token, organization_id, users, integration_data, route_data, alert_group),
task_id=migrate_alert_group_task_id,
)
current_task_id = start_migration_alert_groups.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished 'start_migration_alert_groups' for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_alert_group(api_token, organization_id, users, integration_data, route_data, alert_group_to_migrate):
logger.info(f"Start migration alert_group {alert_group_to_migrate['id']} for organization {organization_id}")
integration = AlertReceiveChannel.objects.get(pk=integration_data["id"])
resolve_by_user_id = None
acknowledged_by_user_id = None
if alert_group_to_migrate["resolved_by_user"]:
resolve_by_user_id = users.get(alert_group_to_migrate["resolved_by_user"], {}).get("id")
if alert_group_to_migrate["acknowledged_by_user"]:
acknowledged_by_user_id = users.get(alert_group_to_migrate["acknowledged_by_user"], {}).get("id")
new_group = AlertGroup.all_objects.create(
channel=integration,
channel_filter_id=route_data["id"],
resolved=True,
resolved_by=alert_group_to_migrate["resolved_by"],
resolved_by_user_id=resolve_by_user_id,
resolved_at=alert_group_to_migrate.get("resolved_at") or timezone.now(),
acknowledged=alert_group_to_migrate["acknowledged"],
acknowledged_by=alert_group_to_migrate["acknowledged_by"],
acknowledged_by_user_id=acknowledged_by_user_id,
acknowledged_at=alert_group_to_migrate.get("acknowledged_at"),
)
new_group.started_at = convert_string_to_datetime(alert_group_to_migrate["created_at"])
new_group.save(update_fields=["started_at"])
alert_group_data = {
"id": new_group.pk,
"old_public_primary_key": alert_group_to_migrate["id"],
}
start_migration_alerts_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_alerts.name
)
start_migration_alerts.apply_async(
(api_token, organization_id, alert_group_data),
task_id=start_migration_alerts_task_id,
)
start_migration_logs_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=start_migration_logs.name
)
start_migration_logs.apply_async(
(api_token, organization_id, users, alert_group_data),
task_id=start_migration_logs_task_id,
countdown=5,
)
current_task_id = migrate_alert_group.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration alert_group {alert_group_to_migrate['id']} for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_alerts(api_token, organization_id, alert_group_data):
logger.info(
f"Start migration alerts for alert_group {alert_group_data['old_public_primary_key']} "
f"for organization {organization_id}"
)
AlertGroup = apps.get_model("alerts", "AlertGroup")
alert_group = AlertGroup.all_objects.get(pk=alert_group_data["id"])
if not alert_group.alerts.exists():
url = "alerts?incident_id={}".format(alert_group_data["old_public_primary_key"])
alerts = get_data_with_respect_to_pagination(api_token, url)
for alert in alerts:
migrate_alerts_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_alert.name
)
migrate_alert.apply_async(
(organization_id, alert_group_data, alert),
task_id=migrate_alerts_task_id,
)
current_task_id = start_migration_alerts.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(
f"Finished 'start_migration_alerts' for alert_group {alert_group_data['old_public_primary_key']} "
f"for organization {organization_id}"
)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_alert(organization_id, alert_group_data, alert):
logger.info(f"Start migration alert {alert['id']} for organization {organization_id}")
with transaction.atomic():
new_alert = Alert(
title=alert["title"],
message=alert["message"],
image_url=alert["image_url"],
link_to_upstream_details=alert["link_to_upstream_details"],
group_id=alert_group_data["id"],
integration_unique_data=alert["payload"],
raw_request_data=alert["payload"],
)
new_alert.save()
LockedAlert.objects.create(alert=new_alert)
new_alert.created_at = convert_string_to_datetime(alert["created_at"])
new_alert.save(update_fields=["created_at"])
current_task_id = migrate_alert.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration alert {alert['id']} for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_logs(api_token, organization_id, users, alert_group_data):
logger.info(f"Start migration logs for alert_group {alert_group_data['id']} for organization {organization_id}")
url = "incident_logs?incident_id={}".format(alert_group_data["old_public_primary_key"])
alert_group_logs = get_data_with_respect_to_pagination(api_token, url)
for log in alert_group_logs:
migrate_logs_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_log.name
)
migrate_log.apply_async(
(organization_id, users, alert_group_data, log),
task_id=migrate_logs_task_id,
)
current_task_id = start_migration_logs.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(
f"Finished 'start_migration_logs' for alert_group {alert_group_data['id']} for organization {organization_id}"
)
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_log(organization_id, users, alert_group_data, log):
logger.info(f"Start migration log for alert_group {alert_group_data['id']} for organization {organization_id}")
log_author_id = users.get(log["author"], {}).get("id")
new_resolution_note = ResolutionNote(
author_id=log_author_id,
message_text=log["text"],
alert_group_id=alert_group_data["id"],
)
new_resolution_note.save()
new_resolution_note.created_at = convert_string_to_datetime(log["created_at"])
new_resolution_note.save(update_fields=["created_at"])
current_task_id = migrate_log.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def start_migration_user_data(api_token, organization_id, users):
logger.info(f"Start migration user data for organization {organization_id}")
for user in users:
user_data = users[user]
migrate_user_data_task_id = AmixrMigrationTaskStatus.objects.get_migration_task_id(
organization_id=organization_id, name=migrate_user_data.name
)
migrate_user_data.apply_async(
(api_token, organization_id, user_data),
task_id=migrate_user_data_task_id,
)
current_task_id = start_migration_user_data.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished 'start_migration_user_data' task for organization {organization_id}")
@shared_dedicated_queue_retry_task(
autoretry_for=(Exception,), retry_backoff=True, max_retries=0 if settings.DEBUG else None
)
def migrate_user_data(api_token, organization_id, user_to_migrate):
logger.info(f"Start migration user {user_to_migrate['id']} for organization {organization_id}")
User = apps.get_model("user_management", "User")
UserNotificationPolicy = apps.get_model("base", "UserNotificationPolicy")
user = User.objects.filter(pk=user_to_migrate["id"], organization_id=organization_id).first()
if user:
if not user.verified_phone_number and user_to_migrate["old_verified_phone_number"]:
user.save_verified_phone_number(user_to_migrate["old_verified_phone_number"])
url = "personal_notification_rules?user_id={}".format(user_to_migrate["old_public_primary_key"])
user_notification_policies = get_data_with_respect_to_pagination(api_token, url)
notification_policies_to_create = []
existing_notification_policies_ids = list(user.notification_policies.all().values_list("pk", flat=True))
for notification_policy in user_notification_policies:
try:
step, notification_channel = PersonalNotificationRuleSerializer._type_to_step_and_notification_channel(
notification_policy["type"],
)
except exceptions.ValidationError:
continue
new_notification_policy = UserNotificationPolicy(
user=user,
important=notification_policy["important"],
step=step,
order=notification_policy["position"],
)
if step == UserNotificationPolicy.Step.NOTIFY:
new_notification_policy.notify_by = notification_channel
if step == UserNotificationPolicy.Step.WAIT:
duration = notification_policy.get("duration")
wait_delay = timezone.timedelta(seconds=duration) if duration else UserNotificationPolicy.FIVE_MINUTES
new_notification_policy.wait_delay = wait_delay
notification_policies_to_create.append(new_notification_policy)
UserNotificationPolicy.objects.bulk_create(notification_policies_to_create, batch_size=5000)
user.notification_policies.filter(pk__in=existing_notification_policies_ids).delete()
current_task_id = migrate_user_data.request.id
AmixrMigrationTaskStatus.objects.get(task_id=current_task_id).update_status_to_finished()
logger.info(f"Finished migration user {user_to_migrate['id']} for organization {organization_id}")

View file

@ -1,12 +0,0 @@
from common.api_helpers.optional_slash_router import optional_slash_path
from .views.customers_migration_tool import MigrateAPIView, MigrationPlanAPIView, MigrationStatusAPIView
app_name = "migration-tool"
urlpatterns = [
optional_slash_path("amixr_migration_plan", MigrationPlanAPIView.as_view(), name="amixr_migration_plan"),
optional_slash_path("migrate_from_amixr", MigrateAPIView.as_view(), name="migrate_from_amixr"),
optional_slash_path("amixr_migration_status", MigrationStatusAPIView.as_view(), name="amixr_migration_status"),
]

View file

@ -1,35 +0,0 @@
import requests
from django.utils import timezone
from apps.migration_tool.constants import REQUEST_URL
class APIResponseException(Exception):
pass
def get_data_with_respect_to_pagination(api_token, endpoint):
def fetch(url):
response = requests.get(url, headers={"AUTHORIZATION": api_token})
if response.status_code != 200:
raise APIResponseException(f"Status code: {response.status_code}, Data: {response.content}")
return response.json()
data = fetch(f"{REQUEST_URL}/{endpoint}")
results = data["results"]
while data["next"]:
data = fetch(data["next"])
new_results = data["results"]
results.extend(new_results)
return results
def convert_string_to_datetime(dt_str):
try:
dt = timezone.datetime.strptime(dt_str, "%Y-%m-%dT%X.%f%z")
except ValueError:
dt = timezone.datetime.strptime(dt_str, "%Y-%m-%dT%XZ")
return dt

View file

@ -1,186 +0,0 @@
import logging
import requests
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.alerts.models import AlertReceiveChannel
from apps.api.permissions import IsAdmin, MethodPermission
from apps.auth_token.auth import PluginAuthentication
from apps.migration_tool.constants import FINISHED, IN_PROGRESS, NOT_STARTED, REQUEST_URL
from apps.migration_tool.tasks import start_migration_from_old_amixr
from apps.migration_tool.utils import get_data_with_respect_to_pagination
from common.api_helpers.exceptions import BadRequest
logger = logging.getLogger(__name__)
class MigrationPlanAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, MethodPermission)
method_permissions = {IsAdmin: ("POST",)}
def post(self, request):
api_token = request.data.get("token", None)
if api_token is None:
raise BadRequest(detail="API token is required")
organization = request.auth.organization
if organization.is_amixr_migration_started:
raise BadRequest(detail="Migration from Amixr has already been started")
# check token
response = requests.get(f"{REQUEST_URL}/users", headers={"AUTHORIZATION": api_token})
if response.status_code == status.HTTP_403_FORBIDDEN:
raise BadRequest(detail="Invalid token")
# Just not to re-make the frontend...
USERS_NOT_TO_MIGRATE_KEY = (
"Users WON'T be migrated (couldn't find those users in the Grafana Cloud, ask "
"them to sign up if you want their data to be migrated and re-build the migration plan)"
)
USERS_TO_MIGRATE = "Users will be migrated"
INTEGRATIONS_TO_MIGRATE = "Integrations to migrate"
INTEGRATIONS_COUNT = "Integrations count"
ROUTES_COUNT = "Routes count"
ESCALATIONS_POLICIES_COUNT = "Escalation policies count"
CALENDARS_COUNT = "Calendars count"
migration_plan = {
USERS_TO_MIGRATE: [],
USERS_NOT_TO_MIGRATE_KEY: [],
INTEGRATIONS_TO_MIGRATE: [],
INTEGRATIONS_COUNT: 0,
ROUTES_COUNT: 0,
ESCALATIONS_POLICIES_COUNT: 0,
CALENDARS_COUNT: 0,
}
logger.info(f"migration plan for organization {organization.pk}: get users")
users = get_data_with_respect_to_pagination(api_token, "users")
logger.info(f"migration plan for organization {organization.pk}: got users")
org_users = organization.users.values_list("email", flat=True)
for user in users:
if user["email"] in org_users:
migration_plan[USERS_TO_MIGRATE].append(user["email"])
else:
migration_plan[USERS_NOT_TO_MIGRATE_KEY].append(user["email"])
logger.info(f"migration plan for organization {organization.pk}: get integrations")
integrations = get_data_with_respect_to_pagination(api_token, "integrations")
logger.info(f"migration plan for organization {organization.pk}: got integrations")
existing_integrations_names = set(organization.alert_receive_channels.values_list("verbal_name", flat=True))
integrations_to_migrate_public_pk = []
for integration in integrations:
if integration["name"] in existing_integrations_names:
continue
try:
integration_type = [
key
for key, value in AlertReceiveChannel.INTEGRATIONS_TO_REVERSE_URL_MAP.items()
if value == integration["type"]
][0]
except IndexError:
continue
if integration_type not in AlertReceiveChannel.WEB_INTEGRATION_CHOICES:
continue
migration_plan[INTEGRATIONS_TO_MIGRATE].append(integration["name"])
integrations_to_migrate_public_pk.append(integration["id"])
migration_plan[INTEGRATIONS_COUNT] = len(migration_plan[INTEGRATIONS_TO_MIGRATE])
routes_to_migrate_public_pk = []
logger.info(f"migration plan for organization {organization.pk}: get routes")
routes = get_data_with_respect_to_pagination(api_token, "routes")
logger.info(f"migration plan for organization {organization.pk}: got routes")
for route in routes:
if route["integration_id"] in integrations_to_migrate_public_pk:
migration_plan[ROUTES_COUNT] += 1
routes_to_migrate_public_pk.append(route["id"])
logger.info(f"migration plan for organization {organization.pk}: get escalation_policies")
escalation_policies = get_data_with_respect_to_pagination(api_token, "escalation_policies")
logger.info(f"migration plan for organization {organization.pk}: got escalation_policies")
for escalation_policy in escalation_policies:
if escalation_policy["route_id"] in routes_to_migrate_public_pk:
migration_plan[ESCALATIONS_POLICIES_COUNT] += 1
logger.info(f"migration plan for organization {organization.pk}: get schedules")
schedules = get_data_with_respect_to_pagination(api_token, "schedules")
logger.info(f"migration plan for organization {organization.pk}: got schedules")
existing_schedules_names = set(organization.oncall_schedules.values_list("name", flat=True))
for schedule in schedules:
if not schedule["ical_url"] or schedule["name"] in existing_schedules_names:
continue
migration_plan[CALENDARS_COUNT] += 1
return Response(migration_plan)
class MigrateAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def post(self, request):
api_token = request.data.get("token", None)
if api_token is None:
raise BadRequest(detail="API token is required")
organization = request.auth.organization
if organization.is_amixr_migration_started:
raise BadRequest(detail="Migration from Amixr has already been started")
# check token
response = requests.get(f"{REQUEST_URL}/users", headers={"AUTHORIZATION": api_token})
if response.status_code == status.HTTP_403_FORBIDDEN:
raise BadRequest(detail="Invalid token")
organization.is_amixr_migration_started = True
organization.save(update_fields=["is_amixr_migration_started"])
organization_id = organization.pk
user_id = request.user.pk
# start migration process
start_migration_from_old_amixr.delay(api_token=api_token, organization_id=organization_id, user_id=user_id)
return Response(status=status.HTTP_200_OK)
class MigrationStatusAPIView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def get(self, request):
organization = request.auth.organization
migration_status = self.get_migration_status(organization)
endpoints_list = self.get_endpoints_list(organization)
return Response(
{"migration_status": migration_status, "endpoints_list": endpoints_list}, status=status.HTTP_200_OK
)
def get_migration_status(self, organization):
migration_status = NOT_STARTED
if organization.is_amixr_migration_started:
unfinished_tasks_exist = organization.migration_tasks.filter(is_finished=False).exists()
if unfinished_tasks_exist:
migration_status = IN_PROGRESS
else:
migration_status = FINISHED
return migration_status
def get_endpoints_list(self, organization):
integrations = organization.alert_receive_channels.filter(team_id__isnull=True)
endpoints_list = []
for integration in integrations:
integration_endpoint = f"{integration.verbal_name}, new endpoint: {integration.integration_url}"
endpoints_list.append(integration_endpoint)
return endpoints_list

View file

@ -358,7 +358,7 @@ class UpdateResolutionNoteStep(scenario_step.ScenarioStep):
author_verbal = resolution_note.author_verbal(mention=True)
resolution_note_text_block = {
"type": "section",
"text": {"type": "plain_text", "text": resolution_note.text, "emoji": True},
"text": {"type": "mrkdwn", "text": resolution_note.text},
}
blocks.append(resolution_note_text_block)
context_block = {

View file

@ -23,26 +23,39 @@ class TelegramChannelVerificationCode(models.Model):
def is_active(self) -> bool:
return self.datetime + timezone.timedelta(days=1) < timezone.now()
@property
def uuid_with_org_id(self) -> str:
return f"{self.organization.public_primary_key}_{self.uuid}"
@classmethod
def uuid_without_org_id(cls, verification_code: str) -> str:
try:
return verification_code.split("_")[1]
except IndexError:
raise ValidationError("Invalid verification code format")
@classmethod
def verify_channel_and_discussion_group(
cls,
uuid_code: str,
verification_code: str,
channel_chat_id: int,
channel_name: str,
discussion_group_chat_id: int,
discussion_group_name: str,
) -> Tuple[Optional[TelegramToOrganizationConnector], bool]:
try:
verification_code = cls.objects.get(uuid=uuid_code)
uuid_code = cls.uuid_without_org_id(verification_code)
code_instance = cls.objects.get(uuid=uuid_code)
# see if a organization has other channels connected
# if it is the first channel, make it default for the organization
connector_exists = verification_code.organization.telegram_channel.exists()
connector_exists = code_instance.organization.telegram_channel.exists()
connector, created = TelegramToOrganizationConnector.objects.get_or_create(
channel_chat_id=channel_chat_id,
defaults={
"organization": verification_code.organization,
"organization": code_instance.organization,
"channel_name": channel_name,
"discussion_group_chat_id": discussion_group_chat_id,
"discussion_group_name": discussion_group_name,
@ -51,14 +64,14 @@ class TelegramChannelVerificationCode(models.Model):
)
write_chatops_insight_log(
author=verification_code.author,
author=code_instance.author,
event_name=ChatOpsEvent.CHANNEL_CONNECTED,
chatops_type=ChatOpsType.TELEGRAM,
channel_name=channel_name,
)
if not connector_exists:
write_chatops_insight_log(
author=verification_code.author,
author=code_instance.author,
event_name=ChatOpsEvent.DEFAULT_CHANNEL_CHANGED,
chatops_type=ChatOpsType.TELEGRAM,
prev_channel=None,

View file

@ -21,13 +21,26 @@ class TelegramVerificationCode(models.Model):
def is_active(self) -> bool:
return self.datetime + timezone.timedelta(days=1) < timezone.now()
@property
def uuid_with_org_id(self) -> str:
return f"{self.user.organization.public_primary_key}_{self.uuid}"
@classmethod
def uuid_without_org_id(cls, verification_code: str) -> str:
try:
return verification_code.split("_")[1]
except IndexError:
raise ValidationError("Invalid verification code format")
@classmethod
def verify_user(
cls, uuid_code: str, telegram_chat_id: int, telegram_nick_name: str
cls, verification_code: str, telegram_chat_id: int, telegram_nick_name: str
) -> Tuple[Optional[TelegramToUserConnector], bool]:
try:
verification_code = cls.objects.get(uuid=uuid_code)
user = verification_code.user
uuid_code = cls.uuid_without_org_id(verification_code)
code_instance = cls.objects.get(uuid=uuid_code)
user = code_instance.user
connector, created = TelegramToUserConnector.objects.get_or_create(
user=user, defaults={"telegram_nick_name": telegram_nick_name, "telegram_chat_id": telegram_chat_id}

View file

@ -83,7 +83,10 @@ class TelegramKeyboardRenderer:
callback_data_args = [self.alert_group.pk, action.value]
if action_data is not None:
callback_data_args.append(action_data)
# Add org id with 'x-oncall-org-id' prefix to callback data.
# It's a workaroung to pass org_id to the oncall-gateway while proxying requests.
# TODO: switch to json str instead of ':' separated string.
callback_data_args.append(f"x-oncall-org-id{self.alert_group.channel.organization.public_primary_key}")
button = InlineKeyboardButton(text=text, callback_data=CallbackQueryFactory.encode_data(*callback_data_args))
return button

View file

@ -46,12 +46,31 @@ def test_actions_keyboard_alerting(make_organization, make_alert_receive_channel
keyboard = renderer.render_actions_keyboard()
expected_keyboard = [
[InlineKeyboardButton(text="Acknowledge", callback_data=f"{alert_group.pk}:acknowledge")],
[InlineKeyboardButton(text="Resolve", callback_data=f"{alert_group.pk}:resolve")],
[
InlineKeyboardButton(text="🔕 forever", callback_data=f"{alert_group.pk}:silence"),
InlineKeyboardButton(text="... for 1h", callback_data=f"{alert_group.pk}:silence:3600"),
InlineKeyboardButton(text="... for 4h", callback_data=f"{alert_group.pk}:silence:14400"),
InlineKeyboardButton(
text="Acknowledge",
callback_data=f"{alert_group.pk}:acknowledge:x-oncall-org-id{organization.public_primary_key}",
)
],
[
InlineKeyboardButton(
text="Resolve",
callback_data=f"{alert_group.pk}:resolve:x-oncall-org-id{organization.public_primary_key}",
)
],
[
InlineKeyboardButton(
text="🔕 forever",
callback_data=f"{alert_group.pk}:silence:x-oncall-org-id{organization.public_primary_key}",
),
InlineKeyboardButton(
text="... for 1h",
callback_data=f"{alert_group.pk}:silence:3600:x-oncall-org-id{organization.public_primary_key}",
),
InlineKeyboardButton(
text="... for 4h",
callback_data=f"{alert_group.pk}:silence:14400:x-oncall-org-id{organization.public_primary_key}",
),
],
]
@ -75,8 +94,18 @@ def test_actions_keyboard_acknowledged(
keyboard = renderer.render_actions_keyboard()
expected_keyboard = [
[InlineKeyboardButton(text="Unacknowledge", callback_data=f"{alert_group.pk}:unacknowledge")],
[InlineKeyboardButton(text="Resolve", callback_data=f"{alert_group.pk}:resolve")],
[
InlineKeyboardButton(
text="Unacknowledge",
callback_data=f"{alert_group.pk}:unacknowledge:x-oncall-org-id{organization.public_primary_key}",
)
],
[
InlineKeyboardButton(
text="Resolve",
callback_data=f"{alert_group.pk}:resolve:x-oncall-org-id{organization.public_primary_key}",
)
],
]
assert are_keyboards_equal(keyboard.inline_keyboard, expected_keyboard) is True
@ -99,7 +128,12 @@ def test_actions_keyboard_resolved(
keyboard = renderer.render_actions_keyboard()
expected_keyboard = [
[InlineKeyboardButton(text="Unresolve", callback_data=f"{alert_group.pk}:unresolve")],
[
InlineKeyboardButton(
text="Unresolve",
callback_data=f"{alert_group.pk}:unresolve:x-oncall-org-id{organization.public_primary_key}",
)
],
]
assert are_keyboards_equal(keyboard.inline_keyboard, expected_keyboard) is True
@ -122,9 +156,24 @@ def test_actions_keyboard_silenced(
keyboard = renderer.render_actions_keyboard()
expected_keyboard = [
[InlineKeyboardButton(text="Acknowledge", callback_data=f"{alert_group.pk}:acknowledge")],
[InlineKeyboardButton(text="Resolve", callback_data=f"{alert_group.pk}:resolve")],
[InlineKeyboardButton(text="Unsilence", callback_data=f"{alert_group.pk}:unsilence")],
[
InlineKeyboardButton(
text="Acknowledge",
callback_data=f"{alert_group.pk}:acknowledge:x-oncall-org-id{organization.public_primary_key}",
)
],
[
InlineKeyboardButton(
text="Resolve",
callback_data=f"{alert_group.pk}:resolve:x-oncall-org-id{organization.public_primary_key}",
)
],
[
InlineKeyboardButton(
text="Unsilence",
callback_data=f"{alert_group.pk}:unsilence:x-oncall-org-id{organization.public_primary_key}",
)
],
]
assert are_keyboards_equal(keyboard.inline_keyboard, expected_keyboard) is True

View file

@ -71,9 +71,8 @@ def test_alert_group_message(make_organization, make_alert_receive_channel, make
renderer = TelegramMessageRenderer(alert_group=alert_group)
text = renderer.render_alert_group_message()
assert text == (
f"🔴 #{alert_group.inside_organization_number}, {alert_receive_channel.config.tests['telegram']['title']}\n"
f"<a href='{organization.web_link_with_id}'>&#8205;</a>🔴 #{alert_group.inside_organization_number}, {alert_receive_channel.config.tests['telegram']['title']}\n"
"Alerting, alerts: 1\n"
"Source: Test integration - Grafana\n"
f"{alert_group.web_link}\n\n"
@ -157,7 +156,7 @@ def test_personal_message(
text = renderer.render_personal_message()
assert text == (
f"🟠 #{alert_group.inside_organization_number}, {alert_receive_channel.config.tests['telegram']['title']}\n"
f"<a href='{organization.web_link_with_id}'>&#8205;</a>🟠 #{alert_group.inside_organization_number}, {alert_receive_channel.config.tests['telegram']['title']}\n"
f"Acknowledged by {user_name}, alerts: 1\n"
"Source: Test integration - Grafana\n"
f"{alert_group.web_link}\n\n"

View file

@ -17,7 +17,7 @@ def test_user_verification_handler_process_update_another_account_already_linked
user_2 = make_user_for_organization(organization)
code = make_telegram_verification_code(user_2)
connector, created = TelegramVerificationCode.verify_user(code.uuid, chat_id, "nickname")
connector, created = TelegramVerificationCode.verify_user(code.uuid_with_org_id, chat_id, "nickname")
assert created
assert connector.telegram_chat_id == chat_id
@ -38,7 +38,7 @@ def test_user_verification_handler_process_update_user_already_linked(
other_chat_id = 321
code = make_telegram_verification_code(user_1)
connector, created = TelegramVerificationCode.verify_user(code.uuid, other_chat_id, "nickname")
connector, created = TelegramVerificationCode.verify_user(code.uuid_with_org_id, other_chat_id, "nickname")
assert created is False
assert connector.user == user_1

View file

@ -70,7 +70,7 @@ class ButtonPressHandler(UpdateHandler):
action_name = args[1]
action = Action(action_name)
action_data = args[2] if len(args) >= 3 else None
action_data = args[2] if len(args) >= 3 and not args[2].startswith("x-oncall-org-id") else None
return ActionContext(alert_group=alert_group, action=action, action_data=action_data)

View file

@ -1,15 +1,10 @@
from apps.telegram.client import TelegramClient
from apps.telegram.models import TelegramToUserConnector
from apps.telegram.updates.update_handlers.update_handler import UpdateHandler
START_TEXT = """Hi!
This is Grafana OnCall notification bot. You can connect your Grafana OnCall account to Telegram on user settings page.
"""
START_TEXT_FOR_CONNECTED_USER = """Hi!
This is Grafana OnCall notification bot. Your Telegram account is connected to user <b>{username}</b>
"""
class StartMessageHandler(UpdateHandler):
def matches(self) -> bool:
@ -24,12 +19,5 @@ class StartMessageHandler(UpdateHandler):
return is_from_private_chat and is_start_message
def process_update(self) -> None:
connector = TelegramToUserConnector.objects.filter(telegram_chat_id=self.update.effective_user.id).first()
telegram_client = TelegramClient()
if connector is not None:
user = connector.user
text = START_TEXT_FOR_CONNECTED_USER.format(username=user.username)
telegram_client.send_raw_message(chat_id=self.update.effective_user.id, text=text)
else:
telegram_client.send_raw_message(chat_id=self.update.effective_user.id, text=START_TEXT)
telegram_client.send_raw_message(chat_id=self.update.effective_user.id, text=START_TEXT)

View file

@ -73,7 +73,7 @@ class ChannelVerificationCodeHandler(UpdateHandler):
return
connector, created = TelegramChannelVerificationCode.verify_channel_and_discussion_group(
uuid_code=verification_code,
verification_code=verification_code,
channel_chat_id=channel_chat_id,
channel_name=channel_name,
discussion_group_chat_id=discussion_group_chat_id,

View file

@ -33,7 +33,7 @@ class PersonalVerificationCodeHandler(UpdateHandler):
verification_code = text if is_verification_message(text) else text.split()[1]
connector, created = TelegramVerificationCode.verify_user(
uuid_code=verification_code, telegram_chat_id=user.id, telegram_nick_name=nickname
verification_code=verification_code, telegram_chat_id=user.id, telegram_nick_name=nickname
)
if created:

View file

@ -1,11 +1,11 @@
import re
from typing import List, Union
UUID4_REGEX = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
TELEGRAM_VERIFICATION_CODE_REGEX = "^[A-Z0-9]*_[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
def is_verification_message(text: str) -> bool:
return bool(re.match(UUID4_REGEX, text))
return bool(re.match(TELEGRAM_VERIFICATION_CODE_REGEX, text))
class CallbackQueryFactory:

View file

@ -245,6 +245,11 @@ class Organization(MaintainableObject):
def web_link(self):
return urljoin(self.grafana_url, "a/grafana-oncall-app/")
@property
def web_link_with_id(self):
# It's a workaround to pass org id to the oncall gateway while proxying telegram requests
return urljoin(self.grafana_url, f"a/grafana-oncall-app/?x-oncall-org-id={self.public_primary_key}")
def __str__(self):
return f"{self.pk}: {self.org_title}"

View file

@ -37,7 +37,6 @@ urlpatterns = [
path("integrations/v1/", include("apps.integrations.urls", namespace="integrations")),
path("twilioapp/", include("apps.twilioapp.urls")),
path("api/v1/", include("apps.public_api.urls", namespace="api-public")),
path("api/internal/v1/", include("apps.migration_tool.urls", namespace="migration-tool")),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.FEATURE_SLACK_INTEGRATION_ENABLED:

View file

@ -201,7 +201,6 @@ INSTALLED_APPS = [
"apps.public_api",
"apps.grafana_plugin",
"apps.grafana_plugin_management",
"apps.migration_tool",
"corsheaders",
"debug_toolbar",
"social_django",

View file

@ -9,18 +9,7 @@ module.exports = {
'^assets|^components|^containers|^declare|^icons|^img|^interceptors|^models|^network|^pages|^services|^state|^utils',
},
rules: {
'no-unused-vars': ['warn', { vars: 'all', args: 'after-used', ignoreRestSiblings: false }],
'react/prop-types': 'warn',
'react/display-name': 'warn',
'react/jsx-key': 'warn',
'react-hooks/exhaustive-deps': 'off',
'react/no-unescaped-entities': 'warn',
'react/jsx-no-target-blank': 'warn',
'react-hooks/exhaustive-deps': 'warn',
'no-restricted-imports': 'warn',
eqeqeq: 'warn',
'no-duplicate-imports': 'error',
'rulesdir/no-relative-import-paths': ['error', { allowSameFolder: true }],
'import/order': [
'error',
{
@ -47,5 +36,33 @@ module.exports = {
'newlines-between': 'always',
},
],
'no-unused-vars': [
'warn',
{
vars: 'all',
args: 'after-used',
argsIgnorePattern: '^_',
destructuredArrayIgnorePattern: '^_',
ignoreRestSiblings: true,
},
],
'no-duplicate-imports': 'error',
'no-restricted-imports': 'warn',
'react/display-name': 'warn',
/**
* It appears as though the react/prop-types rule has a bug in it
* when your props extend an interface
* https://github.com/jsx-eslint/eslint-plugin-react/issues/3325
*/
'react/prop-types': 'off',
'react/jsx-key': 'warn',
'react/jsx-no-target-blank': 'warn',
'react/no-unescaped-entities': 'off',
/**
* TODO: react-hooks/exhaustive-deps is temporarily disabled
* this will be turned back on, and the warnings fixed, in a forthcoming PR
*/
'react-hooks/exhaustive-deps': 'off',
'rulesdir/no-relative-import-paths': ['error', { allowSameFolder: true }],
},
};

View file

@ -1,12 +1,14 @@
{
"extends": "stylelint-config-standard",
"rules": {
"block-no-empty": [true,{ "severity": "warning"}],
"extends": ["stylelint-config-standard", "stylelint-prettier/recommended"],
"plugins": ["stylelint-prettier"],
"rules": {
"block-no-empty": [true, { "severity": "warning" }],
"selector-pseudo-class-no-unknown": [
true,
{
"ignorePseudoClasses": ["global"]
}
]
],
"prettier/prettier": true
}
}

View file

@ -1,165 +0,0 @@
# Change Log
## v1.0.39 (2022-10-03)
- Fix issue in v1.0.38 blocking the creation of schedules and webhooks in the UI
## v1.0.38 (2022-09-30)
- Fix exception handling for adding resolution notes when slack and oncall users are out of sync.
- Fix all day events showing as having gaps in slack notifications
- Improve plugin configuration error message readability
- Add `telegram` key to `permalinks` property in `AlertGroup` public API response schema
## v1.0.37 (2022-09-21)
- Improve API token creation form
- Fix alert group bulk action bugs
- Add `permalinks` property to `AlertGroup` public API response schema
- Scheduling system bug fixes
- Public API bug fixes
## v1.0.36 (2022-09-12)
- Alpha web schedules frontend/backend updates
- Bug fixes
## v1.0.35 (2022-09-07)
- Bug fixes
## v1.0.34 (2022-09-06)
- Fix schedule notification spam
## v1.0.33 (2022-09-06)
- Add raw alert view
- Add GitHub star button for OSS installations
- Restore alert group search functionality
- Bug fixes
## v1.0.32 (2022-09-01)
- Bug fixes
## v1.0.31 (2022-09-01)
- Bump celery version
- Fix oss to cloud connection
## v1.0.30 (2022-08-31)
- Bug fix: check user notification policy before access
## v1.0.29 (2022-08-31)
- Add arm64 docker image
## v1.0.28 (2022-08-31)
- Bug fixes
## v1.0.27 (2022-08-30)
- Bug fixes
## v1.0.26 (2022-08-26)
- Insight log's format fixes
- Remove UserNotificationPolicy auto-recreating
## v1.0.25 (2022-08-24)
- Bug fixes
## v1.0.24 (2022-08-24)
- Insight logs
- Default DATA_UPLOAD_MAX_MEMORY_SIZE to 1mb
## v1.0.23 (2022-08-23)
- Bug fixes
## v1.0.22 (2022-08-16)
- Make STATIC_URL configurable from environment variable
## v1.0.21 (2022-08-12)
- Bug fixes
## v1.0.19 (2022-08-10)
- Bug fixes
## v1.0.15 (2022-08-03)
- Bug fixes
## v1.0.13 (2022-07-27)
- Optimize alert group list view
- Fix a bug related to Twilio setup
## v1.0.12 (2022-07-26)
- Update push-notifications dependency
- Rework how absolute URLs are built
- Fix to show maintenance windows per team
- Logging improvements
- Internal api to get a schedule final events
## v1.0.10 (2022-07-22)
- Speed-up of alert group web caching
- Internal api for OnCall shifts
## v1.0.9 (2022-07-21)
- Frontend bug fixes & improvements
- Support regex_replace() in templates
- Bring back alert group caching and list view
## v1.0.7 (2022-07-18)
- Backend & frontend bug fixes
- Deployment improvements
- Reshape webhook payload for outgoing webhooks
- Add escalation chain usage info on escalation chains page
- Improve alert group list load speeds and simplify caching system
## v1.0.6 (2022-07-12)
- Manual Incidents enabled for teams
- Fix phone notifications for OSS
- Public API improvements
## v1.0.5 (2022-07-06)
- Bump Django to 3.2.14
- Fix PagerDuty iCal parsing
## 1.0.4 (2022-06-28)
- Allow Telegram DMs without channel connection.
## 1.0.3 (2022-06-27)
- Fix users public api endpoint. Now it returns users with all roles.
- Fix redundant notifications about gaps in schedules.
- Frontend fixes.
## 1.0.2 (2022-06-17)
- Fix Grafana Alerting integration to handle API changes in Grafana 9
- Improve public api endpoint for outgoing webhooks (/actions) by adding ability to create, update and delete outgoing webhook instance
## 1.0.0 (2022-06-14)
- First Public Release
## 0.0.71 (2022-06-06)
- Initial Commit Release

1
grafana-plugin/CHANGELOG.md Symbolic link
View file

@ -0,0 +1 @@
../CHANGELOG.md

View file

@ -3,8 +3,8 @@
"version": "1.0.0",
"description": "Grafana OnCall Plugin",
"scripts": {
"lint": "eslint --cache --ext .js,.jsx,.ts,.tsx ./src",
"lint:fix": "eslint --fix --cache --ext .js,.jsx,.ts,.tsx --quiet ./src",
"lint": "eslint --cache --ext .js,.jsx,.ts,.tsx --max-warnings=0 ./src",
"lint:fix": "eslint --fix --cache --ext .js,.jsx,.ts,.tsx --max-warnings=0 --quiet ./src",
"stylelint": "stylelint ./src/**/*.css",
"stylelint:fix": "stylelint --fix ./src/**/*.css",
"build": "grafana-toolkit plugin:build",
@ -52,6 +52,7 @@
"@babel/preset-react": "^7.18.6",
"@babel/preset-typescript": "^7.18.6",
"@grafana/data": "^9.1.1",
"@grafana/eslint-config": "^5.0.0",
"@grafana/runtime": "^9.1.1",
"@grafana/toolkit": "^9.1.1",
"@grafana/ui": "^9.1.1",
@ -67,8 +68,13 @@
"@types/react-router-dom": "^5.3.3",
"@types/react-test-renderer": "^17.0.2",
"@types/throttle-debounce": "^5.0.0",
"@typescript-eslint/eslint-plugin": "^5.40.1",
"copy-webpack-plugin": "^11.0.0",
"dompurify": "^2.3.12",
"eslint": "^8.25.0",
"eslint-plugin-jsdoc": "^39.3.14",
"eslint-plugin-react": "^7.31.10",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-rulesdir": "^0.2.1",
"jest": "^27.5.1",
"jest-environment-jsdom": "^27.5.1",
@ -78,8 +84,11 @@
"plop": "^2.7.4",
"postcss-loader": "^7.0.1",
"react-test-renderer": "^17.0.2",
"stylelint-config-prettier": "^9.0.3",
"stylelint-prettier": "^2.0.0",
"ts-jest": "^27.1.3",
"ts-loader": "^9.3.1",
"typescript": "4.6.4",
"webpack-bundle-analyzer": "^4.6.1"
},
"engines": {

View file

@ -1,7 +1,7 @@
import React, { useEffect, useMemo } from 'react';
import { AppRootProps } from '@grafana/data';
import { Button, HorizontalGroup, LinkButton, VerticalGroup } from '@grafana/ui';
import { Button, HorizontalGroup, LinkButton } from '@grafana/ui';
import dayjs from 'dayjs';
import isSameOrAfter from 'dayjs/plugin/isSameOrAfter';
import isSameOrBefore from 'dayjs/plugin/isSameOrBefore';
@ -30,8 +30,6 @@ dayjs.extend(isSameOrBefore);
dayjs.extend(isSameOrAfter);
dayjs.extend(isoWeek);
// dayjs().weekday(0);
import './style/vars.css';
import './style/index.css';

View file

@ -3,6 +3,7 @@
Developer-Friendly
Alert Management
with Brilliant Slack Integration
- Connect monitoring systems
- Collect and analyze data
- On-call rotation
@ -10,5 +11,6 @@ with Brilliant Slack Integration
- Never miss alerts with calls and SMS
## Documentation
- [On Github](http://github.com/grafana/oncall)
- [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/)

View file

@ -10,7 +10,3 @@ export function getLabelFromTemplateName(templateName: string, group: any) {
}
return arrayWithNeededValues.join(' ');
}
export function includeTemplateGroup(groupName: string) {
return true;
}

View file

@ -8,8 +8,7 @@ import cn from 'classnames/bind';
import { omit } from 'lodash-es';
import { templatesToRender, Template } from 'components/AlertTemplates/AlertTemplatesForm.config';
import { getLabelFromTemplateName, includeTemplateGroup } from 'components/AlertTemplates/AlertTemplatesForm.helper';
import Collapse from 'components/Collapse/Collapse';
import { getLabelFromTemplateName } from 'components/AlertTemplates/AlertTemplatesForm.helper';
import Block from 'components/GBlock/Block';
import MonacoJinja2Editor from 'components/MonacoJinja2Editor/MonacoJinja2Editor';
import SourceCode from 'components/SourceCode/SourceCode';
@ -41,7 +40,6 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
const {
onUpdateTemplates,
templates,
errors,
alertReceiveChannelId,
alertGroupId,
demoAlertEnabled,
@ -53,6 +51,8 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
const [tempValues, setTempValues] = useState<{
[key: string]: string | null;
}>({});
const [activeGroup, setActiveGroup] = useState<string>();
const [activeTemplate, setActiveTemplate] = useState<Template>();
useEffect(() => {
makeRequest('/preview_template_options/', {});
@ -80,14 +80,11 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
const handleReset = () => {
const temValuesCopy = omit(
tempValues,
groups[activeGroup].map((group: any) => group.name)
groups[activeGroup].map((group) => group.name)
);
setTempValues(temValuesCopy);
};
const [activeGroup, setActiveGroup] = useState<string>();
const [activeTemplate, setActiveTemplate] = useState<any>();
const filteredTemplatesToRender = useMemo(() => {
return templates
? templatesToRender.filter((template) => {
@ -97,13 +94,10 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
}, [templates]);
const groups = useMemo(() => {
const groups: { [key: string]: any } = {};
const groups: { [key: string]: Template[] } = {};
filteredTemplatesToRender.forEach((templateToRender) => {
if (!groups[templateToRender.group]) {
if (!includeTemplateGroup(templateToRender.group)) {
return;
}
groups[templateToRender.group] = [];
}
groups[templateToRender.group].push(templateToRender);
@ -113,11 +107,7 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
const getGroupByTemplateName = (templateName: string) => {
Object.values(groups).find((group) => {
const foundTemplate = group.find((obj: any) => {
if (obj.name == templateName) {
return obj;
}
});
const foundTemplate = group.find((obj) => obj.name === templateName);
setActiveGroup(foundTemplate?.group);
});
};
@ -210,18 +200,18 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
suggestions
</p>
</Text>
{groups[activeGroup].map((activeTemplate: any) => (
{groups[activeGroup].map((activeTemplate) => (
<div
key={activeTemplate.name}
className={cx('template-form', {
'template-form-full': true,
'autoresolve-condition': selectedTemplateName && activeTemplate.name == 'resolve_condition_template',
'autoresolve-condition': selectedTemplateName && activeTemplate.name === 'resolve_condition_template',
})}
>
<Label className={cx({ 'autoresolve-label': activeTemplate.name == 'resolve_condition_template' })}>
<Label className={cx({ 'autoresolve-label': activeTemplate.name === 'resolve_condition_template' })}>
{getLabelFromTemplateName(activeTemplate.name, activeGroup)}
</Label>
{activeTemplate.name == 'resolve_condition_template' && (
{activeTemplate.name === 'resolve_condition_template' && (
<Text type="secondary" size="small">
To activate autoresolving change integration
<Button fill="text" size="sm" onClick={handleGoToTemplateSettingsCllick}>
@ -240,7 +230,7 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
<Text type="secondary">
Press <Text keyboard>Ctrl</Text>+<Text keyboard>Space</Text> to get suggestions
</Text>
{activeGroup === 'web' && activeTemplate.name == 'web_title_template' && (
{activeGroup === 'web' && activeTemplate.name === 'web_title_template' && (
<div className={cx('web-title-message')}>
<Text type="secondary" size="small">
Please note that after changing the web title template new alert groups will be searchable by
@ -271,7 +261,7 @@ const AlertTemplatesForm = (props: AlertTemplatesFormProps) => {
<VerticalGroup>
<Label>{`${capitalCase(activeGroup)} Preview`}</Label>
<VerticalGroup style={{ width: '100%' }}>
{groups[activeGroup].map((template: any) => (
{groups[activeGroup].map((template) => (
<TemplatePreview
active={template.name === activeTemplate?.name}
key={template.name}

View file

@ -1,7 +1,7 @@
import React from 'react';
import { describe, expect, test } from '@jest/globals';
import { render, fireEvent, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import Avatar from 'components/Avatar/Avatar';

View file

@ -19,7 +19,7 @@
.root_selected::before {
display: block;
content: "";
content: '';
position: absolute;
left: 0;
top: 0;

View file

@ -4,7 +4,6 @@ import React from 'react';
import { describe, expect, test } from '@jest/globals';
import { fireEvent, render, screen } from '@testing-library/react';
import '@testing-library/jest-dom';
import CardButton from 'components/CardButton/CardButton';
@ -23,11 +22,11 @@ describe('CardButton', () => {
const onClickMock = jest.fn();
render(<CardButton {...getProps(onClickMock)} />);
const rootEl = getRootBlockEl()
const rootEl = getRootBlockEl();
fireEvent.click(rootEl);
expect(rootEl.classList).toContain("root_selected")
expect(rootEl.classList).toContain('root_selected');
expect(onClickMock).toHaveBeenCalled();
});

View file

@ -26,7 +26,12 @@ const CardButton: FC<CardButtonProps> = (props) => {
}, [selected]);
return (
<Block onClick={handleClick} withBackground className={cx('root', { root_selected: selected })} data-testid='test__cardButton'>
<Block
onClick={handleClick}
withBackground
className={cx('root', { root_selected: selected })}
data-testid="test__cardButton"
>
<div className={cx('icon')}>{icon}</div>
<div className={cx('meta')}>
<VerticalGroup spacing="xs">

View file

@ -13,8 +13,8 @@ describe('Collapse', () => {
return {
label: 'Toggle',
isOpen: isOpen,
onClick: onClick
} as CollapseProps
onClick: onClick,
} as CollapseProps;
}
test('Content becomes visible on click', () => {
@ -34,7 +34,7 @@ describe('Collapse', () => {
const content = getChildrenEl();
expect(content).toBeNull();
})
});
test('Content is not collapsed for [isOpen=true]', () => {
render(<Collapse {...getProps(true)} />);

View file

@ -1,3 +0,0 @@
.root {
display: block;
}

View file

@ -2,12 +2,9 @@ import React, { FC, useCallback, useEffect, useState } from 'react';
import { SelectableValue } from '@grafana/data';
import { Button, HorizontalGroup, Icon, Select } from '@grafana/ui';
import cn from 'classnames/bind';
import Text from 'components/Text/Text';
import styles from './CursorPagination.module.css';
interface CursorPaginationProps {
current: string;
onChange: (cursor: string, direction: 'prev' | 'next') => void;
@ -18,8 +15,6 @@ interface CursorPaginationProps {
next: string;
}
const cx = cn.bind(styles);
const CursorPagination: FC<CursorPaginationProps> = (props) => {
const { current, onChange, prev, next, itemsPerPage, itemsPerPageOptions, onChangeItemsPerPage } = props;

View file

@ -1,6 +1,6 @@
import React, { ChangeEvent, FC, useCallback } from 'react';
import { Icon, Input, Button, IconButton } from '@grafana/ui';
import { Icon, Input, IconButton } from '@grafana/ui';
import cn from 'classnames/bind';
import styles from './EscalationsFilters.module.css';

View file

@ -1,3 +0,0 @@
.root {
display: block;
}

View file

@ -2,18 +2,14 @@ import React, { useCallback } from 'react';
import { Field, Form, Input, InputControl, Select, Switch, TextArea } from '@grafana/ui';
import { capitalCase } from 'change-case';
import cn from 'classnames/bind';
import { FormItem, FormItemType } from 'components/GForm/GForm.types';
import GSelect from 'containers/GSelect/GSelect';
import RemoteSelect from 'containers/RemoteSelect/RemoteSelect';
import styles from './GForm.module.css';
interface GFormProps {
form: { name: string; fields: FormItem[] };
data: any;
/* errors: { [key: string]: string }; */
onSubmit: (data: any) => void;
}
@ -21,8 +17,6 @@ const nullNormalizer = (value: string) => {
return value || null;
};
const cx = cn.bind(styles);
function renderFormControl(formItem: FormItem, register: any, control: any) {
switch (formItem.type) {
case FormItemType.Input:
@ -99,20 +93,18 @@ const GForm = (props: GFormProps) => {
return (
<Form maxWidth="none" id={form.name} defaultValues={data} onSubmit={handleSubmit}>
{({ register, errors, control }) => {
return form.fields.map((formItem: FormItem, formIndex: number) => {
return (
<Field
key={formIndex}
disabled={formItem.getDisabled ? formItem.getDisabled(data) : false}
label={formItem.label || capitalCase(formItem.name)}
invalid={!!errors[formItem.name]}
error={`${capitalCase(formItem.name)} is required`}
description={formItem.description}
>
{renderFormControl(formItem, register, control)}
</Field>
);
});
return form.fields.map((formItem: FormItem, formIndex: number) => (
<Field
key={formIndex}
disabled={formItem.getDisabled ? formItem.getDisabled(data) : false}
label={formItem.label || capitalCase(formItem.name)}
invalid={!!errors[formItem.name]}
error={`${capitalCase(formItem.name)} is required`}
description={formItem.description}
>
{renderFormControl(formItem, register, control)}
</Field>
));
}}
</Form>
);

View file

@ -1,5 +1,3 @@
import { Moment } from 'moment';
export enum FormItemType {
'Input' = 'input',
'TextArea' = 'textarea',
@ -7,13 +5,6 @@ export enum FormItemType {
'GSelect' = 'gselect',
'Switch' = 'switch',
'RemoteSelect' = 'remoteselect',
/* 'InputNumber' = 'input-number',
'Select' = 'select',
'Switch' = 'switch',
'ASelect' = 'aselect',
'JustSelect' = 'just-select',
'DatePicker' = 'datepicker', */
}
export interface FormItem {

View file

@ -1,4 +1,4 @@
import React, { FC, useState, useCallback, useRef, useEffect } from 'react';
import React, { useCallback, useRef, useEffect } from 'react';
import { LoadingPlaceholder } from '@grafana/ui';
import cn from 'classnames/bind';
@ -37,7 +37,6 @@ const GList = <T extends WithId>(props: GListProps<T>) => {
const divToScroll = selectedElement.parentElement.parentElement;
const maxScroll = Math.max(0, selectedElement.parentElement.offsetHeight - divToScroll.offsetHeight);
const minScroll = 0;
const scrollTop =
selectedElement.offsetTop -

View file

@ -1,4 +1,4 @@
import React, { FC, useState, useCallback, useMemo, ChangeEvent } from 'react';
import React, { FC, useCallback, useMemo, ChangeEvent } from 'react';
import { Pagination, Checkbox, Icon } from '@grafana/ui';
import cn from 'classnames/bind';
@ -96,7 +96,7 @@ const GTable: FC<Props> = (props) => {
const handleMasterCheckboxChange = useCallback(
(event: ChangeEvent<HTMLInputElement>) => {
const { selectedRowKeys, onChange } = rowSelection;
const { onChange } = rowSelection;
if (event.target.checked) {
const newRowSelection = data.map((item: any) => item[rowKey as string]);
onChange(newRowSelection);

View file

@ -39,8 +39,7 @@ const Modal: FC<PropsWithChildren<ModalProps>> = (props) => {
contentLabel={title}
className={cx('root')}
overlayClassName={cx('overlay')}
overlayElement={(props, contentElement) => contentElement} // render without overlay to allow body scroll
/* bodyOpenClassName={cx('body-open')} */
overlayElement={(_props, contentElement) => contentElement} // render without overlay to allow body scroll
contentElement={contentElement}
>
{children}

View file

@ -1,3 +0,0 @@
.root {
display: block;
}

View file

@ -1,14 +1,11 @@
import React, { FC, useCallback, useMemo, useRef, useEffect } from 'react';
import React, { FC, useCallback } from 'react';
import { CodeEditor, CodeEditorSuggestionItemKind, LoadingPlaceholder } from '@grafana/ui';
import cn from 'classnames/bind';
import { getPaths } from 'utils';
import { conf, language } from './jinja2';
import styles from './MonacoJinja2Editor.module.css';
declare const monaco: any;
interface MonacoJinja2EditorProps {
@ -19,8 +16,6 @@ interface MonacoJinja2EditorProps {
loading: boolean;
}
const cx = cn.bind(styles);
const PREDEFINED_TERMS = [
'grafana_oncall_link',
'integration_name',

View file

@ -1,6 +1,5 @@
import React, { FC, useCallback, useState } from 'react';
import { getLocationSrv } from '@grafana/runtime';
import { Button, Drawer, HorizontalGroup, Icon, VerticalGroup } from '@grafana/ui';
import cn from 'classnames/bind';
@ -39,10 +38,6 @@ const NewScheduleSelector: FC<NewScheduleSelectorProps> = (props) => {
<Drawer scrollableContent title="Create new schedule" onClose={onHide} closeOnMaskClick>
<div className={cx('content')}>
<VerticalGroup spacing="lg">
{/*<Text type="secondary">
Manage on-call schedules using your favourite calendar app, such as Google Calendar or Microsoft Outlook. To
schedule on-call shifts create a new calendar and use events with the teammates usernames
</Text>*/}
<Block bordered withBackground className={cx('block')}>
<HorizontalGroup justify="space-between">
<HorizontalGroup spacing="md">

View file

@ -2,7 +2,6 @@ import React, { useEffect } from 'react';
import { Button, VerticalGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import { PropTypes } from 'mobx-react';
import PluginLink from 'components/PluginLink/PluginLink';
import Text from 'components/Text/Text';

View file

@ -1,7 +1,7 @@
import React, { ChangeEvent } from 'react';
import { SelectableValue } from '@grafana/data';
import { Button, Input, Select, Tooltip, IconButton } from '@grafana/ui';
import { Button, Input, Select, IconButton } from '@grafana/ui';
import cn from 'classnames/bind';
import moment from 'moment-timezone';
import { SortableElement } from 'react-sortable-hoc';
@ -11,7 +11,6 @@ import PluginLink from 'components/PluginLink/PluginLink';
import TimeRange from 'components/TimeRange/TimeRange';
import Timeline from 'components/Timeline/Timeline';
import GSelect from 'containers/GSelect/GSelect';
import RemoteSelect from 'containers/RemoteSelect/RemoteSelect';
import UserTooltip from 'containers/UserTooltip/UserTooltip';
import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl';
import { prepareEscalationPolicy } from 'models/escalation_policy/escalation_policy.helpers';
@ -147,7 +146,7 @@ export class EscalationPolicy extends React.Component<EscalationPolicyProps, any
className={cx('select', 'control', 'multiSelect')}
value={notify_to_users_queue}
onChange={this._getOnChangeHandler('notify_to_users_queue')}
getOptionLabel={({ label, value }: SelectableValue) => <UserTooltip id={value} />}
getOptionLabel={({ value }: SelectableValue) => <UserTooltip id={value} />}
/>
</WithPermissionControl>
);

View file

@ -1,15 +1,9 @@
import React, { useState, useEffect } from 'react';
import React from 'react';
import { FadeTransition } from '@grafana/ui';
import cn from 'classnames/bind';
import dayjs from 'dayjs';
import { CSSTransition } from 'react-transition-group';
import styles from './ScheduleBorderedAvatar.module.scss';
import animationStyles from 'containers/Rotations/Rotations.module.css';
const cx = cn.bind(styles);
interface ScheduleBorderedAvatarProps {
@ -52,7 +46,9 @@ export default function ScheduleBorderedAvatar({
}
function renderColorPaths(colors: string[]) {
if (!colors?.length) {return null;}
if (!colors?.length) {
return null;
}
const colorSchemeList = colors;
if (colors.length === 1) {
@ -75,7 +71,7 @@ export default function ScheduleBorderedAvatar({
lastX = x;
lastY = y;
return <path d={d} stroke={colors[colorIndex]} />;
return <path key={colorIndex} d={d} stroke={colors[colorIndex]} />;
});
}
}

View file

@ -28,15 +28,3 @@
.tooltip {
width: auto;
}
/*
.tooltip__type_link {
border: 1px solid #6CCF8E;
background: #132322;
}
.tooltip__type_warning {
border: 1px solid #F8D06B;
background: #3A301E;
}
*/

View file

@ -1,6 +1,6 @@
import React, { FC, useCallback } from 'react';
import React, { FC } from 'react';
import { HorizontalGroup, VerticalGroup, Icon, IconButton, Tooltip, IconName } from '@grafana/ui';
import { HorizontalGroup, VerticalGroup, Icon, Tooltip, IconName } from '@grafana/ui';
import cn from 'classnames/bind';
import Text, { TextType } from 'components/Text/Text';
@ -25,16 +25,6 @@ const typeToColor = {
warning: 'warning',
};
const typeToBorderColor = {
link: '#6CCF8E',
warning: '#F8D06B',
};
const typeToBackgroundColor = {
link: '#132322',
warning: '#3A301E',
};
const cx = cn.bind(styles);
const ScheduleCounter: FC<ScheduleCounterProps> = (props) => {

View file

@ -25,7 +25,7 @@
.hr {
width: 100%;
margin: 0 -11px;
margin: 0 -11px;
}
.times {

View file

@ -1,6 +1,6 @@
import React, { FC } from 'react';
import { Icon, Button, HorizontalGroup, VerticalGroup } from '@grafana/ui';
import { HorizontalGroup, VerticalGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import dayjs from 'dayjs';
@ -9,8 +9,6 @@ import Text from 'components/Text/Text';
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
import { User } from 'models/user/user.types';
import Line from './img/line.svg';
import styles from './ScheduleUserDetails.module.css';
interface ScheduleUserDetailsProps {
@ -20,30 +18,9 @@ interface ScheduleUserDetailsProps {
const cx = cn.bind(styles);
enum UserOncallStatus {
Now = 'now',
Outside = 'outside',
Inside = 'inside',
}
const userOncallStatusToText = {
[UserOncallStatus.Now]: 'Oncall now',
[UserOncallStatus.Inside]: 'Inside working hours',
[UserOncallStatus.Outside]: 'Outside working hours',
};
const ScheduleUserDetails: FC<ScheduleUserDetailsProps> = (props) => {
const { user, currentMoment } = props;
const userStatus =
Math.random() > 0.66
? UserOncallStatus.Now
: Math.random() > 0.33
? UserOncallStatus.Inside
: UserOncallStatus.Outside;
const userMoment = currentMoment.tz(user.timezone);
const userOffsetHoursStr = getTzOffsetString(userMoment);
return (
@ -51,67 +28,12 @@ const ScheduleUserDetails: FC<ScheduleUserDetailsProps> = (props) => {
<VerticalGroup spacing="sm">
<HorizontalGroup justify="space-between">
<Avatar src={user.avatar} size="large" />
{/*<Button variant="secondary">
<HorizontalGroup spacing="sm">
<Icon name="bell" />
Push
</HorizontalGroup>
</Button>*/}
</HorizontalGroup>
<VerticalGroup spacing="sm">
<Text type="primary">{user.username}</Text>
<Text type="secondary">
{`${userMoment.tz(user.timezone).format('DD MMM, HH:mm')}`} {userOffsetHoursStr}
</Text>
{/* <div
className={cx('oncall-badge', {
[`oncall-badge__type_${userStatus}`]: true,
})}
>
{userOncallStatusToText[userStatus]}
</div>
<HorizontalGroup>
<VerticalGroup spacing="sm">
<Text type="primary">Next shift</Text>
<div className={cx('times')}>
<HorizontalGroup>
<img src={Line} />
<VerticalGroup spacing="none">
<Text type="secondary">30 apr, 00:00</Text>
<Text type="secondary">30 apr, 23:59</Text>
</VerticalGroup>
</HorizontalGroup>
</div>
</VerticalGroup>
<VerticalGroup spacing="sm">
<Text type="primary">Last shift</Text>
<div className={cx('times')}>
<HorizontalGroup>
<img src={Line} />
<VerticalGroup spacing="none">
<Text type="secondary">30 apr, 00:00</Text>
<Text type="secondary">30 apr, 23:59</Text>
</VerticalGroup>
</HorizontalGroup>
</div>
</VerticalGroup>
</HorizontalGroup>
</VerticalGroup>
<hr style={{ width: '100%' }} />
<VerticalGroup spacing="sm">
<Text type="primary">Contacts</Text>
<HorizontalGroup spacing="sm">
<Icon className={cx('icon')} name="message" />
<Text type="link">mail@grafana.com</Text>
</HorizontalGroup>
<HorizontalGroup spacing="sm">
<Icon className={cx('icon')} name="slack" />
<Text type="link">@slackid</Text>
</HorizontalGroup>
<HorizontalGroup spacing="sm">
<Icon className={cx('icon')} name="phone" />
<Text type="secondary">+39 555 449 00 00</Text>
</HorizontalGroup>*/}
</VerticalGroup>
</VerticalGroup>
</div>

View file

@ -1,4 +1,4 @@
import moment from 'moment';
import moment from 'moment-timezone';
export function optionToDateString(option: string) {
switch (option) {

View file

@ -1,8 +1,8 @@
import React, { ChangeEvent, useCallback, useMemo, useState } from 'react';
import React, { useCallback, useMemo } from 'react';
import { DatePickerWithInput, Field, HorizontalGroup, RadioButtonGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import moment from 'moment';
import moment from 'moment-timezone';
import { dateStringToOption, optionToDateString } from './SchedulesFilters.helpers';
import { SchedulesFiltersType } from './SchedulesFilters.types';
@ -17,18 +17,22 @@ interface SchedulesFiltersProps {
className?: string;
}
const SchedulesFilters = (props: SchedulesFiltersProps) => {
const { value, onChange, className } = props;
const handleDateChange = useCallback((date: Date) => {
onChange({ selectedDate: moment(date).format('YYYY-MM-DD') });
}, []);
const SchedulesFilters = ({ value, onChange, className }: SchedulesFiltersProps) => {
const handleDateChange = useCallback(
(date: Date) => {
onChange({ selectedDate: moment(date).format('YYYY-MM-DD') });
},
[onChange]
);
const option = useMemo(() => dateStringToOption(value.selectedDate), [value]);
const handleOptionChange = useCallback((option: string) => {
onChange({ ...value, selectedDate: optionToDateString(option) });
}, []);
const handleOptionChange = useCallback(
(option: string) => {
onChange({ ...value, selectedDate: optionToDateString(option) });
},
[onChange, value]
);
const datePickerValue = useMemo(() => moment(value.selectedDate).toDate(), [value]);

View file

@ -1,5 +1,3 @@
import { Moment } from 'moment';
export interface SchedulesFiltersType {
selectedDate: string;
}

View file

@ -1,4 +1,4 @@
import moment from 'moment';
import moment from 'moment-timezone';
export function optionToDateString(option: string) {
switch (option) {

View file

@ -1,11 +1,10 @@
import React, { ChangeEvent, useCallback, useMemo, useState } from 'react';
import React, { ChangeEvent, useCallback } from 'react';
import { DatePickerWithInput, Field, HorizontalGroup, Icon, Input, RadioButtonGroup } from '@grafana/ui';
import { Field, HorizontalGroup, Icon, Input, RadioButtonGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import { ScheduleType } from 'models/schedule/schedule.types';
import { dateStringToOption, optionToDateString } from './SchedulesFilters.helpers';
import { SchedulesFiltersType } from './SchedulesFilters.types';
import styles from './SchedulesFilters.module.css';

View file

@ -26,4 +26,4 @@
}
.copyButton {
opacity: 0;
}
}

View file

@ -2,8 +2,7 @@ import 'jest/matchMedia.ts';
import React from 'react';
import { describe, expect, test } from '@jest/globals';
import { render, fireEvent, screen } from '@testing-library/react';
import { render, screen } from '@testing-library/react';
import '@testing-library/jest-dom';
import SourceCode from './SourceCode';

View file

@ -1,6 +1,6 @@
import React, { FC } from 'react';
import { Button, Icon, IconButton } from '@grafana/ui';
import { Button, IconButton } from '@grafana/ui';
import cn from 'classnames/bind';
import CopyToClipboard from 'react-copy-to-clipboard';
@ -33,7 +33,13 @@ const SourceCode: FC<SourceCodeProps> = (props) => {
{showClipboardIconOnly ? (
<IconButton className={cx('copyIcon')} size={'lg'} name="copy" data-testid="test__copyIcon" />
) : (
<Button className={cx('copyButton')} variant="primary" size="xs" icon="copy" data-testid="test__copyIconWithText">
<Button
className={cx('copyButton')}
variant="primary"
size="xs"
icon="copy"
data-testid="test__copyIconWithText"
>
Copy
</Button>
)}

View file

@ -1,6 +1,6 @@
import React, { FC, useState, useCallback, useMemo, ChangeEvent } from 'react';
import React, { FC, useMemo } from 'react';
import { Pagination, Checkbox, Icon, VerticalGroup } from '@grafana/ui';
import { Pagination, VerticalGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import Table from 'rc-table';
import { TableProps } from 'rc-table/lib/Table';
@ -34,21 +34,20 @@ export interface Props<RecordType = unknown> extends TableProps<RecordType> {
const GTable: FC<Props> = (props) => {
const { columns, data, className, pagination, loading, rowKey, expandable, ...restProps } = props;
const { page, total: numberOfPages, onChange: onNavigate } = pagination || {};
const expandableFn = useMemo(() => {
return expandable
? {
...expandable,
expandIcon: ({ expanded, record }) => {
expandIcon: ({ expanded }) => {
return (
<div className={cx('expand-icon', { [`expand-icon__expanded`]: expanded })}>
<ExpandIcon />
</div>
);
},
expandedRowClassName: (record, index) => (index % 2 === 0 ? cx('row-even') : cx('row-odd')),
expandedRowClassName: (_record, index) => (index % 2 === 0 ? cx('row-even') : cx('row-odd')),
}
: null;
}, [expandable]);
@ -61,7 +60,7 @@ const GTable: FC<Props> = (props) => {
columns={columns}
data={data}
expandable={expandableFn}
rowClassName={(record, index) => (index % 2 === 0 ? cx('row-even') : cx('row-odd'))}
rowClassName={(_record, index) => (index % 2 === 0 ? cx('row-even') : cx('row-odd'))}
{...restProps}
/>
{pagination && (

View file

@ -40,7 +40,6 @@
white-space: nowrap;
}
.keyboard {
margin: 0 0.2em;
padding: 0.15em 0.4em 0.1em;

View file

@ -69,9 +69,8 @@ const Text: TextInterface = (props) => {
const handleConfirmEdit = useCallback(() => {
setIsEditMode(false);
onTextChange(value);
}, [value]);
}, [value, onTextChange]);
const handleInputChange = useCallback((e: ChangeEvent<HTMLInputElement>) => {
setValue(e.target.value);

View file

@ -1,4 +1,4 @@
import React, { FC } from 'react';
import React from 'react';
import cn from 'classnames/bind';

View file

@ -1,4 +1,4 @@
import React, { FC } from 'react';
import React from 'react';
import cn from 'classnames/bind';
@ -15,21 +15,19 @@ export interface TimelineItemProps {
children?: any;
}
const TimelineItem: React.FC<TimelineItemProps> = (props) => {
const { className, contentClassName, children, color = '#3274D9', number } = props;
const style = { backgroundColor: color };
return (
<li className={cx('item', className)}>
{/*<Badge count={badge} style={style} showZero={false}>*/}
<div className={cx('dot')} style={{ backgroundColor: color }}>
{number}
</div>
{/*</Badge>*/}
<div className={cx('content', contentClassName)}>{children}</div>
</li>
);
};
const TimelineItem: React.FC<TimelineItemProps> = ({
className,
contentClassName,
children,
color = '#3274D9',
number,
}) => (
<li className={cx('item', className)}>
<div className={cx('dot')} style={{ backgroundColor: color }}>
{number}
</div>
<div className={cx('content', contentClassName)}>{children}</div>
</li>
);
export default TimelineItem;

View file

@ -49,8 +49,9 @@ const TimelineMarks: FC<TimelineMarksProps> = (props) => {
<div className={cx('root')}>
{debug && (
<svg version="1.1" width="100%" height="6px" xmlns="http://www.w3.org/2000/svg" className={cx('debug-scale')}>
{cuts.map((cut, index) => (
{cuts.map((_cut, index) => (
<line
key={index}
x1={`${(index * 100) / (24 * 7)}%`}
strokeWidth={1}
y1="0"

View file

@ -1,6 +1,5 @@
import React, { FC } from 'react';
import { Button, VerticalGroup } from '@grafana/ui';
import cn from 'classnames/bind';
import Block from 'components/GBlock/Block';
@ -13,7 +12,6 @@ import scheduleIcon from './icons/calendar-icon.svg';
import chatIcon from './icons/chat-icon.svg';
import escalationIcon from './icons/escalation-icon.svg';
import integrationsIcon from './icons/integration-icon.svg';
import arrowIcon from './icons/long-arrow.svg';
import styles from './Tutorial.module.css';

View file

@ -2,7 +2,7 @@ import { Item } from './UserGroups.types';
export const toPlainArray = (groups: string[][]) => {
const items: Item[] = [];
groups.forEach((group: string[], groupIndex: number) => {
groups.forEach((_group: string[], groupIndex: number) => {
items.push({
key: `group-${groupIndex}`,
type: 'group',

View file

@ -24,7 +24,7 @@
.separator::before {
display: block;
content: "";
content: '';
flex-grow: 1;
border-bottom: var(--border-medium);
height: 0;
@ -33,7 +33,7 @@
.separator::after {
display: block;
content: "";
content: '';
flex-grow: 1;
border-bottom: var(--border-medium);
height: 0;

View file

@ -1,6 +1,6 @@
import React, { useCallback, useEffect, useMemo } from 'react';
import React, { useCallback, useMemo } from 'react';
import { VerticalGroup, HorizontalGroup, IconButton, Field, Input } from '@grafana/ui';
import { VerticalGroup, HorizontalGroup, IconButton } from '@grafana/ui';
import { arrayMoveImmutable } from 'array-move';
import cn from 'classnames/bind';
import { SortableContainer, SortableElement, SortableHandle } from 'react-sortable-hoc';
@ -45,7 +45,7 @@ const UserGroups = (props: UserGroupsProps) => {
k++;
if (k === index) {
newGroups[i] = newGroups[i].filter((item, itemIndex) => itemIndex !== j);
newGroups[i] = newGroups[i].filter((_item, itemIndex) => itemIndex !== j);
onChange(newGroups.filter((group) => group.length));
return;
}

View file

@ -3,7 +3,6 @@ import React, { FC, useCallback, useMemo } from 'react';
import { Select } from '@grafana/ui';
import cn from 'classnames/bind';
import dayjs from 'dayjs';
import { get } from 'lodash-es';
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
import { Timezone } from 'models/timezone/timezone.types';

View file

@ -70,6 +70,7 @@ const UsersFilters = (props: UsersFiltersProps) => {
<HorizontalGroup>
{roleOptions.map((option) => (
<Checkbox
key={option.value}
value={value.roles.includes(option.value)}
label={option.label}
onChange={onChangeRolesCallback(option.value)}

View file

@ -4,14 +4,22 @@ import cn from 'classnames/bind';
import styles from './VerticalTabsBar.module.css';
const cx = cn.bind(styles);
interface TabProps {
id: string;
children?: any;
}
export const VerticalTab: FC<TabProps> = ({ children }) => {
return <>{children}</>;
};
interface VerticalTabsBarProps {
children: Array<React.ReactElement<TabProps>> | React.ReactElement<TabProps>;
activeTab: string;
onChange: (id: string) => void;
}
const cx = cn.bind(styles);
const VerticalTabsBar = (props: VerticalTabsBarProps) => {
const { children, activeTab, onChange } = props;
@ -25,8 +33,9 @@ const VerticalTabsBar = (props: VerticalTabsBarProps) => {
<div className={cx('root')}>
{React.Children.toArray(children)
.filter(Boolean)
.map((child: React.ReactElement) => (
.map((child: React.ReactElement, idx) => (
<div
key={idx}
onClick={getClickHandler(child.props.id)}
className={cx('tab', { tab_active: activeTab === child.props.id })}
>
@ -38,12 +47,3 @@ const VerticalTabsBar = (props: VerticalTabsBarProps) => {
};
export default VerticalTabsBar;
interface TabProps {
id: string;
children?: any;
}
export const VerticalTab: FC<TabProps> = ({ children }) => {
return <>{children}</>;
};

View file

@ -1,3 +0,0 @@
.root {
display: block;
}

Some files were not shown because too many files have changed in this diff Show more