This commit is contained in:
Joey Orlando 2024-02-27 20:17:46 +00:00 committed by GitHub
commit 01ddf8b084
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
144 changed files with 6515 additions and 5938 deletions

View file

@ -20,8 +20,8 @@ steps:
- name: Sign and Package Plugin
image: node:18.16.0-buster
environment:
GRAFANA_API_KEY:
from_secret: gcom_plugin_publisher_api_key
GRAFANA_ACCESS_POLICY_TOKEN:
from_secret: cloud_access_policy_token
depends_on:
- Build Plugin
commands:
@ -30,9 +30,7 @@ steps:
- cd grafana-plugin
- yarn sign
- if [ ! -f dist/MANIFEST.txt ]; then echo "Sign failed, MANIFEST.txt not created, aborting." && exit 1; fi
- yarn ci-build:finish
- yarn ci-package
- cd ci/dist
- mv dist grafana-oncall-app
- zip -r grafana-oncall-app.zip ./grafana-oncall-app
# yamllint disable rule:line-length
- if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app.zip grafana-oncall-app-${DRONE_TAG}.zip; fi
@ -57,7 +55,7 @@ steps:
environment:
DJANGO_SETTINGS_MODULE: settings.ci-test
commands:
- pip install $(grep "pre-commit" engine/requirements-dev.txt)
- pip install $(grep "pre-commit==" engine/requirements-dev.txt)
- pre-commit run isort --all-files
- pre-commit run black --all-files
- pre-commit run flake8 --all-files
@ -192,8 +190,8 @@ steps:
- name: sign and package plugin
image: node:18.16.0-buster
environment:
GRAFANA_API_KEY:
from_secret: gcom_plugin_publisher_api_key
GRAFANA_ACCESS_POLICY_TOKEN:
from_secret: cloud_access_policy_token
depends_on:
- build plugin
commands:
@ -202,9 +200,7 @@ steps:
- cd grafana-plugin
- yarn sign
- if [ ! -f dist/MANIFEST.txt ]; then echo "Sign failed, MANIFEST.txt not created, aborting." && exit 1; fi
- yarn ci-build:finish
- yarn ci-package
- cd ci/dist
- mv dist grafana-oncall-app
- zip -r grafana-oncall-app.zip ./grafana-oncall-app
# yamllint disable rule:line-length
- if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app.zip grafana-oncall-app-${DRONE_TAG}.zip; fi
@ -217,7 +213,7 @@ steps:
from_secret: gcom_plugin_publisher_api_key
commands:
# yamllint disable rule:line-length
- 'curl -f -s -H "Authorization: Bearer $${GRAFANA_API_KEY}" -d "download[any][url]=https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip" -d "download[any][md5]=$$(curl -sL https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip | md5sum | cut -d'' '' -f1)" -d url=https://github.com/grafana/oncall/grafana-plugin https://grafana.com/api/plugins'
- 'curl -f -w "status=%{http_code}" -s -H "Authorization: Bearer $${GRAFANA_API_KEY}" -d "download[any][url]=https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip" -d "download[any][md5]=$$(curl -sL https://storage.googleapis.com/grafana-oncall-app/releases/grafana-oncall-app-${DRONE_TAG}.zip | md5sum | cut -d'' '' -f1)" -d url=https://github.com/grafana/oncall/grafana-plugin https://grafana.com/api/plugins'
# yamllint enable rule:line-length
depends_on:
- sign and package plugin
@ -355,6 +351,7 @@ get:
path: infra/data/ci/docker_hub
kind: secret
name: docker_username
---
get:
name: password
@ -377,6 +374,15 @@ get:
path: ci/data/repo/grafana/oncall/drone
kind: secret
name: github_api_token
---
# Secret for signing plugin
get:
name: cloud_access_policy_token
path: ci/data/repo/grafana/oncall/sign_plugin
kind: secret
name: cloud_access_policy_token
---
kind: signature
hmac: b9e499a424faecd9a8f41552cc307bd3431cb0e3fac77f3ee99ce19258fc0fec
hmac: 198b7c7d2c94fc5698b22a722e7748181990207755cf1778b2290137e262518c

View file

@ -21,18 +21,21 @@ jobs:
- 9.3.16
- 9.4.13
- 9.5.7
- 10.0.2
# NOTE: don't run on > 10.0.3 until this issue is fixed. It appears that something was changed
# with the plugin-proxy that caused it to start throwing HTTP 502s from time to time
# https://raintank-corp.slack.com/archives/C01C4K8DETW/p1692279329797149
- 10.0.11
- 10.1.7
# TODO: fix issues with running e2e tests against Grafana v10.2.x and v10.3.x
# - 10.2.4
# - latest
fail-fast: false
uses: ./.github/workflows/e2e-tests.yml
with:
grafana-image-tag: ${{ matrix.grafana-image-tag }}
run-expensive-tests: true
browsers: "chromium firefox webkit"
# TODO: fix issues with some tests on firefox and webkit
# example CI build with firefox/webkit failing tests
# https://github.com/grafana/oncall/actions/runs/8022194346/job/21915964672#step:19:905
# browsers: "chromium firefox webkit"
browsers: "chromium"
secrets: inherit
post-status-to-slack:
@ -40,19 +43,22 @@ jobs:
needs: end-to-end-tests
if: always()
steps:
# Useful references
# https://stackoverflow.com/questions/59073850/github-actions-get-url-of-test-build
# https://github.com/orgs/community/discussions/26822#discussioncomment-3305794
#
- uses: slackapi/slack-github-action@v1.24.0
with:
channel-id: irm-amixr-flux
channel-id: gops-oncall-dev
# yamllint disable rule:line-length
payload: |
{
"text": "Daily e2e tests build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}",
"blocks": [
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "Daily e2e tests build result: ${{ job.status }}\n${{ github.event.pull_request.html_url || github.event.head_commit.url }}"
"text": "Daily e2e tests build result: ${{ needs.end-to-end-tests.result == 'success' && ':check:' || ':alert:' }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}
}
]

View file

@ -37,6 +37,15 @@ jobs:
- name: Checkout
uses: actions/checkout@v3
# TODO: re-enable this when we get the docker build build-context caching working.. see other TODO comment below
# - uses: actions/setup-python@v4
# with:
# python-version: "3.11.4"
# cache: "pip"
# cache-dependency-path: |
# engine/requirements.txt
# engine/requirements-dev.txt
- name: Collect Workflow Telemetry
uses: runforesight/workflow-telemetry-action@v1
with:
@ -90,6 +99,16 @@ jobs:
push: false
tags: oncall/engine:latest
outputs: type=docker,dest=/tmp/oncall-engine.tar
# TODO: figure out how to get this to work.. this will substantially speed up building our docker image here
# because right now most time is spent building wheels for python dependencies
# (even though they rarely change).. this portion "should" work however I haven't yet figured out how to
# get the cache bind mount in engine/Dockerfile to work optionally (ie. when we don't specify
# the --build-context flag to docker build.. otherwise it fails if pip_cache is not available)
#
# references
# https://github.com/moby/buildkit/blob/master/frontend/dockerfile/docs/reference.md#run---mounttypecache
# https://stackoverflow.com/a/71846527
# build-contexts: pip_cache=/home/runner/.cache/pip
- name: Load engine Docker image on the nodes of the cluster
run: kind load image-archive --name=chart-testing /tmp/oncall-engine.tar
@ -119,7 +138,7 @@ jobs:
uses: actions/cache@v3
with:
path: "~/.cache/ms-playwright"
key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }}-chromium-firefox-webkit
key: ${{ runner.os }}-playwright-${{ env.PLAYWRIGHT_VERSION }}-${{ inputs.browsers }}
# For the next two steps, use the binary directly from node_modules/.bin as opposed to npx playwright
# due to this bug (https://github.com/microsoft/playwright/issues/13188)
@ -206,6 +225,6 @@ jobs:
- uses: actions/upload-artifact@v3
if: failure()
with:
name: playwright-report
name: playwright-report-${{ inputs.grafana-image-tag }}
path: ./grafana-plugin/playwright-report/
retention-days: 30

View file

@ -127,7 +127,8 @@ jobs:
# makemigrations --check = Exit with a non-zero status if model changes are missing migrations
# and don't actually write them.
run: |
pip install -r requirements.txt -r requirements-dev.txt
pip install pip-tools
pip-sync requirements.txt requirements-dev.txt
python manage.py makemigrations --check
python manage.py lintmigrations
@ -184,7 +185,8 @@ jobs:
working-directory: engine
run: |
apt-get update && apt-get install -y netcat-traditional
pip install -r requirements.txt -r requirements-dev.txt
pip install pip-tools
pip-sync requirements.txt requirements-dev.txt
./wait_for_test_mysql_start.sh && pytest -x
unit-test-backend-postgresql-rabbitmq:
@ -233,7 +235,8 @@ jobs:
- name: Unit Test Backend
working-directory: engine
run: |
pip install -r requirements.txt -r requirements-dev.txt
pip install pip-tools
pip-sync requirements.txt requirements-dev.txt
pytest -x
unit-test-backend-sqlite-redis:
@ -272,7 +275,8 @@ jobs:
working-directory: engine
run: |
apt-get update && apt-get install -y netcat-traditional
pip install -r requirements.txt -r requirements-dev.txt
pip install pip-tools
pip-sync requirements.txt requirements-dev.txt
pytest -x
unit-test-pd-migrator:
@ -288,7 +292,8 @@ jobs:
- name: Unit Test PD Migrator
working-directory: tools/pagerduty-migrator
run: |
pip install -r requirements.txt
pip install pip-tools
pip-sync requirements.txt
pytest -x
mypy:
@ -306,17 +311,17 @@ jobs:
- name: mypy Static Type Checking
working-directory: engine
run: |
pip install -r requirements.txt -r requirements-dev.txt
pip install pip-tools
pip-sync requirements.txt requirements-dev.txt
mypy .
end-to-end-tests:
name: End to end tests
uses: ./.github/workflows/e2e-tests.yml
with:
# NOTE: don't run on > 10.0.3 until this issue is fixed. It appears that something was changed
# with the plugin-proxy that caused it to start throwing HTTP 502s from time to time
# https://raintank-corp.slack.com/archives/C01C4K8DETW/p1692279329797149
grafana-image-tag: 10.0.2
# TODO: fix issues with running e2e tests against Grafana v10.2.x and v10.3.x
grafana-image-tag: 10.1.7
# grafana-image-tag: 10.3.3
run-expensive-tests: false
browsers: "chromium"
secrets: inherit

View file

@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Unreleased
## v1.3.107 (2024-02-27)
### Added
- Add manually run task to cleanup unused integrations @mderynck ([#3941](https://github.com/grafana/oncall/pull/3941))
### Changed
- Change plugin build to use new packages instead of deprecated grafana-toolkit @maskin25 ([#3837](https://github.com/grafana/oncall/pull/3837))
- Remove explicit uWSGI and Django request size limits by @vadimkerr ([#3878](https://github.com/grafana/oncall/pull/3878))
- Migrate webhooks integration_filter to use a m2m field instead ([#3946](https://github.com/grafana/oncall/pull/3946))
### Fixed
- Fix template preview by @vadimkerr ([#3937](https://github.com/grafana/oncall/pull/3937))
## v1.3.106 (2024-02-20)
### Added
@ -20,6 +38,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Unblock slack install by skipping check chatops gateway link in OSS deployment @mderynck ([#3893](https://github.com/grafana/oncall/pull/3893))
- Fix multiple issues of alert groups table ([#3894](https://github.com/grafana/oncall/issues/3894))
- Improvements for dragging the add rotation form in Schedules ([#3904](https://github.com/grafana/oncall/pull/3904))
- Allow external-redis secret creation when the broker is rabbitmq ([#3903](https://github.com/grafana/oncall/pull/3903))
### Changed

View file

@ -28,6 +28,10 @@ DEV_HELM_FILE = $(DEV_ENV_DIR)/helm-local.yml
DEV_HELM_USER_SPECIFIC_FILE = $(DEV_ENV_DIR)/helm-local.dev.yml
ENGINE_DIR = ./engine
VENV_DIR = ./venv
REQUIREMENTS_DEV_IN = $(ENGINE_DIR)/requirements-dev.in
REQUIREMENTS_DEV_TXT = $(ENGINE_DIR)/requirements-dev.txt
REQUIREMENTS_IN = $(ENGINE_DIR)/requirements.in
REQUIREMENTS_TXT = $(ENGINE_DIR)/requirements.txt
REQUIREMENTS_ENTERPRISE_TXT = $(ENGINE_DIR)/requirements-enterprise.txt
SQLITE_DB_FILE = $(ENGINE_DIR)/oncall.db
@ -237,20 +241,29 @@ backend-debug-disable: _backend-debug-disable stop start
define backend_command
export `grep -v '^#' $(DEV_ENV_FILE) | xargs -0` && \
export BROKER_TYPE=$(BROKER_TYPE) && \
. ./venv/bin/activate && \
cd engine && \
$(1)
endef
backend-bootstrap:
pip install -U pip wheel
pip install -r $(REQUIREMENTS_TXT)
python3.11 -m venv $(VENV_DIR)
$(VENV_DIR)/bin/pip install -U pip wheel pip-tools
$(VENV_DIR)/bin/pip-sync $(REQUIREMENTS_TXT) $(REQUIREMENTS_DEV_TXT)
@if [ -f $(REQUIREMENTS_ENTERPRISE_TXT) ]; then \
pip install -r $(REQUIREMENTS_ENTERPRISE_TXT); \
$(VENV_DIR)/bin/pip install -r $(REQUIREMENTS_ENTERPRISE_TXT); \
fi
backend-migrate:
$(call backend_command,python manage.py migrate)
backend-compile-deps:
pip-compile --strip-extras $(REQUIREMENTS_IN)
pip-compile --strip-extras $(REQUIREMENTS_DEV_IN)
backend-upgrade-deps:
pip-compile --strip-extras --upgrade $(REQUIREMENTS_IN)
run-backend-server:
$(call backend_command,python manage.py runserver 0.0.0.0:8080)

View file

@ -44,7 +44,7 @@ docker_build_sub(
live_update=[
sync("./engine/", "/etc/app"),
run(
"cd /etc/app && pip install -r requirements.txt",
"cd /etc/app && pip install pip-tools && pip-sync",
trigger="./engine/requirements.txt",
),
],

View file

@ -189,13 +189,11 @@ See the `django-silk` documentation [here](https://github.com/jazzband/django-si
By default everything runs inside Docker. If you would like to run the backend services outside of Docker
(for integrating w/ PyCharm for example), follow these instructions:
1. Create a Python 3.11 virtual environment using a method of your choosing (ex.
[venv](https://docs.python.org/3.11/library/venv.html) or [pyenv-virtualenv](https://github.com/pyenv/pyenv-virtualenv)).
Make sure the virtualenv is "activated".
1. Make sure you have Python 3.11 installed.
2. `postgres` is a dependency on some of our Python dependencies (notably `psycopg2`
([docs](https://www.psycopg.org/docs/install.html#prerequisites))). Please visit
[here](https://www.postgresql.org/download/) for installation instructions.
3. `make backend-bootstrap` - installs all backend dependencies
3. `make backend-bootstrap` - will create the virtual env and install all backend dependencies
4. Modify your `.env.dev` by copying the contents of one of `.env.mysql.dev`, `.env.postgres.dev`,
or `.env.sqlite.dev` into `.env.dev` (you should exclude the `GF_` prefixed environment variables).
@ -209,6 +207,22 @@ By default everything runs inside Docker. If you would like to run the backend s
- `make run-backend-server` - runs the HTTP server
- `make run-backend-celery` - runs Celery workers
### Adding or updating Python dependencies
We are using [pip-tools](https://github.com/jazzband/pip-tools) to manage our dependencies. It helps
making builds deterministic, controlling deps (and indirect deps) upgrades (and versions consistency)
avoiding unexpected (and potentially breaking) changes.
We keep our direct deps in `requirements.in` from which we generate (through `pip-compile`) the
`requirements.txt` (where all deps are pinned). We also constrain dev (and enterprise) deps based
on our base requirements. Check [how to update deps](https://github.com/jazzband/pip-tools?tab=readme-ov-file#updating-requirements).
`pip install -r requirements.txt` will keep working (the difference is that this should never
bring additional dependencies or different versions not listed there), and when starting an env
from scratch, it would be the same as running `pip-sync`. `pip-sync` on the other hand will also
ensure to clean up any deps not listed in the requirements, keeping the env exactly as described
in `requirements.txt`.
## UI E2E Tests
We've developed a suite of "end-to-end" integration tests using [Playwright](https://playwright.dev/). These tests

View file

@ -59,9 +59,6 @@ services:
volumes:
- ./grafana-plugin:/etc/app
- node_modules_dev:/etc/app/node_modules
# https://stackoverflow.com/a/60456034
# see the explaination above that uses the $ENTERPRISE_ENGINE env var
- ${ENTERPRISE_FRONTEND:-/dev/null}:${ENTERPRISE_FRONTEND_VOLUME_MOUNT_DEST_DIR:-/tmp/empty:ro}
profiles:
- oncall_ui

View file

@ -13,10 +13,14 @@ RUN apk add bash \
mariadb-connector-c-dev \
libffi-dev \
git \
postgresql-dev
postgresql-dev
WORKDIR /etc/app
COPY ./requirements.txt ./
# TODO: figure out how to get this to work.. see comment in .github/workflows/e2e-tests.yml
# https://stackoverflow.com/a/71846527
# RUN --mount=type=cache,target=/root/.cache/pip,from=pip_cache pip install -r requirements.txt
RUN pip install -r requirements.txt
# we intentionally have two COPY commands, this is to have the requirements.txt in a separate build step
@ -47,9 +51,15 @@ ENV prometheus_multiproc_dir "/tmp/prometheus_django_metrics"
FROM base AS dev
RUN apk add sqlite mysql-client postgresql-client
# TODO: figure out how to get this to work.. see comment in .github/workflows/e2e-tests.yml
# https://stackoverflow.com/a/71846527
# RUN --mount=type=cache,target=/root/.cache/pip,from=pip_cache pip install -r requirements-dev.txt
RUN pip install -r requirements-dev.txt
FROM dev AS dev-enterprise
# TODO: figure out how to get this to work.. see comment in .github/workflows/e2e-tests.yml
# https://stackoverflow.com/a/71846527
# RUN --mount=type=cache,target=/root/.cache/pip,from=pip_cache pip install -r requirements-enterprise-docker.txt
RUN pip install -r requirements-enterprise-docker.txt
FROM base AS prod

View file

@ -7,7 +7,7 @@ from apps.api.serializers.labels import LabelsSerializerMixin
from apps.webhooks.models import Webhook, WebhookResponse
from apps.webhooks.models.webhook import PUBLIC_WEBHOOK_HTTP_METHODS, WEBHOOK_FIELD_PLACEHOLDER
from apps.webhooks.presets.preset_options import WebhookPresetOptions
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
from common.api_helpers.custom_fields import IntegrationFilteredByOrganizationField, TeamPrimaryKeyRelatedField
from common.api_helpers.utils import CurrentOrganizationDefault, CurrentUserDefault
from common.jinja_templater import apply_jinja_template
from common.jinja_templater.apply_jinja_template import JinjaTemplateError, JinjaTemplateWarning
@ -37,6 +37,9 @@ class WebhookSerializer(LabelsSerializerMixin, serializers.ModelSerializer):
last_response_log = serializers.SerializerMethodField()
trigger_type = serializers.CharField(allow_null=True)
trigger_type_name = serializers.SerializerMethodField()
integration_filter = IntegrationFilteredByOrganizationField(
source="filtered_integrations", many=True, required=False
)
PREFETCH_RELATED = ["labels", "labels__key", "labels__value"]

View file

@ -1,5 +1,5 @@
import json
from unittest.mock import patch
from unittest.mock import ANY, patch
import pytest
from django.urls import reverse
@ -1696,3 +1696,157 @@ def test_team_not_updated_if_not_in_data(
alert_receive_channel.refresh_from_db()
assert alert_receive_channel.team == team
def _webhook_data(webhook_id=ANY, webhook_name=ANY, webhook_url=ANY, alert_receive_channel_id=ANY):
return {
"authorization_header": None,
"data": None,
"forward_all": True,
"headers": None,
"http_method": "POST",
"id": webhook_id,
"integration_filter": [alert_receive_channel_id],
"is_legacy": False,
"is_webhook_enabled": True,
"labels": [],
"last_response_log": {
"content": "",
"event_data": "",
"request_data": "",
"request_headers": "",
"request_trigger": "",
"status_code": None,
"timestamp": None,
"url": "",
},
"name": webhook_name,
"password": None,
"preset": None,
"team": None,
"trigger_template": None,
"trigger_type": "0",
"trigger_type_name": "Escalation step",
"url": webhook_url,
"username": None,
}
@pytest.mark.django_db
def test_alert_receive_channel_webhooks_get(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_custom_webhook,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel = make_alert_receive_channel(organization)
webhook = make_custom_webhook(organization, is_from_connected_integration=True)
webhook.filtered_integrations.set([alert_receive_channel])
# create 2 webhooks that are not connected to the integration
make_custom_webhook(organization)
webhook2 = make_custom_webhook(organization, is_from_connected_integration=False)
webhook2.filtered_integrations.set([alert_receive_channel])
client = APIClient()
url = reverse(
"api-internal:alert_receive_channel-webhooks-get", kwargs={"pk": alert_receive_channel.public_primary_key}
)
response = client.get(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json() == [
_webhook_data(
webhook_id=webhook.public_primary_key,
alert_receive_channel_id=alert_receive_channel.public_primary_key,
)
]
@pytest.mark.django_db
def test_alert_receive_channel_webhooks_post(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel = make_alert_receive_channel(organization)
client = APIClient()
url = reverse(
"api-internal:alert_receive_channel-webhooks-get", kwargs={"pk": alert_receive_channel.public_primary_key}
)
data = {
"name": None,
"enabled": True,
"url": "http://example.com/",
"http_method": "POST",
"trigger_type": "0",
"trigger_template": None,
}
response = client.post(url, data, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == _webhook_data(
webhook_url="http://example.com/",
alert_receive_channel_id=alert_receive_channel.public_primary_key,
)
assert alert_receive_channel.webhooks.get().is_from_connected_integration is True
@pytest.mark.django_db
def test_alert_receive_channel_webhooks_put(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_custom_webhook,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel = make_alert_receive_channel(organization)
webhook = make_custom_webhook(organization, is_from_connected_integration=True)
webhook.filtered_integrations.set([alert_receive_channel])
client = APIClient()
url = reverse(
"api-internal:alert_receive_channel-webhooks-put",
kwargs={"pk": alert_receive_channel.public_primary_key, "webhook_id": webhook.public_primary_key},
)
data = _webhook_data(
webhook_id=webhook.public_primary_key,
webhook_name="Test",
webhook_url="http://example.com/",
alert_receive_channel_id=alert_receive_channel.public_primary_key,
)
response = client.put(url, data, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
webhook.refresh_from_db()
assert webhook.url == "http://example.com/"
@pytest.mark.django_db
def test_alert_receive_channel_webhooks_delete(
make_organization_and_user_with_plugin_token,
make_alert_receive_channel,
make_custom_webhook,
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
alert_receive_channel = make_alert_receive_channel(organization)
webhook = make_custom_webhook(organization, is_from_connected_integration=True)
webhook.filtered_integrations.set([alert_receive_channel])
client = APIClient()
url = reverse(
"api-internal:alert_receive_channel-webhooks-put",
kwargs={"pk": alert_receive_channel.public_primary_key, "webhook_id": webhook.public_primary_key},
)
response = client.delete(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_204_NO_CONTENT
webhook.refresh_from_db()
assert webhook.deleted_at is not None
assert alert_receive_channel.webhooks.count() == 0

View file

@ -61,7 +61,7 @@ def test_create_webhook_from_preset(
"forward_all": True,
"headers": None,
"http_method": "GET",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,

View file

@ -32,8 +32,11 @@ def webhook_internal_api_setup(make_organization_and_user_with_plugin_token, mak
@pytest.mark.django_db
def test_get_list_webhooks(webhook_internal_api_setup, make_user_auth_headers):
def test_get_list_webhooks(webhook_internal_api_setup, make_custom_webhook, make_user_auth_headers):
user, token, webhook = webhook_internal_api_setup
# connected integration webhooks are not included
make_custom_webhook(organization=user.organization, is_from_connected_integration=True)
client = APIClient()
url = reverse("api-internal:webhooks-list")
@ -50,7 +53,7 @@ def test_get_list_webhooks(webhook_internal_api_setup, make_user_auth_headers):
"forward_all": False,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -94,7 +97,7 @@ def test_get_detail_webhook(webhook_internal_api_setup, make_user_auth_headers):
"forward_all": False,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -143,7 +146,7 @@ def test_create_webhook(webhook_internal_api_setup, make_user_auth_headers):
"forward_all": True,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -204,7 +207,7 @@ def test_create_valid_templated_field(webhook_internal_api_setup, make_user_auth
"headers": None,
"data": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -288,6 +291,70 @@ def test_delete_webhook(webhook_internal_api_setup, make_user_auth_headers):
assert response.status_code == status.HTTP_204_NO_CONTENT
@pytest.mark.django_db
def test_webhook_integration_filter(webhook_internal_api_setup, make_alert_receive_channel, make_user_auth_headers):
user, token, webhook = webhook_internal_api_setup
alert_receive_channel_1 = make_alert_receive_channel(user.organization)
alert_receive_channel_2 = make_alert_receive_channel(user.organization)
client = APIClient()
# create webhook setting integrations filter
url = reverse("api-internal:webhooks-list")
data = {
"name": "the_webhook",
"url": TEST_URL,
"trigger_type": Webhook.TRIGGER_ALERT_GROUP_CREATED,
"http_method": "POST",
"team": None,
"integration_filter": [alert_receive_channel_1.public_primary_key],
}
response = client.post(url, data, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_201_CREATED
webhook = Webhook.objects.get(public_primary_key=response.json()["id"])
assert list(webhook.filtered_integrations.all()) == [alert_receive_channel_1]
assert response.json()["integration_filter"] == [alert_receive_channel_1.public_primary_key]
# update filter
url = reverse("api-internal:webhooks-detail", kwargs={"pk": webhook.public_primary_key})
data = {
"name": "github_button_updated",
"url": "https://github.com/",
"trigger_type": Webhook.TRIGGER_ALERT_GROUP_CREATED,
"http_method": "POST",
"team": None,
"integration_filter": [alert_receive_channel_1.public_primary_key, alert_receive_channel_2.public_primary_key],
}
response = client.put(
url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
)
webhook.refresh_from_db()
assert response.status_code == status.HTTP_200_OK
assert list(webhook.filtered_integrations.all()) == [alert_receive_channel_1, alert_receive_channel_2]
assert response.json()["integration_filter"] == [
alert_receive_channel_1.public_primary_key,
alert_receive_channel_2.public_primary_key,
]
# clear filter
url = reverse("api-internal:webhooks-detail", kwargs={"pk": webhook.public_primary_key})
data = {
"name": "github_button_updated",
"url": "https://github.com/",
"trigger_type": Webhook.TRIGGER_ALERT_GROUP_CREATED,
"http_method": "POST",
"team": None,
"integration_filter": [],
}
response = client.put(
url, data=json.dumps(data), content_type="application/json", **make_user_auth_headers(user, token)
)
webhook.refresh_from_db()
assert response.status_code == status.HTTP_200_OK
assert list(webhook.filtered_integrations.all()) == []
assert response.json()["integration_filter"] == []
@pytest.mark.django_db
@pytest.mark.parametrize(
"role,expected_status",
@ -585,7 +652,7 @@ def test_webhook_field_masking(webhook_internal_api_setup, make_user_auth_header
"forward_all": True,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -645,7 +712,7 @@ def test_webhook_copy(webhook_internal_api_setup, make_user_auth_headers):
"forward_all": True,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"labels": [],
"is_legacy": False,
@ -867,7 +934,7 @@ def test_create_webhook_with_labels(
"forward_all": True,
"headers": None,
"http_method": "POST",
"integration_filter": None,
"integration_filter": [],
"is_webhook_enabled": True,
"is_legacy": False,
"last_response_log": {

View file

@ -1,5 +1,6 @@
import typing
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django_filters import rest_framework as filters
from django_filters.rest_framework import DjangoFilterBackend
@ -7,6 +8,7 @@ from drf_spectacular.plumbing import resolve_type_hint
from drf_spectacular.utils import PolymorphicProxySerializer, extend_schema, extend_schema_view, inline_serializer
from rest_framework import serializers, status
from rest_framework.decorators import action
from rest_framework.exceptions import NotFound
from rest_framework.filters import SearchFilter
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
@ -22,6 +24,7 @@ from apps.api.serializers.alert_receive_channel import (
AlertReceiveChannelUpdateSerializer,
FilterAlertReceiveChannelSerializer,
)
from apps.api.serializers.webhook import WebhookSerializer
from apps.api.throttlers import DemoAlertThrottler
from apps.api.views.labels import schedule_update_label_cache
from apps.auth_token.auth import PluginAuthentication
@ -148,6 +151,10 @@ class AlertReceiveChannelView(
"connect_contact_point": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
"create_contact_point": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
"disconnect_contact_point": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
"webhooks_get": [RBACPermission.Permissions.INTEGRATIONS_READ],
"webhooks_post": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
"webhooks_put": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
"webhooks_delete": [RBACPermission.Permissions.INTEGRATIONS_WRITE],
}
def perform_update(self, serializer):
@ -622,3 +629,50 @@ class AlertReceiveChannelView(
if not disconnected:
raise BadRequest(detail=error)
return Response(status=status.HTTP_200_OK)
@extend_schema(request=None, responses=WebhookSerializer(many=True))
@action(detail=True, methods=["get"], url_path="webhooks")
def webhooks_get(self, request, pk):
instance = self.get_object()
return Response(
WebhookSerializer(
instance.webhooks.filter(is_from_connected_integration=True),
many=True,
context={"request": request},
).data
)
@extend_schema(request=WebhookSerializer, responses=WebhookSerializer)
@webhooks_get.mapping.post
# https://www.django-rest-framework.org/api-guide/viewsets/#routing-additional-http-methods-for-extra-actions
def webhooks_post(self, request, pk):
instance = self.get_object()
serializer = WebhookSerializer(data=request.data, context={"request": request})
serializer.is_valid(raise_exception=True)
serializer.save(filtered_integrations=[instance], is_from_connected_integration=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
@extend_schema(request=WebhookSerializer, responses=WebhookSerializer)
@action(detail=True, methods=["put"], url_path=r"webhooks/(?P<webhook_id>\w+)")
def webhooks_put(self, request, pk, webhook_id):
instance = self.get_object()
try:
webhook = instance.webhooks.get(is_from_connected_integration=True, public_primary_key=webhook_id)
except ObjectDoesNotExist:
raise NotFound
serializer = WebhookSerializer(webhook, data=request.data, context={"request": request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(request=None, responses=None)
@webhooks_put.mapping.delete
# https://www.django-rest-framework.org/api-guide/viewsets/#routing-additional-http-methods-for-extra-actions
def webhooks_delete(self, request, pk, webhook_id):
instance = self.get_object()
try:
webhook = instance.webhooks.get(is_from_connected_integration=True, public_primary_key=webhook_id)
except ObjectDoesNotExist:
raise NotFound
webhook.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -94,6 +94,7 @@ class WebhooksView(TeamFilteringMixin, PublicPrimaryKeyMixin[Webhook], ModelView
def get_queryset(self, ignore_filtering_by_available_teams=False):
queryset = Webhook.objects.filter(
organization=self.request.auth.organization,
is_from_connected_integration=False,
)
if not ignore_filtering_by_available_teams:
queryset = queryset.filter(*self.available_teams_lookup_args).distinct()

View file

@ -4,6 +4,7 @@ from celery.utils.log import get_task_logger
from django.conf import settings
from django.utils import timezone
from apps.alerts.models import AlertReceiveChannel
from apps.grafana_plugin.helpers import GcomAPIClient
from apps.grafana_plugin.helpers.client import GrafanaAPIClient
from apps.grafana_plugin.helpers.gcom import get_active_instance_ids, get_deleted_instance_ids, get_stack_regions
@ -139,3 +140,26 @@ def sync_team_members_for_organization_async(organization_pk):
grafana_api_client = GrafanaAPIClient(api_url=organization.grafana_url, api_token=organization.api_token)
sync_team_members(grafana_api_client, organization)
@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), max_retries=1)
def cleanup_empty_deleted_integrations(organization_pk, dry_run=True):
try:
organization = Organization.objects.get(pk=organization_pk)
except Organization.DoesNotExist:
logger.info(f"Organization {organization_pk} was not found")
return
integrations_qs = AlertReceiveChannel.objects_with_deleted.filter(
organization=organization, deleted_at__isnull=False, alert_groups=None
).distinct()
logger.info(
f"Found count={len(integrations_qs)} integrations in org={organization.public_primary_key} that are both empty and deleted"
)
for integration in integrations_qs:
logger.info(
f"Deleting integration ppk={integration.public_primary_key} in organization={organization.stack_slug} dry_run={dry_run}"
)
if not dry_run:
integration.hard_delete()

View file

@ -5,7 +5,8 @@ from django.conf import settings
from django.test.utils import override_settings
from django.utils import timezone
from apps.grafana_plugin.tasks.sync import run_organization_sync
from apps.alerts.models import AlertReceiveChannel
from apps.grafana_plugin.tasks.sync import cleanup_empty_deleted_integrations, run_organization_sync
class SyncOrganization(object):
@ -100,3 +101,65 @@ def test_sync_organization_skip_cloud(
assert test_client.called and not syncer.called and not syncer.org
syncer.reset()
test_client.reset()
def create_test_integrations_for_cleanup(make_organization, make_alert_receive_channel, make_alert_group):
org = make_organization()
org_channel = make_alert_receive_channel(organization=org)
org_channel_empty = make_alert_receive_channel(organization=org)
org_channel_deleted = make_alert_receive_channel(organization=org)
org_channel_deleted_empty = make_alert_receive_channel(organization=org)
make_alert_group(alert_receive_channel=org_channel)
make_alert_group(alert_receive_channel=org_channel_deleted)
org_channel_deleted.delete()
org_channel_deleted_empty.delete()
return org, org_channel, org_channel_empty, org_channel_deleted, org_channel_deleted_empty
@pytest.mark.django_db
@pytest.mark.parametrize(
"dry_run, channel1_exists, channel2_exists, channel3_exists, channel4_exists",
[
(True, True, True, True, True),
(False, True, True, True, False),
],
)
def test_cleanup_empty_deleted_integrations_test_run(
make_organization,
make_alert_receive_channel,
make_alert_group,
dry_run,
channel1_exists,
channel2_exists,
channel3_exists,
channel4_exists,
):
(
org1,
org1_channel,
org1_channel_empty,
org1_channel_deleted,
org1_channel_deleted_empty,
) = create_test_integrations_for_cleanup(make_organization, make_alert_receive_channel, make_alert_group)
(
org2,
org2_channel,
org2_channel_empty,
org2_channel_deleted,
org2_channel_deleted_empty,
) = create_test_integrations_for_cleanup(make_organization, make_alert_receive_channel, make_alert_group)
assert AlertReceiveChannel.objects_with_deleted.filter(organization=org1).count() == 4
assert AlertReceiveChannel.objects_with_deleted.filter(organization=org2).count() == 4
cleanup_empty_deleted_integrations(org1.pk, dry_run)
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org1_channel.pk).exists() == channel1_exists
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org1_channel_empty.pk).exists() == channel2_exists
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org1_channel_deleted.pk).exists() == channel3_exists
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org1_channel_deleted_empty.pk).exists() == channel4_exists
# Org 2 should always be unaffected
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org2_channel.pk).exists()
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org2_channel_empty.pk).exists()
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org2_channel_deleted.pk).exists()
assert AlertReceiveChannel.objects_with_deleted.filter(pk=org2_channel_deleted_empty.pk).exists()

View file

@ -38,44 +38,6 @@ def setup_failing_redis_cache(settings):
}
@pytest.mark.django_db
def test_integration_json_data_too_big(settings, make_organization_and_user, make_alert_receive_channel):
settings.DATA_UPLOAD_MAX_MEMORY_SIZE = 50
organization, user = make_organization_and_user()
alert_receive_channel = make_alert_receive_channel(
organization=organization,
author=user,
integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER,
)
client = APIClient()
url = reverse("integrations:alertmanager", kwargs={"alert_channel_key": alert_receive_channel.token})
data = {"value": "a" * settings.DATA_UPLOAD_MAX_MEMORY_SIZE}
response = client.post(url, data, format="json")
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.django_db
def test_integration_form_data_too_big(settings, make_organization_and_user, make_alert_receive_channel):
settings.DATA_UPLOAD_MAX_MEMORY_SIZE = 50
organization, user = make_organization_and_user()
alert_receive_channel = make_alert_receive_channel(
organization=organization,
author=user,
integration=AlertReceiveChannel.INTEGRATION_ALERTMANAGER,
)
client = APIClient()
url = reverse("integrations:alertmanager", kwargs={"alert_channel_key": alert_receive_channel.token})
data = {"value": "a" * settings.DATA_UPLOAD_MAX_MEMORY_SIZE}
response = client.post(url, data, content_type="application/x-www-form-urlencoded")
assert response.status_code == status.HTTP_400_BAD_REQUEST
@patch("apps.integrations.views.create_alert")
@pytest.mark.parametrize(
"integration_type",

View file

@ -3,10 +3,9 @@ from collections import defaultdict
from rest_framework import fields, serializers
from rest_framework.validators import UniqueTogetherValidator
from apps.alerts.models import AlertReceiveChannel
from apps.webhooks.models import Webhook, WebhookResponse
from apps.webhooks.models.webhook import PUBLIC_WEBHOOK_HTTP_METHODS, WEBHOOK_FIELD_PLACEHOLDER
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
from common.api_helpers.custom_fields import IntegrationFilteredByOrganizationField, TeamPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.utils import CurrentOrganizationDefault, CurrentTeamDefault, CurrentUserDefault
from common.jinja_templater import apply_jinja_template
@ -54,6 +53,9 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
team = TeamPrimaryKeyRelatedField(allow_null=True, default=CurrentTeamDefault())
user = serializers.HiddenField(default=CurrentUserDefault())
trigger_type = WebhookTriggerTypeField()
integration_filter = IntegrationFilteredByOrganizationField(
source="filtered_integrations", many=True, required=False
)
class Meta:
model = Webhook
@ -88,7 +90,6 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
"headers": {"required": False, "allow_null": True, "allow_blank": True},
"data": {"required": False, "allow_null": True, "allow_blank": True},
"forward_all": {"required": False, "allow_null": False},
"integration_filter": {"required": False, "allow_null": True},
}
validators = [UniqueTogetherValidator(queryset=Webhook.objects.all(), fields=["name", "organization"])]
@ -99,6 +100,8 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
result["password"] = WEBHOOK_FIELD_PLACEHOLDER
if instance.authorization_header:
result["authorization_header"] = WEBHOOK_FIELD_PLACEHOLDER
if instance.filtered_integrations.count() == 0:
result["integration_filter"] = None
return result
def to_internal_value(self, data):
@ -107,6 +110,8 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
data["password"] = webhook.password
if data.get("authorization_header") == WEBHOOK_FIELD_PLACEHOLDER:
data["authorization_header"] = webhook.authorization_header
if not data.get("integration_filter"):
data["integration_filter"] = []
return super().to_internal_value(data)
def _validate_template_field(self, template):
@ -149,17 +154,6 @@ class WebhookCreateSerializer(serializers.ModelSerializer):
raise serializers.ValidationError(f"Must be one of {PUBLIC_WEBHOOK_HTTP_METHODS}")
return http_method
def validate_integration_filter(self, integration_filter):
if integration_filter:
if type(integration_filter) is not list:
raise serializers.ValidationError(INTEGRATION_FILTER_MESSAGE)
integrations = AlertReceiveChannel.objects.filter(
organization=self.context["request"].auth.organization, public_primary_key__in=integration_filter
)
if len(integrations) != len(integration_filter):
raise serializers.ValidationError(INTEGRATION_FILTER_MESSAGE)
return integration_filter
def validate_preset(self, preset):
raise serializers.ValidationError(PRESET_VALIDATION_MESSAGE)
@ -185,5 +179,4 @@ class WebhookUpdateSerializer(WebhookCreateSerializer):
"data": {"required": False, "allow_null": True, "allow_blank": True},
"forward_all": {"required": False, "allow_null": False},
"http_method": {"required": False, "allow_null": False, "allow_blank": False},
"integration_filter": {"required": False, "allow_null": True},
}

View file

@ -37,7 +37,7 @@ def test_get_custom_actions(make_organization_and_user_with_token, make_custom_w
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
],
"current_page_number": 1,
@ -83,7 +83,7 @@ def test_get_custom_actions_filter_by_name(
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
],
"current_page_number": 1,
@ -152,7 +152,7 @@ def test_get_custom_action(
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_200_OK
@ -217,7 +217,7 @@ def test_create_custom_action(make_organization_and_user_with_token, data):
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_201_CREATED
@ -260,7 +260,7 @@ def test_create_custom_action_nested_data(make_organization_and_user_with_token)
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_201_CREATED
@ -303,7 +303,7 @@ def test_create_custom_action_valid_after_render(make_organization_and_user_with
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_201_CREATED
@ -346,7 +346,7 @@ def test_create_custom_action_valid_after_render_use_all_data(make_organization_
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_201_CREATED
@ -419,7 +419,7 @@ def test_update_custom_action(
"headers": custom_action.headers,
"http_method": custom_action.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[custom_action.trigger_type],
"integration_filter": custom_action.integration_filter,
"integration_filter": [i.public_primary_key for i in custom_action.filtered_integrations.all()] or None,
}
assert response.status_code == status.HTTP_200_OK

View file

@ -26,7 +26,7 @@ def _get_expected_result(webhook):
"headers": webhook.headers,
"http_method": webhook.http_method,
"trigger_type": Webhook.PUBLIC_TRIGGER_TYPES_MAP[webhook.trigger_type],
"integration_filter": webhook.integration_filter,
"integration_filter": [i.public_primary_key for i in webhook.filtered_integrations.all()] or None,
"preset": webhook.preset,
}
@ -37,6 +37,8 @@ def test_get_webhooks(make_organization_and_user_with_token, make_custom_webhook
client = APIClient()
webhook = make_custom_webhook(organization=organization)
# connected integration webhooks are not included
make_custom_webhook(organization=organization, is_from_connected_integration=True)
url = reverse("api-public:webhooks-list")
@ -193,7 +195,7 @@ def test_create_webhook_optional_fields(make_organization_and_user_with_token, o
"headers": optional_value,
"forward_all": True,
"is_webhook_enabled": True,
"integration_filter": optional_value,
"integration_filter": None,
}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
@ -318,6 +320,7 @@ def test_get_webhook_responses(
@pytest.mark.django_db
def test_webhook_validate_integration_filters(
make_organization,
make_organization_and_user_with_token,
make_custom_webhook,
make_alert_receive_channel,
@ -325,10 +328,14 @@ def test_webhook_validate_integration_filters(
organization, user, token = make_organization_and_user_with_token()
alert_receive_channel = make_alert_receive_channel(organization)
webhook = make_custom_webhook(organization=organization)
url = reverse("api-public:webhooks-detail", kwargs={"pk": webhook.public_primary_key})
data = {"integration_filter": alert_receive_channel.public_primary_key}
other_organization = make_organization()
other_alert_receive_channel = make_alert_receive_channel(other_organization)
url = reverse("api-public:webhooks-detail", kwargs={"pk": webhook.public_primary_key})
client = APIClient()
data = {"integration_filter": alert_receive_channel.public_primary_key}
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == 400
@ -336,7 +343,10 @@ def test_webhook_validate_integration_filters(
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == 400
data["integration_filter"] = [alert_receive_channel.public_primary_key, alert_receive_channel.public_primary_key]
data["integration_filter"] = [
alert_receive_channel.public_primary_key,
other_alert_receive_channel.public_primary_key,
]
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == 400
@ -345,21 +355,21 @@ def test_webhook_validate_integration_filters(
webhook.refresh_from_db()
assert response.status_code == 200
assert response.data["integration_filter"] == data["integration_filter"]
assert webhook.integration_filter == data["integration_filter"]
assert list(webhook.filtered_integrations.all()) == [alert_receive_channel]
data["integration_filter"] = []
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
webhook.refresh_from_db()
assert response.status_code == 200
assert response.data["integration_filter"] == data["integration_filter"]
assert webhook.integration_filter == data["integration_filter"]
assert response.data["integration_filter"] is None
assert list(webhook.filtered_integrations.all()) == []
data["integration_filter"] = None
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
webhook.refresh_from_db()
assert response.status_code == 200
assert response.data["integration_filter"] == data["integration_filter"]
assert webhook.integration_filter == data["integration_filter"]
assert response.data["integration_filter"] is None
assert list(webhook.filtered_integrations.all()) == []
@pytest.mark.django_db

View file

@ -34,7 +34,10 @@ class WebhooksView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet):
def get_queryset(self):
webhook_name = self.request.query_params.get("name", None)
queryset = Webhook.objects.filter(organization=self.request.auth.organization)
queryset = Webhook.objects.filter(
organization=self.request.auth.organization,
is_from_connected_integration=False,
)
if webhook_name:
queryset = queryset.filter(name=webhook_name)

View file

@ -0,0 +1,31 @@
# Generated by Django 4.2.10 on 2024-02-21 18:28
from django.db import migrations, models
def migrate_integration_filter_to_filtered_integrations(apps, schema_editor):
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
Webhook = apps.get_model("webhooks", "Webhook")
to_migrate = Webhook.objects.filter(integration_filter__isnull=False)
for w in to_migrate:
w.filtered_integrations.set(
AlertReceiveChannel.objects.filter(public_primary_key__in=w.integration_filter)
)
class Migration(migrations.Migration):
dependencies = [
('alerts', '0045_escalationpolicy_notify_to_team_members_and_more'),
('webhooks', '0013_alter_webhook_trigger_type_and_more'),
]
operations = [
migrations.AddField(
model_name='webhook',
name='filtered_integrations',
field=models.ManyToManyField(related_name='webhooks', to='alerts.alertreceivechannel'),
),
migrations.RunPython(migrate_integration_filter_to_filtered_integrations, migrations.RunPython.noop),
]

View file

@ -0,0 +1,18 @@
# Generated by Django 4.2.10 on 2024-02-22 17:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('webhooks', '0014_webhook_filtered_integrations'),
]
operations = [
migrations.AddField(
model_name='webhook',
name='is_from_connected_integration',
field=models.BooleanField(default=False, null=True),
),
]

View file

@ -152,10 +152,14 @@ class Webhook(models.Model):
http_method = models.CharField(max_length=32, default="POST", null=True)
trigger_type = models.IntegerField(choices=TRIGGER_TYPES, default=TRIGGER_ESCALATION_STEP, null=True)
is_webhook_enabled = models.BooleanField(null=True, default=True)
# NOTE: integration_filter is deprecated (to be removed), use filtered_integrations instead
integration_filter = models.JSONField(default=None, null=True, blank=True)
filtered_integrations = models.ManyToManyField("alerts.AlertReceiveChannel", related_name="webhooks")
is_legacy = models.BooleanField(null=True, default=False)
preset = models.CharField(max_length=100, null=True, blank=True, default=None)
is_from_connected_integration = models.BooleanField(null=True, default=False)
class Meta:
unique_together = ("name", "organization")
@ -241,9 +245,9 @@ class Webhook(models.Model):
return url
def check_integration_filter(self, alert_group):
if not self.integration_filter:
if self.filtered_integrations.count() == 0:
return True
return alert_group.channel.public_primary_key in self.integration_filter
return self.filtered_integrations.filter(id=alert_group.channel.id).exists()
def check_trigger(self, event_data):
if not self.trigger_template:

View file

@ -118,11 +118,12 @@ def test_execute_webhook_integration_filter_not_matching(
organization = make_organization()
alert_receive_channel = make_alert_receive_channel(organization)
alert_group = make_alert_group(alert_receive_channel)
other_alert_receive_channel = make_alert_receive_channel(organization)
webhook = make_custom_webhook(
organization=organization,
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
integration_filter=["does-not-match"],
)
webhook.filtered_integrations.add(other_alert_receive_channel)
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
execute_webhook(webhook.pk, alert_group.pk, None, None)
@ -146,10 +147,10 @@ def test_execute_webhook_integration_filter_matching(
webhook = make_custom_webhook(
organization=organization,
trigger_type=Webhook.TRIGGER_ALERT_GROUP_CREATED,
integration_filter=["test-integration-1"],
# Check we get past integration filter but exit early to keep test simple
trigger_template="False",
)
webhook.filtered_integrations.add(alert_receive_channel)
with patch("apps.webhooks.models.webhook.requests") as mock_requests:
execute_webhook(webhook.pk, alert_group.pk, None, None)

View file

@ -103,6 +103,25 @@ class UsersFilteredByOrganizationField(serializers.Field):
return queryset.filter(organization=request.user.organization, public_primary_key__in=data).distinct()
class IntegrationFilteredByOrganizationField(serializers.RelatedField):
def get_queryset(self):
request = self.context.get("request", None)
if not request:
return None
return request.user.organization.alert_receive_channels.all()
def to_internal_value(self, data):
try:
return self.get_queryset().get(public_primary_key=data)
except ObjectDoesNotExist:
raise ValidationError("Integration does not exist")
except (TypeError, ValueError):
raise ValidationError("Invalid integration")
def to_representation(self, value):
return value.public_primary_key
class RouteIdField(fields.CharField):
def to_internal_value(self, data):
try:

View file

@ -179,7 +179,7 @@ class OrderedModel(models.Model):
self.order = order
self._manager.filter(pk__in=pks).bulk_update([self] + instances_to_move, fields=["order"])
@_retry(OperationalError) # retry on deadlock
@_retry((IntegrityError, OperationalError)) # retry on duplicate order or deadlock
def swap(self, order: int) -> None:
"""
Swap self with an instance at a given order.

View file

@ -327,6 +327,38 @@ def test_ordered_model_swap_concurrent():
assert list(TestOrderedModel.objects.order_by("id").values_list("order", flat=True)) == unique_orders
@pytest.mark.skipif(SKIP_CONCURRENT, reason="OrderedModel concurrent tests are skipped to speed up tests")
@pytest.mark.django_db(transaction=True)
def test_ordered_model_swap_all_to_zero():
THREADS = 300
exceptions = []
TestOrderedModel.objects.all().delete() # clear table
instances = [TestOrderedModel.objects.create(test_field="test") for _ in range(THREADS)]
# generate random non-unique orders
random.seed(42)
positions = [random.randint(0, THREADS - 1) for _ in range(THREADS)]
def swap(idx):
try:
instance = instances[idx]
instance.swap(positions[idx])
except Exception as e:
exceptions.append(e)
threads = [threading.Thread(target=swap, args=(0,)) for _ in range(THREADS)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# can only check that orders are still sequential and that there are no exceptions
# can't check the exact order because it changes depending on the order of execution
assert not exceptions
assert _orders_are_sequential()
@pytest.mark.skipif(SKIP_CONCURRENT, reason="OrderedModel concurrent tests are skipped to speed up tests")
@pytest.mark.django_db(transaction=True)
def test_ordered_model_swap_non_unique_orders_concurrent():

View file

@ -1,3 +1,4 @@
import re
from unittest.mock import patch
import pytest
@ -27,7 +28,11 @@ def test_internal_api_detail_actions_get_object(
organization, user, token = make_organization_and_user_with_plugin_token()
client = APIClient()
url = reverse(f"api-internal:{basename}-{action.url_name}", kwargs={"pk": "NONEXISTENT"})
# get additional kwargs based on url_path regex
# example: for /alert_receive_channel/<pk>/webhooks/<webhook_id>, url_path_kwargs = {"webhook_id": "NONEXISTENT"}
url_path_kwargs = {key: "NONEXISTENT" for key in re.compile(action.url_path).groupindex.keys()}
url = reverse(f"api-internal:{basename}-{action.url_name}", kwargs={"pk": "NONEXISTENT", **url_path_kwargs})
with patch.object(viewset_class, "get_object", side_effect=NotFound) as mock_get_object:
method = list(action.mapping.keys())[0] # get the first allowed method

View file

@ -1,48 +0,0 @@
from django.conf import settings
from rest_framework import parsers, renderers
def check_content_length(parser_context):
"""Enforce DATA_UPLOAD_MAX_MEMORY_SIZE for json rest framework API requests."""
if parser_context and settings.DATA_UPLOAD_MAX_MEMORY_SIZE and "request" in parser_context:
try:
content_length = int(parser_context["request"].META.get("CONTENT_LENGTH", 0))
except (ValueError, TypeError):
content_length = 0
if content_length and content_length > settings.DATA_UPLOAD_MAX_MEMORY_SIZE or content_length < 0:
raise parsers.ParseError("RequestDataTooBig")
class JSONParser(parsers.JSONParser):
"""
Parse JSON-serialized data.
Enforce django setting for DATA_UPLOAD_MAX_MEMORY_SIZE.
"""
media_type = "application/json"
renderer_class = renderers.JSONRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""Parse incoming bytestream as JSON and returns the resulting data."""
# see https://github.com/encode/django-rest-framework/issues/4760
check_content_length(parser_context)
return super(JSONParser, self).parse(stream, media_type, parser_context)
class FormParser(parsers.FormParser):
"""
Parse form data.
Enforce django setting for DATA_UPLOAD_MAX_MEMORY_SIZE.
"""
media_type = "application/x-www-form-urlencoded"
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as a URL encoded form,
and returns the resulting QueryDict.
"""
# see https://github.com/encode/django-rest-framework/issues/4760
check_content_length(parser_context)
return super(FormParser, self).parse(stream, media_type, parser_context)

View file

@ -0,0 +1,16 @@
-c requirements.txt
celery-types==0.18.0
django-filter-stubs==0.1.3
django-stubs[compatible-mypy]==4.2.2
djangorestframework-stubs[compatible-mypy]==3.14.2
httpretty==1.1.4
mypy==1.4.1
pre-commit==2.15.0
pytest==7.3.1
pytest-django==4.5.2
pytest-xdist[psutil]==3.5.0
pytest_factoryboy==2.5.1
types-beautifulsoup4==4.12.0.5
types-PyMySQL==1.0.19.7
types-python-dateutil==2.8.19.13
types-requests==2.31.0.1

View file

@ -1,15 +1,169 @@
#
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --strip-extras ./engine/requirements-dev.in
#
asgiref==3.7.2
# via
# -c ./engine/requirements.txt
# django
celery-types==0.18.0
# via -r ./engine/requirements-dev.in
certifi==2024.2.2
# via
# -c ./engine/requirements.txt
# requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.3.2
# via
# -c ./engine/requirements.txt
# requests
distlib==0.3.8
# via virtualenv
django==4.2.10
# via
# -c ./engine/requirements.txt
# django-stubs
# django-stubs-ext
django-filter-stubs==0.1.3
django-stubs[compatible-mypy]==4.2.2
djangorestframework-stubs[compatible-mypy]==3.14.2
mypy==1.4.1
pre-commit==2.15.0
pytest==7.3.1
pytest-django==4.5.2
pytest_factoryboy==2.5.1
types-beautifulsoup4==4.12.0.5
types-PyMySQL==1.0.19.7
types-python-dateutil==2.8.19.13
types-requests==2.31.0.1
# via -r ./engine/requirements-dev.in
django-stubs==4.2.2
# via
# -r ./engine/requirements-dev.in
# django-filter-stubs
# django-stubs
# djangorestframework-stubs
django-stubs-ext==4.2.7
# via django-stubs
djangorestframework-stubs==3.14.2
# via
# -r ./engine/requirements-dev.in
# django-filter-stubs
# djangorestframework-stubs
execnet==2.0.2
# via pytest-xdist
factory-boy==2.12.0
# via
# -c ./engine/requirements.txt
# pytest-factoryboy
faker==23.1.0
# via
# -c ./engine/requirements.txt
# factory-boy
filelock==3.13.1
# via virtualenv
httpretty==1.1.4
pytest-xdist[psutil]==3.5.0
# via -r ./engine/requirements-dev.in
identify==2.5.34
# via pre-commit
idna==3.6
# via
# -c ./engine/requirements.txt
# requests
inflection==0.5.1
# via
# -c ./engine/requirements.txt
# pytest-factoryboy
iniconfig==2.0.0
# via pytest
mypy==1.4.1
# via
# -r ./engine/requirements-dev.in
# django-filter-stubs
# django-stubs
# djangorestframework-stubs
mypy-extensions==1.0.0
# via mypy
nodeenv==1.8.0
# via pre-commit
packaging==23.2
# via pytest
platformdirs==4.2.0
# via virtualenv
pluggy==1.4.0
# via pytest
pre-commit==2.15.0
# via -r ./engine/requirements-dev.in
psutil==5.9.4
# via
# -c ./engine/requirements.txt
# pytest-xdist
pytest==7.3.1
# via
# -r ./engine/requirements-dev.in
# pytest-django
# pytest-factoryboy
# pytest-xdist
pytest-django==4.5.2
# via -r ./engine/requirements-dev.in
pytest-factoryboy==2.5.1
# via -r ./engine/requirements-dev.in
pytest-xdist==3.5.0
# via
# -r ./engine/requirements-dev.in
# pytest-xdist
python-dateutil==2.8.2
# via
# -c ./engine/requirements.txt
# faker
pyyaml==6.0.1
# via
# -c ./engine/requirements.txt
# pre-commit
requests==2.31.0
# via
# -c ./engine/requirements.txt
# djangorestframework-stubs
six==1.16.0
# via
# -c ./engine/requirements.txt
# python-dateutil
sqlparse==0.4.4
# via
# -c ./engine/requirements.txt
# django
toml==0.10.2
# via
# -c ./engine/requirements.txt
# pre-commit
types-beautifulsoup4==4.12.0.5
# via -r ./engine/requirements-dev.in
types-html5lib==1.1.11.20240106
# via types-beautifulsoup4
types-pymysql==1.0.19.7
# via -r ./engine/requirements-dev.in
types-python-dateutil==2.8.19.13
# via -r ./engine/requirements-dev.in
types-pytz==2024.1.0.20240203
# via django-stubs
types-pyyaml==6.0.12.12
# via
# django-stubs
# djangorestframework-stubs
types-requests==2.31.0.1
# via
# -r ./engine/requirements-dev.in
# djangorestframework-stubs
types-urllib3==1.26.25.14
# via types-requests
typing-extensions==4.9.0
# via
# -c ./engine/requirements.txt
# celery-types
# django-filter-stubs
# django-stubs
# django-stubs-ext
# djangorestframework-stubs
# mypy
# pytest-factoryboy
urllib3==1.26.18
# via
# -c ./engine/requirements.txt
# requests
virtualenv==20.25.0
# via pre-commit
# The following packages are considered to be unsafe in a requirements file:
# setuptools

58
engine/requirements.in Normal file
View file

@ -0,0 +1,58 @@
babel==2.12.1
beautifulsoup4==4.12.2
celery[redis]==5.3.1
cryptography==38.0.4 # version 39.0.0 introduced an issue - https://stackoverflow.com/a/75053968/3902555
django==4.2.10
django-add-default-value==0.10.0
django-amazon-ses==4.0.1
django-anymail==8.6
django-cors-headers==3.7.0
# pyroscope-io==0.8.1
django-dbconn-retry==0.1.7
django-debug-toolbar==4.1
django-deprecate-fields==0.1.1
django-filter==2.4.0
django-ipware==4.0.2
django-log-request-id==1.6.0
django-migration-linter==4.1.0
django-mirage-field==1.3.0
django-mysql==4.6.0
django-polymorphic==3.1.0
django-ratelimit==2.0.0
django-redis==5.4.0
django-rest-polymorphic==0.1.10
django-silk==5.0.3
django-sns-view==0.1.2
djangorestframework==3.14.0
factory-boy<3.0
drf-spectacular==0.26.5
emoji==2.4.0
grpcio==1.57.0
fcm-django @ https://github.com/grafana/fcm-django/archive/refs/tags/v1.0.12r1.tar.gz#sha256=7ec7cd9d353fc9edf19a4acd4fa14090a31d83d02ac986c5e5e081dea29f564f
hiredis==2.2.3
humanize==0.5.1
icalendar==5.0.10
lxml==4.9.2
markdown2==2.4.10
opentelemetry-exporter-otlp-proto-grpc==1.15.0
opentelemetry-instrumentation-celery==0.36b0
opentelemetry-instrumentation-pymysql==0.36b0
opentelemetry-instrumentation-wsgi==0.36b0
phonenumbers==8.10.0
prometheus_client==0.16.0
psutil==5.9.4
psycopg2==2.9.3
pymdown-extensions==10.0
PyMySQL==1.1.0
python-telegram-bot==13.13
recurring-ical-events==2.1.0
redis==5.0.1
regex==2021.11.2
requests==2.31.0
slack-export-viewer==1.1.4
slack_sdk==3.21.3
social-auth-app-django==5.3.0
twilio~=6.37.0
urllib3==1.26.18
uwsgi==2.0.21
whitenoise==5.3.0

View file

@ -1,58 +1,468 @@
django==4.2.10
djangorestframework==3.14.0
slack_sdk==3.21.3
whitenoise==5.3.0
twilio~=6.37.0
phonenumbers==8.10.0
celery[amqp,redis]==5.3.1
redis==5.0.1
humanize==0.5.1
uwsgi==2.0.21
django-cors-headers==3.7.0
django-debug-toolbar==4.1
django-sns-view==0.1.2
python-telegram-bot==13.13
django-silk==5.0.3
django-redis==5.4.0
hiredis==2.2.3
django-ratelimit==2.0.0
django-filter==2.4.0
icalendar==5.0.10
recurring-ical-events==2.1.0
slack-export-viewer==1.1.4
beautifulsoup4==4.12.2
social-auth-app-django==5.3.0
cryptography==38.0.4 # version 39.0.0 introduced an issue - https://stackoverflow.com/a/75053968/3902555
factory-boy<3.0
django-log-request-id==1.6.0
django-polymorphic==3.1.0
django-rest-polymorphic==0.1.10
https://github.com/grafana/fcm-django/archive/refs/tags/v1.0.12r1.tar.gz
django-mirage-field==1.3.0
django-mysql==4.6.0
PyMySQL==1.1.0
psycopg2==2.9.3
emoji==2.4.0
regex==2021.11.2
psutil==5.9.4
django-migration-linter==4.1.0
django-add-default-value==0.10.0
opentelemetry-instrumentation-celery==0.36b0
opentelemetry-instrumentation-pymysql==0.36b0
opentelemetry-instrumentation-wsgi==0.36b0
opentelemetry-exporter-otlp-proto-grpc==1.15.0
# pyroscope-io==0.8.1
django-dbconn-retry==0.1.7
django-ipware==4.0.2
django-anymail==8.6
django-amazon-ses==4.0.1
django-deprecate-fields==0.1.1
pymdown-extensions==10.0
requests==2.31.0
urllib3==1.26.18
prometheus_client==0.16.0
lxml==4.9.2
#
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --strip-extras ./engine/requirements.in
#
amqp==5.2.0
# via kombu
appdirs==1.4.4
# via django-migration-linter
apscheduler==3.6.3
# via python-telegram-bot
asgiref==3.7.2
# via django
attrs==23.2.0
# via
# jsonschema
# referencing
autopep8==2.0.4
# via django-silk
babel==2.12.1
# via -r ./engine/requirements.in
backoff==2.2.1
# via opentelemetry-exporter-otlp-proto-grpc
beautifulsoup4==4.12.2
# via -r ./engine/requirements.in
billiard==4.2.0
# via celery
blinker==1.7.0
# via flask
boto3==1.34.41
# via django-amazon-ses
botocore==1.34.41
# via
# boto3
# s3transfer
cachecontrol==0.14.0
# via firebase-admin
cachetools==4.2.2
# via
# google-auth
# python-telegram-bot
celery==5.3.1
# via
# -r ./engine/requirements.in
# celery
certifi==2024.2.2
# via
# python-telegram-bot
# requests
cffi==1.16.0
# via
# cryptography
# django-sns-view
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via
# celery
# click-didyoumean
# click-plugins
# click-repl
# flask
# slack-export-viewer
click-didyoumean==0.3.0
# via celery
click-plugins==1.1.1
# via celery
click-repl==0.3.0
# via celery
cryptography==38.0.4
# via
# -r ./engine/requirements.in
# django-mirage-field
# pyopenssl
# social-auth-core
defusedxml==0.8.0rc2
# via
# python3-openid
# social-auth-core
deprecated==1.2.14
# via opentelemetry-api
django==4.2.10
# via
# -r ./engine/requirements.in
# django-add-default-value
# django-amazon-ses
# django-anymail
# django-cors-headers
# django-debug-toolbar
# django-deprecate-fields
# django-filter
# django-log-request-id
# django-migration-linter
# django-mysql
# django-polymorphic
# django-redis
# django-rest-polymorphic
# django-silk
# django-sns-view
# djangorestframework
# drf-spectacular
# fcm-django
# social-auth-app-django
django-add-default-value==0.10.0
# via -r ./engine/requirements.in
django-amazon-ses==4.0.1
# via -r ./engine/requirements.in
django-anymail==8.6
# via -r ./engine/requirements.in
django-cors-headers==3.7.0
# via -r ./engine/requirements.in
django-dbconn-retry==0.1.7
# via -r ./engine/requirements.in
django-debug-toolbar==4.1.0
# via -r ./engine/requirements.in
django-deprecate-fields==0.1.1
# via -r ./engine/requirements.in
django-filter==2.4.0
# via -r ./engine/requirements.in
django-ipware==4.0.2
# via -r ./engine/requirements.in
django-log-request-id==1.6.0
# via -r ./engine/requirements.in
django-migration-linter==4.1.0
# via -r ./engine/requirements.in
django-mirage-field==1.3.0
# via -r ./engine/requirements.in
django-mysql==4.6.0
# via -r ./engine/requirements.in
django-polymorphic==3.1.0
# via
# -r ./engine/requirements.in
# django-rest-polymorphic
django-ratelimit==2.0.0
# via -r ./engine/requirements.in
django-redis==5.4.0
# via -r ./engine/requirements.in
django-rest-polymorphic==0.1.10
# via -r ./engine/requirements.in
django-silk==5.0.3
# via -r ./engine/requirements.in
django-sns-view==0.1.2
# via -r ./engine/requirements.in
djangorestframework==3.14.0
# via
# -r ./engine/requirements.in
# django-rest-polymorphic
# drf-spectacular
drf-spectacular==0.26.5
# via -r ./engine/requirements.in
emoji==2.4.0
# via
# -r ./engine/requirements.in
# slack-export-viewer
factory-boy==2.12.0
# via -r ./engine/requirements.in
faker==23.1.0
# via factory-boy
fcm-django @ https://github.com/grafana/fcm-django/archive/refs/tags/v1.0.12r1.tar.gz#sha256=7ec7cd9d353fc9edf19a4acd4fa14090a31d83d02ac986c5e5e081dea29f564f
# via -r ./engine/requirements.in
firebase-admin==5.4.0
# via fcm-django
flask==3.0.2
# via slack-export-viewer
google-api-core==2.17.0
# via
# firebase-admin
# google-api-core
# google-api-python-client
# google-cloud-core
# google-cloud-firestore
# google-cloud-storage
google-api-python-client==2.118.0
# via firebase-admin
google-auth==2.27.0
# via
# google-api-core
# google-api-python-client
# google-auth-httplib2
# google-cloud-core
# google-cloud-storage
google-auth-httplib2==0.2.0
# via google-api-python-client
google-cloud-core==2.4.1
# via
# google-cloud-firestore
# google-cloud-storage
google-cloud-firestore==2.14.0
# via firebase-admin
google-cloud-storage==2.14.0
# via firebase-admin
google-crc32c==1.5.0
# via
# google-cloud-storage
# google-resumable-media
google-resumable-media==2.7.0
# via google-cloud-storage
googleapis-common-protos==1.62.0
# via
# google-api-core
# grpcio-status
# opentelemetry-exporter-otlp-proto-grpc
gprof2dot==2022.7.29
# via django-silk
grpcio==1.57.0
# via
# -r ./engine/requirements.in
# google-api-core
# grpcio-status
# opentelemetry-exporter-otlp-proto-grpc
grpcio-status==1.57.0
# via google-api-core
hiredis==2.2.3
# via -r ./engine/requirements.in
httplib2==0.22.0
# via
# google-api-python-client
# google-auth-httplib2
humanize==0.5.1
# via -r ./engine/requirements.in
icalendar==5.0.10
# via
# -r ./engine/requirements.in
# recurring-ical-events
# x-wr-timezone
idna==3.6
# via requests
inflection==0.5.1
# via drf-spectacular
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
jmespath==1.0.1
# via
# boto3
# botocore
jsonschema==4.21.1
# via drf-spectacular
jsonschema-specifications==2023.12.1
# via jsonschema
kombu==5.3.5
# via celery
lxml==4.9.2
# via -r ./engine/requirements.in
markdown==3.5.2
# via pymdown-extensions
markdown2==2.4.10
# via
# -r ./engine/requirements.in
# slack-export-viewer
markupsafe==2.1.5
# via
# jinja2
# werkzeug
msgpack==1.0.7
# via cachecontrol
oauthlib==3.2.2
# via
# requests-oauthlib
# social-auth-core
opentelemetry-api==1.15.0
# via
# opentelemetry-exporter-otlp-proto-grpc
# opentelemetry-instrumentation
# opentelemetry-instrumentation-celery
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-pymysql
# opentelemetry-instrumentation-wsgi
# opentelemetry-sdk
opentelemetry-exporter-otlp-proto-grpc==1.15.0
# via -r ./engine/requirements.in
opentelemetry-instrumentation==0.36b0
# via
# opentelemetry-instrumentation-celery
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-pymysql
# opentelemetry-instrumentation-wsgi
opentelemetry-instrumentation-celery==0.36b0
# via -r ./engine/requirements.in
opentelemetry-instrumentation-dbapi==0.36b0
# via opentelemetry-instrumentation-pymysql
opentelemetry-instrumentation-pymysql==0.36b0
# via -r ./engine/requirements.in
opentelemetry-instrumentation-wsgi==0.36b0
# via -r ./engine/requirements.in
opentelemetry-proto==1.15.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-sdk==1.15.0
# via opentelemetry-exporter-otlp-proto-grpc
opentelemetry-semantic-conventions==0.36b0
# via
# opentelemetry-instrumentation-celery
# opentelemetry-instrumentation-dbapi
# opentelemetry-instrumentation-wsgi
# opentelemetry-sdk
opentelemetry-util-http==0.36b0
# via opentelemetry-instrumentation-wsgi
pem==23.1.0
# via django-sns-view
phonenumbers==8.10.0
# via -r ./engine/requirements.in
prometheus-client==0.16.0
# via -r ./engine/requirements.in
prompt-toolkit==3.0.43
# via click-repl
proto-plus==1.23.0
# via google-cloud-firestore
protobuf==4.25.2
# via
# google-api-core
# google-cloud-firestore
# googleapis-common-protos
# grpcio-status
# opentelemetry-proto
# proto-plus
psutil==5.9.4
# via -r ./engine/requirements.in
psycopg2==2.9.3
# via -r ./engine/requirements.in
pyasn1==0.5.1
# via
# pyasn1-modules
# rsa
pyasn1-modules==0.3.0
# via google-auth
pycodestyle==2.11.1
# via autopep8
pycparser==2.21
# via cffi
pyjwt==2.8.0
# via
# social-auth-core
# twilio
pymdown-extensions==10.0
# via -r ./engine/requirements.in
pymysql==1.1.0
# via -r ./engine/requirements.in
pyopenssl==23.2.0
# via django-sns-view
pyparsing==3.1.1
# via httplib2
python-dateutil==2.8.2
# via
# botocore
# celery
# faker
# icalendar
# recurring-ical-events
python-telegram-bot==13.13
# via -r ./engine/requirements.in
python3-openid==3.2.0
# via social-auth-core
pytz==2024.1
# via
# apscheduler
# djangorestframework
# icalendar
# python-telegram-bot
# recurring-ical-events
# twilio
# x-wr-timezone
pyyaml==6.0.1
# via
# drf-spectacular
# pymdown-extensions
recurring-ical-events==2.1.0
# via -r ./engine/requirements.in
redis==5.0.1
# via
# -r ./engine/requirements.in
# celery
# django-redis
referencing==0.33.0
# via
# jsonschema
# jsonschema-specifications
regex==2021.11.2
# via -r ./engine/requirements.in
requests==2.31.0
# via
# -r ./engine/requirements.in
# cachecontrol
# django-anymail
# django-sns-view
# google-api-core
# google-cloud-storage
# requests-oauthlib
# social-auth-core
# twilio
requests-oauthlib==1.3.1
# via social-auth-core
rpds-py==0.18.0
# via
# jsonschema
# referencing
rsa==4.9
# via google-auth
s3transfer==0.10.0
# via boto3
six==1.16.0
# via
# apscheduler
# django-rest-polymorphic
# python-dateutil
# twilio
slack-export-viewer==1.1.4
# via -r ./engine/requirements.in
slack-sdk==3.21.3
# via -r ./engine/requirements.in
social-auth-app-django==5.3.0
# via -r ./engine/requirements.in
social-auth-core==4.5.2
# via social-auth-app-django
soupsieve==2.5
# via beautifulsoup4
sqlparse==0.4.4
# via
# django
# django-debug-toolbar
# django-silk
toml==0.10.2
# via django-migration-linter
tornado==6.4
# via python-telegram-bot
tqdm==4.66.2
# via django-mirage-field
twilio==6.37.0
# via -r ./engine/requirements.in
typing-extensions==4.9.0
# via opentelemetry-sdk
tzdata==2024.1
# via celery
tzlocal==5.2
# via apscheduler
uritemplate==4.1.1
# via
# drf-spectacular
# google-api-python-client
urllib3==1.26.18
# via
# -r ./engine/requirements.in
# botocore
# requests
uwsgi==2.0.21
# via -r ./engine/requirements.in
vine==5.1.0
# via
# amqp
# celery
# kombu
wcwidth==0.2.13
# via prompt-toolkit
werkzeug==3.0.1
# via flask
whitenoise==5.3.0
# via -r ./engine/requirements.in
wrapt==1.16.0
# via
# deprecated
# opentelemetry-instrumentation
# opentelemetry-instrumentation-dbapi
x-wr-timezone==0.0.6
# via recurring-ical-events
# The following packages are considered to be unsafe in a requirements file:
# setuptools

View file

@ -284,8 +284,8 @@ INSTALLED_APPS = [
REST_FRAMEWORK = {
"DEFAULT_PARSER_CLASSES": (
"engine.parsers.JSONParser",
"engine.parsers.FormParser",
"rest_framework.parsers.JSONParser",
"rest_framework.parsers.FormParser",
"rest_framework.parsers.MultiPartParser",
),
"DEFAULT_AUTHENTICATION_CLASSES": [],
@ -731,7 +731,6 @@ SELF_HOSTED_SETTINGS = {
GRAFANA_INCIDENT_STATIC_API_KEY = os.environ.get("GRAFANA_INCIDENT_STATIC_API_KEY", None)
DATA_UPLOAD_MAX_MEMORY_SIZE = getenv_integer("DATA_UPLOAD_MAX_MEMORY_SIZE", 1_048_576) # 1mb by default
JINJA_TEMPLATE_MAX_LENGTH = 50000
JINJA_RESULT_TITLE_MAX_LENGTH = 500
JINJA_RESULT_MAX_LENGTH = 50000

View file

@ -134,6 +134,7 @@ CELERY_TASK_ROUTES = {
"apps.alerts.tasks.check_escalation_finished.check_alert_group_personal_notifications_task": {"queue": "long"},
"apps.alerts.tasks.check_escalation_finished.check_personal_notifications_task": {"queue": "long"},
"apps.grafana_plugin.tasks.sync.cleanup_organization_async": {"queue": "long"},
"apps.grafana_plugin.tasks.sync.cleanup_empty_deleted_integrations": {"queue": "long"},
"apps.grafana_plugin.tasks.sync.start_cleanup_deleted_organizations": {"queue": "long"},
"apps.grafana_plugin.tasks.sync.start_sync_organizations": {"queue": "long"},
"apps.grafana_plugin.tasks.sync.sync_organization_async": {"queue": "long"},

View file

@ -74,9 +74,6 @@ post-buffering=1
; leave it on by default and remove it on a case-by-case basis.
enable-threads=true
; drop requests with CONTENT_LENGTH bigger than 15MB
route-if=ishigher:${CONTENT_LENGTH};15000000 break:413 Request Entity Too Large
; Till uWSGI 2.1, by default, sending the SIGTERM signal to uWSGI means “brutally reload the stack” while the
; convention is to shut an application down on SIGTERM. To shutdown uWSGI, use SIGINT or SIGQUIT instead. If you
; absolutely can not live with uWSGI being so disrespectful towards SIGTERM, by all means, enable the die-on-term

View file

@ -0,0 +1,25 @@
/*
* ⚠️⚠️⚠️ THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY. ⚠️⚠️⚠️
*
* In order to extend the configuration follow the steps in
* https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-eslint-config
*/
{
"extends": ["@grafana/eslint-config"],
"root": true,
"rules": {
"react/prop-types": "off"
},
"overrides": [
{
"plugins": ["deprecation"],
"files": ["src/**/*.{ts,tsx}"],
"rules": {
"deprecation/deprecation": "warn"
},
"parserOptions": {
"project": "./tsconfig.json"
}
}
]
}

View file

@ -0,0 +1,16 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in .config/README.md
*/
module.exports = {
endOfLine: 'auto',
printWidth: 120,
trailingComma: 'es5',
semi: true,
jsxSingleQuote: false,
singleQuote: true,
useTabs: false,
tabWidth: 2,
};

View file

@ -0,0 +1,25 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in
* https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-jest-config
*/
import '@testing-library/jest-dom';
// https://jestjs.io/docs/manual-mocks#mocking-methods-which-are-not-implemented-in-jsdom
Object.defineProperty(global, 'matchMedia', {
writable: true,
value: jest.fn().mockImplementation((query) => ({
matches: false,
media: query,
onchange: null,
addListener: jest.fn(), // deprecated
removeListener: jest.fn(), // deprecated
addEventListener: jest.fn(),
removeEventListener: jest.fn(),
dispatchEvent: jest.fn(),
})),
});
HTMLCanvasElement.prototype.getContext = () => {};

View file

@ -0,0 +1,43 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in
* https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-jest-config
*/
const path = require('path');
const { grafanaESModules, nodeModulesToTransform } = require('./jest/utils');
module.exports = {
moduleNameMapper: {
'\\.(css|scss|sass)$': 'identity-obj-proxy',
'react-inlinesvg': path.resolve(__dirname, 'jest', 'mocks', 'react-inlinesvg.tsx'),
},
modulePaths: ['<rootDir>/src'],
setupFilesAfterEnv: ['<rootDir>/jest-setup.js'],
testEnvironment: 'jest-environment-jsdom',
testMatch: [
'<rootDir>/src/**/__tests__/**/*.{js,jsx,ts,tsx}',
'<rootDir>/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}',
'<rootDir>/src/**/*.{spec,test,jest}.{js,jsx,ts,tsx}',
],
transform: {
'^.+\\.(t|j)sx?$': [
'@swc/jest',
{
sourceMaps: 'inline',
jsc: {
parser: {
syntax: 'typescript',
tsx: true,
decorators: false,
dynamicImport: true,
},
},
},
],
},
// Jest will throw `Cannot use import statement outside module` if it tries to load an
// ES module without it being transformed first. ./config/README.md#esm-errors-with-jest
transformIgnorePatterns: [nodeModulesToTransform(grafanaESModules)],
};

View file

@ -0,0 +1,25 @@
// Due to the grafana/ui Icon component making fetch requests to
// `/public/img/icon/<icon_name>.svg` we need to mock react-inlinesvg to prevent
// the failed fetch requests from displaying errors in console.
import React from 'react';
type Callback = (...args: any[]) => void;
export interface StorageItem {
content: string;
queue: Callback[];
status: string;
}
export const cacheStore: { [key: string]: StorageItem } = Object.create(null);
const SVG_FILE_NAME_REGEX = /(.+)\/(.+)\.svg$/;
const InlineSVG = ({ src }: { src: string }) => {
// testId will be the file name without extension (e.g. `public/img/icons/angle-double-down.svg` -> `angle-double-down`)
const testId = src.replace(SVG_FILE_NAME_REGEX, '$2');
return <svg xmlns="http://www.w3.org/2000/svg" data-testid={testId} viewBox="0 0 24 24" />;
};
export default InlineSVG;

View file

@ -0,0 +1,31 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in .config/README.md
*/
/*
* This utility function is useful in combination with jest `transformIgnorePatterns` config
* to transform specific packages (e.g.ES modules) in a projects node_modules folder.
*/
const nodeModulesToTransform = (moduleNames) => `node_modules\/(?!.*(${moduleNames.join('|')})\/.*)`;
// Array of known nested grafana package dependencies that only bundle an ESM version
const grafanaESModules = [
'.pnpm', // Support using pnpm symlinked packages
'@grafana/schema',
'd3',
'd3-color',
'd3-force',
'd3-interpolate',
'd3-scale-chromatic',
'ol',
'react-colorful',
'rxjs',
'uuid',
];
module.exports = {
nodeModulesToTransform,
grafanaESModules,
};

View file

@ -0,0 +1,26 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in
* https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-typescript-config
*/
{
"compilerOptions": {
"alwaysStrict": true,
"declaration": false,
"rootDir": "../src",
"baseUrl": "../src",
"typeRoots": ["../node_modules/@types"],
"resolveJsonModule": true
},
"ts-node": {
"compilerOptions": {
"module": "commonjs",
"target": "es5",
"esModuleInterop": true
},
"transpileOnly": true
},
"include": ["../src", "./types"],
"extends": "@grafana/tsconfig"
}

View file

@ -0,0 +1,37 @@
// Image declarations
declare module '*.gif' {
const src: string;
export default src;
}
declare module '*.jpg' {
const src: string;
export default src;
}
declare module '*.jpeg' {
const src: string;
export default src;
}
declare module '*.png' {
const src: string;
export default src;
}
declare module '*.webp' {
const src: string;
export default src;
}
declare module '*.svg' {
const content: string;
export default content;
}
// Font declarations
declare module '*.woff';
declare module '*.woff2';
declare module '*.eot';
declare module '*.ttf';
declare module '*.otf';

View file

@ -0,0 +1,2 @@
export const SOURCE_DIR = 'src';
export const DIST_DIR = 'dist';

View file

@ -0,0 +1,58 @@
import fs from 'fs';
import process from 'process';
import os from 'os';
import path from 'path';
import { glob } from 'glob';
import { SOURCE_DIR } from './constants';
export function isWSL() {
if (process.platform !== 'linux') {
return false;
}
if (os.release().toLowerCase().includes('microsoft')) {
return true;
}
try {
return fs.readFileSync('/proc/version', 'utf8').toLowerCase().includes('microsoft');
} catch {
return false;
}
}
export function getPackageJson() {
return require(path.resolve(process.cwd(), 'package.json'));
}
export function getPluginJson() {
return require(path.resolve(process.cwd(), `${SOURCE_DIR}/plugin.json`));
}
export function hasReadme() {
return fs.existsSync(path.resolve(process.cwd(), SOURCE_DIR, 'README.md'));
}
// Support bundling nested plugins by finding all plugin.json files in src directory
// then checking for a sibling module.[jt]sx? file.
export async function getEntries(): Promise<Record<string, string>> {
const pluginsJson = await glob('**/src/**/plugin.json', { absolute: true });
const plugins = await Promise.all(
pluginsJson.map((pluginJson) => {
const folder = path.dirname(pluginJson);
return glob(`${folder}/module.{ts,tsx,js,jsx}`, { absolute: true });
})
);
return plugins.reduce((result, modules) => {
return modules.reduce((result, module) => {
const pluginPath = path.dirname(module);
const pluginName = path.relative(process.cwd(), pluginPath).replace(/src\/?/i, '');
const entryName = pluginName === '' ? 'module' : `${pluginName}/module`;
result[entryName] = module;
return result;
}, result);
}, {});
}

View file

@ -0,0 +1,218 @@
/*
* THIS FILE WAS SCAFFOLDED BY `@grafana/create-plugin`. DO NOT EDIT THIS FILE DIRECTLY.
*
* In order to extend the configuration follow the steps in
* https://grafana.com/developers/plugin-tools/create-a-plugin/extend-a-plugin/extend-configurations#extend-the-webpack-config
*/
import CopyWebpackPlugin from 'copy-webpack-plugin';
import ESLintPlugin from 'eslint-webpack-plugin';
import ForkTsCheckerWebpackPlugin from 'fork-ts-checker-webpack-plugin';
import LiveReloadPlugin from 'webpack-livereload-plugin';
import path from 'path';
import ReplaceInFileWebpackPlugin from 'replace-in-file-webpack-plugin';
import { Configuration } from 'webpack';
import { getPackageJson, getPluginJson, hasReadme, getEntries, isWSL } from './utils';
import { SOURCE_DIR, DIST_DIR } from './constants';
const pluginJson = getPluginJson();
const config = async (env): Promise<Configuration> => {
const baseConfig: Configuration = {
cache: {
type: 'filesystem',
buildDependencies: {
config: [__filename],
},
},
context: path.join(process.cwd(), SOURCE_DIR),
devtool: env.production ? 'source-map' : 'eval-source-map',
entry: await getEntries(),
externals: [
'lodash',
'jquery',
'moment',
'slate',
'emotion',
'@emotion/react',
'@emotion/css',
'prismjs',
'slate-plain-serializer',
'@grafana/slate-react',
'react',
'react-dom',
'react-redux',
'redux',
'rxjs',
'react-router',
'react-router-dom',
'd3',
'angular',
'@grafana/ui',
'@grafana/runtime',
'@grafana/data',
// Mark legacy SDK imports as external if their name starts with the "grafana/" prefix
({ request }, callback) => {
const prefix = 'grafana/';
const hasPrefix = (request) => request.indexOf(prefix) === 0;
const stripPrefix = (request) => request.substr(prefix.length);
if (hasPrefix(request)) {
return callback(undefined, stripPrefix(request));
}
callback();
},
],
mode: env.production ? 'production' : 'development',
module: {
rules: [
{
exclude: /(node_modules)/,
test: /\.[tj]sx?$/,
use: {
loader: 'swc-loader',
options: {
jsc: {
baseUrl: path.resolve(__dirname, 'src'),
target: 'es2015',
loose: false,
parser: {
syntax: 'typescript',
tsx: true,
decorators: false,
dynamicImport: true,
},
},
},
},
},
{
test: /\.css$/,
use: ['style-loader', 'css-loader'],
},
{
test: /\.s[ac]ss$/,
use: ['style-loader', 'css-loader', 'sass-loader'],
},
{
test: /\.(png|jpe?g|gif|svg)$/,
type: 'asset/resource',
generator: {
// Keep publicPath relative for host.com/grafana/ deployments
publicPath: `public/plugins/${pluginJson.id}/img/`,
outputPath: 'img/',
filename: Boolean(env.production) ? '[hash][ext]' : '[file]',
},
},
{
test: /\.(woff|woff2|eot|ttf|otf)(\?v=\d+\.\d+\.\d+)?$/,
type: 'asset/resource',
generator: {
// Keep publicPath relative for host.com/grafana/ deployments
publicPath: `public/plugins/${pluginJson.id}/fonts/`,
outputPath: 'fonts/',
filename: Boolean(env.production) ? '[hash][ext]' : '[name][ext]',
},
},
],
},
output: {
clean: {
keep: new RegExp(`(.*?_(amd64|arm(64)?)(.exe)?|go_plugin_build_manifest)`),
},
filename: '[name].js',
library: {
type: 'amd',
},
path: path.resolve(process.cwd(), DIST_DIR),
publicPath: `public/plugins/${pluginJson.id}/`,
uniqueName: pluginJson.id,
},
plugins: [
new CopyWebpackPlugin({
patterns: [
// If src/README.md exists use it; otherwise the root README
// To `compiler.options.output`
{ from: hasReadme() ? 'README.md' : '../README.md', to: '.', force: true },
{ from: 'plugin.json', to: '.' },
{ from: '../LICENSE', to: '.' },
{ from: '../CHANGELOG.md', to: '.', noErrorOnMissing: true, force: true },
{ from: '**/*.json', to: '.' }, // TODO<Add an error for checking the basic structure of the repo>
{ from: '**/*.svg', to: '.', noErrorOnMissing: true }, // Optional
{ from: '**/*.png', to: '.', noErrorOnMissing: true }, // Optional
{ from: '**/*.html', to: '.', noErrorOnMissing: true }, // Optional
{ from: 'img/**/*', to: '.', noErrorOnMissing: true }, // Optional
{ from: 'libs/**/*', to: '.', noErrorOnMissing: true }, // Optional
{ from: 'static/**/*', to: '.', noErrorOnMissing: true }, // Optional
{ from: '**/query_help.md', to: '.', noErrorOnMissing: true }, // Optional
],
}),
// Replace certain template-variables in the README and plugin.json
new ReplaceInFileWebpackPlugin([
{
dir: DIST_DIR,
files: ['plugin.json', 'README.md'],
rules: [
{
search: /\%VERSION\%/g,
replace: getPackageJson().version,
},
{
search: /\%TODAY\%/g,
replace: new Date().toISOString().substring(0, 10),
},
{
search: /\%PLUGIN_ID\%/g,
replace: pluginJson.id,
},
],
},
]),
...(env.development
? [
new LiveReloadPlugin(),
new ForkTsCheckerWebpackPlugin({
async: Boolean(env.development),
issue: {
include: [{ file: '**/*.{ts,tsx}' }],
},
typescript: { configFile: path.join(process.cwd(), 'tsconfig.json') },
}),
new ESLintPlugin({
extensions: ['.ts', '.tsx'],
lintDirtyModulesOnly: Boolean(env.development), // don't lint on start, only lint changed files
}),
]
: []),
],
resolve: {
extensions: ['.js', '.jsx', '.ts', '.tsx'],
// handle resolving "rootDir" paths
modules: [path.resolve(process.cwd(), 'src'), 'node_modules'],
unsafeCache: true,
},
};
if (isWSL()) {
baseConfig.watchOptions = {
poll: 3000,
ignored: /node_modules/,
};
}
return baseConfig;
};
export default config;

View file

@ -2,8 +2,8 @@ const rulesDirPlugin = require('eslint-plugin-rulesdir');
rulesDirPlugin.RULES_DIR = 'tools/eslint-rules';
module.exports = {
extends: ['@grafana/eslint-config'],
plugins: ['rulesdir', 'import'],
extends: ['./.config/.eslintrc'],
plugins: ['rulesdir', 'import', 'unused-imports'],
settings: {
'import/internal-regex':
'^assets|^components|^containers|^contexts|^icons|^models|^network|^pages|^services|^state|^utils|^plugin',
@ -37,7 +37,9 @@ module.exports = {
},
],
'no-console': ['warn', { allow: ['warn', 'error'] }],
'no-unused-vars': [
'no-unused-vars': 'off',
'unused-imports/no-unused-imports': ['warn'],
'unused-imports/no-unused-vars': [
'warn',
{
vars: 'all',

1
grafana-plugin/.nvmrc Normal file
View file

@ -0,0 +1 @@
18.16.0

View file

@ -1,3 +1,4 @@
module.exports = {
...require('@grafana/toolkit/src/config/prettier.plugin.config.json'),
// Prettier configuration provided by Grafana scaffolding
...require('./.config/.prettierrc.js'),
};

View file

@ -1,15 +0,0 @@
{
"presets": [
["@babel/preset-env", { "targets": { "node": "current" } }],
"@babel/preset-react",
"@babel/preset-typescript"
],
"plugins": [
["@babel/plugin-proposal-decorators", { "legacy": true }],
["@babel/plugin-transform-destructuring", { "useBuiltIns": true }],
"@babel/plugin-transform-runtime",
["@babel/plugin-proposal-class-properties", { "loose": false }],
"@babel/transform-regenerator",
"@babel/plugin-transform-template-literals"
]
}

View file

@ -53,6 +53,6 @@ test('from_time and to_time for "Continue escalation if current UTC time is in r
await page.reload();
await page.waitForLoadState('networkidle');
expect(_getFromTimeInput()).toHaveValue(FROM_TIME);
expect(_getToTimeInput()).toHaveValue(TO_TIME);
await expect(_getFromTimeInput()).toHaveValue(FROM_TIME);
await expect(_getToTimeInput()).toHaveValue(TO_TIME);
});

View file

@ -1,4 +1,4 @@
import { test as base, Browser, Page, TestInfo } from '@playwright/test';
import { test as base, Browser, Fixtures, Page, TestInfo } from '@playwright/test';
import { VIEWER_USER_STORAGE_STATE, EDITOR_USER_STORAGE_STATE, ADMIN_USER_STORAGE_STATE } from '../playwright.config';
@ -7,8 +7,6 @@ import { GRAFANA_ADMIN_USERNAME, GRAFANA_EDITOR_USERNAME, GRAFANA_VIEWER_USERNAM
import * as fs from 'fs';
import * as path from 'path';
export class BaseRolePage {
page: Page;
userName: string;
@ -32,11 +30,14 @@ class AdminRolePage extends BaseRolePage {
userName = GRAFANA_ADMIN_USERNAME;
}
type Fixtures = {
interface TestFixtures extends Fixtures {
// currentGrafanaVersion: string;
viewerRolePage: ViewerRolePage;
editorRolePage: EditorRolePage;
adminRolePage: AdminRolePage;
};
}
interface WorkerFixtures extends Fixtures {}
/**
* NOTE: currently videos are not generated automatically because of how we generate a browserContext within our
@ -77,11 +78,16 @@ const _recordTestVideo = async (
};
export * from '@playwright/test';
export const test = base.extend<Fixtures>({
export const test = base.extend<TestFixtures, WorkerFixtures>({
viewerRolePage: ({ browser }, use, testInfo) =>
_recordTestVideo(browser, use, testInfo, VIEWER_USER_STORAGE_STATE, ViewerRolePage),
editorRolePage: async ({ browser }, use, testInfo) =>
_recordTestVideo(browser, use, testInfo, EDITOR_USER_STORAGE_STATE, EditorRolePage),
adminRolePage: async ({ browser }, use, testInfo) =>
_recordTestVideo(browser, use, testInfo, ADMIN_USER_STORAGE_STATE, AdminRolePage),
/**
* add back this fixture once this bug is fixed
* https://github.com/microsoft/playwright/issues/29608
*/
// currentGrafanaVersion: ({}, use) => use('9.0.0'),
});

View file

@ -12,7 +12,7 @@ import { getOnCallApiUrl } from 'utils/consts';
import { VIEWER_USER_STORAGE_STATE, EDITOR_USER_STORAGE_STATE, ADMIN_USER_STORAGE_STATE } from '../playwright.config';
import GrafanaAPIClient from './utils/clients/grafana';
import grafanaApiClient from './utils/clients/grafana';
import {
GRAFANA_ADMIN_PASSWORD,
GRAFANA_ADMIN_USERNAME,
@ -26,8 +26,6 @@ import {
import { clickButton, getInputByName } from './utils/forms';
import { goToGrafanaPage } from './utils/navigation';
const grafanaApiClient = new GrafanaAPIClient(GRAFANA_ADMIN_USERNAME, GRAFANA_ADMIN_PASSWORD);
enum OrgRole {
None = 'None',
Viewer = 'Viewer',
@ -74,7 +72,7 @@ const configureOnCallPlugin = async (page: Page): Promise<void> => {
* go to the oncall plugin configuration page and wait for the page to be loaded
*/
await goToGrafanaPage(page, '/plugins/grafana-oncall-app');
await page.waitForTimeout(2000);
await page.waitForTimeout(3000);
// if plugin is configured, go to OnCall
const isConfigured = (await page.getByText('Connected to OnCall').count()) >= 1;
@ -154,5 +152,18 @@ setup('Configure Grafana OnCall plugin', async ({ request }, { config }) => {
await configureOnCallPlugin(adminPage);
}
/**
* determine the current Grafana version of the stack in question and set it such that it can be used in the tests
* to conditionally skip certain tests.
*
* According to the Playwright docs, the best way to set config like this on the fly, is to set values
* on process.env https://playwright.dev/docs/test-global-setup-teardown#example
*
* TODO: when this bug is fixed in playwright https://github.com/microsoft/playwright/issues/29608
* move this to the currentGrafanaVersion fixture
*/
const currentGrafanaVersion = await grafanaApiClient.getGrafanaVersion(adminAuthedRequest);
process.env.CURRENT_GRAFANA_VERSION = currentGrafanaVersion;
await adminBrowserContext.close();
});

View file

@ -1,3 +1,4 @@
import semver from 'semver';
import { test, expect } from '../fixtures';
import { resolveFiringAlert } from '../utils/alertGroup';
import { createEscalationChain, EscalationStep } from '../utils/escalationChain';
@ -6,6 +7,18 @@ import { createIntegrationAndSendDemoAlert } from '../utils/integrations';
import { goToGrafanaPage, goToOnCallPage } from '../utils/navigation';
import { createOnCallSchedule } from '../utils/schedule';
/**
* Insights is dependent on Scenes which were only added in Grafana 10.0.0
* https://grafana.com/docs/grafana/latest/whatsnew/whats-new-in-v10-0/#scenes
* TODO: remove the process.env.CURRENT_GRAFANA_VERSION portion
* and use the currentGrafanaVersion fixture once this bugged is patched in playwright
* https://github.com/microsoft/playwright/issues/29608
*/
test.skip(
() => semver.lt(process.env.CURRENT_GRAFANA_VERSION, '10.0.0'),
'Insights is only available in Grafana 10.0.0 and above'
);
test.describe('Insights', () => {
test.beforeAll(async ({ adminRolePage: { page, userName } }) => {
const DATASOURCE_NAME = 'OnCall Prometheus';

View file

@ -7,8 +7,7 @@ const HEARTBEAT_SETTINGS_FORM_TEST_ID = 'heartbeat-settings-form';
test.describe("updating an integration's heartbeat interval works", async () => {
const _openHeartbeatSettingsForm = async (page: Page) => {
await page.getByTestId('integration-settings-context-menu-wrapper').getByRole('img').click();
await page.waitForTimeout(1000);
await page.getByTestId('integration-settings-context-menu-wrapper').click();
await page.getByTestId('integration-heartbeat-settings').click();
};

View file

@ -40,7 +40,7 @@ test.describe('maintenance mode works', () => {
const enableMaintenanceMode = async (page: Page, mode: MaintenanceModeType): Promise<void> => {
await _openIntegrationSettingsPopup(page, true);
// open the maintenance mode settings drawer + fill in the maintenance details
await page.getByTestId('integration-start-maintenance').click();
await page.getByText('Start Maintenance').click();
// fill in the form
const maintenanceModeDrawer = page.getByTestId('maintenance-mode-drawer');
@ -78,14 +78,10 @@ test.describe('maintenance mode works', () => {
await _openIntegrationSettingsPopup(page, true);
// click the stop maintenance button
await page.getByTestId('integration-stop-maintenance').click();
await page.getByText('Stop Maintenance').click();
// in the modal popup, confirm that we want to stop it
await clickButton({
page,
buttonText: 'Stop',
startingLocator: page.getByRole('dialog'),
});
await page.locator('button >> text=Stop').click();
await getRemainingTimeTooltip(page).waitFor({ state: 'hidden' });
};

View file

@ -40,7 +40,7 @@ test('create advanced webhook and check it is displayed on the list correctly',
await webhooksFormDivs.locator('.monaco-editor').first().click();
await page.keyboard.insertText(WEBHOOK_URL);
await clickButton({ page, buttonText: 'Create Webhook' });
await clickButton({ page, buttonText: 'Create' });
await checkWebhookPresenceInTable({ page, webhookName: WEBHOOK_NAME, expectedTriggerType: 'Resolved' });
});

View file

@ -18,7 +18,7 @@ test('create simple webhook and check it is displayed on the list correctly', as
await page.locator('[name=name]').fill(WEBHOOK_NAME);
await page.getByLabel('New Outgoing Webhook').getByRole('img').nth(1).click(); // Open team dropdown
await page.getByLabel('Select options menu').getByText('No team').click();
await clickButton({ page, buttonText: 'Create Webhook' });
await clickButton({ page, buttonText: 'Create' });
await checkWebhookPresenceInTable({ page, webhookName: WEBHOOK_NAME, expectedTriggerType: 'Escalation step' });
});

View file

@ -18,10 +18,6 @@ test('check schedule quality for simple 1-user schedule', async ({ adminRolePage
const scheduleQualityDetailsElement = page.getByTestId('schedule-quality-details');
await scheduleQualityDetailsElement.waitFor({ state: 'visible' });
await expect(scheduleQualityDetailsElement.locator('span[class*="Text"] >> nth=2 ')).toHaveText(
'Schedule has no gaps'
);
await expect(scheduleQualityDetailsElement.locator('span[class*="Text"] >> nth=3 ')).toHaveText(
'Schedule is perfectly balanced'
);
await expect(scheduleQualityDetailsElement).toHaveText(/Schedule has no gaps/);
await expect(scheduleQualityDetailsElement).toHaveText(/Schedule is perfectly balanced/);
});

View file

@ -1,7 +1,7 @@
import { OrgRole } from '@grafana/data';
import { expect, APIRequestContext } from '@playwright/test';
import { BASE_URL } from '../constants';
import { BASE_URL, GRAFANA_ADMIN_PASSWORD, GRAFANA_ADMIN_USERNAME } from '../constants';
type UsersLookupResponse = {
id: number;
@ -11,6 +11,12 @@ type CreateUserResponse = {
id: number;
};
type GetSettingsResponse = {
buildInfo: {
version: string;
};
};
class GrafanaApiException extends Error {
constructor(message: string) {
super(message);
@ -18,7 +24,7 @@ class GrafanaApiException extends Error {
}
}
export default class GrafanaAPIClient {
class GrafanaAPIClient {
userName: string;
password: string;
@ -113,4 +119,16 @@ export default class GrafanaAPIClient {
});
expect(res.ok()).toBeTruthy();
};
// https://grafana.com/docs/grafana/latest/developers/http_api/other/#get-settings
getGrafanaVersion = async (request: APIRequestContext): Promise<string> => {
const res = await request.get(`${BASE_URL}/api/frontend/settings`);
expect(res.ok()).toBeTruthy();
const data: GetSettingsResponse = await res.json();
return data.buildInfo.version;
};
}
const grafanaAPIClient = new GrafanaAPIClient(GRAFANA_ADMIN_USERNAME, GRAFANA_ADMIN_PASSWORD);
export default grafanaAPIClient;

View file

@ -1,5 +1,17 @@
import { Page } from '@playwright/test';
// close the currently opened modal
/**
* in Grafana v9 the aria-label is "Close dialog"
* in Grafana v10.0 the aria-label is "Close dialogue"
* in Grafana v10.1 the aria-label is "Close"
* 🙄
*
* https://playwright.dev/docs/other-locators#css-elements-matching-one-of-the-conditions
*/
const POSSIBLE_CLOSE_MODAL_BUTTON_DIALOGUE_ARIA_LABELS = ['Close dialog', 'Close dialogue', 'Close'];
const CLOSE_MODAL_BUTTON_ARIA_LABEL_SELECTOR = POSSIBLE_CLOSE_MODAL_BUTTON_DIALOGUE_ARIA_LABELS.map(
(ariaLabel) => `button[aria-label="${ariaLabel}"]`
).join(', ');
export const closeModal = async (page: Page): Promise<void> =>
(await page.waitForSelector('button[aria-label="Close dialogue"]')).click();
(await page.waitForSelector(CLOSE_MODAL_BUTTON_ARIA_LABEL_SELECTOR))?.click();

View file

@ -1,7 +1,7 @@
import { Page } from '@playwright/test';
import dayjs from 'dayjs';
import { clickButton, fillInInput, selectDropdownValue } from './forms';
import { clickButton, selectDropdownValue } from './forms';
import { goToOnCallPage } from './navigation';
export const createOnCallSchedule = async (page: Page, scheduleName: string, userName: string): Promise<void> => {
@ -13,7 +13,7 @@ export const createOnCallSchedule = async (page: Page, scheduleName: string, use
(await page.waitForSelector('button >> text=Create >> nth=0')).click();
// fill in the name input
await fillInInput(page, 'div[class*="ScheduleForm"] input[name="name"]', scheduleName);
await page.getByTestId('schedule-form').locator('input[name="name"]').fill(scheduleName);
// Add a new layer w/ the current user to it
await clickButton({ page, buttonText: 'Create Schedule' });

View file

@ -10,7 +10,6 @@ type NotifyBy = 'SMS' | 'Phone call';
const openUserSettingsModal = async (page: Page): Promise<void> => {
await goToOnCallPage(page, 'users');
await clickButton({ page, buttonText: 'View my profile' });
await page.locator('text=To edit user details such as Username, email, and role').waitFor({ state: 'visible' });
};
const getForgetPhoneNumberButton = (page: Page): Locator => page.locator('button >> text=Forget Phone Number');
@ -55,11 +54,9 @@ export const verifyUserPhoneNumber = async (page: Page): Promise<void> => {
* and then gets the notification type dropdown
*/
const getFirstDefaultNotificationSettingTypeDropdown = async (page: Page): Promise<Locator> => {
const defaultNotificationSettingsList = page.locator('ul[class*="Timeline-module"] >> nth=0');
await defaultNotificationSettingsList.waitFor({ state: 'visible' });
const firstDefaultNotificationSettingRow = defaultNotificationSettingsList.locator('li >> nth=0');
await firstDefaultNotificationSettingRow.waitFor({ state: 'visible' });
const firstDefaultNotificationSettingRow = page
.getByTestId('default-personal-notification-settings')
.locator('li >> nth=0');
// get the notification type dropdown specifically
return firstDefaultNotificationSettingRow.locator('div[class*="input-wrapper"] >> nth=1');
@ -69,18 +66,6 @@ export const configureUserNotificationSettings = async (page: Page, notifyBy: No
// open the user settings modal
await openUserSettingsModal(page);
/**
* see if we already have a default notification setting
* if we don't click the Add Notification Step button and add one
* otherwise update the existing one
*/
const defaultNotificationsAddNotificationStepButton = page.locator(
'div[class*="PersonalNotificationSettings"] >> nth=0 text=Add Notification Step'
);
if (await defaultNotificationsAddNotificationStepButton.isVisible()) {
await defaultNotificationsAddNotificationStepButton.click();
}
// select our notification type
const firstDefaultNotificationTypeDropdopdown = await getFirstDefaultNotificationSettingTypeDropdown(page);
await selectDropdownValue({

View file

@ -1,6 +1,11 @@
// force timezone to UTC to allow tests to work regardless of local timezone
// generally used by snapshots, but can affect specific tests
process.env.TZ = 'UTC';
const esModules = ['@grafana', 'uplot', 'ol', 'd3', 'react-colorful', 'uuid', 'openapi-fetch'].join('|');
module.exports = {
...require('./.config/jest.config'),
testEnvironment: 'jsdom',
moduleDirectories: ['node_modules', 'src'],
@ -23,4 +28,20 @@ module.exports = {
testTimeout: 10000,
testPathIgnorePatterns: ['/node_modules/', '/e2e-tests/'],
transform: {
'^.+\\.(t|j)sx?$': [
'@swc/jest',
{
sourceMaps: 'inline',
jsc: {
parser: {
syntax: 'typescript',
tsx: true,
decorators: true,
dynamicImport: true,
},
},
},
],
},
};

View file

@ -6,6 +6,10 @@ import '@testing-library/jest-dom';
import 'plugin/dayjs';
import { TextEncoder, TextDecoder } from 'util';
Object.assign(global, { TextDecoder, TextEncoder });
// https://stackoverflow.com/a/66055672
// https://jestjs.io/docs/manual-mocks#mocking-methods-which-are-not-implemented-in-jsdom
Object.defineProperty(window, 'matchMedia', {

View file

@ -4,11 +4,11 @@
"description": "Grafana OnCall Plugin",
"scripts": {
"lint": "eslint --cache --ext .js,.jsx,.ts,.tsx --max-warnings=0 ./src ./e2e-tests",
"lint:fix": "eslint --fix --cache --ext .js,.jsx,.ts,.tsx --quiet ./src ./e2e-tests",
"lint:fix": "eslint --fix --cache --ext .js,.jsx,.ts,.tsx ./src ./e2e-tests",
"stylelint": "stylelint ./src/**/*.{css,scss,module.css,module.scss}",
"stylelint:fix": "stylelint --fix ./src/**/*.{css,scss,module.css,module.scss}",
"build": "grafana-toolkit plugin:build",
"build:dev": "grafana-toolkit plugin:build --skipTest --skipLint",
"build": "webpack -c ./webpack.config.ts --env production",
"build:dev": "webpack -c ./webpack.config.ts --env development",
"labels:link": "yarn --cwd ../../gops-labels/frontend link && yarn link \"@grafana/labels\" && yarn --cwd ../../gops-labels/frontend watch",
"labels:unlink": "yarn --cwd ../../gops-labels/frontend unlink",
"test": "jest --verbose",
@ -19,12 +19,8 @@
"test:e2e:gen": "yarn playwright codegen http://localhost:3000",
"e2e-show-report": "yarn playwright show-report",
"generate-types": "cd ./src/network/oncall-api/types-generator && yarn generate",
"dev": "grafana-toolkit plugin:dev",
"watch": "grafana-toolkit plugin:dev --watch",
"sign": "grafana-toolkit plugin:sign",
"ci-build:finish": "grafana-toolkit plugin:ci-build --finish",
"ci-package": "grafana-toolkit plugin:ci-package",
"ci-report": "grafana-toolkit plugin:ci-report",
"watch": "webpack -w -c ./webpack.config.ts --env development",
"sign": "npx --yes @grafana/sign-plugin@latest",
"start": "yarn watch",
"plop": "plop",
"setversion": "setversion",
@ -50,31 +46,21 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"devDependencies": {
"@babel/plugin-proposal-class-properties": "^7.18.6",
"@babel/plugin-proposal-decorators": "^7.20.0",
"@babel/plugin-proposal-nullish-coalescing-operator": "^7.18.6",
"@babel/plugin-proposal-object-rest-spread": "^7.18.9",
"@babel/plugin-proposal-optional-chaining": "^7.18.9",
"@babel/plugin-syntax-decorators": "^7.18.6",
"@babel/plugin-syntax-dynamic-import": "^7.8.3",
"@babel/plugin-transform-destructuring": "^7.20.0",
"@babel/plugin-transform-react-constant-elements": "^7.18.12",
"@babel/plugin-transform-runtime": "^7.19.6",
"@babel/plugin-transform-typescript": "^7.18.12",
"@babel/preset-env": "^7.18.10",
"@babel/preset-react": "^7.18.6",
"@babel/preset-typescript": "^7.18.6",
"@grafana/eslint-config": "^5.1.0",
"@grafana/toolkit": "^9.5.2",
"@grafana/eslint-config": "^6.0.0",
"@grafana/tsconfig": "^1.2.0-rc1",
"@jest/globals": "^27.5.1",
"@playwright/test": "^1.39.0",
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/react": "12",
"@playwright/test": "^1.41.0",
"@swc/core": "^1.3.90",
"@swc/helpers": "^0.5.0",
"@swc/jest": "^0.2.26",
"@testing-library/jest-dom": "6.1.4",
"@testing-library/react": "14.0.0",
"@testing-library/user-event": "^14.4.3",
"@types/dompurify": "^2.3.4",
"@types/jest": "27.5.1",
"@types/jest": "^29.5.0",
"@types/lodash": "^4.14.194",
"@types/lodash-es": "^4.17.6",
"@types/node": "^18.11.9",
"@types/node": "^20.8.7",
"@types/query-string": "^6.3.0",
"@types/react-copy-to-clipboard": "^5.0.4",
"@types/react-dom": "^18.0.6",
@ -82,19 +68,26 @@
"@types/react-router-dom": "^5.3.3",
"@types/react-test-renderer": "^18.0.5",
"@types/react-transition-group": "^4.4.5",
"@types/testing-library__jest-dom": "5.14.8",
"@types/throttle-debounce": "^5.0.0",
"@typescript-eslint/eslint-plugin": "^5.40.1",
"babel-plugin-dynamic-import-node": "^2.3.3",
"copy-webpack-plugin": "^11.0.0",
"css-loader": "^6.7.3",
"dompurify": "^2.3.12",
"dotenv": "^16.0.3",
"eslint": "^8.25.0",
"eslint-plugin-deprecation": "^2.0.0",
"eslint-plugin-jsdoc": "^44.2.4",
"eslint-plugin-react": "^7.31.10",
"eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-rulesdir": "^0.2.1",
"jest": "27.5.1",
"jest-environment-jsdom": "^27.5.1",
"eslint-plugin-unused-imports": "^3.1.0",
"eslint-webpack-plugin": "^4.0.1",
"fork-ts-checker-webpack-plugin": "^8.0.0",
"glob": "^10.2.7",
"identity-obj-proxy": "3.0.0",
"jest": "^29.5.0",
"jest-environment-jsdom": "^29.5.0",
"lint-staged": "^10.2.11",
"lodash-es": "^4.17.21",
"mailslurp-client": "^15.14.1",
@ -102,26 +95,36 @@
"openapi-typescript": "^7.0.0-next.4",
"plop": "^2.7.4",
"postcss-loader": "^7.0.1",
"prettier": "^2.8.7",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-test-renderer": "^18.0.2",
"replace-in-file-webpack-plugin": "^1.0.6",
"sass": "1.63.2",
"sass-loader": "13.3.1",
"semver": "^7.6.0",
"style-loader": "3.3.3",
"stylelint-config-prettier": "^9.0.3",
"stylelint-prettier": "^2.0.0",
"swc-loader": "^0.2.3",
"ts-jest": "29.0.3",
"ts-loader": "^9.3.1",
"ts-node": "^10.9.1",
"typescript": "4.6.4",
"tsconfig-paths": "^4.2.0",
"typescript": "4.8.4",
"webpack": "^5.86.0",
"webpack-bundle-analyzer": "^4.6.1",
"webpack-cli": "^5.1.4",
"webpack-livereload-plugin": "^3.0.2"
},
"engines": {
"node": ">=14"
"node": "~18.16.0"
},
"dependencies": {
"@dnd-kit/core": "^6.0.8",
"@dnd-kit/modifiers": "^7.0.0",
"@dnd-kit/sortable": "^7.0.2",
"@dnd-kit/utilities": "^3.2.1",
"@emotion/css": "11.10.6",
"@grafana/data": "^10.2.3",
"@grafana/faro-web-sdk": "^1.0.0-beta4",
"@grafana/faro-web-tracing": "^1.0.0-beta4",
@ -133,6 +136,8 @@
"@lifeomic/attempt": "^3.0.3",
"@opentelemetry/api": "^1.3.0",
"array-move": "^4.0.0",
"axios": "^1.6.7",
"babel-loader": "^9.1.3",
"change-case": "^4.1.1",
"circular-dependency-plugin": "^5.2.2",
"dayjs": "^1.11.5",
@ -142,11 +147,12 @@
"mobx-react": "9.1.0",
"object-hash": "^3.0.0",
"openapi-fetch": "^0.8.1",
"prettier": "^2.8.2",
"qrcode.react": "^3.1.0",
"raw-loader": "^4.0.2",
"rc-table": "^7.17.1",
"react": "18.2.0",
"react-copy-to-clipboard": "^5.0.2",
"react-dom": "18.2.0",
"react-draggable": "^4.4.5",
"react-emoji-render": "^1.2.4",
"react-modal": "^3.15.1",
@ -155,9 +161,10 @@
"react-sortable-hoc": "^1.11.0",
"react-string-replace": "^0.4.4",
"react-transition-group": "^4.4.5",
"sass-loader": "^13.0.2",
"stylelint": "^13.13.1",
"stylelint-config-standard": "^22.0.0",
"throttle-debounce": "^2.1.0"
}
"throttle-debounce": "^2.1.0",
"tslib": "2.5.3"
},
"packageManager": "yarn@1.22.21"
}

View file

@ -0,0 +1,23 @@
import React, { FC } from 'react';
import { IconButton } from '@grafana/ui';
import CopyToClipboard from 'react-copy-to-clipboard';
import { openNotification } from 'utils/utils';
interface CopyToClipboardProps {
text: string;
iconButtonProps?: Partial<Parameters<typeof IconButton>[0]>;
}
export const CopyToClipboardIcon: FC<CopyToClipboardProps> = ({ text, iconButtonProps }) => {
const onCopy = () => {
openNotification('Copied to clipboard');
};
return (
<CopyToClipboard text={text} onCopy={onCopy}>
<IconButton aria-label="Copy" name="copy" {...iconButtonProps} />
</CopyToClipboard>
);
};

View file

@ -0,0 +1,77 @@
import React, { FC, ReactNode } from 'react';
import { css } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import { HamburgerMenuIcon } from 'components/HamburgerMenuIcon/HamburgerMenuIcon';
import { WithContextMenu } from 'components/WithContextMenu/WithContextMenu';
import { WithPermissionControlTooltip } from 'containers/WithPermissionControl/WithPermissionControlTooltip';
import { isUserActionAllowed, UserAction } from 'utils/authorization/authorization';
interface HamburgerContextMenuProps {
items: Array<
{ onClick?: () => void; label: ReactNode; requiredPermission?: UserAction; hidden?: boolean } | 'divider'
>;
hamburgerIconClassName?: string;
}
export const HamburgerContextMenu: FC<HamburgerContextMenuProps> = ({ items, hamburgerIconClassName }) => {
const styles = useStyles2(getStyles);
return (
<WithContextMenu
renderMenuItems={() => (
<div className={styles.menuList}>
{items.map((item, idx) => {
if (item === 'divider') {
return <div key="line-break" className="thin-line-break" />;
} else if (item.hidden) {
return null;
}
return item.requiredPermission ? (
<WithPermissionControlTooltip key={idx} userAction={item.requiredPermission}>
<div
className={styles.menuItem}
key={idx}
onClick={isUserActionAllowed(item.requiredPermission) && item.onClick}
>
{item.label}
</div>
</WithPermissionControlTooltip>
) : (
<div className={styles.menuItem} key={idx} onClick={item.onClick}>
{item.label}
</div>
);
})}
</div>
)}
>
{({ openMenu }) => (
<HamburgerMenuIcon openMenu={openMenu} listBorder={2} listWidth={225} className={hamburgerIconClassName} />
)}
</WithContextMenu>
);
};
export const getStyles = (theme: GrafanaTheme2) => ({
menuList: css({
display: 'flex',
flexDirection: 'column',
width: '225px',
borderRadius: '2px',
}),
menuItem: css({
padding: '8px',
whiteSpace: 'nowrap',
borderLeft: '2px solid transparent',
minWidth: '84px',
gap: '8px',
cursor: 'pointer',
'&:hover': {
background: theme.colors.background.secondary,
},
}),
});

View file

@ -3,9 +3,9 @@ import React, { useRef } from 'react';
import { Icon } from '@grafana/ui';
import cn from 'classnames/bind';
import styles from './HamburgerMenu.module.scss';
import styles from './HamburgerMenuIcon.module.scss';
interface HamburgerMenuProps {
interface HamburgerMenuIconProps {
openMenu: React.MouseEventHandler<HTMLElement>;
listWidth: number;
listBorder: number;
@ -16,7 +16,7 @@ interface HamburgerMenuProps {
const cx = cn.bind(styles);
export const HamburgerMenu: React.FC<HamburgerMenuProps> = (props) => {
export const HamburgerMenuIcon: React.FC<HamburgerMenuIconProps> = (props) => {
const ref = useRef<HTMLDivElement>();
const { openMenu, listBorder, listWidth, withBackground, className, stopPropagation = false } = props;
return (

View file

@ -14,12 +14,11 @@ const cx = cn.bind(styles);
export interface IntegrationCollapsibleItem {
isHidden?: boolean;
customIcon?: IconName;
canHoverIcon: boolean;
canHoverIcon?: boolean;
isTextIcon?: boolean;
collapsedView: (toggle?: () => void) => React.ReactNode; // needs toggle param for toggling on click
collapsedView?: (toggle?: () => void) => React.ReactNode; // needs toggle param for toggling on click
expandedView: () => React.ReactNode; // for consistency, this is also a function
isCollapsible: boolean;
iconText?: string;
isCollapsible?: boolean;
isExpanded?: boolean;
startingElemPosition?: string;
onStateChange?(isChecked: boolean): void;

View file

@ -18,7 +18,7 @@ import { observer } from 'mobx-react';
import { GTable } from 'components/GTable/GTable';
import { IntegrationBlock } from 'components/Integrations/IntegrationBlock';
import { Tag } from 'components/Tag/Tag';
import { IntegrationTag } from 'components/Integrations/IntegrationTag';
import { Text } from 'components/Text/Text';
import { WithConfirm } from 'components/WithConfirm/WithConfirm';
import { AlertReceiveChannelHelper } from 'models/alert_receive_channel/alert_receive_channel.helpers';
@ -26,7 +26,6 @@ import { ContactPoint } from 'models/alert_receive_channel/alert_receive_channel
import { ApiSchemas } from 'network/oncall-api/api.types';
import styles from 'pages/integration/Integration.module.scss';
import { useStore } from 'state/useStore';
import { getVar } from 'utils/DOM';
import { openErrorNotification, openNotification } from 'utils/utils';
const cx = cn.bind(styles);
@ -145,11 +144,7 @@ export const IntegrationContactPoint: React.FC<{
)}
<HorizontalGroup spacing="md">
<Tag color={getVar('--tag-secondary-transparent')} border={getVar('--border-weak')} className={cx('tag')}>
<Text type="primary" size="small" className={cx('radius')}>
Contact point
</Text>
</Tag>
<IntegrationTag>Contact point</IntegrationTag>
{contactPoints?.length ? (
<HorizontalGroup>

View file

@ -6,12 +6,11 @@ import { noop } from 'lodash-es';
import { IntegrationInputField } from 'components/IntegrationInputField/IntegrationInputField';
import { IntegrationBlock } from 'components/Integrations/IntegrationBlock';
import { Tag } from 'components/Tag/Tag';
import { IntegrationTag } from 'components/Integrations/IntegrationTag';
import { Text } from 'components/Text/Text';
import { ApiSchemas } from 'network/oncall-api/api.types';
import styles from 'pages/integration/Integration.module.scss';
import { useStore } from 'state/useStore';
import { getVar } from 'utils/DOM';
const cx = cn.bind(styles);
@ -40,11 +39,7 @@ export const IntegrationHowToConnect: React.FC<{ id: ApiSchemas['AlertReceiveCha
toggle={noop}
heading={
<div className={cx('how-to-connect__container')}>
<Tag color={getVar('--tag-secondary-transparent')} border={getVar('--border-weak')} className={cx('tag')}>
<Text type="primary" size="small" className={cx('radius')}>
{howToConnectTagName(item?.integration)}
</Text>
</Tag>
<IntegrationTag>{howToConnectTagName(item?.integration)}</IntegrationTag>
{item?.integration === 'direct_paging' ? (
<>
<Text type="secondary">Alert Groups raised manually via Web or ChatOps</Text>

View file

@ -2,9 +2,8 @@ import React, { useState } from 'react';
import { HorizontalGroup, IconButton, Input } from '@grafana/ui';
import cn from 'classnames/bind';
import CopyToClipboard from 'react-copy-to-clipboard';
import { openNotification } from 'utils/utils';
import { CopyToClipboardIcon } from 'components/CopyToClipboardIcon/CopyToClipboardIcon';
import styles from './IntegrationInputField.module.scss';
@ -36,11 +35,7 @@ export const IntegrationInputField: React.FC<IntegrationInputFieldProps> = ({
<div className={cx('icons')}>
<HorizontalGroup spacing={'xs'}>
{showEye && <IconButton aria-label="Reveal" name={'eye'} size={'xs'} onClick={onInputReveal} />}
{showCopy && (
<CopyToClipboard text={value} onCopy={onCopy}>
<IconButton aria-label="Copy" name={'copy'} size={'xs'} />
</CopyToClipboard>
)}
{showCopy && <CopyToClipboardIcon text={value} iconButtonProps={{ size: 'xs' }} />}
{showExternal && <IconButton aria-label="Open" name={'external-link-alt'} size={'xs'} onClick={onOpen} />}
</HorizontalGroup>
</div>
@ -55,10 +50,6 @@ export const IntegrationInputField: React.FC<IntegrationInputFieldProps> = ({
setIsMasked(!isInputMasked);
}
function onCopy() {
openNotification("Integration's HTTP Endpoint is copied");
}
function onOpen() {
window.open(value, '_blank');
}

View file

@ -8,7 +8,7 @@ import { logoCoors } from './IntegrationLogo.config';
import styles from 'components/IntegrationLogo/IntegrationLogo.module.css';
interface IntegrationLogoProps {
export interface IntegrationLogoProps {
integration: SelectOption;
scale: number;
}

View file

@ -0,0 +1,18 @@
import React, { FC } from 'react';
import { HorizontalGroup } from '@grafana/ui';
import { Text } from 'components/Text/Text';
import { IntegrationLogo, IntegrationLogoProps } from './IntegrationLogo';
interface IntegrationLogoWithTitleProps {
integration: IntegrationLogoProps['integration'];
}
export const IntegrationLogoWithTitle: FC<IntegrationLogoWithTitleProps> = ({ integration }) => (
<HorizontalGroup spacing="xs">
<IntegrationLogo scale={0.08} integration={integration} />
<Text type="primary">{integration?.display_name}</Text>
</HorizontalGroup>
);

View file

@ -11,9 +11,9 @@ const cx = cn.bind(styles);
interface IntegrationBlockProps {
className?: string;
noContent: boolean;
noContent?: boolean;
heading: React.ReactNode;
content: React.ReactNode;
content?: React.ReactNode;
toggle?: () => void;
}

View file

@ -0,0 +1,35 @@
import React, { FC } from 'react';
import { css } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import { Tag } from 'components/Tag/Tag';
import { Text } from 'components/Text/Text';
interface IntegrationTagProps {
children: React.ReactNode;
}
export const IntegrationTag: FC<IntegrationTagProps> = ({ children }) => {
const styles = useStyles2(getStyles);
return (
<Tag className={styles.tag}>
<Text type="primary" size="small" className={styles.radius}>
{children}
</Text>
</Tag>
);
};
export const getStyles = (theme: GrafanaTheme2) => ({
tag: css({
height: '25px',
background: theme.colors.background.secondary,
border: `1px solid ${theme.colors.border.weak}`,
}),
radius: css({
borderRadius: '4px',
}),
});

View file

@ -57,7 +57,7 @@ const getStyles = (theme: GrafanaTheme2, color?: string, size?: string) => {
color: ${fontColor};
font-size: ${theme.typography.bodySmall.fontSize};
border-radius: ${theme.shape.borderRadius(2)};
border-radius: ${theme.shape.radius.default};
`,
label: css`
display: flex;
@ -68,8 +68,8 @@ const getStyles = (theme: GrafanaTheme2, color?: string, size?: string) => {
background: ${backgroundColor};
border: solid 1px ${borderColor};
border-top-left-radius: ${theme.shape.borderRadius(2)};
border-bottom-left-radius: ${theme.shape.borderRadius(2)};
border-top-left-radius: ${theme.shape.radius.default};
border-bottom-left-radius: ${theme.shape.radius.default};
`,
value: css`
color: inherit;
@ -78,8 +78,8 @@ const getStyles = (theme: GrafanaTheme2, color?: string, size?: string) => {
border: solid 1px ${borderColor};
border-left: none;
border-top-right-radius: ${theme.shape.borderRadius(2)};
border-bottom-right-radius: ${theme.shape.borderRadius(2)};
border-top-right-radius: ${theme.shape.radius.default};
border-bottom-right-radius: ${theme.shape.radius.default};
`,
};
};

View file

@ -1,6 +1,7 @@
import React, { FC, useCallback } from 'react';
import React, { ComponentProps, FC, useCallback } from 'react';
import { CodeEditor, CodeEditorSuggestionItemKind, LoadingPlaceholder } from '@grafana/ui';
import cn from 'classnames';
import { getPaths } from 'utils/utils';
@ -21,6 +22,8 @@ interface MonacoEditorProps {
loading?: boolean;
monacoOptions?: any;
suggestionPrefix?: string;
containerClassName?: string;
codeEditorProps?: Partial<ComponentProps<typeof CodeEditor>>;
}
export enum MONACO_LANGUAGE {
@ -51,6 +54,8 @@ export const MonacoEditor: FC<MonacoEditorProps> = (props) => {
showLineNumbers = true,
loading = false,
suggestionPrefix = 'payload.',
containerClassName,
codeEditorProps,
} = props;
const autoCompleteList = useCallback(
@ -100,7 +105,8 @@ export const MonacoEditor: FC<MonacoEditorProps> = (props) => {
height={height}
onEditorDidMount={handleMount}
getSuggestions={useAutoCompleteList ? autoCompleteList : undefined}
containerStyles="u-width-height-100"
containerStyles={cn('u-width-height-100', containerClassName)}
{...codeEditorProps}
/>
);
};

View file

@ -1,6 +1,7 @@
.root {
position: relative;
width: 100%;
min-height: 200px;
&:hover .copyButton,
&:hover .copyIcon {

View file

@ -1,9 +1,10 @@
import React, { FC } from 'react';
import { Button, IconButton, Tooltip } from '@grafana/ui';
import { Button, IconButton } from '@grafana/ui';
import cn from 'classnames/bind';
import CopyToClipboard from 'react-copy-to-clipboard';
import { formatSourceCodeJsonString } from 'utils/string';
import { openNotification } from 'utils/utils';
import styles from './SourceCode.module.scss';
@ -14,33 +15,40 @@ interface SourceCodeProps {
noMaxHeight?: boolean;
showClipboardIconOnly?: boolean;
showCopyToClipboard?: boolean;
children?: any;
className?: string;
children?: string;
rootClassName?: string;
preClassName?: string;
prettifyJsonString?: boolean;
}
export const SourceCode: FC<SourceCodeProps> = (props) => {
const { children, noMaxHeight = false, showClipboardIconOnly = false, showCopyToClipboard = true, className } = props;
export const SourceCode: FC<SourceCodeProps> = ({
children,
noMaxHeight = false,
showClipboardIconOnly = false,
showCopyToClipboard = true,
rootClassName,
preClassName,
prettifyJsonString,
}) => {
const showClipboardCopy = showClipboardIconOnly || showCopyToClipboard;
return (
<div className={cx('root')}>
<div className={cx('root', rootClassName)}>
{showClipboardCopy && (
<CopyToClipboard
text={children as string}
text={children}
onCopy={() => {
openNotification('Copied!');
}}
>
{showClipboardIconOnly ? (
<Tooltip placement="top" content="Copy to Clipboard">
<IconButton
aria-label="Copy"
className={cx('copyIcon')}
size={'lg'}
name="copy"
data-testid="test__copyIcon"
/>
</Tooltip>
<IconButton
aria-label="Copy"
className={cx('copyIcon')}
size={'lg'}
name="copy"
data-testid="test__copyIcon"
/>
) : (
<Button
className={cx('copyButton')}
@ -60,10 +68,10 @@ export const SourceCode: FC<SourceCodeProps> = (props) => {
{
'scroller--maxHeight': !noMaxHeight,
},
className
preClassName
)}
>
<code>{children}</code>
<code>{prettifyJsonString ? formatSourceCodeJsonString(children) : children}</code>
</pre>
</div>
);

View file

@ -1,9 +1,11 @@
import React, { FC, useState } from 'react';
import React, { FC, useEffect, useState } from 'react';
import { css } from '@emotion/css';
import { Tab, TabsBar, TabContent, useStyles2 } from '@grafana/ui';
import cn from 'classnames';
import { LocationHelper } from 'utils/LocationHelper';
interface TabConfig {
label: string;
content: React.ReactNode;
@ -11,24 +13,45 @@ interface TabConfig {
interface TabsProps {
tabs: TabConfig[];
defaultActiveLabel?: string;
tabContentClassName?: string;
shouldBeSyncedWithQueryString?: boolean;
// in case there are more than 1 <Tabs /> in the page, we want to use different queryString keys
queryStringKey?: string;
}
export const Tabs: FC<TabsProps> = ({ tabs, defaultActiveLabel, tabContentClassName }) => {
export const Tabs: FC<TabsProps> = ({
tabs,
tabContentClassName,
shouldBeSyncedWithQueryString = true,
queryStringKey = 'activeTab',
}) => {
const styles = useStyles2(getStyles);
const [activeTabLabel, setActiveTabLabel] = useState(defaultActiveLabel || tabs[0].label);
const defaultActiveLabel =
(shouldBeSyncedWithQueryString && LocationHelper.getQueryParam(queryStringKey)) || tabs[0].label;
const [activeTabLabel, setActiveTabLabel] = useState(defaultActiveLabel);
const setLabel = (label: string) => {
setActiveTabLabel(label);
if (shouldBeSyncedWithQueryString) {
LocationHelper.update({ [queryStringKey]: label }, 'partial');
}
};
useEffect(
() => () => {
if (shouldBeSyncedWithQueryString) {
LocationHelper.update({ [queryStringKey]: undefined }, 'partial');
}
},
[]
);
return (
<>
<TabsBar>
{tabs.map(({ label }) => (
<Tab
label={label}
key={label}
onChangeTab={() => setActiveTabLabel(label)}
active={activeTabLabel === label}
/>
<Tab label={label} key={label} onChangeTab={() => setLabel(label)} active={activeTabLabel === label} />
))}
</TabsBar>
<TabContent className={cn(styles.content, tabContentClassName)}>

View file

@ -3,5 +3,11 @@
line-height: 100%;
padding: 5px 8px;
color: white;
display: inline-block;
white-space: nowrap;
}
.size-small {
font-size: 12px;
height: 24px;
}

View file

@ -8,27 +8,33 @@ interface TagProps {
color?: string;
className?: string;
border?: string;
text?: string;
children?: any;
onClick?: (ev) => void;
forwardedRef?: React.MutableRefObject<HTMLSpanElement>;
size?: 'small' | 'medium';
}
const cx = cn.bind(styles);
export const Tag: FC<TagProps> = (props) => {
const { children, color, className, border, onClick } = props;
const { children, color, text, className, border, onClick, size = 'medium' } = props;
const style: React.CSSProperties = {};
if (color) {
style.backgroundColor = color;
}
if (text) {
style.color = text;
}
if (border) {
style.border = border;
}
return (
<span style={style} className={cx('root', className)} onClick={onClick} ref={props.forwardedRef}>
<span style={style} className={cx('root', `size-${size}`, className)} onClick={onClick} ref={props.forwardedRef}>
{children}
</span>
);

View file

@ -7,7 +7,7 @@ exports[`Unauthorized renders properly - access control enabled: false 1`] = `
<div
className="css-8tu8mo-vertical-group"
style={
Object {
{
"height": "100%",
"width": "100%",
}
@ -22,7 +22,7 @@ exports[`Unauthorized renders properly - access control enabled: false 1`] = `
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -40,7 +40,7 @@ exports[`Unauthorized renders properly - access control enabled: false 1`] = `
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -65,7 +65,7 @@ exports[`Unauthorized renders properly - access control enabled: true 1`] = `
<div
className="css-8tu8mo-vertical-group"
style={
Object {
{
"height": "100%",
"width": "100%",
}
@ -80,7 +80,7 @@ exports[`Unauthorized renders properly - access control enabled: true 1`] = `
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -98,7 +98,7 @@ exports[`Unauthorized renders properly - access control enabled: true 1`] = `
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -123,7 +123,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Admin 1
<div
className="css-8tu8mo-vertical-group"
style={
Object {
{
"height": "100%",
"width": "100%",
}
@ -138,7 +138,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Admin 1
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -156,7 +156,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Admin 1
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -181,7 +181,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Editor
<div
className="css-8tu8mo-vertical-group"
style={
Object {
{
"height": "100%",
"width": "100%",
}
@ -196,7 +196,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Editor
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -214,7 +214,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Editor
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -239,7 +239,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Viewer
<div
className="css-8tu8mo-vertical-group"
style={
Object {
{
"height": "100%",
"width": "100%",
}
@ -254,7 +254,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Viewer
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}
@ -272,7 +272,7 @@ exports[`Unauthorized renders properly the grammar for different roles - Viewer
<span
className="root text text--undefined text--medium"
style={
Object {
{
"maxWidth": undefined,
}
}

View file

@ -0,0 +1,132 @@
import React, { FC, useMemo } from 'react';
import { css } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { VerticalGroup, HorizontalGroup, Badge, useStyles2, Tooltip, Icon, useTheme2 } from '@grafana/ui';
import dayjs from 'dayjs';
import { SourceCode } from 'components/SourceCode/SourceCode';
import { Tabs } from 'components/Tabs/Tabs';
import { Text } from 'components/Text/Text';
import { OutgoingWebhook } from 'models/outgoing_webhook/outgoing_webhook.types';
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
import { WebhookStatusCodeBadge } from './WebhookStatusCodeBadge';
interface WebhookLastEventDetailsProps {
webhook: OutgoingWebhook;
sourceCodeRootClassName?: string;
}
export const WebhookLastEventDetails: FC<WebhookLastEventDetailsProps> = ({ webhook, sourceCodeRootClassName }) => {
const styles = useStyles2(getStyles);
const theme = useTheme2();
const rows = useMemo(() => getEventDetailsRows(theme, webhook), [theme, webhook]);
const commonSourceCodeProps = {
showClipboardIconOnly: true,
prettifyJsonString: true,
noMaxHeight: true,
rootClassName: sourceCodeRootClassName,
preClassName: styles.sourceCodePre,
};
if (!webhook.last_response_log?.timestamp) {
return (
<Text type="primary" size="medium">
An event triggering of this webhook has not been sent yet.
</Text>
);
}
return (
<>
<div className={styles.lastEventDetailsRowsWrapper}>
<VerticalGroup spacing="md">
{rows.map(({ title, value }) => (
<HorizontalGroup key={title}>
<span className={styles.lastEventDetailsRowTitle}>{title}</span>
<span className={styles.lastEventDetailsRowValue}>{value}</span>
</HorizontalGroup>
))}
</VerticalGroup>
</div>
<Tabs
queryStringKey="lastEventDetailsActiveTab"
tabs={[
{
label: 'Event body',
content: (
<SourceCode {...commonSourceCodeProps}>{webhook.last_response_log.request_data || 'No data'}</SourceCode>
),
},
{
label: 'Response body',
content: (
<SourceCode {...commonSourceCodeProps}>{webhook.last_response_log.content || 'No data'}</SourceCode>
),
},
{
label: 'Request headers',
content: (
<SourceCode {...commonSourceCodeProps}>
{webhook.last_response_log.request_headers || 'No data'}
</SourceCode>
),
},
]}
/>
</>
);
};
const getEventDetailsRows = (theme: GrafanaTheme2, webhook?: OutgoingWebhook) =>
webhook
? [
{
title: 'Trigger type',
value: webhook.trigger_type_name,
},
{
title: 'Time',
value: `${dayjs(webhook.last_response_log?.timestamp).format('DD MMM YYYY, HH:mm')} (${getTzOffsetString(
dayjs(webhook.last_response_log?.timestamp)
)})`,
},
{
title: 'URL',
value: (
<HorizontalGroup align="center">
<span>{webhook.url}</span>
{webhook.last_response_log?.url && webhook.url !== webhook.last_response_log?.url && (
<Tooltip content={webhook.last_response_log?.url}>
<Icon name="exclamation-triangle" color={theme.colors.error.main} />
</Tooltip>
)}
</HorizontalGroup>
),
},
{
title: 'Method',
value: <Badge color="blue" text={webhook.http_method} />,
},
{
title: 'Response code',
value: <WebhookStatusCodeBadge webhook={webhook} />,
},
]
: [];
const getStyles = () => ({
lastEventDetailsRowTitle: css({
width: '150px',
}),
lastEventDetailsRowValue: css({
fontWeight: 500,
}),
lastEventDetailsRowsWrapper: css({
marginBottom: '26px',
}),
sourceCodePre: css({
height: '100%',
}),
});

View file

@ -0,0 +1,72 @@
import React from 'react';
import { css } from '@emotion/css';
import { useTheme2, useStyles2, HorizontalGroup, Button } from '@grafana/ui';
import dayjs from 'dayjs';
import { Tag } from 'components/Tag/Tag';
import { OutgoingWebhook } from 'models/outgoing_webhook/outgoing_webhook.types';
import { getTzOffsetString } from 'models/timezone/timezone.helpers';
import { OutgoingTabDrawerKey } from 'pages/integration/OutgoingTab/OutgoingTab.types';
import { WebhookStatusCodeBadge } from './WebhookStatusCodeBadge';
export const WebhookLastEventTimestamp = ({
webhook,
openDrawer,
}: {
webhook: OutgoingWebhook;
openDrawer: (key: OutgoingTabDrawerKey) => void;
}) => {
const theme = useTheme2();
const styles = useStyles2(getStyles);
const lastEventMoment = dayjs(webhook.last_response_log?.timestamp);
const lastEventFormatted = `${lastEventMoment.format('DD MMM YYYY')}, ${lastEventMoment.format(
'HH:mm:ss'
)} (${getTzOffsetString(lastEventMoment)})`;
const isLastEventDateValid = lastEventMoment.isValid();
if (!isLastEventDateValid) {
return (
<Tag
color={theme.colors.background.secondary}
border={`1px solid ${theme.colors.border.weak}`}
text={theme.colors.text.secondary}
size="small"
>
Never
</Tag>
);
}
return (
<HorizontalGroup>
<Tag
color={theme.colors.background.secondary}
border={`1px solid ${theme.colors.border.weak}`}
text={theme.colors.text.primary}
size="small"
>
{lastEventFormatted}
</Tag>
<WebhookStatusCodeBadge webhook={webhook} />
<Button
size="sm"
icon="eye"
tooltip="Go to event details"
variant="secondary"
className={styles.eventDetailsIconButton}
onClick={() => openDrawer('webhookDetails')}
/>
</HorizontalGroup>
);
};
export const getStyles = () => ({
eventDetailsIconButton: css({
padding: '6px 10px',
}),
});

View file

@ -0,0 +1,43 @@
import React from 'react';
import { css } from '@emotion/css';
import { Badge, Button, useStyles2 } from '@grafana/ui';
import { OutgoingWebhook } from 'models/outgoing_webhook/outgoing_webhook.types';
export const WebhookName = ({
webhook: { is_webhook_enabled, name },
onNameClick,
}: {
webhook: OutgoingWebhook;
onNameClick: () => void;
}) => {
const styles = useStyles2(getStyles);
return (
<div className={styles.nameColumn}>
<Button fill="text" className={styles.webhookName} onClick={onNameClick}>
{name}
</Button>
{!is_webhook_enabled && <Badge className={styles.disabledBadge} text="Disabled" color="orange" icon="pause" />}
</div>
);
};
export const getStyles = () => ({
nameColumn: css({
display: 'flex',
alignItems: 'center',
gap: '4px',
}),
webhookName: css({
wordBreak: 'break-word',
padding: 0,
'&:hover': {
background: 'none',
},
}),
disabledBadge: css({
wordBreak: 'keep-all',
}),
});

View file

@ -0,0 +1,29 @@
import React, { FC } from 'react';
import { css } from '@emotion/css';
import { Badge, useStyles2 } from '@grafana/ui';
import { OutgoingWebhook } from 'models/outgoing_webhook/outgoing_webhook.types';
interface WebhookStatusCodeBadgeProps {
webhook: OutgoingWebhook;
}
export const WebhookStatusCodeBadge: FC<WebhookStatusCodeBadgeProps> = ({ webhook }) => {
const styles = useStyles2(getStyles);
return (
<Badge
color={`${webhook.last_response_log?.status_code}`?.startsWith?.('2') ? 'green' : 'orange'}
text={webhook.last_response_log?.status_code || 'No status'}
className={styles.lastEventBadge}
/>
);
};
const getStyles = () => ({
lastEventBadge: css({
wordBreak: 'keep-all',
whiteSpace: 'nowrap',
}),
});

View file

@ -28,6 +28,7 @@ describe('AddRespondersPopup', () => {
},
grafanaTeamStore: {
getSearchResult: jest.fn().mockReturnValue(teams),
updateItems: jest.fn(),
},
userStore: {
search: jest.fn().mockReturnValue({ results: [] }),

View file

@ -16,7 +16,7 @@ import cn from 'classnames/bind';
import { observer } from 'mobx-react';
import CopyToClipboard from 'react-copy-to-clipboard';
import { HamburgerMenu } from 'components/HamburgerMenu/HamburgerMenu';
import { HamburgerMenuIcon } from 'components/HamburgerMenuIcon/HamburgerMenuIcon';
import {
IntegrationCollapsibleTreeView,
IntegrationCollapsibleItem,
@ -444,7 +444,7 @@ export const RouteButtonsDisplay: React.FC<RouteButtonsDisplayProps> = ({
)}
>
{({ openMenu }) => (
<HamburgerMenu
<HamburgerMenuIcon
openMenu={openMenu}
listBorder={2}
listWidth={200}

View file

@ -7,24 +7,16 @@ import { FormItem, FormItemType } from 'components/GForm/GForm.types';
import { AlertReceiveChannelStore } from 'models/alert_receive_channel/alert_receive_channel';
import { AlertReceiveChannelHelper } from 'models/alert_receive_channel/alert_receive_channel.helpers';
import { GrafanaTeamStore } from 'models/grafana_team/grafana_team';
import { OutgoingWebhookPreset } from 'models/outgoing_webhook/outgoing_webhook.types';
import {
HTTP_METHOD_OPTIONS,
OutgoingWebhookPreset,
WebhookTriggerType,
WEBHOOK_TRIGGGER_TYPE_OPTIONS,
} from 'models/outgoing_webhook/outgoing_webhook.types';
import { generateAssignToTeamInputDescription } from 'utils/consts';
import { KeyValuePair } from 'utils/utils';
import { WebhookFormFieldName } from './OutgoingWebhookForm.types';
export const WebhookTriggerType = {
EscalationStep: new KeyValuePair('0', 'Escalation Step'),
AlertGroupCreated: new KeyValuePair('1', 'Alert Group Created'),
Acknowledged: new KeyValuePair('2', 'Acknowledged'),
Resolved: new KeyValuePair('3', 'Resolved'),
Silenced: new KeyValuePair('4', 'Silenced'),
Unsilenced: new KeyValuePair('5', 'Unsilenced'),
Unresolved: new KeyValuePair('6', 'Unresolved'),
Unacknowledged: new KeyValuePair('7', 'Unacknowledged'),
AlertGroupStatusChange: new KeyValuePair('8', 'Alert Group Status Change'),
};
export function createForm({
presets = [],
grafanaTeamStore,
@ -78,44 +70,7 @@ export function createForm({
type: FormItemType.Select,
extra: {
placeholder: 'Choose (Required)',
options: [
{
value: WebhookTriggerType.EscalationStep.key,
label: WebhookTriggerType.EscalationStep.value,
},
{
value: WebhookTriggerType.AlertGroupCreated.key,
label: WebhookTriggerType.AlertGroupCreated.value,
},
{
value: WebhookTriggerType.AlertGroupStatusChange.key,
label: WebhookTriggerType.AlertGroupStatusChange.value,
},
{
value: WebhookTriggerType.Acknowledged.key,
label: WebhookTriggerType.Acknowledged.value,
},
{
value: WebhookTriggerType.Resolved.key,
label: WebhookTriggerType.Resolved.value,
},
{
value: WebhookTriggerType.Silenced.key,
label: WebhookTriggerType.Silenced.value,
},
{
value: WebhookTriggerType.Unsilenced.key,
label: WebhookTriggerType.Unsilenced.value,
},
{
value: WebhookTriggerType.Unresolved.key,
label: WebhookTriggerType.Unresolved.value,
},
{
value: WebhookTriggerType.Unacknowledged.key,
label: WebhookTriggerType.Unacknowledged.value,
},
],
options: WEBHOOK_TRIGGGER_TYPE_OPTIONS,
},
isHidden: (data) => !isPresetFieldVisible(data.preset, presets, WebhookFormFieldName.TriggerType),
normalize: (value) => value,
@ -126,32 +81,7 @@ export function createForm({
type: FormItemType.Select,
extra: {
placeholder: 'Choose (Required)',
options: [
{
value: 'GET',
label: 'GET',
},
{
value: 'POST',
label: 'POST',
},
{
value: 'PUT',
label: 'PUT',
},
{
value: 'PATCH',
label: 'PATCH',
},
{
value: 'DELETE',
label: 'DELETE',
},
{
value: 'OPTIONS',
label: 'OPTIONS',
},
],
options: HTTP_METHOD_OPTIONS,
},
isHidden: (data) => !isPresetFieldVisible(data.preset, presets, WebhookFormFieldName.HttpMethod),
normalize: (value) => value,

View file

@ -10,9 +10,8 @@
margin: 4px;
}
.tabs__content {
.tabsWrapper {
padding-top: 16px;
padding-bottom: 16px;
}
.form-row {
@ -30,6 +29,11 @@
display: none !important;
}
.sourceCodeRoot {
height: calc(100vh - 530px);
min-height: 200px;
}
.cards {
display: flex;
flex-wrap: wrap;

Some files were not shown because too many files have changed in this diff Show more