This commit is contained in:
Matvey Kukuy 2022-06-14 18:36:43 +03:00
commit 0a4b9909dc
201 changed files with 3852 additions and 3837 deletions

View file

@ -5,4 +5,5 @@ frontend/node_modules
frontend/build
package-lock.json
./engine/extensions
.env
.env
.env-hobby

View file

@ -1,3 +1,4 @@
---
kind: pipeline
type: docker
name: Build and Release
@ -11,7 +12,6 @@ steps:
- cd grafana-plugin/
- if [ -z "$DRONE_TAG" ]; then echo "No tag, not modifying version"; else jq '.version="${DRONE_TAG}"' package.json > package.new && mv package.new package.json && jq '.version' package.json; fi
- yarn --network-timeout 500000
- yarn plop "Append build info" "${DRONE_TAG}" "${DRONE_BRANCH}" "${DRONE_COMMIT}"
- yarn build
- ls ./
@ -30,8 +30,8 @@ steps:
- yarn ci-build:finish
- yarn ci-package
- cd ci/dist
- zip -r grafana-oncall-app-${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}.zip ./grafana-oncall-app
- if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app-${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}.zip grafana-oncall-app-${DRONE_TAG}.zip; fi
- zip -r grafana-oncall-app.zip ./grafana-oncall-app
- if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app.zip grafana-oncall-app-${DRONE_TAG}.zip; fi
- name: Publish Plugin to GCS (release)
image: plugins/gcs
@ -84,7 +84,7 @@ steps:
- pre-commit run black --all-files
- pre-commit run flake8 --all-files
- name: Test Backend
- name: Unit Test Backend
image: python:3.9
environment:
DJANGO_SETTINGS_MODULE: settings.ci-test
@ -92,10 +92,8 @@ steps:
commands:
- apt-get update && apt-get install -y netcat
- cd engine/
- mkdir sqlite_data
- pip install -r requirements.txt
- pytest --ds=settings.ci-test
- rm -rf sqlite_data
- ./wait_for_test_mysql_start.sh && pytest --ds=settings.ci-test
depends_on:
- rabbit_test
@ -124,7 +122,7 @@ steps:
from_secret: gcr_admin
depends_on:
- Lint Backend
- Test Backend
- Unit Test Backend
- Image Tag
- name: Build and Push Engine Docker Image Backend to Dockerhub
@ -139,14 +137,14 @@ steps:
from_secret: docker_username
depends_on:
- Lint Backend
- Test Backend
- Unit Test Backend
- Image Tag
when:
ref:
- refs/heads/dev
- refs/tags/v*.*.*
# Services for Test Backend
# Services for Unit Test Backend
services:
- name: rabbit_test
image: rabbitmq:3.7.19
@ -154,10 +152,23 @@ services:
RABBITMQ_DEFAULT_USER: rabbitmq
RABBITMQ_DEFAULT_PASS: rabbitmq
- name: mysql_test
image: mysql:5.7.25
environment:
MYSQL_DATABASE: oncall_local_dev
MYSQL_ROOT_PASSWORD: local_dev_pwd
trigger:
event:
- push
include:
- tag
- push
- pull_request
ref:
include:
- refs/heads/main
- refs/heads/dev
- refs/tags/v*.*.*
---
# Secret for pulling docker images.
@ -227,4 +238,9 @@ get:
name: machine-user-token
path: infra/data/ci/drone
kind: secret
name: drone_token
name: drone_token
---
kind: signature
hmac: 7621bb1ccfcbec9f92c385670f2b2790859aba25f31c4936997123906fb102c0
...

View file

@ -1,3 +1,5 @@
RUNSERVER_PORT=8080
SLACK_CLIENT_OAUTH_ID=
SLACK_CLIENT_OAUTH_SECRET=
SLACK_API_TOKEN=
@ -19,13 +21,13 @@ SENDGRID_FROM_EMAIL=
DJANGO_SETTINGS_MODULE=settings.dev
SECRET_KEY=jkashdkjashdkjh
BASE_URL=http://localhost:8000
BASE_URL=http://localhost:8080
FEATURE_TELEGRAM_INTEGRATION_ENABLED=
FEATURE_TELEGRAM_INTEGRATION_ENABLED=True
FEATURE_SLACK_INTEGRATION_ENABLED=True
FEATURE_EXTRA_MESSAGING_BACKENDS_ENABLED=
SLACK_INSTALL_RETURN_REDIRECT_HOST=http://localhost:8000
SLACK_INSTALL_RETURN_REDIRECT_HOST=http://localhost:8080
SOCIAL_AUTH_REDIRECT_IS_HTTPS=False
GRAFANA_INCIDENT_STATIC_API_KEY=

View file

@ -1,39 +0,0 @@
name: backend-ci
on:
push:
branches:
- main
pull_request:
jobs:
lint:
name: Backend Checks
runs-on: ubuntu-latest
container: python:3.9
env:
DJANGO_SETTINGS_MODULE: settings.ci-test
SLACK_CLIENT_OAUTH_ID: 1
services:
rabbit_test:
image: rabbitmq:3.7.19
env:
RABBITMQ_DEFAULT_USER: rabbitmq
RABBITMQ_DEFAULT_PASS: rabbitmq
steps:
- uses: actions/checkout@v2
- name: Lint Backend
run: |
pip install $(grep "pre-commit" engine/requirements.txt)
pre-commit run isort --all-files
pre-commit run black --all-files
pre-commit run flake8 --all-files
- name: Test Backend
run: |
apt-get update && apt-get install -y netcat
cd engine/
mkdir sqlite_data
pip install -r requirements.txt
pytest --ds=settings.ci-test

56
.github/workflows/ci.yml vendored Normal file
View file

@ -0,0 +1,56 @@
name: ci
on:
push:
branches:
- main
- dev
pull_request:
jobs:
lint:
runs-on: ubuntu-latest
container: python:3.9
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 14
- name: Build
run: |
pip install $(grep "pre-commit" engine/requirements.txt)
npm install -g yarn
cd grafana-plugin/
yarn --network-timeout 500000
yarn build
- name: Lint All
run: |
pre-commit run --all-files
unit-test-backend:
runs-on: ubuntu-latest
container: python:3.9
env:
DJANGO_SETTINGS_MODULE: settings.ci-test
SLACK_CLIENT_OAUTH_ID: 1
services:
rabbit_test:
image: rabbitmq:3.7.19
env:
RABBITMQ_DEFAULT_USER: rabbitmq
RABBITMQ_DEFAULT_PASS: rabbitmq
mysql_test:
image: mysql:5.7.25
env:
MYSQL_DATABASE: oncall_local_dev
MYSQL_ROOT_PASSWORD: local_dev_pwd
steps:
- uses: actions/checkout@v2
- name: Unit Test Backend
run: |
apt-get update && apt-get install -y netcat
cd engine/
pip install -r requirements.txt
./wait_for_test_mysql_start.sh && pytest --ds=settings.ci-test -x

View file

@ -1,21 +0,0 @@
name: frontend-ci
on:
push:
branches:
- main
pull_request:
jobs:
lint:
name: Frontend Checks
runs-on: ubuntu-latest
env:
DJANGO_SETTINGS_MODULE: settings.ci-test
steps:
- uses: actions/checkout@v2
- name: Build Frontend Plugin
run: |
cd grafana-plugin/
yarn --network-timeout 500000
yarn build

30
.github/workflows/synk.yml vendored Normal file
View file

@ -0,0 +1,30 @@
name: snyk
on:
push:
branches: [ main,dev ]
pull_request:
branches: [ main,dev ]
jobs:
security-scan:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- uses: actions/setup-node@v3
with:
node-version: 14
- uses: snyk/actions/setup@master
- name: Install Dependencies
run: |
pip install -r engine/requirements.txt
cd grafana-plugin/
yarn --network-timeout 500000
- name: Run Snyk
continue-on-error: true
run: snyk test --all-projects --severity-threshold=high
env:
SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }}

1
.gitignore vendored
View file

@ -3,6 +3,7 @@
*.pyc
venv
.env
.env_hobby
.vscode
dump.rdb
.idea

View file

@ -1,3 +1,5 @@
# Change Log
## 1.0.0 (2022-02-02)
## 0.0.71 (2022-06-06)
- Initial Release

46
CODE_OF_CONDUCT.md Normal file
View file

@ -0,0 +1,46 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
- Using welcoming and inclusive language
- Being respectful of differing viewpoints and experiences
- Gracefully accepting constructive criticism
- Focusing on what is best for the community
- Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
- The use of sexualized language or imagery and unwelcome sexual attention or advances
- Trolling, insulting/derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or electronic address, without explicit permission
- Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at conduct@grafana.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

View file

@ -28,7 +28,7 @@
1. Start stateful services (RabbitMQ, Redis, Grafana with mounted plugin folder)
```bash
docker-compose -f developer-docker-compose.yml up -d
docker-compose -f docker-compose-developer.yml up -d
```
2. Prepare a python environment:
@ -53,13 +53,10 @@ export $(grep -v '^#' .env | xargs -0)
# Hint: there is a known issue with uwsgi. It's not used in the local dev environment. Feel free to comment it in `engine/requirements.txt`.
cd engine && pip install -r requirements.txt
# Create folder for database
mkdir sqlite_data
# Migrate the DB:
python manage.py migrate
# Create user for django admin panel:
# Create user for django admin panel (if you need it):
python manage.py createsuperuser
```
@ -67,16 +64,16 @@ python manage.py createsuperuser
3. Launch the backend:
```bash
# Http server:
python manage.py runserver
python manage.py runserver 8080
# Worker for background tasks(run it in the parallel terminal, don't forget to export .env there)
# Worker for background tasks (run it in the parallel terminal, don't forget to export .env there)
python manage.py start_celery
# Additionally you could launch the worker with periodic tasks launcher (99% you don't need this)
celery -A engine beat -l info
```
4. All set! Check out internal API endpoints at http://localhost:8000/.
4. All set! Check out internal API endpoints at http://localhost:8080/.
### Frontend setup
@ -105,9 +102,9 @@ python manage.py issue_invite_for_the_frontend --override
6. Some configuration fields will appear be available. Fill them out and click Initialize OnCall
```
OnCall API URL:
http://host.docker.internal:8000
http://host.docker.internal:8080
OnCall Invitation Token (Single use token to connect Grafana instance):
Invitation Token (Single use token to connect Grafana instance):
Response from the invite generator command (check above)
Grafana URL (URL OnCall will use to talk to Grafana instance):
@ -119,7 +116,7 @@ host IP from inside the container by running:
```bash
/sbin/ip route|awk '/default/ { print $3 }'
# Alternatively add host.docker.internal as an extra_host for grafana in developer-docker-compose.yml
# Alternatively add host.docker.internal as an extra_host for grafana in docker-compose-developer.yml
extra_hosts:
- "host.docker.internal:host-gateway"
@ -127,161 +124,7 @@ extra_hosts:
### Slack application setup
This instruction is also applicable if you set up self-hosted OnCall.
1. Start a [localtunnel](https://github.com/localtunnel/localtunnel) reverse proxy to make oncall engine api accessible to slack (if you don't have OnCall backend accessible from https),
```bash
# Choose the unique prefix instead of pretty-turkey-83
# Localtunnel will generate an url, e.g. https://pretty-turkey-83.loca.lt
# it is referred as <ONCALL_ENGINE_PUBLIC_URL> below
lt --port 8000 -s pretty-turkey-83 --print-requests
```
2. [Create a Slack Workspace](https://slack.com/create) for development.
3. Go to https://api.slack.com/apps and click Create New App button
4. Select `From an app manifest` option and choose the right workspace
5. Copy and paste the following block with the correct <YOUR_BOT_NAME> and <ONCALL_ENGINE_PUBLIC_URL> fields
<details>
<summary>Click to expand!</summary>
```yaml
_metadata:
major_version: 1
minor_version: 1
display_information:
name: <YOUR_BOT_NAME>
features:
app_home:
home_tab_enabled: true
messages_tab_enabled: true
messages_tab_read_only_enabled: false
bot_user:
display_name: <YOUR_BOT_NAME>
always_online: true
shortcuts:
- name: Create a new incident
type: message
callback_id: incident_create
description: Creates a new OnCall incident
- name: Add to postmortem
type: message
callback_id: add_postmortem
description: Add this message to postmortem
slash_commands:
- command: /oncall
url: <ONCALL_ENGINE_PUBLIC_URL>/slack/interactive_api_endpoint/
description: oncall
should_escape: false
oauth_config:
redirect_urls:
- <ONCALL_ENGINE_PUBLIC_URL>/api/internal/v1/complete/slack-install-free/
- <ONCALL_ENGINE_PUBLIC_URL>/api/internal/v1/complete/slack-login/
scopes:
user:
- channels:read
- chat:write
- identify
- users.profile:read
bot:
- app_mentions:read
- channels:history
- channels:read
- chat:write
- chat:write.customize
- chat:write.public
- commands
- files:write
- groups:history
- groups:read
- im:history
- im:read
- im:write
- mpim:history
- mpim:read
- mpim:write
- reactions:write
- team:read
- usergroups:read
- usergroups:write
- users.profile:read
- users:read
- users:read.email
- users:write
settings:
event_subscriptions:
request_url: <ONCALL_ENGINE_PUBLIC_URL>/slack/event_api_endpoint/
bot_events:
- app_home_opened
- app_mention
- channel_archive
- channel_created
- channel_deleted
- channel_rename
- channel_unarchive
- member_joined_channel
- message.channels
- message.im
- subteam_created
- subteam_members_changed
- subteam_updated
- user_change
interactivity:
is_enabled: true
request_url: <ONCALL_ENGINE_PUBLIC_URL>/slack/interactive_api_endpoint/
org_deploy_enabled: false
socket_mode_enabled: false
```
</details>
6. Click `Install to workspace` button to generate the credentials
6. Populate the environment with variables related to Slack
In your `.env` file, fill out the following variables:
```
SLACK_CLIENT_OAUTH_ID = Basic Information -> App Credentials -> Client ID
SLACK_CLIENT_OAUTH_SECRET = Basic Information -> App Credentials -> Client Secret
SLACK_API_TOKEN = OAuth & Permissions -> Bot User OAuth Token
SLACK_INSTALL_RETURN_REDIRECT_HOST = https://pretty-turkey-83.loca.lt
```
Don't forget to export variables from the `.env` file and restart the server!
7. Edit `grafana-plugin/grafana-plugin.yml` to set `onCallApiUrl` fields with localtunnel url
```
onCallApiUrl: https://pretty-turkey-83.loca.lt
```
or set BASE_URL Env variable through web interface.
8. Edit grafana-plugin/src/plugin.json to add `Bypass-Tunnel-Reminder` header section for all existing routes
> this headers required for the local development only, otherwise localtunnel blocks requests from grafana plugin
```
{
"path": ...,
...
"headers": [
...
{
"name": "Bypass-Tunnel-Reminder",
"content": "True"
}
]
},
```
9. Rebuild the plugin
```
yarn watch
```
10. Restart grafana instance
11. All set! Go to Slack and check if your application is functional.
For Slack app configuration check our docs: https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup
## Troubleshooting
@ -383,3 +226,19 @@ pytest --ds=settings.dev
- Set Settings to settings/dev.py
5. Create a new Django Server run configuration to Run/Debug the engine
- Use a plugin such as EnvFile to load the .env file
- Change port from 8000 to 8080
## Update drone build
The .drone.yml build file must be signed when changes are made to it. Follow these steps:
If you have not installed drone CLI follow [these instructions](https://docs.drone.io/cli/install/)
To sign the .drone.yml file:
```bash
export DRONE_SERVER=https://drone.grafana.net
# Get your drone token from https://drone.grafana.net/account
export DRONE_TOKEN=<Your DRONE_TOKEN>
drone sign --save grafana/oncall .drone.yml
```

159
GOVERNANCE.md Normal file
View file

@ -0,0 +1,159 @@
---
title: Governance
---
# Governance
This document describes the rules and governance of the project. It is meant to be followed by all the developers of the project and the OnCall community. Common terminology used in this governance document are listed below:
- **Team members**: Any members of the private [team mailing list][team].
- **Maintainers**: Maintainers lead an individual project or parts thereof ([`MAINTAINERS.md`][maintainers]).
- **Projects**: A single repository in the Grafana GitHub organization and listed below is referred to as a project:
- oncall
- **The OnCall project**: The sum of all activities performed under this governance, concerning one or more repositories or the community.
## Values
The OnCall developers and community are expected to follow the values defined in the [Code of Conduct][coc]. Furthermore, the OnCall community strives for kindness, giving feedback effectively, and building a welcoming environment. The OnCall developers generally decide by consensus and only resort to conflict resolution by a majority vote if consensus cannot be reached.
## Projects
Each project must have a [`MAINTAINERS.md`][maintainers] file with at least one maintainer. Where a project has a release process, access and documentation should be such that more than one person can perform a release. Releases should be announced on the [announcements][https://github.com/grafana/oncall/discussions/categories/announcements] category at the GitHub Discussions. Any new projects should be first proposed on the [team mailing list][team] following the voting procedures listed below.
## Decision making
### Team members
Team member status may be given to those who have made ongoing contributions to the OnCall project for at least 3 months. This is usually in the form of code improvements and/or notable work on documentation, but organizing events or user support could also be taken into account.
New members may be proposed by any existing member by email to the [team mailing list][team]. It is highly desirable to reach consensus about acceptance of a new member. However, the proposal is ultimately voted on by a formal [supermajority vote](#supermajority-vote).
If the new member proposal is accepted, the proposed team member should be contacted privately via email to confirm or deny their acceptance of team membership. This email will also be CC'd to the [team mailing list][team] for record-keeping purposes.
If they choose to accept, the [onboarding](#onboarding) procedure is followed.
Team members may retire at any time by emailing [the team][team].
Team members can be removed by [supermajority vote](#supermajority-vote) on [the team mailing list][team].
For this vote, the member in question is not eligible to vote and does not count towards the quorum.
Any removal vote can cover only one single person.
Upon death of a member, they leave the team automatically.
In case a member leaves, the [offboarding](#offboarding) procedure is applied.
The current team members are:
- Ildar Iskhakov — [@iskhakov](https://github.com/iskhakov) ([Grafana Labs](https://grafana.com/))
- Innokentii Konstantinov — [@Konstantinov-Innokentii](https://github.com/Konstantinov-Innokentii) ([Grafana Labs](https://grafana.com/))
- Matías Bordese — [@matiasb](https://github.com/matiasb) ([Grafana Labs](https://grafana.com/))
- Matvey Kukuy — [@Matvey-Kuk](https://github.com/Matvey-Kuk) ([Grafana Labs](https://grafana.com/))
- Michael Derynck — [@mderynck](https://github.com/mderynck) ([Grafana Labs](https://grafana.com/))
- Vadim Stepanov — [@vadimkerr](https://github.com/vadimkerr) ([Grafana Labs](https://grafana.com/))
- Yulia Shanyrova — [@Ukochka](https://github.com/Ukochka) ([Grafana Labs](https://grafana.com/))
- Maxim Mordasov — [@maskin25](https://github.com/maskin25) ([Grafana Labs](https://grafana.com/))
- Julia Artyukhina — [@Ferril](https://github.com/Ferril) ([Grafana Labs](https://grafana.com/))
- Julia Artyukhina — [@Ferril](https://github.com/Ferril) ([Grafana Labs](https://grafana.com/))
Previous team members:
- n/a
### Maintainers
Maintainers lead one or more project(s) or parts thereof and serve as a point of conflict resolution amongst the contributors to this project. Ideally, maintainers are also team members, but exceptions are possible for suitable maintainers that, for whatever reason, are not yet team members.
Changes in maintainership have to be announced on the [announcemount][https://github.com/grafana/oncall/discussions/categories/announcements] category at the GitHub Discussions. They are decided by [rough consensus](#consensus) and formalized by changing the [`MAINTAINERS.md`][maintainers] file of the respective repository.
Maintainers are granted commit rights to all projects covered by this governance.
A maintainer or committer may resign by notifying the [team mailing list][team]. A maintainer with no project activity for a year is considered to have resigned. Maintainers that wish to resign are encouraged to propose another team member to take over the project.
A project may have multiple maintainers, as long as the responsibilities are clearly agreed upon between them. This includes coordinating who handles which issues and pull requests.
### Technical decisions
Technical decisions that only affect a single project are made informally by the maintainer of this project, and [rough consensus](#consensus) is assumed. Technical decisions that span multiple parts of the project should be discussed and made on the the [GitHub Discussions][https://github.com/grafana/oncall/discussions].
Decisions are usually made by [rough consensus](#consensus). If no consensus can be reached, the matter may be resolved by [majority vote](#majority-vote).
### Governance changes
Changes to this document are made by Grafana Labs.
### Other matters
Any matter that needs a decision may be called to a vote by any member if they deem it necessary. For private or personnel matters, discussion and voting takes place on the [team mailing list][team], otherwise on the [GitHub Discussions][https://github.com/grafana/oncall/discussions].
## Voting
The OnCall project usually runs by informal consensus, however sometimes a formal decision must be made.
Depending on the subject matter, as laid out [above](#decision-making), different methods of voting are used.
For all votes, voting must be open for at least one week. The end date should be clearly stated in the call to vote. A vote may be called and closed early if enough votes have come in one way so that further votes cannot change the final decision.
In all cases, all and only [team members](#team-members) are eligible to vote, with the sole exception of the forced removal of a team member, in which said member is not eligible to vote.
Discussion and votes on personnel matters (including but not limited to team membership and maintainership) are held in private on the [team mailing list][team]. All other discussion and votes are held in public on the [GitHub Discussions][https://github.com/grafana/oncall/discussions].
For public discussions, anyone interested is encouraged to participate. Formal power to object or vote is limited to [team members](#team-members).
### Consensus
The default decision making mechanism for the OnCall project is [rough][rough] consensus. This means that any decision on technical issues is considered supported by the [team][team] as long as nobody objects or the objection has been considered but not necessarily accommodated.
Silence on any consensus decision is implicit agreement and equivalent to explicit agreement. Explicit agreement may be stated at will. Decisions may, but do not need to be called out and put up for decision on the [GitHub Discussions][https://github.com/grafana/oncall/discussions] at any time and by anyone.
Consensus decisions can never override or go against the spirit of an earlier explicit vote.
If any [team member](#team-members) raises objections, the team members work together towards a solution that all involved can accept. This solution is again subject to rough consensus.
In case no consensus can be found, but a decision one way or the other must be made, any [team member](#team-members) may call a formal [majority vote](#majority-vote).
### Majority vote
Majority votes must be called explicitly in a separate thread on the appropriate mailing list. The subject must be prefixed with `[VOTE]`. In the body, the call to vote must state the proposal being voted on. It should reference any discussion leading up to this point.
Votes may take the form of a single proposal, with the option to vote yes or no, or the form of multiple alternatives.
A vote on a single proposal is considered successful if more vote in favor than against.
If there are multiple alternatives, members may vote for one or more alternatives, or vote “no” to object to all alternatives. It is not possible to cast an “abstain” vote. A vote on multiple alternatives is considered decided in favor of one alternative if it has received the most votes in favor, and a vote from more than half of those voting. Should no alternative reach this quorum, another vote on a reduced number of options may be called separately.
### Supermajority vote
Supermajority votes must be called explicitly in a separate thread on the appropriate mailing list. The subject must be prefixed with `[VOTE]`. In the body, the call to vote must state the proposal being voted on. It should reference any discussion leading up to this point.
Votes may take the form of a single proposal, with the option to vote yes or no, or the form of multiple alternatives.
A vote on a single proposal is considered successful if at least two thirds of those eligible to vote vote in favor.
If there are multiple alternatives, members may vote for one or more alternatives, or vote “no” to object to all alternatives. A vote on multiple alternatives is considered decided in favor of one alternative if it has received the most votes in favor, and a vote from at least two thirds of those eligible to vote. Should no alternative reach this quorum, another vote on a reduced number of options may be called separately.
## On- / Offboarding
### Onboarding
The new member is
- added to the list of [team members](#team-members). Ideally by sending a PR of their own, at least approving said PR.
- announced on the [GitHub Discussions][https://github.com/grafana/oncall/discussions] by an existing team member. Ideally, the new member replies in this thread, acknowledging team membership.
- added to the projects with commit rights.
- added to the [team mailing list][team].
### Offboarding
The ex-member is
- removed from the list of [team members](#team-members). Ideally by sending a PR of their own, at least approving said PR. In case of forced removal, no approval is needed.
- removed from the projects. Optionally, they can retain maintainership of one or more repositories if the [team](#team-members) agrees.
- removed from the team mailing list and demoted to a normal member of the other mailing lists.
- not allowed to call themselves an active team member any more, nor allowed to imply this to be the case.
- added to a list of previous members if they so choose.
If needed, we reserve the right to publicly announce removal.

View file

@ -9,9 +9,11 @@ The default license for this project is [AGPL-3.0-only](LICENSE).
The following directories and their subdirectories are licensed under Apache-2.0:
```
n/a
```
The following directories and their subdirectories are licensed under their original upstream licenses:
```
n/a
```

14
MAINTAINERS.md Normal file
View file

@ -0,0 +1,14 @@
The following are the main/default maintainers:
- Ildar Iskhakov — [@iskhakov](https://github.com/iskhakov) ([Grafana Labs](https://grafana.com/))
- Matvey Kukuy — [@Matvey-Kuk](https://github.com/Matvey-Kuk) ([Grafana Labs](https://grafana.com/))
Some parts of the codebase have other maintainers, the package paths also include all sub-packages:
n/a
For the sake of brevity, not all subtrees are explicitly listed. Due to the
size of this repository, the natural changes in focus of maintainers over time,
and nuances of where particular features live, this list will always be
incomplete and out of date. However the listed maintainer(s) should be able to
direct a PR/question to the right person.

View file

@ -1,76 +1,61 @@
# Grafana OnCall Incident Response
Grafana OnCall, cloud version of Grafana OnCall: https://grafana.com/products/cloud/
<img width="400px" src="docs/img/logo.png">
Developer-friendly, incident response management with brilliant Slack integration.
- Connect monitoring systems
- Collect and analyze data
- On-call rotation
- Automatic escalation
- Never miss alerts with calls and SMS
Developer-friendly incident response with brilliant Slack integration.
![Grafana OnCall Screenshot](screenshot.png)
<img width="60%" src="screenshot.png">
- Collect and analyze alerts from multiple monitoring systems
- On-call rotations based on schedules
- Automatic escalations
- Phone calls, SMS, Slack, Telegram notifications
## Getting Started
OnCall consists of two parts:
1. OnCall backend
2. "Grafana OnCall" plugin you need to install in your Grafana
### How to run OnCall backend
1. An all-in-one image of OnCall is available on docker hub to run it:
We prepared multiple environments: [production](https://grafana.com/docs/grafana-cloud/oncall/open-source/#production-environment), [developer](DEVELOPER.md) and hobby:
1. Download docker-compose.yaml:
```bash
docker run -it --name oncall-backend -p 8000:8000 grafana/oncall-all-in-one
curl https://github.com/grafana/oncall/blob/dev/docker-compose.yml -o docker-compose.yaml
```
2. When the image starts up you will see a message like this:
2. Set variables:
```bash
👋 This script will issue an invite token to securely connect the frontend.
Maintainers will be happy to help in the slack channel #grafana-oncall: https://slack.grafana.com/
Your invite token: <TOKEN>, use it in the Grafana OnCall plugin.
echo "DOMAIN=http://localhost:8080
SECRET_KEY=my_random_secret_must_be_more_than_32_characters_long
RABBITMQ_PASSWORD=rabbitmq_secret_pw
MYSQL_PASSWORD=mysql_secret_pw
COMPOSE_PROFILES=with_grafana # Remove this line if you want to use existing grafana
GRAFANA_USER=admin
GRAFANA_PASSWORD=admin" > .env_hobby
```
3. If you started your container detached with -d check the log:
3. Launch services:
```bash
docker logs oncall-backend
docker-compose --env-file .env_hobby -f docker-compose.yml up --build -d
```
### How to install "Grafana OnCall" Plugin and connect with a backend
1. Open Grafana in your browser and login as an Admin
2. Navigate to Configuration &rarr; Plugins
3. Type Grafana OnCall into the "Search Grafana plugins" field
4. Select the Grafana OnCall plugin and press the "Install" button
5. On the Grafana OnCall Plugin page Enable the plugin and go to the Configuration tab you should see a status field with the message
```
OnCall has not been setup, configure & initialize below.
```
6. Fill in configuration fields using the token you got from the backend earlier, then press "Install Configuration"
```
OnCall API URL: (The URL & port used to access OnCall)
http://host.docker.internal:8000
OnCall Invitation Token (Single use token to connect Grafana instance):
Invitation token from docker startup
Grafana URL (URL OnCall will use to talk to this Grafana instance):
http://localhost:3000 (or http://host.docker.internal:3000 if your grafana is running in Docker locally)
4. Issue one-time invite token:
```bash
docker-compose --env-file .env_hobby -f docker-compose.yml run engine python manage.py issue_invite_for_the_frontend --override
```
## Getting Help
- `#grafana-oncall` channel at https://slack.grafana.com/
- Grafana Labs community forum for OnCall: https://community.grafana.com
- File an [issue](https://github.com/grafana/oncall/issues) for bugs, issues and feature suggestions.
5. Go to [OnCall Plugin Configuration](http://localhost:3000/plugins/grafana-oncall-app) (or find OnCall plugin in configuration->plugins) and connect OnCall _plugin_ with OnCall _backend_:
```
Invite token: ^^^ from the previous step.
OnCall backend URL: http://engine:8080
Grafana Url: http://grafana:3000
```
## Production Setup
6. Enjoy!
Looking for the production instructions? We're going to release them soon. Please join our Slack channel to be the first to know about them.
## Join community
<a href="https://github.com/grafana/oncall/discussions/categories/community-calls"><img width="200px" src="docs/img/community_call.png"></a>
<a href="https://github.com/grafana/oncall/discussions"><img width="200px" src="docs/img/GH_discussions.png"></a>
<a href="https://slack.grafana.com/"><img width="200px" src="docs/img/slack.png"></a>
## Further Reading
- *Documentation* - [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/)
- *Blog Post* - [Announcing Grafana OnCall, the easiest way to do on-call management](https://grafana.com/blog/2021/11/09/announcing-grafana-oncall/)
- *Presentation* - [Deep dive into the Grafana, Prometheus, and Alertmanager stack for alerting and on-call management](https://grafana.com/go/observabilitycon/2021/alerting/?pg=blog)
## FAQ
- How do I generate a new invitation token to connect plugin with a backend?
```bash
docker exec oncall-backend python manage.py issue_invite_for_the_frontend --override
```

View file

@ -3,16 +3,16 @@ version: '3.2'
services:
mysql:
image: mariadb:10.2
image: mysql:5.7
platform: linux/x86_64
mem_limit: 500m
cpus: 0.5
command: --default-authentication-plugin=mysql_native_password
command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
restart: always
ports:
- 3306:3306
environment:
MYSQL_ROOT_PASSWORD: local_dev_pwd
MYSQL_ROOT_PASSWORD: empty
MYSQL_DATABASE: oncall_local_dev
healthcheck:
test: [ "CMD", "mysqladmin" ,"ping", "-h", "localhost" ]
@ -40,15 +40,15 @@ services:
- 5672:5672
mysql-to-create-grafana-db:
image: mariadb:10.2
image: mysql:5.7
platform: linux/x86_64
command: bash -c "mysql -h mysql -uroot -plocal_dev_pwd -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
command: bash -c "mysql -h mysql -uroot -pempty -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
depends_on:
mysql:
condition: service_healthy
grafana:
image: "grafana/grafana:8.3.2"
image: "grafana/grafana:9.0.0-beta3"
restart: always
mem_limit: 500m
cpus: 0.5
@ -56,7 +56,7 @@ services:
GF_DATABASE_TYPE: mysql
GF_DATABASE_HOST: mysql
GF_DATABASE_USER: root
GF_DATABASE_PASSWORD: local_dev_pwd
GF_DATABASE_PASSWORD: empty
GF_SECURITY_ADMIN_USER: oncall
GF_SECURITY_ADMIN_PASSWORD: oncall
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app

174
docker-compose.yml Normal file
View file

@ -0,0 +1,174 @@
services:
engine:
# TODO: change to the public image once it's public
# image: ...
build: engine
ports:
- 8080:8080
command: >
sh -c "uwsgi --ini uwsgi.ini"
environment:
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
RABBITMQ_USERNAME: "rabbitmq"
RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD
RABBITMQ_HOST: "rabbitmq"
RABBITMQ_PORT: "5672"
RABBITMQ_DEFAULT_VHOST: "/"
MYSQL_PASSWORD: $MYSQL_PASSWORD
MYSQL_DB_NAME: oncall_hobby
MYSQL_USER: ${MYSQL_USER:-root}
MYSQL_HOST: ${MYSQL_HOST:-mysql}
MYSQL_PORT: 3306
REDIS_URI: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: settings.hobby
OSS: "True"
CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery"
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_started
redis:
condition: service_started
celery:
# TODO: change to the public image once it's public
build: engine
command: sh -c "./celery_with_exporter.sh"
environment:
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
RABBITMQ_USERNAME: "rabbitmq"
RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD
RABBITMQ_HOST: "rabbitmq"
RABBITMQ_PORT: "5672"
RABBITMQ_DEFAULT_VHOST: "/"
MYSQL_PASSWORD: $MYSQL_PASSWORD
MYSQL_DB_NAME: oncall_hobby
MYSQL_USER: ${MYSQL_USER:-root}
MYSQL_HOST: ${MYSQL_HOST:-mysql}
MYSQL_PORT: 3306
REDIS_URI: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: settings.hobby
OSS: "True"
CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery"
CELERY_WORKER_CONCURRENCY: "1"
CELERY_WORKER_MAX_TASKS_PER_CHILD: "100"
CELERY_WORKER_SHUTDOWN_INTERVAL: "65m"
CELERY_WORKER_BEAT_ENABLED: "True"
depends_on:
mysql:
condition: service_healthy
oncall_db_migration:
condition: service_completed_successfully
rabbitmq:
condition: service_started
redis:
condition: service_started
oncall_db_migration:
build: engine
command: python manage.py migrate --noinput
environment:
BASE_URL: $DOMAIN
SECRET_KEY: $SECRET_KEY
RABBITMQ_USERNAME: "rabbitmq"
RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD
RABBITMQ_HOST: "rabbitmq"
RABBITMQ_PORT: "5672"
RABBITMQ_DEFAULT_VHOST: "/"
MYSQL_PASSWORD: $MYSQL_PASSWORD
MYSQL_DB_NAME: oncall_hobby
MYSQL_USER: ${MYSQL_USER:-root}
MYSQL_HOST: ${MYSQL_HOST:-mysql}
MYSQL_PORT: 3306
REDIS_URI: redis://redis:6379/0
DJANGO_SETTINGS_MODULE: settings.hobby
OSS: "True"
CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery"
depends_on:
mysql:
condition: service_healthy
rabbitmq:
condition: service_started
mysql:
image: mysql:5.7
platform: linux/x86_64
mem_limit: 500m
cpus: 0.5
command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
restart: always
ports:
- 3306:3306
volumes:
- dbdata:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: $MYSQL_PASSWORD
MYSQL_DATABASE: oncall_hobby
healthcheck:
test: "mysql -uroot -p$MYSQL_PASSWORD oncall_hobby -e 'select 1'"
timeout: 20s
retries: 10
redis:
image: redis
mem_limit: 100m
cpus: 0.1
restart: always
ports:
- 6379:6379
rabbitmq:
image: "rabbitmq:3.7.15-management"
hostname: rabbitmq
mem_limit: 1000m
cpus: 0.5
volumes:
- rabbitmqdata:/var/lib/rabbitmq
environment:
RABBITMQ_DEFAULT_USER: "rabbitmq"
RABBITMQ_DEFAULT_PASS: $RABBITMQ_PASSWORD
RABBITMQ_DEFAULT_VHOST: "/"
mysql_to_create_grafana_db:
image: mysql:5.7
platform: linux/x86_64
command: bash -c "mysql -h ${MYSQL_HOST:-mysql} -uroot -p${MYSQL_PASSWORD:?err} -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'"
depends_on:
mysql:
condition: service_healthy
profiles:
- with_grafana
grafana:
image: "grafana/grafana:9.0.0-beta3"
mem_limit: 500m
ports:
- 3000:3000
cpus: 0.5
environment:
GF_DATABASE_TYPE: mysql
GF_DATABASE_HOST: ${MYSQL_HOST:-mysql}
GF_DATABASE_USER: ${MYSQL_USER:-root}
GF_DATABASE_PASSWORD: ${MYSQL_PASSWORD:?err}
GF_SECURITY_ADMIN_USER: ${GRAFANA_USER:-admin}
GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_PASSWORD:?err}
GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app
GF_INSTALL_PLUGINS: grafana-oncall-app
depends_on:
mysql_to_create_grafana_db:
condition: service_completed_successfully
mysql:
condition: service_healthy
profiles:
- with_grafana
volumes:
dbdata:
rabbitmqdata:
caddy_data:
caddy_config:

View file

@ -1,5 +1,5 @@
IMAGE = grafana/docs-base:latest
CONTENT_PATH = /hugo/content/docs/amixr/latest
CONTENT_PATH = /hugo/content/docs/oncall/latest
PORT = 3002:3002
.PHONY: pull

View file

@ -4,5 +4,5 @@ Source for documentation at https://grafana.com/docs/amixr/
## Preview the website
Run `make docs`. This launches a preview of the website with the current grafana docs at `http://localhost:3002/docs/amixr/` which will refresh automatically when changes are made to content in the `sources` directory.
Run `make docs`. This launches a preview of the website with the current grafana docs at `http://localhost:3002/docs/oncall/latest/` which will refresh automatically when changes are made to content in the `sources` directory.
Make sure Docker is running.

BIN
docs/img/GH_discussions.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 KiB

BIN
docs/img/community_call.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

BIN
docs/img/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

BIN
docs/img/slack.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.5 KiB

View file

@ -17,7 +17,7 @@ These procedures introduce you to the configuration of user settings, how to set
## Before you begin
You must have a Grafana Cloud account.
You must have a [Grafana Cloud](https://grafana.com/products/cloud/) account or [Open Source Grafana OnCall]({{< relref " open-source.md" >}})
Each supported integration and the associated monitoring system has a slightly different configuration method. These methods will not be explained in this guide, however, you can follow the online instructions provided when adding an integration.

View file

@ -21,15 +21,12 @@ To authorize, use the **Authorization** header:
```shell
# With shell, you can just pass the correct header with each request
curl "api_endpoint_here" --header "Authorization: meowmeowmeow"
curl "api_endpoint_here" --header "Authorization: "api_key_here""
```
Note that `meowmeowmeow` is a valid key for test purposes.
Replace `meowmeowmeow` with your API key in production.
Grafana OnCall uses API keys to allow access to the API. You can request a new OnCall API key in OnCall -> Settings page.
Grafana OnCall uses API keys to allow access to the API. You can request a new OnCall API key in the API section.
An API key is specific to a user and a Grafana stack. If you want to switch to a different team configuration, request a different API key.
An API key is specific to a user and a Grafana stack. If you want to switch to a different stack configuration, request a different API key.
## Pagination

170
docs/sources/open-source.md Normal file
View file

@ -0,0 +1,170 @@
---
aliases:
- /docs/grafana-cloud/oncall/open-source/
- /docs/oncall/latest/open-source/
keywords:
- Open Source
title: Open Source
weight: 100
---
# Open Source
We prepared three environments for OSS users:
- **Hobby** environment for local usage & playing around: [README.md](https://github.com/grafana/oncall#getting-started).
- **Development** environment for contributors: [DEVELOPER.md](https://github.com/grafana/oncall/blob/dev/DEVELOPER.md)
- **Production** environment for reliable cloud installation using Helm: [Production Environment](#production-environment)
## Production Environment
We prepared the helm chart for production environment: https://github.com/grafana/oncall/helm
## Slack Setup
Grafana OnCall Slack integration use a lot of Slack API features:
- Subscription on Slack events requires OnCall to be externally available and provide https endpoint.
- You will need to register new Slack App.
1. Make sure your OnCall is up and running.
2. You need OnCall to be accessible through https. For development purposes we suggest using [localtunnel](https://github.com/localtunnel/localtunnel). For production purposes please consider setting up proper web server with HTTPS termination. For localtunnel:
```bash
# Choose the unique prefix instead of pretty-turkey-83
# Localtunnel will generate an url, e.g. https://pretty-turkey-83.loca.lt
# it is referred as <ONCALL_ENGINE_PUBLIC_URL> below
lt --port 8080 -s pretty-turkey-83 --print-requests
```
3. If you use localtunnel, open your external URL and click "Continue" to allow requests to bypass the warning page.
4. [Create a Slack Workspace](https://slack.com/create) for development, or use your company workspace.
5. Go to https://api.slack.com/apps and click Create New App button
6. Select `From an app manifest` option and choose the right workspace
7. Copy and paste the following block with the correct <YOUR_BOT_NAME> and <ONCALL_ENGINE_PUBLIC_URL> fields
```yaml
_metadata:
major_version: 1
minor_version: 1
display_information:
name: <YOUR_BOT_NAME>
features:
app_home:
home_tab_enabled: true
messages_tab_enabled: true
messages_tab_read_only_enabled: false
bot_user:
display_name: <YOUR_BOT_NAME>
always_online: true
shortcuts:
- name: Create a new incident
type: message
callback_id: incident_create
description: Creates a new OnCall incident
- name: Add to postmortem
type: message
callback_id: add_postmortem
description: Add this message to postmortem
slash_commands:
- command: /oncall
url: <ONCALL_ENGINE_PUBLIC_URL>/slack/interactive_api_endpoint/
description: oncall
should_escape: false
oauth_config:
redirect_urls:
- <ONCALL_ENGINE_PUBLIC_URL>/api/internal/v1/complete/slack-install-free/
- <ONCALL_ENGINE_PUBLIC_URL>/api/internal/v1/complete/slack-login/
scopes:
user:
- channels:read
- chat:write
- identify
- users.profile:read
bot:
- app_mentions:read
- channels:history
- channels:read
- chat:write
- chat:write.customize
- chat:write.public
- commands
- files:write
- groups:history
- groups:read
- im:history
- im:read
- im:write
- mpim:history
- mpim:read
- mpim:write
- reactions:write
- team:read
- usergroups:read
- usergroups:write
- users.profile:read
- users:read
- users:read.email
- users:write
settings:
event_subscriptions:
request_url: <ONCALL_ENGINE_PUBLIC_URL>/slack/event_api_endpoint/
bot_events:
- app_home_opened
- app_mention
- channel_archive
- channel_created
- channel_deleted
- channel_rename
- channel_unarchive
- member_joined_channel
- message.channels
- message.im
- subteam_created
- subteam_members_changed
- subteam_updated
- user_change
interactivity:
is_enabled: true
request_url: <ONCALL_ENGINE_PUBLIC_URL>/slack/interactive_api_endpoint/
org_deploy_enabled: false
socket_mode_enabled: false
```
6. Go to your "OnCall" -> "Env Variables" and set:
```
SLACK_CLIENT_OAUTH_ID = Basic Information -> App Credentials -> Client ID
SLACK_CLIENT_OAUTH_SECRET = Basic Information -> App Credentials -> Client Secret
SLACK_SIGNING_SECRET = Basic Information -> App Credentials -> Signing Secret
SLACK_INSTALL_RETURN_REDIRECT_HOST = << OnCall external URL >>
```
7. Go to "OnCall" -> "ChatOps" -> "Slack" and install Slack Integration
8. All set!
## Telegram Setup
- Telegram integrations requires OnCall to be externally available and provide https endpoint.
- Telegram integration in OnCall is designed for collaborative team work. It requires Telegram Group and a Telegram Channel (private) for alerts.
1. Make sure your OnCall is up and running.
2. Respectfully ask [BotFather](https://t.me/BotFather) for a key, put it in `TELEGRAM_TOKEN` in "OnCall" -> "Env Variables".
3. Set `TELEGRAM_WEBHOOK_HOST` with your external url for OnCall.
4. Go to "OnCall" -> "ChatOps" -> Telegram and enjoy!
## Grafana OSS-Cloud Setup
Grafana OSS could be connected to Grafana Cloud for heartbeat and SMS / Phone Calls. We tried our best in making Grafana OSS <-> Cloud self-explanatory. Check "Cloud" page in your OSS OnCall instance.
Please note that it's possible either to use Grafana Cloud either Twilio for SMS/Phone calls.
## Twilio Setup
1. Make sure Grafana OSS <-> Cloud connector is disabled. Set `GRAFANA_CLOUD_NOTIFICATIONS_ENABLED` as False.
2. Check "OnCall" -> "Env Variables" and set all variables starting with `TWILIO_`

View file

@ -1,38 +0,0 @@
FROM python:3.9-alpine
RUN apk add bash
RUN apk add python3-dev
RUN apk add build-base
RUN apk add linux-headers
RUN apk add pcre-dev
RUN apk add mariadb-connector-c-dev
RUN apk add openssl-dev
RUN apk add libffi-dev
RUN apk add git
RUN apk add curl
RUN apk add redis
RUN pip install uwsgi
RUN pip install regex==2021.11.2
WORKDIR /etc/app
COPY ./requirements.txt ./requirements.txt
RUN pip install -r requirements.txt
COPY ./scripts/start_all_in_one.sh ./start_all_in_one.sh
COPY ./ ./
RUN rm db.sqlite3 || true
RUN DJANGO_SETTINGS_MODULE=settings.prod_without_db \
SECRET_KEY="ThEmUsTSecretKEYforBUILDstage123" \
TELEGRAM_TOKEN="0000000000:XXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXX" \
SLACK_CLIENT_OAUTH_ID=1 python manage.py collectstatic --no-input
VOLUME /etc/app/sqlite_data
VOLUME /etc/app/secret_data
VOLUME /etc/app/redis_data
EXPOSE 8000
CMD ["bash", "./start_all_in_one.sh"]

View file

@ -266,7 +266,7 @@ class EscalationPolicySnapshot:
escalation_policy_step=self.step,
)
else:
notify_to_users_list = list_users_to_notify_from_ical(on_call_schedule)
notify_to_users_list = list_users_to_notify_from_ical(on_call_schedule, include_viewers=True)
if notify_to_users_list is None:
log_record = AlertGroupLogRecord(
type=AlertGroupLogRecord.TYPE_ESCALATION_FAILED,

View file

@ -5,7 +5,7 @@ from typing import Optional
from django.apps import apps
from rest_framework import status
from apps.alerts.tasks import create_contact_points_for_datasource
from apps.alerts.tasks import schedule_create_contact_points_for_datasource
from apps.grafana_plugin.helpers import GrafanaAPIClient
logger = logging.getLogger(__name__)
@ -77,16 +77,15 @@ class GrafanaAlertingSyncManager:
# sync other datasource
for datasource in datasources:
if datasource["type"] == GrafanaAlertingSyncManager.ALERTING_DATASOURCE:
if self.create_contact_point(datasource) is None:
contact_point = self.create_contact_point(datasource)
if contact_point is None:
# Failed to create contact point duo to getting wrong alerting config. It is expected behaviour.
# Add datasource to list and retry to create contact point for it async
datasources_to_create.append(datasource)
if datasources_to_create:
# create other contact points async
create_contact_points_for_datasource.apply_async(
(self.alert_receive_channel.pk, datasources_to_create),
)
schedule_create_contact_points_for_datasource(self.alert_receive_channel.pk, datasources_to_create)
else:
self.alert_receive_channel.is_finished_alerting_setup = True
self.alert_receive_channel.save(update_fields=["is_finished_alerting_setup"])

View file

@ -1,5 +1,5 @@
from apps.alerts.incident_appearance.templaters.alert_templater import AlertTemplater
from common.utils import clean_markup
from common.utils import clean_markup, escape_for_twilio_phone_call
class AlertPhoneCallTemplater(AlertTemplater):
@ -24,8 +24,4 @@ class AlertPhoneCallTemplater(AlertTemplater):
return sf.format(data)
def _escape(self, data):
# https://www.twilio.com/docs/api/errors/12100
data = data.replace("&", "&amp;")
data = data.replace(">", "&gt;")
data = data.replace("<", "&lt;")
return data
return escape_for_twilio_phone_call(data)

View file

@ -16,6 +16,8 @@ from django.db import migrations, models
import django.db.models.deletion
import django.db.models.manager
from apps.alerts.integration_options_mixin import IntegrationOptionsMixin
class Migration(migrations.Migration):
@ -132,7 +134,7 @@ class Migration(migrations.Migration):
('public_primary_key', models.CharField(default=apps.alerts.models.alert_receive_channel.generate_public_primary_key_for_alert_receive_channel, max_length=20, unique=True, validators=[django.core.validators.MinLengthValidator(13)])),
('created_at', models.DateTimeField(auto_now_add=True)),
('deleted_at', models.DateTimeField(blank=True, null=True)),
('integration', models.CharField(choices=[('alertmanager', 'AlertManager'), ('grafana', 'Grafana'), ('grafana_alerting', 'Grafana Alerting'), ('formatted_webhook', 'Formatted Webhook'), ('webhook', 'Webhook'), ('amazon_sns', 'Amazon SNS'), ('heartbeat', 'Heartbeat'), ('inbound_email', 'Inboubd Email'), ('maintenance', 'Maintenance'), ('manual', 'Manual'), ('slack_channel', 'Slack Channel'), ('stackdriver', 'Stackdriver'), ('curler', 'Curler'), ('datadog', 'Datadog'), ('demo', 'Demo'), ('elastalert', 'Elastalert'), ('fabric', 'Fabric'), ('kapacitor', 'Kapacitor'), ('newrelic', 'New Relic'), ('pagerduty', 'Pagerduty'), ('pingdom', 'Pingdom'), ('prtg', 'PRTG'), ('sentry', 'Sentry'), ('uptimerobot', 'UptimeRobot'), ('zabbix', 'Zabbix')], default='grafana', max_length=100)),
('integration', models.CharField(choices=IntegrationOptionsMixin.INTEGRATION_CHOICES,default=IntegrationOptionsMixin.DEFAULT_INTEGRATION, max_length=100)),
('allow_source_based_resolving', models.BooleanField(default=True)),
('token', models.CharField(db_index=True, default=apps.alerts.models.alert_receive_channel.random_token_generator, max_length=30)),
('smile_code', models.TextField(default=':slightly_smiling_face:')),

View file

@ -1,178 +0,0 @@
# Generated by Django 3.2.5 on 2021-08-04 10:42
import sys
from django.db import migrations
from django.utils import timezone, dateparse
from apps.alerts.models.alert_receive_channel import number_to_smiles_translator
from apps.public_api import constants as public_api_constants
TYPE_SINGLE_EVENT = 0
TYPE_RECURRENT_EVENT = 1
FREQUENCY_WEEKLY = 1
SOURCE_TERRAFORM = 3
STEP_WAIT = 0
STEP_NOTIFY_USERS_QUEUE = 12
SOURCE_WEB = 1
def create_demo_token_instances(apps, schema_editor):
if not (len(sys.argv) > 1 and sys.argv[1] == 'test'):
User = apps.get_model('user_management', 'User')
Organization = apps.get_model('user_management', 'Organization')
AlertReceiveChannel = apps.get_model('alerts', 'AlertReceiveChannel')
EscalationChain = apps.get_model('alerts', 'EscalationChain')
ChannelFilter = apps.get_model('alerts', 'ChannelFilter')
EscalationPolicy = apps.get_model('alerts', 'EscalationPolicy')
OnCallScheduleICal = apps.get_model('schedules', 'OnCallScheduleICal')
AlertGroup = apps.get_model('alerts', 'AlertGroup')
Alert = apps.get_model('alerts', 'Alert')
CustomButton = apps.get_model("alerts", "CustomButton")
CustomOnCallShift = apps.get_model('schedules', 'CustomOnCallShift')
organization = Organization.objects.get(public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID)
user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID)
alert_receive_channel, _ = AlertReceiveChannel.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_INTEGRATION_ID,
defaults=dict(
integration=0,
author=user,
organization=organization,
smile_code=number_to_smiles_translator(0)
)
)
escalation_chain, _ = EscalationChain.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ESCALATION_CHAIN_ID,
defaults=dict(
name="default",
organization=organization,
)
)
channel_filter_1, _ = ChannelFilter.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ROUTE_ID_1,
defaults=dict(
alert_receive_channel=alert_receive_channel,
slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID,
filtering_term='us-(east|west)',
order=0,
escalation_chain=escalation_chain,
)
)
ChannelFilter.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ROUTE_ID_2,
defaults=dict(
alert_receive_channel=alert_receive_channel,
slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID,
filtering_term='.*',
order=1,
is_default=True,
escalation_chain=escalation_chain,
)
)
EscalationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1,
defaults=dict(
step=STEP_WAIT,
wait_delay=timezone.timedelta(minutes=1),
order=0,
escalation_chain=escalation_chain,
)
)
escalation_policy_1, _ = EscalationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_2,
defaults=dict(
step=STEP_NOTIFY_USERS_QUEUE,
order=1,
escalation_chain=escalation_chain,
)
)
escalation_policy_1.notify_to_users_queue.add(user)
schedule, _ = OnCallScheduleICal.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL,
defaults=dict(
organization=organization,
name=public_api_constants.DEMO_SCHEDULE_NAME_ICAL,
ical_url_overrides=public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES,
channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
)
)
alert_group, _ = AlertGroup.all_objects.get_or_create(
public_primary_key=public_api_constants.DEMO_INCIDENT_ID,
defaults=dict(
channel=alert_receive_channel,
channel_filter=channel_filter_1,
resolved=True,
resolved_at=dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_RESOLVED_AT),
)
)
alert_group.started_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_CREATED_AT)
alert_group.save(update_fields=['started_at'])
for id, created_at in public_api_constants.DEMO_ALERT_IDS:
alert, _ = Alert.objects.get_or_create(
public_primary_key=id,
defaults=dict(
group=alert_group,
raw_request_data=public_api_constants.DEMO_ALERT_PAYLOAD,
title='Memory above 90% threshold',
)
)
alert.created_at = dateparse.parse_datetime(created_at)
alert.save(update_fields=['created_at'])
CustomButton.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_CUSTOM_ACTION_ID,
defaults=dict(
name=public_api_constants.DEMO_CUSTOM_ACTION_NAME,
organization=organization,
)
)
on_call_shift_1, _ = CustomOnCallShift.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_1,
defaults=dict(
type=TYPE_SINGLE_EVENT,
organization=organization,
name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1,
start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_1),
duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION),
)
)
on_call_shift_1.users.add(user)
on_call_shift_2, _ = CustomOnCallShift.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_2,
defaults=dict(
type=TYPE_RECURRENT_EVENT,
organization=organization,
name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2,
start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_2),
duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION),
frequency=FREQUENCY_WEEKLY,
interval=2,
by_day=public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY,
source=SOURCE_TERRAFORM,
)
)
on_call_shift_2.users.add(user)
class Migration(migrations.Migration):
dependencies = [
('alerts', '0002_squashed_initial'),
('user_management', '0002_squashed_create_demo_token_instances'),
('schedules', '0002_squashed_initial'),
]
operations = [
migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop)
]

View file

@ -113,20 +113,7 @@ class ChannelFilter(OrderedModel):
return satisfied_filter
def is_satisfying(self, raw_request_data, title, message=None):
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
return (
self.is_default
or self.check_filter(json.dumps(raw_request_data))
or self.check_filter(str(title))
or
# Special case for Amazon SNS
(
self.check_filter(str(message))
if self.alert_receive_channel.integration == AlertReceiveChannel.INTEGRATION_AMAZON_SNS
else False
)
)
return self.is_default or self.check_filter(json.dumps(raw_request_data)) or self.check_filter(str(title))
def check_filter(self, value):
return re.search(self.filtering_term, value)

View file

@ -4,6 +4,7 @@ from .calculcate_escalation_finish_time import calculate_escalation_finish_time
from .call_ack_url import call_ack_url # noqa: F401
from .check_escalation_finished import check_escalation_finished_task # noqa: F401
from .create_contact_points_for_datasource import create_contact_points_for_datasource # noqa: F401
from .create_contact_points_for_datasource import schedule_create_contact_points_for_datasource # noqa: F401
from .custom_button_result import custom_button_result # noqa: F401
from .delete_alert_group import delete_alert_group # noqa: F401
from .distribute_alert import distribute_alert # noqa: F401

View file

@ -1,9 +1,32 @@
import logging
from celery.utils.log import get_task_logger
from django.apps import apps
from django.core.cache import cache
from rest_framework import status
from apps.grafana_plugin.helpers import GrafanaAPIClient
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
logger = get_task_logger(__name__)
logger.setLevel(logging.DEBUG)
def get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id):
CACHE_KEY_PREFIX = "create_contact_points_for_datasource"
return f"{CACHE_KEY_PREFIX}_{alert_receive_channel_id}"
@shared_dedicated_queue_retry_task
def schedule_create_contact_points_for_datasource(alert_receive_channel_id, datasource_list):
CACHE_LIFETIME = 600
START_TASK_DELAY = 3
task = create_contact_points_for_datasource.apply_async(
args=[alert_receive_channel_id, datasource_list], countdown=START_TASK_DELAY
)
cache_key = get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id)
cache.set(cache_key, task.id, timeout=CACHE_LIFETIME)
@shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=10)
def create_contact_points_for_datasource(alert_receive_channel_id, datasource_list):
@ -11,6 +34,11 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li
Try to create contact points for other datasource.
Restart task for datasource, for which contact point was not created.
"""
cache_key = get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id)
cached_task_id = cache.get(cache_key)
current_task_id = create_contact_points_for_datasource.request.id
if cached_task_id is not None and current_task_id != cached_task_id:
return
AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel")
@ -21,7 +49,7 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li
api_token=alert_receive_channel.organization.api_token,
)
# list of datasource for which contact point creation was failed
datasource_to_create = []
datasources_to_create = []
for datasource in datasource_list:
contact_point = None
config, response_info = client.get_alerting_config(datasource["id"])
@ -29,16 +57,22 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li
if response_info.get("status_code") == status.HTTP_404_NOT_FOUND:
client.get_alertmanager_status_with_config(datasource["id"])
contact_point = alert_receive_channel.grafana_alerting_sync_manager.create_contact_point(datasource)
elif response_info.get("status_code") == status.HTTP_400_BAD_REQUEST:
logger.warning(
f"Failed to create contact point for integration {alert_receive_channel_id}, "
f"datasource info: {datasource}; response: {response_info}"
)
continue
else:
contact_point = alert_receive_channel.grafana_alerting_sync_manager.create_contact_point(datasource)
if contact_point is None:
# Failed to create contact point duo to getting wrong alerting config.
# Add datasource to list and retry to create contact point for it again
datasource_to_create.append(datasource)
datasources_to_create.append(datasource)
# if some contact points were not created, restart task for them
if datasource_to_create:
create_contact_points_for_datasource.apply_async((alert_receive_channel_id, datasource_to_create), countdown=5)
if datasources_to_create:
schedule_create_contact_points_for_datasource(alert_receive_channel_id, datasources_to_create)
else:
alert_receive_channel.is_finished_alerting_setup = True
alert_receive_channel.save(update_fields=["is_finished_alerting_setup"])

View file

@ -12,6 +12,7 @@ from apps.alerts.constants import NEXT_ESCALATION_DELAY
from apps.alerts.incident_appearance.renderers.web_renderer import AlertGroupWebRenderer
from apps.alerts.signals import user_notification_action_triggered_signal
from apps.base.messaging import get_messaging_backend_from_id
from apps.base.utils import live_settings
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
from .task_logger import task_logger
@ -56,6 +57,13 @@ def notify_user_task(
if not user.is_notification_allowed:
task_logger.info(f"notify_user_task: user {user.pk} notification is not allowed for role {user.role}")
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
reason=f"notification is not allowed for user with role {user.role}",
alert_group=alert_group,
notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE,
).save()
return
user_has_notification, _ = UserHasNotification.objects.get_or_create(
@ -257,11 +265,31 @@ def perform_notification(log_record_pk):
).save()
return
if not user.is_notification_allowed:
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
reason=f"notification is not allowed for user with role {user.role}",
alert_group=alert_group,
notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE,
).save()
return
if notification_channel == UserNotificationPolicy.NotificationChannel.SMS:
SMSMessage.send_sms(user, alert_group, notification_policy)
SMSMessage.send_sms(
user,
alert_group,
notification_policy,
is_cloud_notification=live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED,
)
elif notification_channel == UserNotificationPolicy.NotificationChannel.PHONE_CALL:
PhoneCall.make_call(user, alert_group, notification_policy)
PhoneCall.make_call(
user,
alert_group,
notification_policy,
is_cloud_notification=live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED,
)
elif notification_channel == UserNotificationPolicy.NotificationChannel.TELEGRAM:
if alert_group.notify_in_telegram_enabled is True:

View file

@ -2,7 +2,7 @@ import pytest
from apps.alerts.incident_appearance.templaters import AlertSlackTemplater
from apps.alerts.models import AlertGroup
from apps.integrations.metadata.configuration import grafana
from config_integrations import grafana
@pytest.mark.django_db

View file

@ -10,9 +10,9 @@ from apps.alerts.incident_appearance.templaters import (
AlertWebTemplater,
)
from apps.alerts.models import Alert, AlertReceiveChannel
from apps.integrations.metadata.configuration import grafana
from common.jinja_templater import jinja_template_env
from common.utils import getattrd
from config_integrations import grafana
@pytest.mark.django_db

View file

@ -10,6 +10,7 @@ from apps.alerts.escalation_snapshot.utils import eta_for_escalation_step_notify
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy
from apps.schedules.ical_utils import list_users_to_notify_from_ical
from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar
from common.constants.role import Role
def get_escalation_policy_snapshot_from_model(escalation_policy):
@ -200,6 +201,55 @@ def test_escalation_step_notify_on_call_schedule(
assert mocked_execute_tasks.called
@patch("apps.alerts.escalation_snapshot.snapshot_classes.EscalationPolicySnapshot._execute_tasks", return_value=None)
@pytest.mark.django_db
def test_escalation_step_notify_on_call_schedule_viewer_user(
mocked_execute_tasks,
escalation_step_test_setup,
make_user_for_organization,
make_escalation_policy,
make_schedule,
make_on_call_shift,
):
organization, user, _, channel_filter, alert_group, reason = escalation_step_test_setup
viewer = make_user_for_organization(organization=organization, role=Role.VIEWER)
schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar)
# create on_call_shift with user to notify
data = {
"start": timezone.datetime.now().replace(microsecond=0),
"duration": timezone.timedelta(seconds=7200),
}
on_call_shift = make_on_call_shift(
organization=organization, shift_type=CustomOnCallShift.TYPE_SINGLE_EVENT, **data
)
on_call_shift.users.add(viewer)
schedule.custom_on_call_shifts.add(on_call_shift)
notify_schedule_step = make_escalation_policy(
escalation_chain=channel_filter.escalation_chain,
escalation_policy_step=EscalationPolicy.STEP_NOTIFY_SCHEDULE,
notify_schedule=schedule,
)
escalation_policy_snapshot = get_escalation_policy_snapshot_from_model(notify_schedule_step)
expected_eta = timezone.now() + timezone.timedelta(seconds=NEXT_ESCALATION_DELAY)
result = escalation_policy_snapshot.execute(alert_group, reason)
expected_result = EscalationPolicySnapshot.StepExecutionResultData(
eta=result.eta,
stop_escalation=False,
pause_escalation=False,
start_from_beginning=False,
)
assert expected_eta + timezone.timedelta(seconds=15) > result.eta > expected_eta - timezone.timedelta(seconds=15)
assert result == expected_result
assert notify_schedule_step.log_records.filter(type=AlertGroupLogRecord.TYPE_ESCALATION_TRIGGERED).exists()
assert list(escalation_policy_snapshot.notify_to_users_queue) == list(
list_users_to_notify_from_ical(schedule, include_viewers=True)
)
assert list(escalation_policy_snapshot.notify_to_users_queue) == [viewer]
assert mocked_execute_tasks.called
@patch("apps.alerts.escalation_snapshot.snapshot_classes.EscalationPolicySnapshot._execute_tasks", return_value=None)
@pytest.mark.django_db
def test_escalation_step_notify_user_group(

View file

@ -2,9 +2,10 @@ from unittest.mock import patch
import pytest
from apps.alerts.tasks.notify_user import perform_notification
from apps.alerts.tasks.notify_user import notify_user_task, perform_notification
from apps.base.models.user_notification_policy import UserNotificationPolicy
from apps.base.models.user_notification_policy_log_record import UserNotificationPolicyLogRecord
from common.constants.role import Role
@pytest.mark.django_db
@ -118,3 +119,62 @@ def test_notify_user_missing_data_errors(
assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED
assert error_log_record.reason == "Expected data is missing"
assert error_log_record.notification_error_code is None
@pytest.mark.django_db
def test_notify_user_perform_notification_error_if_viewer(
make_organization,
make_user,
make_user_notification_policy,
make_alert_receive_channel,
make_alert_group,
make_user_notification_policy_log_record,
):
organization = make_organization()
user_1 = make_user(organization=organization, role=Role.VIEWER, _verified_phone_number="1234567890")
user_notification_policy = make_user_notification_policy(
user=user_1,
step=UserNotificationPolicy.Step.NOTIFY,
notify_by=UserNotificationPolicy.NotificationChannel.SMS,
)
alert_receive_channel = make_alert_receive_channel(organization=organization)
alert_group = make_alert_group(alert_receive_channel=alert_receive_channel)
log_record = make_user_notification_policy_log_record(
author=user_1,
alert_group=alert_group,
notification_policy=user_notification_policy,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED,
)
perform_notification(log_record.pk)
error_log_record = UserNotificationPolicyLogRecord.objects.last()
assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED
assert error_log_record.reason == f"notification is not allowed for user with role {user_1.role}"
assert (
error_log_record.notification_error_code
== UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE
)
@pytest.mark.django_db
def test_notify_user_error_if_viewer(
make_organization,
make_user,
make_alert_receive_channel,
make_alert_group,
):
organization = make_organization()
user_1 = make_user(organization=organization, role=Role.VIEWER, _verified_phone_number="1234567890")
alert_receive_channel = make_alert_receive_channel(organization=organization)
alert_group = make_alert_group(alert_receive_channel=alert_receive_channel)
notify_user_task(user_1.pk, alert_group.pk)
error_log_record = UserNotificationPolicyLogRecord.objects.last()
assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED
assert error_log_record.reason == f"notification is not allowed for user with role {user_1.role}"
assert (
error_log_record.notification_error_code
== UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE
)

View file

@ -1,9 +1,12 @@
from django.conf import settings
from rest_framework import serializers
from apps.api.serializers.telegram import TelegramToUserConnectorSerializer
from apps.base.constants import ADMIN_PERMISSIONS, ALL_ROLES_PERMISSIONS, EDITOR_PERMISSIONS
from apps.base.messaging import get_messaging_backends
from apps.base.models import UserNotificationPolicy
from apps.base.utils import live_settings
from apps.oss_installation.utils import cloud_user_identity_status
from apps.twilioapp.utils import check_phone_number_is_valid
from apps.user_management.models import User
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
@ -30,6 +33,7 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
permissions = serializers.SerializerMethodField()
notification_chain_verbal = serializers.SerializerMethodField()
cloud_connection_status = serializers.SerializerMethodField()
SELECT_RELATED = ["telegram_verification_code", "telegram_connection", "organization", "slack_user_identity"]
@ -50,6 +54,7 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
"messaging_backends",
"permissions",
"notification_chain_verbal",
"cloud_connection_status",
]
read_only_fields = [
"email",
@ -88,6 +93,15 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin):
default, important = UserNotificationPolicy.get_short_verbals_for_user(user=obj)
return {"default": " - ".join(default), "important": " - ".join(important)}
def get_cloud_connection_status(self, obj):
if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
connector = self.context.get("connector", None)
identities = self.context.get("cloud_identities", {})
identity = identities.get(obj.email, None)
status, _ = cloud_user_identity_status(connector, identity)
return status
return None
class UserHiddenFieldsSerializer(UserSerializer):
available_for_all_roles_fields = [

View file

@ -3,7 +3,13 @@ from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.api.views.features import FEATURE_LIVE_SETTINGS, FEATURE_SLACK, FEATURE_TELEGRAM
from apps.api.views.features import (
FEATURE_GRAFANA_CLOUD_CONNECTION,
FEATURE_GRAFANA_CLOUD_NOTIFICATIONS,
FEATURE_LIVE_SETTINGS,
FEATURE_SLACK,
FEATURE_TELEGRAM,
)
@pytest.mark.django_db
@ -30,15 +36,24 @@ def test_select_features_all_enabled(
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
settings.OSS_INSTALLATION = True
settings.FEATURE_SLACK_INTEGRATION_ENABLED = True
settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED = True
settings.FEATURE_LIVE_SETTINGS_ENABLED = True
settings.FEATURE_GRAFANA_CLOUD_CONNECTION = True
settings.FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = True
client = APIClient()
url = reverse("api-internal:features")
response = client.get(url, format="json", **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
assert response.json() == [FEATURE_SLACK, FEATURE_TELEGRAM, FEATURE_LIVE_SETTINGS]
assert response.json() == [
FEATURE_SLACK,
FEATURE_TELEGRAM,
FEATURE_GRAFANA_CLOUD_CONNECTION,
FEATURE_LIVE_SETTINGS,
FEATURE_GRAFANA_CLOUD_NOTIFICATIONS,
]
@pytest.mark.django_db
@ -48,9 +63,12 @@ def test_select_features_all_disabled(
make_user_auth_headers,
):
organization, user, token = make_organization_and_user_with_plugin_token()
settings.OSS_INSTALLATION = False
settings.FEATURE_SLACK_INTEGRATION_ENABLED = False
settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED = False
settings.FEATURE_LIVE_SETTINGS_ENABLED = False
settings.FEATURE_GRAFANA_CLOUD_CONNECTION = False
settings.FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = FEATURE_GRAFANA_CLOUD_NOTIFICATIONS
client = APIClient()
url = reverse("api-internal:features")
response = client.get(url, format="json", **make_user_auth_headers(user, token))

View file

@ -141,7 +141,6 @@ def test_get_filter_created_at_invalid_format(
assert response.status_code == status.HTTP_400_BAD_REQUEST
@pytest.mark.skip(reason="SQLITE Incompatibility")
@pytest.mark.django_db
def test_get_filter_by_labels(
make_organization_and_user_with_plugin_token,

View file

@ -75,6 +75,7 @@ def test_update_user_cant_change_email_and_username(
"user": admin.username,
}
},
"cloud_connection_status": 0,
"permissions": ADMIN_PERMISSIONS,
"notification_chain_verbal": {"default": "", "important": ""},
"slack_user_identity": None,
@ -124,6 +125,7 @@ def test_list_users(
"notification_chain_verbal": {"default": "", "important": ""},
"slack_user_identity": None,
"avatar": admin.avatar_url,
"cloud_connection_status": 0,
},
{
"pk": editor.public_primary_key,
@ -144,6 +146,7 @@ def test_list_users(
"notification_chain_verbal": {"default": "", "important": ""},
"slack_user_identity": None,
"avatar": editor.avatar_url,
"cloud_connection_status": 0,
},
],
}

View file

@ -4,11 +4,14 @@ from rest_framework.response import Response
from rest_framework.views import APIView
from apps.auth_token.auth import PluginAuthentication
from apps.base.utils import live_settings
FEATURE_SLACK = "slack"
FEATURE_TELEGRAM = "telegram"
FEATURE_LIVE_SETTINGS = "live_settings"
MOBILE_APP_PUSH_NOTIFICATIONS = "mobile_app"
FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = "grafana_cloud_notifications"
FEATURE_GRAFANA_CLOUD_CONNECTION = "grafana_cloud_connection"
class FeaturesAPIView(APIView):
@ -31,9 +34,6 @@ class FeaturesAPIView(APIView):
if settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED:
enabled_features.append(FEATURE_TELEGRAM)
if settings.FEATURE_LIVE_SETTINGS_ENABLED:
enabled_features.append(FEATURE_LIVE_SETTINGS)
if settings.MOBILE_APP_PUSH_NOTIFICATIONS_ENABLED:
DynamicSetting = apps.get_model("base", "DynamicSetting")
mobile_app_settings = DynamicSetting.objects.get_or_create(
@ -48,4 +48,12 @@ class FeaturesAPIView(APIView):
if request.auth.organization.pk in mobile_app_settings.json_value["org_ids"]:
enabled_features.append(MOBILE_APP_PUSH_NOTIFICATIONS)
if settings.OSS_INSTALLATION:
# Features below should be enabled only in OSS
enabled_features.append(FEATURE_GRAFANA_CLOUD_CONNECTION)
if settings.FEATURE_LIVE_SETTINGS_ENABLED:
enabled_features.append(FEATURE_LIVE_SETTINGS)
if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
enabled_features.append(FEATURE_GRAFANA_CLOUD_NOTIFICATIONS)
return enabled_features

View file

@ -12,6 +12,7 @@ from apps.api.serializers.live_setting import LiveSettingSerializer
from apps.auth_token.auth import PluginAuthentication
from apps.base.models import LiveSetting
from apps.base.utils import live_settings
from apps.oss_installation.tasks import sync_users_with_cloud
from apps.slack.tasks import unpopulate_slack_user_identities
from apps.telegram.client import TelegramClient
from apps.telegram.tasks import register_telegram_webhook
@ -32,13 +33,19 @@ class LiveSettingViewSet(PublicPrimaryKeyMixin, viewsets.ModelViewSet):
def get_queryset(self):
LiveSetting.populate_settings_if_needed()
return LiveSetting.objects.filter(name__in=LiveSetting.AVAILABLE_NAMES).order_by("name")
queryset = LiveSetting.objects.filter(name__in=LiveSetting.AVAILABLE_NAMES).order_by("name")
search = self.request.query_params.get("search", None)
if search:
queryset = queryset.filter(name=search)
return queryset
def perform_update(self, serializer):
new_value = serializer.validated_data["value"]
self._update_hook(new_value)
super().perform_update(serializer)
instance = serializer.save()
sync_users = self.request.query_params.get("sync_users", "true") == "true"
if instance.name == "GRAFANA_CLOUD_ONCALL_TOKEN" and sync_users:
sync_users_with_cloud.apply_async()
def perform_destroy(self, instance):
new_value = instance.default_value
@ -66,6 +73,17 @@ class LiveSettingViewSet(PublicPrimaryKeyMixin, viewsets.ModelViewSet):
if sti is not None:
unpopulate_slack_user_identities.apply_async((sti.pk, True), countdown=0)
if instance.name == "GRAFANA_CLOUD_ONCALL_TOKEN":
from apps.oss_installation.models import CloudConnector
try:
old_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
except ImproperlyConfigured:
old_token = None
if old_token != new_value:
CloudConnector.remove_sync()
def _reset_telegram_integration(self, new_token):
# tell Telegram to cancel sending events from old bot
with suppress(ImproperlyConfigured, error.InvalidToken, error.Unauthorized):

View file

@ -34,6 +34,7 @@ from apps.auth_token.models import UserScheduleExportAuthToken
from apps.auth_token.models.mobile_app_auth_token import MobileAppAuthToken
from apps.auth_token.models.mobile_app_verification_token import MobileAppVerificationToken
from apps.base.messaging import get_messaging_backend_from_id
from apps.base.utils import live_settings
from apps.telegram.client import TelegramClient
from apps.telegram.models import TelegramVerificationCode
from apps.twilioapp.phone_manager import PhoneManager
@ -56,7 +57,19 @@ class CurrentUserView(APIView):
permission_classes = (IsAuthenticated,)
def get(self, request):
serializer = UserSerializer(request.user, context={"request": self.request})
context = {"request": self.request, "format": self.format_kwarg, "view": self}
if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
connector = CloudConnector.objects.first()
if connector is not None:
cloud_identities = list(CloudUserIdentity.objects.filter(email__in=[request.user.email]))
cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities}
context["cloud_identities"] = cloud_identities
context["connector"] = connector
serializer = UserSerializer(request.user, context=context)
return Response(serializer.data)
def put(self, request):
@ -179,6 +192,46 @@ class UserView(
return queryset.order_by("id")
def list(self, request, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
page = self.paginate_queryset(queryset)
if page is not None:
context = {"request": self.request, "format": self.format_kwarg, "view": self}
if settings.OSS_INSTALLATION:
if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
connector = CloudConnector.objects.first()
if connector is not None:
emails = list(queryset.values_list("email", flat=True))
cloud_identities = list(CloudUserIdentity.objects.filter(email__in=emails))
cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities}
context["cloud_identities"] = cloud_identities
context["connector"] = connector
serializer = self.get_serializer(page, many=True, context=context)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
def retrieve(self, request, *args, **kwargs):
context = {"request": self.request, "format": self.format_kwarg, "view": self}
instance = self.get_object()
if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
connector = CloudConnector.objects.first()
if connector is not None:
cloud_identities = list(CloudUserIdentity.objects.filter(email__in=[instance.email]))
cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities}
context["cloud_identities"] = cloud_identities
context["connector"] = connector
serializer = self.get_serializer(instance, context=context)
return Response(serializer.data)
def current(self, request):
serializer = UserSerializer(self.get_queryset().get(pk=self.request.user.pk))
return Response(serializer.data)

View file

@ -9,7 +9,6 @@ from rest_framework.authentication import BaseAuthentication, get_authorization_
from rest_framework.request import Request
from apps.grafana_plugin.helpers.gcom import check_token
from apps.public_api import constants as public_api_constants
from apps.user_management.models import User
from apps.user_management.models.organization import Organization
from common.constants.role import Role
@ -29,12 +28,6 @@ class ApiTokenAuthentication(BaseAuthentication):
def authenticate(self, request):
auth = get_authorization_header(request).decode("utf-8")
if auth == public_api_constants.DEMO_AUTH_TOKEN:
user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID)
auth_token = user.auth_tokens.first()
return user, auth_token
user, auth_token = self.authenticate_credentials(auth)
if user.role != Role.ADMIN:

View file

@ -1,40 +0,0 @@
# Generated by Django 3.2.5 on 2021-08-04 13:02
import sys
from django.db import migrations
from apps.auth_token import constants
from apps.auth_token import crypto
from apps.public_api import constants as public_api_constants
def create_demo_token_instances(apps, schema_editor):
if not (len(sys.argv) > 1 and sys.argv[1] == 'test'):
User = apps.get_model('user_management', 'User')
Organization = apps.get_model('user_management', 'Organization')
ApiAuthToken = apps.get_model('auth_token', 'ApiAuthToken')
organization = Organization.objects.get(public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID)
user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID)
token_string = crypto.generate_token_string()
digest = crypto.hash_token_string(token_string)
ApiAuthToken.objects.get_or_create(
name=public_api_constants.DEMO_AUTH_TOKEN,
user=user,
organization=organization,
defaults=dict(token_key=token_string[:constants.TOKEN_KEY_LENGTH], digest=digest)
)
class Migration(migrations.Migration):
dependencies = [
('auth_token', '0002_squashed_initial'),
('user_management', '0002_squashed_create_demo_token_instances')
]
operations = [
migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop)
]

View file

@ -1,74 +0,0 @@
# Generated by Django 3.2.5 on 2021-08-04 10:45
import sys
from django.db import migrations
from django.utils import timezone
from apps.public_api import constants as public_api_constants
STEP_WAIT = 0
STEP_NOTIFY = 1
NOTIFY_BY_SMS = 1
NOTIFY_BY_PHONE = 2
FIVE_MINUTES = timezone.timedelta(minutes=5)
def create_demo_token_instances(apps, schema_editor):
if not (len(sys.argv) > 1 and sys.argv[1] == 'test'):
User = apps.get_model('user_management', 'User')
UserNotificationPolicy = apps.get_model("base", "UserNotificationPolicy")
user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID)
UserNotificationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1,
defaults=dict(
important=False,
user=user,
notify_by=NOTIFY_BY_SMS,
step=STEP_NOTIFY,
order=0,
)
)
UserNotificationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2,
defaults=dict(
important=False,
user=user,
step=STEP_WAIT,
wait_delay=FIVE_MINUTES,
order=1,
)
)
UserNotificationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3,
defaults=dict(
important=False,
user=user,
step=STEP_NOTIFY,
notify_by=NOTIFY_BY_PHONE,
order=2,
)
)
UserNotificationPolicy.objects.get_or_create(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4,
defaults=dict(
important=True,
user=user,
notify_by=NOTIFY_BY_PHONE,
order=0,
)
)
class Migration(migrations.Migration):
dependencies = [
('base', '0002_squashed_initial'),
('user_management', '0002_squashed_create_demo_token_instances')
]
operations = [
migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop)
]

View file

@ -38,35 +38,45 @@ class LiveSetting(models.Model):
"TWILIO_NUMBER",
"TWILIO_VERIFY_SERVICE_SID",
"TELEGRAM_TOKEN",
"TELEGRAM_WEBHOOK_HOST",
"SLACK_CLIENT_OAUTH_ID",
"SLACK_CLIENT_OAUTH_SECRET",
"SLACK_SIGNING_SECRET",
"SLACK_INSTALL_RETURN_REDIRECT_HOST",
"SEND_ANONYMOUS_USAGE_STATS",
"GRAFANA_CLOUD_ONCALL_TOKEN",
"GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED",
"GRAFANA_CLOUD_NOTIFICATIONS_ENABLED",
)
DESCRIPTIONS = {
"SLACK_SIGNING_SECRET": (
"Check <a href='"
"https://github.com/grafana/amixr/blob/main/DEVELOPER.md#slack-application-setup"
"'>this instruction</a> for details how to set up Slack. "
"https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup"
"'>instruction</a> for details how to set up Slack. "
"Slack secrets can't be verified on the backend, please try installing the Slack Bot "
"after you update Slack credentials."
"after you update them."
),
"SLACK_CLIENT_OAUTH_SECRET": (
"Check <a href='"
"https://github.com/grafana/amixr/blob/main/DEVELOPER.md#slack-application-setup"
"'>this instruction</a> for details how to set up Slack. "
"https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup"
"'>instruction</a> for details how to set up Slack. "
"Slack secrets can't be verified on the backend, please try installing the Slack Bot "
"after you update Slack credentials."
"after you update them."
),
"SLACK_CLIENT_OAUTH_ID": (
"Check <a href='"
"https://github.com/grafana/amixr/blob/main/DEVELOPER.md#slack-application-setup"
"'>this instruction</a> for details how to set up Slack. "
"https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup"
"'>instruction</a> for details how to set up Slack. "
"Slack secrets can't be verified on the backend, please try installing the Slack Bot "
"after you update Slack credentials."
"after you update them."
),
"SLACK_INSTALL_RETURN_REDIRECT_HOST": (
"Check <a href='"
"https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup"
"'>instruction</a> for details how to set up Slack. "
"Slack secrets can't be verified on the backend, please try installing the Slack Bot "
"after you update them."
),
"TWILIO_ACCOUNT_SID": (
"Twilio username to allow amixr send sms and make phone calls, "
@ -99,13 +109,17 @@ class LiveSetting(models.Model):
"TELEGRAM_TOKEN": (
"Secret token for Telegram bot, you can get one via " "<a href='https://t.me/BotFather'>BotFather</a>."
),
"TELEGRAM_WEBHOOK_HOST": (
"Externally available URL for Telegram to make requests. Please restart OnCall backend after after update."
),
"SEND_ANONYMOUS_USAGE_STATS": (
"Grafana OnCall will send anonymous, but uniquely-identifiable usage analytics to Grafana Labs."
" These statistics are sent to https://stats.grafana.org/. For more information on what's sent, look at"
"https://github.com/..." # TODO: add url to usage stats code
" These statistics are sent to https://stats.grafana.org/. For more information on what's sent, look at the "
"<a href='https://github.com/grafana/oncall/blob/dev/engine/apps/oss_installation/usage_stats.py#L29'> source code</a>."
),
"GRAFANA_CLOUD_ONCALL_TOKEN": "Secret token for Grafana Cloud OnCall instance.",
"GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED": "Enable hearbeat integration with Grafana Cloud OnCall.",
"GRAFANA_CLOUD_NOTIFICATIONS_ENABLED": "Enable SMS/call notifications via Grafana Cloud OnCall",
}
SECRET_SETTING_NAMES = (
@ -171,4 +185,5 @@ class LiveSetting(models.Model):
)
self.error = LiveSettingValidator(live_setting=self).get_error()
super().save(*args, **kwargs)

View file

@ -68,7 +68,8 @@ class UserNotificationPolicyLogRecord(models.Model):
ERROR_NOTIFICATION_IN_SLACK_CHANNEL_IS_ARCHIVED,
ERROR_NOTIFICATION_IN_SLACK_RATELIMIT,
ERROR_NOTIFICATION_MESSAGING_BACKEND_ERROR,
) = range(25)
ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE,
) = range(26)
# for this errors we want to send message to general log channel
ERRORS_TO_SEND_IN_SLACK_CHANNEL = [
@ -266,6 +267,10 @@ class UserNotificationPolicyLogRecord(models.Model):
result += f"failed to notify {user_verbal} in Slack, because channel is archived"
elif self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_SLACK_RATELIMIT:
result += f"failed to notify {user_verbal} in Slack due to Slack rate limit"
elif (
self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE
):
result += f"failed to notify {user_verbal}, not allowed role"
else:
# TODO: handle specific backend errors
try:

View file

@ -94,6 +94,13 @@ class LiveSettingValidator:
except Exception as e:
return f"Telegram error: {str(e)}"
@classmethod
def _check_grafana_cloud_oncall_token(cls, grafana_oncall_token):
from apps.oss_installation.models import CloudConnector
_, err = CloudConnector.sync_with_cloud(grafana_oncall_token)
return err
@staticmethod
def _is_email_valid(email):
return re.match(r"^[^@]+@[^@]+\.[^@]+$", email)

View file

@ -6,7 +6,6 @@ from django.utils import timezone
from apps.grafana_plugin.helpers import GcomAPIClient
from apps.grafana_plugin.helpers.gcom import get_active_instance_ids
from apps.public_api.constants import DEMO_ORGANIZATION_ID
from apps.user_management.models import Organization
from apps.user_management.sync import sync_organization
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
@ -23,9 +22,7 @@ SYNC_PERIOD = timezone.timedelta(minutes=25)
def start_sync_organizations():
sync_threshold = timezone.now() - SYNC_PERIOD
organization_qs = Organization.objects.exclude(public_primary_key=DEMO_ORGANIZATION_ID).filter(
last_time_synced__lte=sync_threshold
)
organization_qs = Organization.objects.filter(last_time_synced__lte=sync_threshold)
active_instance_ids, is_cloud_configured = get_active_instance_ids()
if is_cloud_configured:

View file

@ -1,99 +0,0 @@
# Main
enabled = True
title = "Amazon SNS"
slug = "amazon_sns"
short_description = None
is_displayed_on_web = True
description = None
is_featured = False
is_able_to_autoresolve = True
is_demo_alert_enabled = True
description = None
# Default templates
slack_title = """\
{% if payload|length == 0 -%}
{% set title = payload.get("AlarmName", "Alert") %}
{%- else -%}
{% set title = "Alert" %}
{%- endif %}
*<{{ grafana_oncall_link }}|#{{ grafana_oncall_incident_id }} {{ title }}>* via {{ integration_name }}
{% if source_link %}
(*<{{ source_link }}|source>*)
{%- endif %}"""
slack_message = """\
{% if payload|length == 1 and "message" in payload -%}
{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }}
{%- else -%}
*State* {{ payload.get("NewStateValue", "NO") }}
Region: {{ payload.get("Region", "Undefined") }}
_Description_: {{ payload.get("AlarmDescription", "Undefined") }}
{%- endif %}
"""
slack_image_url = None
web_title = """\
{% if payload|length == 0 -%}
{{ payload.get("AlarmName", "Alert")}}
{%- else -%}
Alert
{%- endif %}"""
web_message = """\
{% if payload|length == 1 and "message" in payload -%}
{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }}
{%- else -%}
**State** {{ payload.get("NewStateValue", "NO") }}
Region: {{ payload.get("Region", "Undefined") }}
*Description*: {{ payload.get("AlarmDescription", "Undefined") }}
{%- endif %}
"""
web_image_url = slack_image_url
sms_title = web_title
phone_call_title = web_title
email_title = web_title
email_message = "{{ payload|tojson_pretty }}"
telegram_title = sms_title
telegram_message = """\
{% if payload|length == 1 and "message" in payload -%}
{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }}
{%- else -%}
<b>State</b> {{ payload.get("NewStateValue", "NO") }}
Region: {{ payload.get("Region", "Undefined") }}
<i>Description</i>: {{ payload.get("AlarmDescription", "Undefined") }}
{%- endif %}
"""
telegram_image_url = slack_image_url
source_link = """\
{% if payload|length == 0 -%}
{% if payload.get("Trigger", {}).get("Namespace") == "AWS/ElasticBeanstalk" -%}
https://console.aws.amazon.com/elasticbeanstalk/home?region={{ payload.get("TopicArn").split(":")[3] }}
{%- else -%}
https://console.aws.amazon.com/cloudwatch//home?region={{ payload.get("TopicArn").split(":")[3] }}
{%- endif %}
{%- endif %}"""
grouping_id = web_title
resolve_condition = """\
{{ payload.get("NewStateValue", "") == "OK" }}
"""
acknowledge_condition = None
group_verbose_name = web_title
example_payload = {"foo": "bar"}

View file

@ -43,7 +43,6 @@ def test_ratelimit_alerts_per_integration(
assert mocked_task.call_count == 1
@pytest.mark.skip(reason="SQLITE Incompatibility")
@mock.patch("ratelimit.utils._split_rate", return_value=(1, 60))
@mock.patch("apps.integrations.tasks.create_alert.apply_async", return_value=None)
@pytest.mark.django_db
@ -55,10 +54,16 @@ def test_ratelimit_alerts_per_team(
):
organization = make_organization()
integration_1 = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_WEBHOOK)
url_1 = reverse("integrations:webhook", kwargs={"alert_channel_key": integration_1.token})
url_1 = reverse(
"integrations:universal",
kwargs={"integration_type": AlertReceiveChannel.INTEGRATION_WEBHOOK, "alert_channel_key": integration_1.token},
)
integration_2 = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_WEBHOOK)
url_2 = reverse("integrations:webhook", kwargs={"alert_channel_key": integration_2.token})
url_2 = reverse(
"integrations:universal",
kwargs={"integration_type": AlertReceiveChannel.INTEGRATION_WEBHOOK, "alert_channel_key": integration_2.token},
)
c = Client()
@ -71,7 +76,6 @@ def test_ratelimit_alerts_per_team(
assert mocked_task.call_count == 1
@pytest.mark.skip(reason="SQLITE Incompatibility")
@mock.patch("ratelimit.utils._split_rate", return_value=(1, 60))
@mock.patch("apps.heartbeat.tasks.process_heartbeat_task.apply_async", return_value=None)
@pytest.mark.django_db

View file

@ -0,0 +1,110 @@
import logging
import random
from urllib.parse import urljoin
import requests
from django.apps import apps
from django.conf import settings
from rest_framework import status
from apps.base.utils import live_settings
logger = logging.getLogger(__name__)
def setup_heartbeat_integration(name=None):
"""Setup Grafana Cloud OnCall heartbeat integration."""
CloudHeartbeat = apps.get_model("oss_installation", "CloudHeartbeat")
cloud_heartbeat = None
api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not api_token:
return cloud_heartbeat
# don't specify a team in the data, so heartbeat integration will be created in the General.
name = name or f"OnCall Cloud Heartbeat {settings.BASE_URL}"
data = {"type": "formatted_webhook", "name": name}
url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "/api/v1/integrations/")
try:
headers = {"Authorization": api_token}
r = requests.post(url=url, data=data, headers=headers, timeout=5)
if r.status_code == status.HTTP_201_CREATED:
response_data = r.json()
cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create(
defaults={"integration_id": response_data["id"], "integration_url": response_data["heartbeat"]["link"]}
)
if r.status_code == status.HTTP_400_BAD_REQUEST:
response_data = r.json()
error = response_data["detail"]
if error == "Integration with this name already exists":
response = requests.get(url=f"{url}?name={name}", headers=headers)
integrations = response.json().get("results", [])
if len(integrations) == 1:
integration = integrations[0]
cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create(
defaults={
"integration_id": integration["id"],
"integration_url": integration["heartbeat"]["link"],
}
)
else:
setup_heartbeat_integration(f"{name} { random.randint(1, 1024)}")
except requests.Timeout:
logger.warning("Unable to create cloud heartbeat integration. Request timeout.")
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to create cloud heartbeat integration. Request exception {str(e)}.")
return cloud_heartbeat
def send_cloud_heartbeat():
CloudHeartbeat = apps.get_model("oss_installation", "CloudHeartbeat")
CloudConnector = apps.get_model("oss_installation", "CloudConnector")
"""Send heartbeat to Grafana Cloud OnCall integration."""
if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not live_settings.GRAFANA_CLOUD_ONCALL_TOKEN:
logger.info(
"Unable to send cloud heartbeat. Check values for GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED and GRAFANA_CLOUD_ONCALL_TOKEN."
)
return
connector = CloudConnector.objects.first()
if connector is None:
logger.info("Unable to send cloud heartbeat. Cloud is not connected")
return
logger.info("Start send cloud heartbeat")
try:
cloud_heartbeat = CloudHeartbeat.objects.get()
except CloudHeartbeat.DoesNotExist:
cloud_heartbeat = setup_heartbeat_integration()
if cloud_heartbeat is None:
logger.warning("Unable to setup cloud heartbeat integration.")
return
cloud_heartbeat.success = False
try:
response = requests.get(cloud_heartbeat.integration_url, timeout=5)
logger.info(f"Send cloud heartbeat with response {response.status_code}")
except requests.Timeout:
logger.warning("Unable to send cloud heartbeat. Request timeout.")
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to send cloud heartbeat. Request exception {str(e)}.")
else:
if response.status_code == status.HTTP_200_OK:
cloud_heartbeat.success = True
logger.info("Successfully send cloud heartbeat")
elif response.status_code == status.HTTP_403_FORBIDDEN:
# check for 403 because AlertChannelDefiningMixin returns 403 if no integration was found.
logger.info("Failed to send cloud heartbeat. Integration was not created yet")
# force re-creation on next run
cloud_heartbeat.delete()
else:
logger.info(f"Failed to send cloud heartbeat. response {response.status_code}")
# save result of cloud heartbeat if it wasn't deleted
if cloud_heartbeat.pk is not None:
cloud_heartbeat.save()
logger.info("Finish send cloud heartbeat")
def get_heartbeat_link(connector, heartbeat):
if connector is None:
return None
if heartbeat is None:
return None
return urljoin(connector.cloud_url, f"a/grafana-oncall-app/?page=integrations&id={heartbeat.integration_id}")

View file

@ -0,0 +1,4 @@
CLOUD_NOT_SYNCED = 0
CLOUD_SYNCED_USER_NOT_FOUND = 1
CLOUD_SYNCED_PHONE_NOT_VERIFIED = 2
CLOUD_SYNCED_PHONE_VERIFIED = 3

View file

@ -30,4 +30,20 @@ class Migration(migrations.Migration):
('report_sent_at', models.DateTimeField(default=None, null=True)),
],
),
migrations.CreateModel(
name='CloudConnector',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('cloud_url', models.URLField()),
],
),
migrations.CreateModel(
name='CloudUserIdentity',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone_number_verified', models.BooleanField(default=False)),
('cloud_id', models.CharField(max_length=20)),
('email', models.EmailField(max_length=254)),
],
),
]

View file

@ -1,2 +1,4 @@
from .heartbeat import CloudHeartbeat # noqa: F401
from .cloud_connector import CloudConnector # noqa: F401
from .cloud_heartbeat import CloudHeartbeat # noqa: F401
from .cloud_user_identity import CloudUserIdentity # noqa: F401
from .oss_installation import OssInstallation # noqa: F401

View file

@ -0,0 +1,155 @@
import logging
from urllib.parse import urljoin
import requests
from django.db import models, transaction
from apps.base.utils import live_settings
from apps.oss_installation.models.cloud_user_identity import CloudUserIdentity
from apps.user_management.models import User
from common.constants.role import Role
from settings.base import GRAFANA_CLOUD_ONCALL_API_URL
logger = logging.getLogger(__name__)
class CloudConnector(models.Model):
"""
CloudOrganizationConnector model represents connection between oss organization and cloud organization.
"""
cloud_url = models.URLField()
@classmethod
def sync_with_cloud(cls, token=None):
"""
sync_with_cloud sync organization with cloud organization defined by provided GRAFANA_CLOUD_ONCALL_TOKEN.
"""
sync_status = False
error_msg = None
api_token = token or live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
if api_token is None:
logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is not set")
error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set"
else:
info_url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/info/")
try:
r = requests.get(info_url, headers={"AUTHORIZATION": api_token}, timeout=5)
if r.status_code == 200:
connector, _ = cls.objects.get_or_create()
connector.cloud_url = r.json()["url"]
connector.save()
elif r.status_code == 403:
logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is invalid")
error_msg = "Invalid token"
else:
error_msg = f"Non-200 HTTP code. Got {r.status_code}"
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync with cloud. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
return sync_status, error_msg
def sync_users_with_cloud(self) -> tuple[bool, str]:
sync_status = False
error_msg = None
api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
if api_token is None:
logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is not set")
error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set"
existing_emails = list(User.objects.filter(role__in=(Role.ADMIN, Role.EDITOR)).values_list("email", flat=True))
matching_users = []
users_url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/users")
fetch_next_page = True
users_fetched = True
page = 1
while fetch_next_page:
try:
url = urljoin(users_url, f"?page={page}&?short=true")
r = requests.get(url, headers={"AUTHORIZATION": api_token}, timeout=5)
if r.status_code != 200:
logger.warning(
f"Unable to fetch page {page} while sync_users_with_cloud. Response status code {r.status_code}"
)
error_msg = f"Non-200 HTTP code. Got {r.status_code}"
users_fetched = False
break
data = r.json()
matching_users.extend(list(filter(lambda u: (u["email"] in existing_emails), data["results"])))
page += 1
if data["next"] is None:
fetch_next_page = False
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync users with cloud. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
users_fetched = False
break
if users_fetched:
with transaction.atomic():
cloud_users_identities_to_create = []
for user in matching_users:
cloud_users_identities_to_create.append(
CloudUserIdentity(
cloud_id=user["id"],
email=user["email"],
phone_number_verified=user["is_phone_number_verified"],
)
)
CloudUserIdentity.objects.all().delete()
CloudUserIdentity.objects.bulk_create(cloud_users_identities_to_create, batch_size=1000)
sync_status = True
return sync_status, error_msg
def sync_user_with_cloud(self, user):
sync_status = False
error_msg = None
api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
if api_token is None:
logger.warning(f"Unable to sync_user_with cloud user_id {user.id}. GRAFANA_CLOUD_ONCALL_TOKEN is not set")
error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set"
else:
url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, f"api/v1/users/?email={user.email}")
try:
r = requests.get(url, headers={"AUTHORIZATION": api_token}, timeout=5)
if r.status_code != 200:
logger.warning(
f"Unable to sync_user_with_cloud user_id {user.id}. Response status code {r.status_code}"
)
error_msg = f"Non-200 HTTP code. Got {r.status_code}"
else:
data = r.json()
if len(data["results"]) != 0:
cloud_used_data = data["results"][0]
with transaction.atomic():
CloudUserIdentity.objects.filter(email=user.email).delete()
CloudUserIdentity.objects.create(
email=user.email,
phone_number_verified=cloud_used_data["is_phone_number_verified"],
cloud_id=cloud_used_data["id"],
)
sync_status = True
else:
logger.warning(
f"Unable to sync_user_with_cloud user_id {user.id}. User with {user.email} not found"
)
error_msg = f"User with email not found {user.email}"
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync_user_with cloud user_id {user.id}. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
return sync_status, error_msg
@classmethod
def remove_sync(cls):
from apps.oss_installation.models import CloudHeartbeat
cls.objects.all().delete()
CloudUserIdentity.objects.all().delete()
CloudHeartbeat.objects.all().delete()

View file

@ -0,0 +1,7 @@
from django.db import models
class CloudUserIdentity(models.Model):
phone_number_verified = models.BooleanField(default=False)
cloud_id = models.CharField(max_length=20)
email = models.EmailField()

View file

@ -1,9 +1,16 @@
import logging
import uuid
from django.db import models
logger = logging.getLogger(__name__)
class OssInstallation(models.Model):
"""
OssInstallation is model to track installation of OSS OnCall version.
"""
installation_id = models.UUIDField(default=uuid.uuid4, editable=False)
created_at = models.DateTimeField(auto_now=True)
report_sent_at = models.DateTimeField(null=True, default=None)

View file

@ -0,0 +1 @@
from .cloud_user import CloudUserSerializer # noqa: F401

View file

@ -0,0 +1,20 @@
from rest_framework import serializers
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
from apps.oss_installation.utils import cloud_user_identity_status
from apps.user_management.models import User
class CloudUserSerializer(serializers.ModelSerializer):
cloud_data = serializers.SerializerMethodField()
class Meta:
model = User
fields = ["cloud_data"]
def get_cloud_data(self, obj):
connector = CloudConnector.objects.filter().first()
cloud_user_identity = CloudUserIdentity.objects.filter(email=obj.email).first()
status, link = cloud_user_identity_status(connector, cloud_user_identity)
cloud_data = {"status": status, "link": link}
return cloud_data

View file

@ -1,13 +1,9 @@
from urllib.parse import urljoin
import requests
from celery.utils.log import get_task_logger
from django.conf import settings
from django.apps import apps
from django.utils import timezone
from rest_framework import status
from apps.base.utils import live_settings
from apps.oss_installation.models import CloudHeartbeat, OssInstallation
from apps.oss_installation.cloud_heartbeat import send_cloud_heartbeat
from apps.oss_installation.usage_stats import UsageStatsService
from common.custom_celery_tasks import shared_dedicated_queue_retry_task
@ -17,6 +13,8 @@ logger = get_task_logger(__name__)
@shared_dedicated_queue_retry_task()
def send_usage_stats_report():
logger.info("Start send_usage_stats_report")
OssInstallation = apps.get_model("oss_installation", "OssInstallation")
installation = OssInstallation.objects.get_or_create()[0]
enabled = live_settings.SEND_ANONYMOUS_USAGE_STATS
if enabled:
@ -30,66 +28,24 @@ def send_usage_stats_report():
logger.info("Finish send_usage_stats_report")
def _setup_heartbeat_integration():
"""Setup Grafana Cloud OnCall heartbeat integration."""
cloud_heartbeat = None
api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN
# don't specify a team in the data, so heartbeat integration will be created in the General.
data = {"type": "formatted_webhook", "name": f"OnCall {settings.BASE_URL}"}
url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "/api/v1/integrations/")
try:
headers = {"Authorization": api_token}
r = requests.post(url=url, data=data, headers=headers, timeout=5)
if r.status_code == status.HTTP_201_CREATED:
response_data = r.json()
cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create(
defaults={"integration_id": response_data["id"], "integration_url": response_data["heartbeat"]["link"]}
)
except requests.Timeout:
logger.warning("Unable to create cloud heartbeat integration. Request timeout.")
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to create cloud heartbeat integration. Request exception {str(e)}.")
return cloud_heartbeat
@shared_dedicated_queue_retry_task()
def send_cloud_heartbeat_task():
send_cloud_heartbeat()
@shared_dedicated_queue_retry_task()
def send_cloud_heartbeat():
"""Send heartbeat to Grafana Cloud OnCall integration."""
if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not live_settings.GRAFANA_CLOUD_ONCALL_TOKEN:
logger.info(
"Unable to send cloud heartbeat. Check values for GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED and GRAFANA_CLOUD_ONCALL_TOKEN."
)
return
logger.info("Start send cloud heartbeat")
try:
cloud_heartbeat = CloudHeartbeat.objects.get()
except CloudHeartbeat.DoesNotExist:
cloud_heartbeat = _setup_heartbeat_integration()
if cloud_heartbeat is None:
logger.warning("Unable to setup cloud heartbeat integration.")
return
cloud_heartbeat.success = False
try:
response = requests.get(cloud_heartbeat.integration_url, timeout=5)
logger.info(f"Send cloud heartbeat with response {response.status_code}")
except requests.Timeout:
logger.warning("Unable to send cloud heartbeat. Request timeout.")
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to send cloud heartbeat. Request exception {str(e)}.")
else:
if response.status_code == status.HTTP_200_OK:
cloud_heartbeat.success = True
logger.info("Successfully send cloud heartbeat")
elif response.status_code == status.HTTP_403_FORBIDDEN:
# check for 403 because AlertChannelDefiningMixin returns 403 if no integration was found.
logger.info("Failed to send cloud heartbeat. Integration was not created yet")
# force re-creation on next run
cloud_heartbeat.delete()
def sync_users_with_cloud():
CloudConnector = apps.get_model("oss_installation", "CloudConnector")
logger.info("Start sync_users_with_cloud")
if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED:
connector = CloudConnector.objects.first()
if connector is not None:
status, error = connector.sync_users_with_cloud()
log_message = "Users synced. Status {status}."
if error:
log_message += f" Error {error}"
logger.info(log_message)
else:
logger.info(f"Failed to send cloud heartbeat. response {response.status_code}")
# save result of cloud heartbeat if it wasn't deleted
if cloud_heartbeat.pk is not None:
cloud_heartbeat.save()
logger.info("Finish send cloud heartbeat")
logger.info("Grafana Cloud is not connected")
else:
logger.info("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED is not enabled")

View file

@ -1,7 +1,15 @@
from common.api_helpers.optional_slash_router import optional_slash_path
from django.urls import include, path
from .views import CloudHeartbeatStatusView
from common.api_helpers.optional_slash_router import OptionalSlashRouter, optional_slash_path
from .views import CloudConnectionView, CloudHeartbeatView, CloudUsersView, CloudUserView
router = OptionalSlashRouter()
router.register("cloud_users", CloudUserView, basename="cloud-users")
urlpatterns = [
optional_slash_path("cloud_heartbeat_status", CloudHeartbeatStatusView.as_view(), name="cloud_heartbeat_status"),
path("", include(router.urls)),
optional_slash_path("cloud_users", CloudUsersView.as_view(), name="cloud-users-list"),
optional_slash_path("cloud_connection", CloudConnectionView.as_view(), name="cloud-connection-status"),
optional_slash_path("cloud_heartbeat", CloudHeartbeatView.as_view(), name="cloud-heartbeat"),
]

View file

@ -3,11 +3,11 @@ import platform
from dataclasses import asdict, dataclass
import requests
from django.apps import apps
from django.conf import settings
from django.db.models import Sum
from apps.alerts.models import AlertGroupCounter
from apps.oss_installation.models import OssInstallation
from apps.oss_installation.utils import active_oss_users_count
USAGE_STATS_URL = "https://stats.grafana.org/oncall-usage-report"
@ -27,9 +27,12 @@ class UsageStatsReport:
class UsageStatsService:
def get_usage_stats_report(self):
OssInstallation = apps.get_model("oss_installation", "OssInstallation")
metrics = {}
metrics["active_users_count"] = active_oss_users_count()
total_alert_groups = AlertGroupCounter.objects.aggregate(Sum("value")).get("value__sum", 0)
total_alert_groups = AlertGroupCounter.objects.aggregate(Sum("value")).get("value__sum", None)
if total_alert_groups is None:
total_alert_groups = 0
metrics["alert_groups_count"] = total_alert_groups
usage_stats_id = OssInstallation.objects.get_or_create()[0].installation_id

View file

@ -1,19 +1,23 @@
from contextlib import suppress
import logging
from urllib.parse import urljoin
from django.apps import apps
from django.utils import timezone
from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy
from apps.base.models import UserNotificationPolicyLogRecord
from apps.public_api.constants import DEMO_USER_ID
from apps.oss_installation import constants as oss_constants
from apps.schedules.ical_utils import list_users_to_notify_from_ical_for_period
from apps.schedules.models import OnCallSchedule
from apps.user_management.models import User
logger = logging.getLogger(__name__)
def active_oss_users_count():
"""
active_oss_users_count returns count of active users of oss installation.
"""
OnCallSchedule = apps.get_model("schedules", "OnCallSchedule")
AlertGroupLogRecord = apps.get_model("alerts", "AlertGroupLogRecord")
EscalationPolicy = apps.get_model("alerts", "EscalationPolicy")
UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord")
# Take logs for previous 24 hours
start = timezone.now() - timezone.timedelta(hours=24)
@ -62,9 +66,21 @@ def active_oss_users_count():
for user in users_from_schedule:
unique_active_users.add(user.pk)
# Remove demo user from active users
with suppress(User.DoesNotExist):
demo_user = User.objects.get(public_primary_key=DEMO_USER_ID)
with suppress(KeyError):
unique_active_users.remove(demo_user.pk)
return len(unique_active_users)
def cloud_user_identity_status(connector, identity):
link = None
if connector is None:
status = oss_constants.CLOUD_NOT_SYNCED
elif identity is None:
status = oss_constants.CLOUD_SYNCED_USER_NOT_FOUND
link = connector.cloud_url
else:
if identity.phone_number_verified:
status = oss_constants.CLOUD_SYNCED_PHONE_VERIFIED
else:
status = oss_constants.CLOUD_SYNCED_PHONE_NOT_VERIFIED
link = urljoin(connector.cloud_url, f"a/grafana-oncall-app/?page=users&p=1&id={identity.cloud_id}")
return status, link

View file

@ -1 +1,3 @@
from .cloud_heartbeat_status import CloudHeartbeatStatusView # noqa: F401
from .cloud_connection import CloudConnectionView # noqa: F401
from .cloud_heartbeat import CloudHeartbeatView # noqa: F401
from .cloud_users import CloudUsersView, CloudUserView # noqa: F401

View file

@ -0,0 +1,39 @@
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.api.permissions import IsAdmin
from apps.auth_token.auth import PluginAuthentication
from apps.base.models import LiveSetting
from apps.base.utils import live_settings
from apps.oss_installation.cloud_heartbeat import get_heartbeat_link
from apps.oss_installation.models import CloudConnector, CloudHeartbeat
class CloudConnectionView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def get(self, request):
connector = CloudConnector.objects.first()
heartbeat = CloudHeartbeat.objects.first()
response = {
"cloud_connection_status": connector is not None,
"cloud_notifications_enabled": live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED,
"cloud_heartbeat_enabled": live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED,
"cloud_heartbeat_link": get_heartbeat_link(connector, heartbeat),
"cloud_heartbeat_status": heartbeat is not None and heartbeat.success,
}
return Response(response)
def delete(self, request):
s = LiveSetting.objects.filter(name="GRAFANA_CLOUD_ONCALL_TOKEN").first()
if s is not None:
s.value = None
s.save()
connector = CloudConnector.objects.first()
if connector is None:
return Response(status=status.HTTP_404_NOT_FOUND)
connector.remove_sync()
return Response(status=status.HTTP_204_NO_CONTENT)

View file

@ -0,0 +1,27 @@
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.api.permissions import IsAdmin
from apps.auth_token.auth import PluginAuthentication
from apps.oss_installation.cloud_heartbeat import get_heartbeat_link, setup_heartbeat_integration
from apps.oss_installation.models import CloudConnector, CloudHeartbeat
class CloudHeartbeatView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def post(self, request):
connector = CloudConnector.objects.first()
if connector is not None:
try:
CloudHeartbeat.objects.get()
return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Cloud heartbeat already exists"})
except CloudHeartbeat.DoesNotExist:
heartbeat = setup_heartbeat_integration()
link = get_heartbeat_link(connector, heartbeat)
return Response(status=status.HTTP_200_OK, data={"link": link})
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"})

View file

@ -1,15 +0,0 @@
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.auth_token.auth import PluginAuthentication
from apps.oss_installation.models import CloudHeartbeat
class CloudHeartbeatStatusView(APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated,)
def get(self, request):
response = {"status": CloudHeartbeat.status()}
return Response(response)

View file

@ -0,0 +1,107 @@
from collections import OrderedDict
from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from apps.api.permissions import ActionPermission, AnyRole, IsAdmin, IsOwnerOrAdmin
from apps.auth_token.auth import PluginAuthentication
from apps.oss_installation.models import CloudConnector, CloudUserIdentity
from apps.oss_installation.serializers import CloudUserSerializer
from apps.oss_installation.utils import cloud_user_identity_status
from apps.user_management.models import User
from common.api_helpers.mixins import PublicPrimaryKeyMixin
from common.api_helpers.paginators import HundredPageSizePaginator
from common.constants.role import Role
class CloudUsersView(HundredPageSizePaginator, APIView):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, IsAdmin)
def get(self, request):
organization = request.user.organization
queryset = User.objects.filter(organization=organization, role__in=[Role.ADMIN, Role.EDITOR])
if request.user.current_team is not None:
queryset = queryset.filter(teams=request.user.current_team).distinct()
emails = list(queryset.values_list("email", flat=True))
results = self.paginate_queryset(queryset, request, view=self)
cloud_identities = list(CloudUserIdentity.objects.filter(email__in=emails))
cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities}
response = []
connector = CloudConnector.objects.first()
for user in results:
cloud_identity = cloud_identities.get(user.email, None)
status, link = cloud_user_identity_status(connector, cloud_identity)
response.append(
{
"id": user.public_primary_key,
"email": user.email,
"username": user.username,
"cloud_data": {"status": status, "link": link},
}
)
return self.get_paginated_response_with_matched_users_count(response, len(cloud_identities))
def get_paginated_response_with_matched_users_count(self, data, matched_users_count):
return Response(
OrderedDict(
[
("count", self.page.paginator.count),
("matched_users_count", matched_users_count),
("next", self.get_next_link()),
("previous", self.get_previous_link()),
("results", data),
]
)
)
def post(self, request):
connector = CloudConnector.objects.first()
if connector is not None:
sync_status, err = connector.sync_users_with_cloud()
return Response(status=status.HTTP_200_OK, data={"status": sync_status, "error": err})
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"})
class CloudUserView(
PublicPrimaryKeyMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet,
):
authentication_classes = (PluginAuthentication,)
permission_classes = (IsAuthenticated, ActionPermission)
action_permissions = {
AnyRole: ("retrieve",),
IsAdmin: ("sync",),
}
action_object_permissions = {
IsOwnerOrAdmin: ("retrieve", "sync"),
}
serializer_class = CloudUserSerializer
def get_queryset(self):
queryset = User.objects.filter(organization=self.request.user.organization)
return queryset
@action(detail=True, methods=["post"])
def sync(self, request, pk):
user = self.get_object()
connector = CloudConnector.objects.first()
if connector is not None:
sync_status, err = connector.sync_user_with_cloud(user)
return Response(status=status.HTTP_200_OK, data={"status": sync_status, "error": err})
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"})

View file

@ -1,69 +1,3 @@
from django.utils import dateparse
DEMO_USER_ID = "U4DNY931HHJS5"
DEMO_ORGANIZATION_ID = "TCNPY4A1BWUMP"
DEMO_SLACK_USER_ID = "UALEXSLACKDJPK"
DEMO_SLACK_TEAM_ID = "TALEXSLACKDJPK"
DEMO_AUTH_TOKEN = "meowmeowmeow"
DEMO_USER_USERNAME = "Alex"
DEMO_USER_EMAIL = "public-api-demo-user-1@amixr.io"
DEMO_INTEGRATION_ID = "CFRPV98RPR1U8"
DEMO_INTEGRATION_LINK_TOKEN = "mReAoNwDm0eMwKo1mTeTwYo"
DEMO_INTEGRATION_NAME = "Grafana :blush:"
DEMO_ROUTE_ID_1 = "RIYGUJXCPFHXY"
DEMO_ROUTE_ID_2 = "RVBE4RKQSCGJ2"
DEMO_SLACK_CHANNEL_FOR_ROUTE_ID = "CH23212D"
DEMO_ESCALATION_CHAIN_ID = "F5JU6KJET33FE"
DEMO_ESCALATION_POLICY_ID_1 = "E3GA6SJETWWJS"
DEMO_ESCALATION_POLICY_ID_2 = "E5JJTU52M5YM4"
DEMO_SCHEDULE_ID_ICAL = "SBM7DV7BKFUYU"
DEMO_SCHEDULE_ID_CALENDAR = "S3Z477AHDXTMF"
DEMO_SCHEDULE_NAME_ICAL = "Demo schedule iCal"
DEMO_SCHEDULE_NAME_CALENDAR = "Demo schedule Calendar"
DEMO_SCHEDULE_ICAL_URL_PRIMARY = "https://example.com/meow_calendar.ics"
DEMO_SCHEDULE_ICAL_URL_OVERRIDES = "https://example.com/meow_calendar_overrides.ics"
DEMO_INCIDENT_ID = "I68T24C13IFW1"
DEMO_INCIDENT_CREATED_AT = "2020-05-19T12:37:01.430444Z"
DEMO_INCIDENT_RESOLVED_AT = "2020-05-19T13:37:01.429805Z"
DEMO_ALERT_IDS = [
("AA74DN7T4JQB6", "2020-05-11T20:07:43Z"),
("AR9SSYFKE2PV7", "2020-05-11T20:07:54Z"),
("AWJQSGEYYUFGH", "2020-05-11T20:07:58Z"),
]
DEMO_ALERT_PAYLOAD = {
"evalMatches": [
{"value": 100, "metric": "High value", "tags": None},
{"value": 200, "metric": "Higher Value", "tags": None},
],
"message": "Someone is testing the alert notification within grafana.",
"ruleId": 0,
"ruleName": "Test notification",
"ruleUrl": "https://amixr.io/",
"state": "alerting",
"title": "[Alerting] Test notification",
}
VALID_DATE_FOR_DELETE_INCIDENT = dateparse.parse_date("2020-07-04")
DEMO_SLACK_CHANNEL_NAME = "meow_channel"
DEMO_SLACK_CHANNEL_SLACK_ID = "MEOW_SLACK_ID"
DEMO_PERSONAL_NOTIFICATION_ID_1 = "NT79GA9I7E4DJ"
DEMO_PERSONAL_NOTIFICATION_ID_2 = "ND9EHN5LN1DUU"
DEMO_PERSONAL_NOTIFICATION_ID_3 = "NEF49YQ1HNPDD"
DEMO_PERSONAL_NOTIFICATION_ID_4 = "NWAL6WFJNWDD8"
DEMO_RESOLUTION_NOTE_ID = "M4BTQUS3PRHYQ"
DEMO_RESOLUTION_NOTE_TEXT = "Demo resolution note"
DEMO_RESOLUTION_NOTE_CREATED_AT = "2020-06-19T12:40:01.429805Z"
DEMO_RESOLUTION_NOTE_SOURCE = "web"
DEMO_CUSTOM_ACTION_ID = "KGEFG74LU1D8L"
DEMO_CUSTOM_ACTION_NAME = "Publish Incident To Jira"
DEMO_SLACK_USER_GROUP_ID = "GPFAPH7J7BKJB"
DEMO_SLACK_USER_GROUP_SLACK_ID = "MEOW_SLACK_ID"
DEMO_SLACK_USER_GROUP_NAME = "Meow Group"
DEMO_SLACK_USER_GROUP_HANDLE = "meow_group"
DEMO_ON_CALL_SHIFT_ID_1 = "OH3V5FYQEYJ6M"
DEMO_ON_CALL_SHIFT_ID_2 = "O9WTH7CKM3KZW"
DEMO_ON_CALL_SHIFT_NAME_1 = "Demo single event"
DEMO_ON_CALL_SHIFT_NAME_2 = "Demo recurrent event"
DEMO_ON_CALL_SHIFT_START_1 = "2020-09-10T08:00:00"
DEMO_ON_CALL_SHIFT_START_2 = "2020-09-10T16:00:00"
DEMO_ON_CALL_SHIFT_DURATION = 10800
DEMO_ON_CALL_SHIFT_BY_DAY = ["MO", "WE", "FR"]

View file

@ -1,14 +1,8 @@
from apps.public_api.constants import DEMO_AUTH_TOKEN, VALID_DATE_FOR_DELETE_INCIDENT
from apps.public_api.constants import VALID_DATE_FOR_DELETE_INCIDENT
from apps.slack.slack_client import SlackClientWithErrorHandling
from apps.slack.slack_client.exceptions import SlackAPITokenException
def is_demo_token_request(request):
if DEMO_AUTH_TOKEN == request.headers.get("Authorization"):
return True
return False
def team_has_slack_token_for_deleting(alert_group):
if alert_group.slack_message and alert_group.slack_message.slack_team_identity:
sc = SlackClientWithErrorHandling(alert_group.slack_message.slack_team_identity.bot_access_token)

View file

@ -4,8 +4,6 @@ from rest_framework import fields, serializers
from apps.alerts.grafana_alerting_sync_manager.grafana_alerting_sync import GrafanaAlertingSyncManager
from apps.alerts.models import AlertReceiveChannel
from apps.public_api.constants import DEMO_INTEGRATION_LINK_TOKEN
from apps.public_api.helpers import is_demo_token_request
from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField
from common.api_helpers.exceptions import BadRequest
from common.api_helpers.mixins import EagerLoadingMixin
@ -62,12 +60,6 @@ class IntegrationSerializer(EagerLoadingMixin, serializers.ModelSerializer, Main
default_route = self._get_default_route_iterative(instance)
serializer = DefaultChannelFilterSerializer(default_route, context=self.context)
result["default_route"] = serializer.data
if is_demo_token_request(self.context["request"]):
# Replace integration token to not receive alerts on demo integration
link = result["link"]
real_token = instance.token
link = link.replace(real_token, DEMO_INTEGRATION_LINK_TOKEN)
result["link"] = link
return result

View file

@ -2,8 +2,6 @@ from django.apps import apps
from django.utils import timezone
from rest_framework import serializers
from apps.public_api import constants as public_api_constants
from apps.public_api.helpers import is_demo_token_request
from apps.schedules.ical_utils import list_users_to_notify_from_ical
from apps.schedules.models import OnCallSchedule
from apps.slack.models import SlackUserGroup
@ -36,14 +34,11 @@ class ScheduleBaseSerializer(serializers.ModelSerializer):
raise BadRequest(detail="Schedule with this name already exists")
def get_on_call_now(self, obj):
if not is_demo_token_request(self.context["request"]):
users_on_call = list_users_to_notify_from_ical(obj, timezone.datetime.now(timezone.utc))
if users_on_call is not None:
return [user.public_primary_key for user in users_on_call]
else:
return []
users_on_call = list_users_to_notify_from_ical(obj, timezone.datetime.now(timezone.utc))
if users_on_call is not None:
return [user.public_primary_key for user in users_on_call]
else:
return [public_api_constants.DEMO_USER_ID]
return []
def _correct_validated_data(self, validated_data):
slack_field = validated_data.pop("slack", {})

View file

@ -1,14 +1,7 @@
import pytest
from django.utils import dateparse, timezone
from pytest_factoryboy import register
from apps.alerts.models import EscalationPolicy, ResolutionNote
from apps.auth_token.models import ApiAuthToken
from apps.base.models import UserNotificationPolicy
from apps.public_api import constants as public_api_constants
from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar, OnCallScheduleICal
from apps.user_management.tests.factories import OrganizationFactory, UserFactory
from common.constants.role import Role
register(UserFactory)
register(OrganizationFactory)
@ -22,222 +15,3 @@ def make_organization_and_user_with_token(make_organization_and_user, make_publi
return organization, user, token
return _make_organization_and_user_with_token
@pytest.fixture()
def make_organization_and_user_with_slack_identities_for_demo_token(
make_slack_team_identity,
make_organization,
make_slack_user_identity,
make_user,
):
def _make_organization_and_user_with_slack_identities_for_demo_token():
slack_team_identity = make_slack_team_identity(slack_id=public_api_constants.DEMO_SLACK_TEAM_ID)
organization = make_organization(
slack_team_identity=slack_team_identity, public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID
)
slack_user_identity = make_slack_user_identity(
slack_id=public_api_constants.DEMO_SLACK_USER_ID,
slack_team_identity=slack_team_identity,
)
user = make_user(
organization=organization,
public_primary_key=public_api_constants.DEMO_USER_ID,
email=public_api_constants.DEMO_USER_EMAIL,
username=public_api_constants.DEMO_USER_USERNAME,
role=Role.ADMIN,
slack_user_identity=slack_user_identity,
)
ApiAuthToken.create_auth_token(user, organization, public_api_constants.DEMO_AUTH_TOKEN)
token = public_api_constants.DEMO_AUTH_TOKEN
return organization, user, token
return _make_organization_and_user_with_slack_identities_for_demo_token
@pytest.fixture()
def make_data_for_demo_token(
make_alert_receive_channel,
make_channel_filter,
make_escalation_chain,
make_escalation_policy,
make_alert_group,
make_alert,
make_resolution_note,
make_custom_action,
make_slack_user_group,
make_schedule,
make_on_call_shift,
make_slack_channel,
make_user_notification_policy,
):
def _make_data_for_demo_token(organization, user):
alert_receive_channel = make_alert_receive_channel(
organization,
public_primary_key=public_api_constants.DEMO_INTEGRATION_ID,
verbal_name=public_api_constants.DEMO_INTEGRATION_NAME,
)
route_1 = make_channel_filter(
public_primary_key=public_api_constants.DEMO_ROUTE_ID_1,
alert_receive_channel=alert_receive_channel,
slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID,
filtering_term="us-(east|west)",
order=0,
)
make_channel_filter(
public_primary_key=public_api_constants.DEMO_ROUTE_ID_2,
alert_receive_channel=alert_receive_channel,
slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID,
filtering_term=".*",
order=1,
is_default=True,
)
escalation_chain = make_escalation_chain(
organization, public_primary_key=public_api_constants.DEMO_ESCALATION_CHAIN_ID
)
make_escalation_policy(
escalation_chain,
public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1,
escalation_policy_step=EscalationPolicy.STEP_WAIT,
order=0,
wait_delay=EscalationPolicy.ONE_MINUTE,
)
escalation_policy_2 = make_escalation_policy(
escalation_chain,
public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_2,
escalation_policy_step=EscalationPolicy.STEP_NOTIFY_USERS_QUEUE,
order=1,
)
escalation_policy_2.notify_to_users_queue.add(user)
alert_group = make_alert_group(
alert_receive_channel,
public_primary_key=public_api_constants.DEMO_INCIDENT_ID,
resolved=True,
channel_filter=route_1,
)
alert_group.started_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_CREATED_AT)
alert_group.resolved_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_RESOLVED_AT)
alert_group.save(update_fields=["started_at", "resolved_at"])
for alert_id, created_at in public_api_constants.DEMO_ALERT_IDS:
alert = make_alert(
public_primary_key=alert_id,
alert_group=alert_group,
raw_request_data=public_api_constants.DEMO_ALERT_PAYLOAD,
)
alert.created_at = dateparse.parse_datetime(created_at)
alert.save(update_fields=["created_at"])
resolution_note = make_resolution_note(
alert_group=alert_group,
source=ResolutionNote.Source.WEB,
author=user,
public_primary_key=public_api_constants.DEMO_RESOLUTION_NOTE_ID,
message_text=public_api_constants.DEMO_RESOLUTION_NOTE_TEXT,
)
resolution_note.created_at = dateparse.parse_datetime(public_api_constants.DEMO_RESOLUTION_NOTE_CREATED_AT)
resolution_note.save(update_fields=["created_at"])
make_custom_action(
public_primary_key=public_api_constants.DEMO_CUSTOM_ACTION_ID,
organization=organization,
name=public_api_constants.DEMO_CUSTOM_ACTION_NAME,
)
user_group = make_slack_user_group(
public_primary_key=public_api_constants.DEMO_SLACK_USER_GROUP_ID,
name=public_api_constants.DEMO_SLACK_USER_GROUP_NAME,
handle=public_api_constants.DEMO_SLACK_USER_GROUP_HANDLE,
slack_id=public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID,
slack_team_identity=organization.slack_team_identity,
)
# ical schedule
make_schedule(
organization=organization,
schedule_class=OnCallScheduleICal,
public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL,
ical_url_primary=public_api_constants.DEMO_SCHEDULE_ICAL_URL_PRIMARY,
ical_url_overrides=public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES,
name=public_api_constants.DEMO_SCHEDULE_NAME_ICAL,
channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
user_group=user_group,
)
# calendar schedule
schedule_calendar = make_schedule(
organization=organization,
schedule_class=OnCallScheduleCalendar,
public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_CALENDAR,
name=public_api_constants.DEMO_SCHEDULE_NAME_CALENDAR,
channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
user_group=user_group,
time_zone="America/New_york",
)
on_call_shift_1 = make_on_call_shift(
shift_type=CustomOnCallShift.TYPE_SINGLE_EVENT,
organization=organization,
public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_1,
name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1,
start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_1),
duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION),
)
on_call_shift_1.users.add(user)
on_call_shift_2 = make_on_call_shift(
shift_type=CustomOnCallShift.TYPE_RECURRENT_EVENT,
organization=organization,
public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_2,
name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2,
start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_2),
duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION),
frequency=CustomOnCallShift.FREQUENCY_WEEKLY,
interval=2,
by_day=public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY,
source=CustomOnCallShift.SOURCE_TERRAFORM,
)
on_call_shift_2.users.add(user)
schedule_calendar.custom_on_call_shifts.add(on_call_shift_1)
schedule_calendar.custom_on_call_shifts.add(on_call_shift_2)
make_slack_channel(
organization.slack_team_identity,
slack_id=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
name=public_api_constants.DEMO_SLACK_CHANNEL_NAME,
)
make_user_notification_policy(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1,
important=False,
user=user,
notify_by=UserNotificationPolicy.NotificationChannel.SMS,
step=UserNotificationPolicy.Step.NOTIFY,
order=0,
)
make_user_notification_policy(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2,
important=False,
user=user,
step=UserNotificationPolicy.Step.WAIT,
wait_delay=UserNotificationPolicy.FIVE_MINUTES,
order=1,
)
make_user_notification_policy(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3,
important=False,
user=user,
step=UserNotificationPolicy.Step.NOTIFY,
notify_by=UserNotificationPolicy.NotificationChannel.PHONE_CALL,
order=2,
)
make_user_notification_policy(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4,
important=True,
user=user,
step=UserNotificationPolicy.Step.NOTIFY,
notify_by=UserNotificationPolicy.NotificationChannel.PHONE_CALL,
order=0,
)
return
return _make_data_for_demo_token

View file

@ -1,110 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
demo_alerts_results = []
for alert_id, created_at in public_api_constants.DEMO_ALERT_IDS:
demo_alerts_results.append(
{
"id": alert_id,
"alert_group_id": public_api_constants.DEMO_INCIDENT_ID,
"created_at": created_at,
"payload": {
"state": "alerting",
"title": "[Alerting] Test notification",
"ruleId": 0,
"message": "Someone is testing the alert notification within grafana.",
"ruleUrl": "https://amixr.io/",
"ruleName": "Test notification",
"evalMatches": [
{"tags": None, "value": 100, "metric": "High value"},
{"tags": None, "value": 200, "metric": "Higher Value"},
],
},
}
)
# https://api-docs.amixr.io/#list-alerts
demo_alerts_payload = {"count": 3, "next": None, "previous": None, "results": demo_alerts_results}
@pytest.mark.django_db
def test_get_alerts(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alerts-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_alerts_payload
@pytest.mark.django_db
def test_get_alerts_filter_by_incident(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alerts-list")
response = client.get(
url + f"?alert_group_id={public_api_constants.DEMO_INCIDENT_ID}", format="json", HTTP_AUTHORIZATION=token
)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_alerts_payload
@pytest.mark.django_db
def test_get_alerts_filter_by_incident_no_results(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alerts-list")
response = client.get(url + "?alert_group_id=impossible_alert_group_id", format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.data["results"] == []
@pytest.mark.django_db
def test_get_alerts_search(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alerts-list")
response = client.get(url + "?search=evalMatches", format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_alerts_payload
@pytest.mark.django_db
def test_get_alerts_search_no_results(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alerts-list")
response = client.get(url + "?search=impossible_payload", format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.data["results"] == []

View file

@ -1,32 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
demo_custom_action_payload = {
"id": public_api_constants.DEMO_CUSTOM_ACTION_ID,
"name": public_api_constants.DEMO_CUSTOM_ACTION_NAME,
"team_id": None,
}
demo_custom_action_payload_list = {"count": 1, "next": None, "previous": None, "results": [demo_custom_action_payload]}
@pytest.mark.django_db
def test_demo_get_custom_actions_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:actions-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_custom_action_payload_list

View file

@ -1,169 +0,0 @@
import pytest
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APIClient
from apps.alerts.models import EscalationPolicy
from apps.public_api import constants as public_api_constants
# https://api-docs.amixr.io/#get-escalation-policy
demo_escalation_policy_payload = {
"id": public_api_constants.DEMO_ESCALATION_POLICY_ID_1,
"escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID,
"position": 0,
"type": "wait",
"duration": timezone.timedelta(seconds=60).seconds,
}
# https://api-docs.amixr.io/#list-escalation-policies
demo_escalation_policies_payload = {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": public_api_constants.DEMO_ESCALATION_POLICY_ID_1,
"escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID,
"position": 0,
"type": "wait",
"duration": timezone.timedelta(seconds=60).seconds,
},
{
"id": public_api_constants.DEMO_ESCALATION_POLICY_ID_2,
"escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID,
"position": 1,
"type": "notify_person_next_each_time",
"persons_to_notify_next_each_time": ["U4DNY931HHJS5"],
},
],
}
@pytest.mark.django_db
def test_get_escalation_policies(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:escalation_policies-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_escalation_policies_payload
@pytest.mark.django_db
def test_get_escalation_policies_filter_by_route(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:escalation_policies-list")
response = client.get(
url + f"?route_id={public_api_constants.DEMO_ROUTE_ID_1}", format="json", HTTP_AUTHORIZATION=token
)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_escalation_policies_payload
@pytest.mark.django_db
def test_create_escalation_policy(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data_for_create = {
"escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID,
"type": "notify_person_next_each_time",
"position": 0,
"persons_to_notify_next_each_time": [user.public_primary_key],
}
url = reverse("api-public:escalation_policies-list")
response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_201_CREATED
# check on nothing change
assert response.json() == demo_escalation_policy_payload
@pytest.mark.django_db
def test_invalid_step_type(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data_for_create = {
"escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID,
"type": "this_is_invalid_step_type", # invalid step type
"position": 0,
"persons_to_notify_next_each_time": [user.public_primary_key],
}
url = reverse("api-public:escalation_policies-list")
response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_201_CREATED
# check on nothing change
assert response.json() == demo_escalation_policy_payload
@pytest.mark.django_db
def test_update_escalation_step(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data_for_update = {
"route_id": public_api_constants.DEMO_ROUTE_ID_1,
"type": "notify_person_next_each_time",
"position": 1,
"persons_to_notify_next_each_time": [user.public_primary_key],
}
url = reverse(
"api-public:escalation_policies-detail", kwargs={"pk": public_api_constants.DEMO_ESCALATION_POLICY_ID_1}
)
response = client.put(url, data=data_for_update, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
# check on nothing change
assert response.json() == demo_escalation_policy_payload
@pytest.mark.django_db
def test_delete_escalation_policy(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
escalation_policy = EscalationPolicy.objects.get(
public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1
)
url = reverse("api-public:escalation_policies-detail", args=[escalation_policy.public_primary_key])
response = client.delete(url, format="json", HTTP_AUTHORIZATION=token)
escalation_policy.refresh_from_db()
assert response.status_code == status.HTTP_204_NO_CONTENT
# check on nothing change
escalation_policy.refresh_from_db()
assert escalation_policy is not None

View file

@ -1,82 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.alerts.models import AlertGroup
from apps.public_api import constants as public_api_constants
demo_incidents_payload = {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": public_api_constants.DEMO_INCIDENT_ID,
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"route_id": public_api_constants.DEMO_ROUTE_ID_1,
"alerts_count": 3,
"state": "resolved",
"created_at": public_api_constants.DEMO_INCIDENT_CREATED_AT,
"resolved_at": public_api_constants.DEMO_INCIDENT_RESOLVED_AT,
"acknowledged_at": None,
"title": None,
}
],
}
@pytest.mark.django_db
def test_create_incidents(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alert_groups-list")
response = client.post(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED
@pytest.mark.django_db
def test_get_incidents(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alert_groups-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_incidents_payload
@pytest.mark.django_db
def test_delete_incidents(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:alert_groups-list")
incidents = AlertGroup.unarchived_objects.filter(public_primary_key=public_api_constants.DEMO_INCIDENT_ID)
total_count = incidents.count()
incident = incidents[0]
data = {
"mode": "delete",
}
response = client.delete(url + f"/{incident.public_primary_key}/", data, format="json", HTTP_AUTHORIZATION=token)
new_count = AlertGroup.unarchived_objects.filter(public_primary_key=public_api_constants.DEMO_INCIDENT_ID).count()
assert response.status_code == status.HTTP_204_NO_CONTENT
incident.refresh_from_db()
assert total_count == new_count
assert incident is not None

View file

@ -1,239 +0,0 @@
from urllib.parse import urljoin
import pytest
from django.conf import settings
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.alerts.models import AlertReceiveChannel
from apps.public_api import constants as public_api_constants
# https://api-docs.amixr.io/#post-integration
demo_integration_post_payload = {
"id": public_api_constants.DEMO_INTEGRATION_ID,
"team_id": None,
"name": "Grafana :blush:",
"link": urljoin(settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/"),
"heartbeat": None,
"default_route": {
"escalation_chain_id": None,
"id": public_api_constants.DEMO_ROUTE_ID_2,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
},
"type": "grafana",
"templates": {
"grouping_key": None,
"resolve_signal": None,
"acknowledge_signal": None,
"slack": {"title": None, "message": None, "image_url": None},
"web": {"title": None, "message": None, "image_url": None},
"sms": {
"title": None,
},
"phone_call": {
"title": None,
},
"email": {
"title": None,
"message": None,
},
"telegram": {
"title": None,
"message": None,
"image_url": None,
},
},
"maintenance_mode": None,
"maintenance_started_at": None,
"maintenance_end_at": None,
}
# https://api-docs.amixr.io/#get-integration
demo_integration_payload = {
"id": public_api_constants.DEMO_INTEGRATION_ID,
"team_id": None,
"name": "Grafana :blush:",
"link": urljoin(settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/"),
"default_route": {
"escalation_chain_id": None,
"id": public_api_constants.DEMO_ROUTE_ID_2,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
},
"type": "grafana",
"heartbeat": None,
"templates": {
"grouping_key": None,
"resolve_signal": None,
"acknowledge_signal": None,
"slack": {"title": None, "message": None, "image_url": None},
"web": {"title": None, "message": None, "image_url": None},
"sms": {
"title": None,
},
"phone_call": {
"title": None,
},
"email": {
"title": None,
"message": None,
},
"telegram": {
"title": None,
"message": None,
"image_url": None,
},
},
"maintenance_mode": None,
"maintenance_started_at": None,
"maintenance_end_at": None,
}
# https://api-docs.amixr.io/#list-integrations
demo_integrations_payload = {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": public_api_constants.DEMO_INTEGRATION_ID,
"team_id": None,
"name": "Grafana :blush:",
"link": urljoin(
settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/"
),
"default_route": {
"escalation_chain_id": None,
"id": public_api_constants.DEMO_ROUTE_ID_2,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
},
"type": "grafana",
"heartbeat": None,
"templates": {
"grouping_key": None,
"resolve_signal": None,
"acknowledge_signal": None,
"slack": {
"title": None,
"message": None,
"image_url": None,
},
"web": {"title": None, "message": None, "image_url": None},
"sms": {
"title": None,
},
"phone_call": {
"title": None,
},
"email": {
"title": None,
"message": None,
},
"telegram": {
"title": None,
"message": None,
"image_url": None,
},
},
"maintenance_mode": None,
"maintenance_started_at": None,
"maintenance_end_at": None,
},
],
}
@pytest.mark.django_db
def test_get_integrations(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:integrations-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_integrations_payload
@pytest.mark.django_db
def test_create_integration(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data_for_create = {"type": "grafana"}
url = reverse("api-public:integrations-list")
response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_201_CREATED
# check on nothing change
assert response.json() == demo_integration_post_payload
@pytest.mark.django_db
def test_update_integration(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
integration = AlertReceiveChannel.objects.get(public_primary_key=public_api_constants.DEMO_INTEGRATION_ID)
data_for_update = {"name": "new_name"}
url = reverse("api-public:integrations-detail", args=[integration.public_primary_key])
response = client.put(url, data=data_for_update, format="json", HTTP_AUTHORIZATION=token)
integration.refresh_from_db()
assert response.status_code == status.HTTP_200_OK
# check on nothing change
assert response.json() == demo_integration_payload
@pytest.mark.django_db
def test_invalid_integration_type(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data_for_create = {"type": "this_is_invalid_integration_type"}
url = reverse("api-public:integrations-list")
response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_201_CREATED
# check on nothing change
assert response.json() == demo_integration_post_payload
@pytest.mark.django_db
def test_delete_integration(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
integration = AlertReceiveChannel.objects.get(public_primary_key=public_api_constants.DEMO_INTEGRATION_ID)
url = reverse("api-public:integrations-detail", args=[integration.public_primary_key])
response = client.delete(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_204_NO_CONTENT
# check on nothing change
integration.refresh_from_db()
assert integration is not None

View file

@ -1,172 +0,0 @@
import pytest
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
from apps.schedules.models import CustomOnCallShift
demo_on_call_shift_payload_1 = {
"id": public_api_constants.DEMO_ON_CALL_SHIFT_ID_1,
"team_id": None,
"name": public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1,
"type": "single_event",
"time_zone": None,
"level": 0,
"start": public_api_constants.DEMO_ON_CALL_SHIFT_START_1,
"duration": public_api_constants.DEMO_ON_CALL_SHIFT_DURATION,
"users": [public_api_constants.DEMO_USER_ID],
}
demo_on_call_shift_payload_2 = {
"id": public_api_constants.DEMO_ON_CALL_SHIFT_ID_2,
"team_id": None,
"name": public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2,
"type": "recurrent_event",
"time_zone": None,
"level": 0,
"start": public_api_constants.DEMO_ON_CALL_SHIFT_START_2,
"duration": public_api_constants.DEMO_ON_CALL_SHIFT_DURATION,
"frequency": "weekly",
"interval": 2,
"week_start": "SU",
"users": [public_api_constants.DEMO_USER_ID],
"by_day": public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY,
"by_month": None,
"by_monthday": None,
}
demo_on_call_shift_payload_list = {
"count": 2,
"next": None,
"previous": None,
"results": [demo_on_call_shift_payload_1, demo_on_call_shift_payload_2],
}
@pytest.mark.django_db
def test_demo_get_on_call_shift_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:on_call_shifts-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_on_call_shift_payload_list
@pytest.mark.django_db
@pytest.mark.parametrize(
"demo_on_call_shift_id,payload",
[
(public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, demo_on_call_shift_payload_1),
(public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, demo_on_call_shift_payload_2),
],
)
def test_demo_get_on_call_shift_1(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
demo_on_call_shift_id,
payload,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id})
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == payload
@pytest.mark.django_db
def test_demo_post_on_call_shift(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:on_call_shifts-list")
data = {
"schedule_id": public_api_constants.DEMO_SCHEDULE_ID_CALENDAR,
"name": "New demo shift",
"type": CustomOnCallShift.TYPE_SINGLE_EVENT,
"start": timezone.now().replace(tzinfo=None, microsecond=0).isoformat(),
"duration": 3600,
}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_201_CREATED
assert response.data == demo_on_call_shift_payload_1
@pytest.mark.django_db
@pytest.mark.parametrize(
"demo_on_call_shift_id,payload",
[
(public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, demo_on_call_shift_payload_1),
(public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, demo_on_call_shift_payload_2),
],
)
def test_demo_update_on_call_shift(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
demo_on_call_shift_id,
payload,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data = {"name": "Updated demo name"}
url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id})
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == payload
@pytest.mark.django_db
@pytest.mark.parametrize(
"demo_on_call_shift_id",
[
public_api_constants.DEMO_ON_CALL_SHIFT_ID_1,
public_api_constants.DEMO_ON_CALL_SHIFT_ID_2,
],
)
def test_demo_delete_on_call_shift(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
demo_on_call_shift_id,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id})
response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_204_NO_CONTENT
assert CustomOnCallShift.objects.filter(public_primary_key=demo_on_call_shift_id).exists()

View file

@ -1,225 +0,0 @@
import pytest
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APIClient
from apps.base.models import UserNotificationPolicy
from apps.base.models.user_notification_policy import NotificationChannelPublicAPIOptions
from apps.public_api import constants as public_api_constants
TYPE_WAIT = "wait"
demo_personal_notification_rule_payload_1 = {
"id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1,
"user_id": public_api_constants.DEMO_USER_ID,
"position": 0,
"important": False,
"type": "notify_by_sms",
}
demo_personal_notification_rule_payload_2 = {
"id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2,
"user_id": public_api_constants.DEMO_USER_ID,
"position": 1,
"duration": timezone.timedelta(seconds=300).seconds,
"important": False,
"type": "wait",
}
demo_personal_notification_rule_payload_3 = {
"id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3,
"user_id": public_api_constants.DEMO_USER_ID,
"position": 2,
"important": False,
"type": "notify_by_phone_call",
}
demo_personal_notification_rule_payload_4 = {
"id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4,
"user_id": public_api_constants.DEMO_USER_ID,
"position": 0,
"important": True,
"type": "notify_by_phone_call",
}
demo_personal_notification_rules_payload = {
"count": 4,
"next": None,
"previous": None,
"results": [
demo_personal_notification_rule_payload_1,
demo_personal_notification_rule_payload_2,
demo_personal_notification_rule_payload_3,
demo_personal_notification_rule_payload_4,
],
}
demo_personal_notification_rules_non_important_payload = {
"count": 3,
"next": None,
"previous": None,
"results": [
demo_personal_notification_rule_payload_1,
demo_personal_notification_rule_payload_2,
demo_personal_notification_rule_payload_3,
],
}
demo_personal_notification_rules_important_payload = {
"count": 1,
"next": None,
"previous": None,
"results": [
demo_personal_notification_rule_payload_4,
],
}
@pytest.mark.django_db
def test_get_personal_notification_rule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1
)
client = APIClient()
url = reverse(
"api-public:personal_notification_rules-detail",
kwargs={"pk": demo_personal_notification_rule_1.public_primary_key},
)
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_personal_notification_rule_payload_1
@pytest.mark.django_db
def test_get_personal_notification_rules_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
client = APIClient()
url = reverse("api-public:personal_notification_rules-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_personal_notification_rules_payload
@pytest.mark.django_db
def test_get_personal_notification_rules_list_important(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
client = APIClient()
url = reverse("api-public:personal_notification_rules-list")
response = client.get(url + "?important=true", format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_personal_notification_rules_important_payload
@pytest.mark.django_db
def test_get_personal_notification_rules_list_non_important(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
client = APIClient()
url = reverse("api-public:personal_notification_rules-list")
response = client.get(url + "?important=false", format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_personal_notification_rules_non_important_payload
@pytest.mark.django_db
def test_update_personal_notification_rule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1
)
client = APIClient()
url = reverse(
"api-public:personal_notification_rules-detail",
kwargs={"pk": demo_personal_notification_rule_1.public_primary_key},
)
data_to_update = {
"type": NotificationChannelPublicAPIOptions.LABELS[UserNotificationPolicy.NotificationChannel.SLACK]
}
response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_personal_notification_rule_payload_1
# check on nothing change
demo_personal_notification_rule_1.refresh_from_db()
assert demo_personal_notification_rule_1.notify_by != UserNotificationPolicy.NotificationChannel.SLACK
@pytest.mark.django_db
def test_create_personal_notification_rule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
client = APIClient()
url = reverse("api-public:personal_notification_rules-list")
data_for_create = {
"user_id": user.public_primary_key,
"type": TYPE_WAIT,
"position": 1,
"duration": timezone.timedelta(seconds=300).seconds,
}
response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create)
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == demo_personal_notification_rule_payload_1
@pytest.mark.django_db
def test_delete_personal_notification_rule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
_ = make_data_for_demo_token(organization, user)
demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get(
public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1
)
client = APIClient()
url = reverse(
"api-public:personal_notification_rules-detail",
kwargs={"pk": demo_personal_notification_rule_1.public_primary_key},
)
response = client.delete(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_204_NO_CONTENT
# check on nothing change
demo_personal_notification_rule_1.refresh_from_db()
assert demo_personal_notification_rule_1 is not None

View file

@ -1,117 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.alerts.models import ResolutionNote
from apps.public_api import constants as public_api_constants
demo_resolution_note_payload = {
"id": public_api_constants.DEMO_RESOLUTION_NOTE_ID,
"alert_group_id": public_api_constants.DEMO_INCIDENT_ID,
"author": public_api_constants.DEMO_USER_ID,
"source": public_api_constants.DEMO_RESOLUTION_NOTE_SOURCE,
"created_at": public_api_constants.DEMO_RESOLUTION_NOTE_CREATED_AT,
"text": public_api_constants.DEMO_RESOLUTION_NOTE_TEXT,
}
demo_resolution_note_payload_list = {
"count": 1,
"next": None,
"previous": None,
"results": [demo_resolution_note_payload],
}
@pytest.mark.django_db
def test_demo_get_resolution_note_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:resolution_notes-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_resolution_note_payload_list
@pytest.mark.django_db
def test_demo_get_resolution_note(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID})
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_resolution_note_payload
@pytest.mark.django_db
def test_demo_post_resolution_note(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:resolution_notes-list")
data = {"alert_group_id": public_api_constants.DEMO_INCIDENT_ID, "text": "New demo text"}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_201_CREATED
assert response.data == demo_resolution_note_payload
@pytest.mark.django_db
def test_demo_update_resolution_note(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
data = {"alert_group_id": public_api_constants.DEMO_INCIDENT_ID, "text": "Updated demo text"}
url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID})
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_resolution_note_payload
@pytest.mark.django_db
def test_demo_delete_resolution_note(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID})
response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_204_NO_CONTENT
assert ResolutionNote.objects.filter(public_primary_key=public_api_constants.DEMO_RESOLUTION_NOTE_ID).exists()

View file

@ -1,182 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.alerts.models import ChannelFilter
from apps.public_api import constants as public_api_constants
# https://api-docs.amixr.io/#get-route
demo_route_payload = {
"id": public_api_constants.DEMO_ROUTE_ID_1,
"escalation_chain_id": None,
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"routing_regex": "us-(east|west)",
"position": 0,
"is_the_last_route": False,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
}
# https://api-docs.amixr.io/#list-routes
demo_routes_payload = {
"count": 2,
"next": None,
"previous": None,
"results": [
{
"id": public_api_constants.DEMO_ROUTE_ID_1,
"escalation_chain_id": None,
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"routing_regex": "us-(east|west)",
"position": 0,
"is_the_last_route": False,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
},
{
"id": public_api_constants.DEMO_ROUTE_ID_2,
"escalation_chain_id": None,
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"routing_regex": ".*",
"position": 1,
"is_the_last_route": True,
"slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID},
},
],
}
@pytest.mark.django_db
def test_get_route(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1)
url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key})
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_route_payload
@pytest.mark.django_db
def test_get_routes_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:routes-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_routes_payload
@pytest.mark.django_db
def test_get_routes_filter_by_integration_id(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:routes-list")
response = client.get(
url + f"?integration_id={public_api_constants.DEMO_INTEGRATION_ID}", format="json", HTTP_AUTHORIZATION=token
)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_routes_payload
@pytest.mark.django_db
def test_create_route(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:routes-list")
data_for_create = {
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"routing_regex": "testreg",
}
response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create)
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == demo_route_payload
@pytest.mark.django_db
def test_invalid_route_data(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:routes-list")
data_for_create = {
"integration_id": public_api_constants.DEMO_INTEGRATION_ID,
"routing_regex": None, # routing_regex cannot be null for non-default filters
}
response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create)
assert response.status_code == status.HTTP_201_CREATED
assert response.json() == demo_route_payload
@pytest.mark.django_db
def test_update_route(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1)
url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key})
data_to_update = {
"routing_regex": "testreg_updated",
}
assert channel_filter.filtering_term != data_to_update["routing_regex"]
response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update)
assert response.status_code == status.HTTP_200_OK
# check on nothing change
channel_filter.refresh_from_db()
assert response.json() == demo_route_payload
assert channel_filter.filtering_term != data_to_update["routing_regex"]
@pytest.mark.django_db
def test_delete_route(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1)
url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key})
response = client.delete(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_204_NO_CONTENT
# check on nothing change
channel_filter.refresh_from_db()
assert channel_filter is not None

View file

@ -1,164 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
from apps.schedules.models import OnCallSchedule
demo_ical_schedule_payload = {
"id": public_api_constants.DEMO_SCHEDULE_ID_ICAL,
"team_id": None,
"name": public_api_constants.DEMO_SCHEDULE_NAME_ICAL,
"type": "ical",
"ical_url_primary": public_api_constants.DEMO_SCHEDULE_ICAL_URL_PRIMARY,
"ical_url_overrides": public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES,
"on_call_now": [public_api_constants.DEMO_USER_ID],
"slack": {
"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
"user_group_id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID,
},
}
demo_calendar_schedule_payload = {
"id": public_api_constants.DEMO_SCHEDULE_ID_CALENDAR,
"team_id": None,
"name": public_api_constants.DEMO_SCHEDULE_NAME_CALENDAR,
"type": "calendar",
"time_zone": "America/New_york",
"on_call_now": [public_api_constants.DEMO_USER_ID],
"shifts": [
public_api_constants.DEMO_ON_CALL_SHIFT_ID_1,
public_api_constants.DEMO_ON_CALL_SHIFT_ID_2,
],
"slack": {
"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
"user_group_id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID,
},
"ical_url_overrides": None,
}
demo_schedules_payload = {
"count": 2,
"next": None,
"previous": None,
"results": [
demo_ical_schedule_payload,
demo_calendar_schedule_payload,
],
}
@pytest.mark.django_db
def test_get_schedule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL)
url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key})
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_ical_schedule_payload
@pytest.mark.django_db
def test_create_schedule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:schedules-list")
data = {
"name": "schedule test name",
"type": "ical",
}
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_201_CREATED
# check that demo instance was returned
assert response.data == demo_ical_schedule_payload
@pytest.mark.django_db
def test_update_ical_schedule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL)
url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key})
data = {
"name": "NEW NAME",
}
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
# check on nothing change
schedule.refresh_from_db()
assert schedule.name != data["name"]
assert response.data == demo_ical_schedule_payload
@pytest.mark.django_db
def test_update_calendar_schedule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_CALENDAR)
url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key})
data = {
"name": "NEW NAME",
}
response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
# check on nothing change
schedule.refresh_from_db()
assert schedule.name != data["name"]
assert response.data == demo_calendar_schedule_payload
@pytest.mark.django_db
def test_delete_schedule(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL)
url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key})
response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_204_NO_CONTENT
# check on nothing change
schedule.refresh_from_db()
assert schedule is not None

View file

@ -1,34 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
demo_slack_channels_payload = {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"name": public_api_constants.DEMO_SLACK_CHANNEL_NAME,
"slack_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID,
}
],
}
@pytest.mark.django_db
def test_get_slack_channels_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:slack_channels-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_slack_channels_payload

View file

@ -1,36 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
demo_user_group_payload = {
"id": public_api_constants.DEMO_SLACK_USER_GROUP_ID,
"type": "slack_based",
"slack": {
"id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID,
"name": public_api_constants.DEMO_SLACK_USER_GROUP_NAME,
"handle": public_api_constants.DEMO_SLACK_USER_GROUP_HANDLE,
},
}
demo_user_group_payload_list = {"count": 1, "next": None, "previous": None, "results": [demo_user_group_payload]}
@pytest.mark.django_db
def test_demo_get_user_groups_list(
make_organization_and_user_with_slack_identities_for_demo_token,
make_data_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
_ = make_data_for_demo_token(organization, user)
url = reverse("api-public:user_groups-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.data == demo_user_group_payload_list

View file

@ -1,91 +0,0 @@
import pytest
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from apps.public_api import constants as public_api_constants
# NB can compare with https://api-docs.amixr.io/#get-user
demo_token_user_payload = {
"id": public_api_constants.DEMO_USER_ID,
"email": public_api_constants.DEMO_USER_EMAIL,
"slack": {"user_id": public_api_constants.DEMO_SLACK_USER_ID, "team_id": public_api_constants.DEMO_SLACK_TEAM_ID},
"username": public_api_constants.DEMO_USER_USERNAME,
"role": "admin",
"is_phone_number_verified": False,
}
# https://api-docs.amixr.io/#list-users
demo_token_users_payload = {
"count": 1,
"next": None,
"previous": None,
"results": [
{
"id": public_api_constants.DEMO_USER_ID,
"email": public_api_constants.DEMO_USER_EMAIL,
"slack": {
"user_id": public_api_constants.DEMO_SLACK_USER_ID,
"team_id": public_api_constants.DEMO_SLACK_TEAM_ID,
},
"username": public_api_constants.DEMO_USER_USERNAME,
"role": "admin",
"is_phone_number_verified": False,
}
],
}
@pytest.mark.django_db
def test_get_user(
make_organization_and_user_with_slack_identities_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
url = reverse("api-public:users-detail", args=[user.public_primary_key])
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_token_user_payload
# get current user
url = reverse("api-public:users-detail", args=["current"])
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_token_user_payload
@pytest.mark.django_db
def test_get_users(
make_organization_and_user_with_slack_identities_for_demo_token,
):
organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token()
client = APIClient()
url = reverse("api-public:users-list")
response = client.get(url, format="json", HTTP_AUTHORIZATION=token)
assert response.status_code == status.HTTP_200_OK
assert response.json() == demo_token_users_payload
@pytest.mark.django_db
def test_forbidden_access(
make_organization_and_user_with_slack_identities_for_demo_token,
make_organization_and_user_with_token,
):
_, user, _ = make_organization_and_user_with_slack_identities_for_demo_token()
_, _, another_org_token = make_organization_and_user_with_token()
client = APIClient()
url = reverse("api-public:users-detail", args=[user.public_primary_key])
response = client.get(url, format="json", HTTP_AUTHORIZATION=another_org_token)
assert response.status_code == status.HTTP_404_NOT_FOUND

View file

@ -0,0 +1,3 @@
from .info_throttler import InfoThrottler # noqa: F401
from .phone_notification_throttler import PhoneNotificationThrottler # noqa: F401
from .user_throttle import UserThrottle # noqa: F401

View file

@ -0,0 +1,6 @@
from rest_framework.throttling import UserRateThrottle
class InfoThrottler(UserRateThrottle):
scope = "info"
rate = "100/m"

View file

@ -0,0 +1,6 @@
from rest_framework.throttling import UserRateThrottle
class PhoneNotificationThrottler(UserRateThrottle):
scope = "phone_notification"
rate = "60/m"

View file

@ -30,4 +30,6 @@ router.register(r"teams", views.TeamView, basename="teams")
urlpatterns = [
path("", include(router.urls)),
optional_slash_path("info", views.InfoView.as_view(), name="info"),
optional_slash_path("make_call", views.MakeCallView.as_view(), name="make_call"),
optional_slash_path("send_sms", views.SendSMSView.as_view(), name="send_sms"),
]

Some files were not shown because too many files have changed in this diff Show more