diff --git a/.dockerignore b/.dockerignore index e541b3d4..6561a0ad 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,5 @@ frontend/node_modules frontend/build package-lock.json ./engine/extensions -.env \ No newline at end of file +.env +.env-hobby diff --git a/.drone.yml b/.drone.yml index 4fc14748..46c9d5c4 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1,3 +1,4 @@ +--- kind: pipeline type: docker name: Build and Release @@ -11,7 +12,6 @@ steps: - cd grafana-plugin/ - if [ -z "$DRONE_TAG" ]; then echo "No tag, not modifying version"; else jq '.version="${DRONE_TAG}"' package.json > package.new && mv package.new package.json && jq '.version' package.json; fi - yarn --network-timeout 500000 - - yarn plop "Append build info" "${DRONE_TAG}" "${DRONE_BRANCH}" "${DRONE_COMMIT}" - yarn build - ls ./ @@ -30,8 +30,8 @@ steps: - yarn ci-build:finish - yarn ci-package - cd ci/dist - - zip -r grafana-oncall-app-${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}.zip ./grafana-oncall-app - - if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app-${DRONE_BRANCH}-${DRONE_BUILD_NUMBER}.zip grafana-oncall-app-${DRONE_TAG}.zip; fi + - zip -r grafana-oncall-app.zip ./grafana-oncall-app + - if [ -z "$DRONE_TAG" ]; then echo "No tag, skipping archive"; else cp grafana-oncall-app.zip grafana-oncall-app-${DRONE_TAG}.zip; fi - name: Publish Plugin to GCS (release) image: plugins/gcs @@ -84,7 +84,7 @@ steps: - pre-commit run black --all-files - pre-commit run flake8 --all-files - - name: Test Backend + - name: Unit Test Backend image: python:3.9 environment: DJANGO_SETTINGS_MODULE: settings.ci-test @@ -92,10 +92,8 @@ steps: commands: - apt-get update && apt-get install -y netcat - cd engine/ - - mkdir sqlite_data - pip install -r requirements.txt - - pytest --ds=settings.ci-test - - rm -rf sqlite_data + - ./wait_for_test_mysql_start.sh && pytest --ds=settings.ci-test depends_on: - rabbit_test @@ -124,7 +122,7 @@ steps: from_secret: gcr_admin depends_on: - Lint Backend - - Test Backend + - Unit Test Backend - Image Tag - name: Build and Push Engine Docker Image Backend to Dockerhub @@ -139,14 +137,14 @@ steps: from_secret: docker_username depends_on: - Lint Backend - - Test Backend + - Unit Test Backend - Image Tag when: ref: - refs/heads/dev - refs/tags/v*.*.* -# Services for Test Backend +# Services for Unit Test Backend services: - name: rabbit_test image: rabbitmq:3.7.19 @@ -154,10 +152,23 @@ services: RABBITMQ_DEFAULT_USER: rabbitmq RABBITMQ_DEFAULT_PASS: rabbitmq + - name: mysql_test + image: mysql:5.7.25 + environment: + MYSQL_DATABASE: oncall_local_dev + MYSQL_ROOT_PASSWORD: local_dev_pwd + trigger: event: - - push + include: - tag + - push + - pull_request + ref: + include: + - refs/heads/main + - refs/heads/dev + - refs/tags/v*.*.* --- # Secret for pulling docker images. @@ -227,4 +238,9 @@ get: name: machine-user-token path: infra/data/ci/drone kind: secret -name: drone_token \ No newline at end of file +name: drone_token +--- +kind: signature +hmac: 7621bb1ccfcbec9f92c385670f2b2790859aba25f31c4936997123906fb102c0 + +... diff --git a/.env.example b/.env.example index 6be42e97..529d3ce9 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,5 @@ +RUNSERVER_PORT=8080 + SLACK_CLIENT_OAUTH_ID= SLACK_CLIENT_OAUTH_SECRET= SLACK_API_TOKEN= @@ -19,13 +21,13 @@ SENDGRID_FROM_EMAIL= DJANGO_SETTINGS_MODULE=settings.dev SECRET_KEY=jkashdkjashdkjh -BASE_URL=http://localhost:8000 +BASE_URL=http://localhost:8080 -FEATURE_TELEGRAM_INTEGRATION_ENABLED= +FEATURE_TELEGRAM_INTEGRATION_ENABLED=True FEATURE_SLACK_INTEGRATION_ENABLED=True FEATURE_EXTRA_MESSAGING_BACKENDS_ENABLED= -SLACK_INSTALL_RETURN_REDIRECT_HOST=http://localhost:8000 +SLACK_INSTALL_RETURN_REDIRECT_HOST=http://localhost:8080 SOCIAL_AUTH_REDIRECT_IS_HTTPS=False GRAFANA_INCIDENT_STATIC_API_KEY= diff --git a/.github/workflows/backend-ci.yml b/.github/workflows/backend-ci.yml deleted file mode 100644 index b6095f99..00000000 --- a/.github/workflows/backend-ci.yml +++ /dev/null @@ -1,39 +0,0 @@ -name: backend-ci - -on: - push: - branches: - - main - pull_request: - -jobs: - lint: - name: Backend Checks - runs-on: ubuntu-latest - container: python:3.9 - env: - DJANGO_SETTINGS_MODULE: settings.ci-test - SLACK_CLIENT_OAUTH_ID: 1 - services: - rabbit_test: - image: rabbitmq:3.7.19 - env: - RABBITMQ_DEFAULT_USER: rabbitmq - RABBITMQ_DEFAULT_PASS: rabbitmq - - steps: - - uses: actions/checkout@v2 - - name: Lint Backend - run: | - pip install $(grep "pre-commit" engine/requirements.txt) - pre-commit run isort --all-files - pre-commit run black --all-files - pre-commit run flake8 --all-files - - - name: Test Backend - run: | - apt-get update && apt-get install -y netcat - cd engine/ - mkdir sqlite_data - pip install -r requirements.txt - pytest --ds=settings.ci-test \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..38f061d3 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,56 @@ +name: ci + +on: + push: + branches: + - main + - dev + pull_request: + +jobs: + lint: + runs-on: ubuntu-latest + container: python:3.9 + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: 14 + - name: Build + run: | + pip install $(grep "pre-commit" engine/requirements.txt) + npm install -g yarn + cd grafana-plugin/ + yarn --network-timeout 500000 + yarn build + - name: Lint All + run: | + pre-commit run --all-files + + + unit-test-backend: + runs-on: ubuntu-latest + container: python:3.9 + env: + DJANGO_SETTINGS_MODULE: settings.ci-test + SLACK_CLIENT_OAUTH_ID: 1 + services: + rabbit_test: + image: rabbitmq:3.7.19 + env: + RABBITMQ_DEFAULT_USER: rabbitmq + RABBITMQ_DEFAULT_PASS: rabbitmq + mysql_test: + image: mysql:5.7.25 + env: + MYSQL_DATABASE: oncall_local_dev + MYSQL_ROOT_PASSWORD: local_dev_pwd + + steps: + - uses: actions/checkout@v2 + - name: Unit Test Backend + run: | + apt-get update && apt-get install -y netcat + cd engine/ + pip install -r requirements.txt + ./wait_for_test_mysql_start.sh && pytest --ds=settings.ci-test -x diff --git a/.github/workflows/frontend-ci.yml b/.github/workflows/frontend-ci.yml deleted file mode 100644 index b76cf241..00000000 --- a/.github/workflows/frontend-ci.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: frontend-ci - -on: - push: - branches: - - main - pull_request: - -jobs: - lint: - name: Frontend Checks - runs-on: ubuntu-latest - env: - DJANGO_SETTINGS_MODULE: settings.ci-test - steps: - - uses: actions/checkout@v2 - - name: Build Frontend Plugin - run: | - cd grafana-plugin/ - yarn --network-timeout 500000 - yarn build diff --git a/.github/workflows/synk.yml b/.github/workflows/synk.yml new file mode 100644 index 00000000..e4af1c72 --- /dev/null +++ b/.github/workflows/synk.yml @@ -0,0 +1,30 @@ +name: snyk + +on: + push: + branches: [ main,dev ] + pull_request: + branches: [ main,dev ] + +jobs: + security-scan: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.9' + - uses: actions/setup-node@v3 + with: + node-version: 14 + - uses: snyk/actions/setup@master + - name: Install Dependencies + run: | + pip install -r engine/requirements.txt + cd grafana-plugin/ + yarn --network-timeout 500000 + - name: Run Snyk + continue-on-error: true + run: snyk test --all-projects --severity-threshold=high + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index ae81aab5..b00b88a2 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ *.pyc venv .env +.env_hobby .vscode dump.rdb .idea diff --git a/CHANGELOG.md b/CHANGELOG.md index 940ce36a..8893332c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ # Change Log -## 1.0.0 (2022-02-02) +## 0.0.71 (2022-06-06) + +- Initial Release \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..3d4caa4f --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at conduct@grafana.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/DEVELOPER.md b/DEVELOPER.md index fd4da888..37a0a526 100644 --- a/DEVELOPER.md +++ b/DEVELOPER.md @@ -28,7 +28,7 @@ 1. Start stateful services (RabbitMQ, Redis, Grafana with mounted plugin folder) ```bash -docker-compose -f developer-docker-compose.yml up -d +docker-compose -f docker-compose-developer.yml up -d ``` 2. Prepare a python environment: @@ -53,13 +53,10 @@ export $(grep -v '^#' .env | xargs -0) # Hint: there is a known issue with uwsgi. It's not used in the local dev environment. Feel free to comment it in `engine/requirements.txt`. cd engine && pip install -r requirements.txt -# Create folder for database -mkdir sqlite_data - # Migrate the DB: python manage.py migrate -# Create user for django admin panel: +# Create user for django admin panel (if you need it): python manage.py createsuperuser ``` @@ -67,16 +64,16 @@ python manage.py createsuperuser 3. Launch the backend: ```bash # Http server: -python manage.py runserver +python manage.py runserver 8080 -# Worker for background tasks(run it in the parallel terminal, don't forget to export .env there) +# Worker for background tasks (run it in the parallel terminal, don't forget to export .env there) python manage.py start_celery # Additionally you could launch the worker with periodic tasks launcher (99% you don't need this) celery -A engine beat -l info ``` -4. All set! Check out internal API endpoints at http://localhost:8000/. +4. All set! Check out internal API endpoints at http://localhost:8080/. ### Frontend setup @@ -105,9 +102,9 @@ python manage.py issue_invite_for_the_frontend --override 6. Some configuration fields will appear be available. Fill them out and click Initialize OnCall ``` OnCall API URL: -http://host.docker.internal:8000 +http://host.docker.internal:8080 -OnCall Invitation Token (Single use token to connect Grafana instance): +Invitation Token (Single use token to connect Grafana instance): Response from the invite generator command (check above) Grafana URL (URL OnCall will use to talk to Grafana instance): @@ -119,7 +116,7 @@ host IP from inside the container by running: ```bash /sbin/ip route|awk '/default/ { print $3 }' -# Alternatively add host.docker.internal as an extra_host for grafana in developer-docker-compose.yml +# Alternatively add host.docker.internal as an extra_host for grafana in docker-compose-developer.yml extra_hosts: - "host.docker.internal:host-gateway" @@ -127,161 +124,7 @@ extra_hosts: ### Slack application setup -This instruction is also applicable if you set up self-hosted OnCall. - -1. Start a [localtunnel](https://github.com/localtunnel/localtunnel) reverse proxy to make oncall engine api accessible to slack (if you don't have OnCall backend accessible from https), -```bash -# Choose the unique prefix instead of pretty-turkey-83 -# Localtunnel will generate an url, e.g. https://pretty-turkey-83.loca.lt -# it is referred as below -lt --port 8000 -s pretty-turkey-83 --print-requests -``` - -2. [Create a Slack Workspace](https://slack.com/create) for development. - -3. Go to https://api.slack.com/apps and click Create New App button - -4. Select `From an app manifest` option and choose the right workspace - -5. Copy and paste the following block with the correct and fields - -
- Click to expand! - - ```yaml - _metadata: - major_version: 1 - minor_version: 1 - display_information: - name: - features: - app_home: - home_tab_enabled: true - messages_tab_enabled: true - messages_tab_read_only_enabled: false - bot_user: - display_name: - always_online: true - shortcuts: - - name: Create a new incident - type: message - callback_id: incident_create - description: Creates a new OnCall incident - - name: Add to postmortem - type: message - callback_id: add_postmortem - description: Add this message to postmortem - slash_commands: - - command: /oncall - url: /slack/interactive_api_endpoint/ - description: oncall - should_escape: false - oauth_config: - redirect_urls: - - /api/internal/v1/complete/slack-install-free/ - - /api/internal/v1/complete/slack-login/ - scopes: - user: - - channels:read - - chat:write - - identify - - users.profile:read - bot: - - app_mentions:read - - channels:history - - channels:read - - chat:write - - chat:write.customize - - chat:write.public - - commands - - files:write - - groups:history - - groups:read - - im:history - - im:read - - im:write - - mpim:history - - mpim:read - - mpim:write - - reactions:write - - team:read - - usergroups:read - - usergroups:write - - users.profile:read - - users:read - - users:read.email - - users:write - settings: - event_subscriptions: - request_url: /slack/event_api_endpoint/ - bot_events: - - app_home_opened - - app_mention - - channel_archive - - channel_created - - channel_deleted - - channel_rename - - channel_unarchive - - member_joined_channel - - message.channels - - message.im - - subteam_created - - subteam_members_changed - - subteam_updated - - user_change - interactivity: - is_enabled: true - request_url: /slack/interactive_api_endpoint/ - org_deploy_enabled: false - socket_mode_enabled: false - ``` -
- -6. Click `Install to workspace` button to generate the credentials - -6. Populate the environment with variables related to Slack - - In your `.env` file, fill out the following variables: - - ``` - SLACK_CLIENT_OAUTH_ID = Basic Information -> App Credentials -> Client ID - SLACK_CLIENT_OAUTH_SECRET = Basic Information -> App Credentials -> Client Secret - SLACK_API_TOKEN = OAuth & Permissions -> Bot User OAuth Token - SLACK_INSTALL_RETURN_REDIRECT_HOST = https://pretty-turkey-83.loca.lt - ``` - - Don't forget to export variables from the `.env` file and restart the server! - -7. Edit `grafana-plugin/grafana-plugin.yml` to set `onCallApiUrl` fields with localtunnel url - ``` - onCallApiUrl: https://pretty-turkey-83.loca.lt - ``` - - or set BASE_URL Env variable through web interface. - -8. Edit grafana-plugin/src/plugin.json to add `Bypass-Tunnel-Reminder` header section for all existing routes - > this headers required for the local development only, otherwise localtunnel blocks requests from grafana plugin - - ``` - { - "path": ..., - ... - "headers": [ - ... - { - "name": "Bypass-Tunnel-Reminder", - "content": "True" - } - ] - }, - ``` -9. Rebuild the plugin - ``` - yarn watch - ``` -10. Restart grafana instance - -11. All set! Go to Slack and check if your application is functional. +For Slack app configuration check our docs: https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup ## Troubleshooting @@ -383,3 +226,19 @@ pytest --ds=settings.dev - Set Settings to settings/dev.py 5. Create a new Django Server run configuration to Run/Debug the engine - Use a plugin such as EnvFile to load the .env file + - Change port from 8000 to 8080 + +## Update drone build +The .drone.yml build file must be signed when changes are made to it. Follow these steps: + +If you have not installed drone CLI follow [these instructions](https://docs.drone.io/cli/install/) + +To sign the .drone.yml file: +```bash +export DRONE_SERVER=https://drone.grafana.net + +# Get your drone token from https://drone.grafana.net/account +export DRONE_TOKEN= + +drone sign --save grafana/oncall .drone.yml +``` diff --git a/GOVERNANCE.md b/GOVERNANCE.md new file mode 100644 index 00000000..6b837d68 --- /dev/null +++ b/GOVERNANCE.md @@ -0,0 +1,159 @@ +--- +title: Governance +--- + +# Governance + +This document describes the rules and governance of the project. It is meant to be followed by all the developers of the project and the OnCall community. Common terminology used in this governance document are listed below: + +- **Team members**: Any members of the private [team mailing list][team]. + +- **Maintainers**: Maintainers lead an individual project or parts thereof ([`MAINTAINERS.md`][maintainers]). + +- **Projects**: A single repository in the Grafana GitHub organization and listed below is referred to as a project: + + - oncall + +- **The OnCall project**: The sum of all activities performed under this governance, concerning one or more repositories or the community. + +## Values + +The OnCall developers and community are expected to follow the values defined in the [Code of Conduct][coc]. Furthermore, the OnCall community strives for kindness, giving feedback effectively, and building a welcoming environment. The OnCall developers generally decide by consensus and only resort to conflict resolution by a majority vote if consensus cannot be reached. + +## Projects + +Each project must have a [`MAINTAINERS.md`][maintainers] file with at least one maintainer. Where a project has a release process, access and documentation should be such that more than one person can perform a release. Releases should be announced on the [announcements][https://github.com/grafana/oncall/discussions/categories/announcements] category at the GitHub Discussions. Any new projects should be first proposed on the [team mailing list][team] following the voting procedures listed below. + +## Decision making + +### Team members + +Team member status may be given to those who have made ongoing contributions to the OnCall project for at least 3 months. This is usually in the form of code improvements and/or notable work on documentation, but organizing events or user support could also be taken into account. + +New members may be proposed by any existing member by email to the [team mailing list][team]. It is highly desirable to reach consensus about acceptance of a new member. However, the proposal is ultimately voted on by a formal [supermajority vote](#supermajority-vote). + +If the new member proposal is accepted, the proposed team member should be contacted privately via email to confirm or deny their acceptance of team membership. This email will also be CC'd to the [team mailing list][team] for record-keeping purposes. + +If they choose to accept, the [onboarding](#onboarding) procedure is followed. + +Team members may retire at any time by emailing [the team][team]. + +Team members can be removed by [supermajority vote](#supermajority-vote) on [the team mailing list][team]. +For this vote, the member in question is not eligible to vote and does not count towards the quorum. +Any removal vote can cover only one single person. + +Upon death of a member, they leave the team automatically. + +In case a member leaves, the [offboarding](#offboarding) procedure is applied. + +The current team members are: + +- Ildar Iskhakov — [@iskhakov](https://github.com/iskhakov) ([Grafana Labs](https://grafana.com/)) +- Innokentii Konstantinov — [@Konstantinov-Innokentii](https://github.com/Konstantinov-Innokentii) ([Grafana Labs](https://grafana.com/)) +- Matías Bordese — [@matiasb](https://github.com/matiasb) ([Grafana Labs](https://grafana.com/)) +- Matvey Kukuy — [@Matvey-Kuk](https://github.com/Matvey-Kuk) ([Grafana Labs](https://grafana.com/)) +- Michael Derynck — [@mderynck](https://github.com/mderynck) ([Grafana Labs](https://grafana.com/)) +- Vadim Stepanov — [@vadimkerr](https://github.com/vadimkerr) ([Grafana Labs](https://grafana.com/)) +- Yulia Shanyrova — [@Ukochka](https://github.com/Ukochka) ([Grafana Labs](https://grafana.com/)) +- Maxim Mordasov — [@maskin25](https://github.com/maskin25) ([Grafana Labs](https://grafana.com/)) +- Julia Artyukhina — [@Ferril](https://github.com/Ferril) ([Grafana Labs](https://grafana.com/)) +- Julia Artyukhina — [@Ferril](https://github.com/Ferril) ([Grafana Labs](https://grafana.com/)) + +Previous team members: + +- n/a + +### Maintainers + +Maintainers lead one or more project(s) or parts thereof and serve as a point of conflict resolution amongst the contributors to this project. Ideally, maintainers are also team members, but exceptions are possible for suitable maintainers that, for whatever reason, are not yet team members. + +Changes in maintainership have to be announced on the [announcemount][https://github.com/grafana/oncall/discussions/categories/announcements] category at the GitHub Discussions. They are decided by [rough consensus](#consensus) and formalized by changing the [`MAINTAINERS.md`][maintainers] file of the respective repository. + +Maintainers are granted commit rights to all projects covered by this governance. + +A maintainer or committer may resign by notifying the [team mailing list][team]. A maintainer with no project activity for a year is considered to have resigned. Maintainers that wish to resign are encouraged to propose another team member to take over the project. + +A project may have multiple maintainers, as long as the responsibilities are clearly agreed upon between them. This includes coordinating who handles which issues and pull requests. + +### Technical decisions + +Technical decisions that only affect a single project are made informally by the maintainer of this project, and [rough consensus](#consensus) is assumed. Technical decisions that span multiple parts of the project should be discussed and made on the the [GitHub Discussions][https://github.com/grafana/oncall/discussions]. + +Decisions are usually made by [rough consensus](#consensus). If no consensus can be reached, the matter may be resolved by [majority vote](#majority-vote). + +### Governance changes + +Changes to this document are made by Grafana Labs. + +### Other matters + +Any matter that needs a decision may be called to a vote by any member if they deem it necessary. For private or personnel matters, discussion and voting takes place on the [team mailing list][team], otherwise on the [GitHub Discussions][https://github.com/grafana/oncall/discussions]. + +## Voting + +The OnCall project usually runs by informal consensus, however sometimes a formal decision must be made. + +Depending on the subject matter, as laid out [above](#decision-making), different methods of voting are used. + +For all votes, voting must be open for at least one week. The end date should be clearly stated in the call to vote. A vote may be called and closed early if enough votes have come in one way so that further votes cannot change the final decision. + +In all cases, all and only [team members](#team-members) are eligible to vote, with the sole exception of the forced removal of a team member, in which said member is not eligible to vote. + +Discussion and votes on personnel matters (including but not limited to team membership and maintainership) are held in private on the [team mailing list][team]. All other discussion and votes are held in public on the [GitHub Discussions][https://github.com/grafana/oncall/discussions]. + +For public discussions, anyone interested is encouraged to participate. Formal power to object or vote is limited to [team members](#team-members). + +### Consensus + +The default decision making mechanism for the OnCall project is [rough][rough] consensus. This means that any decision on technical issues is considered supported by the [team][team] as long as nobody objects or the objection has been considered but not necessarily accommodated. + +Silence on any consensus decision is implicit agreement and equivalent to explicit agreement. Explicit agreement may be stated at will. Decisions may, but do not need to be called out and put up for decision on the [GitHub Discussions][https://github.com/grafana/oncall/discussions] at any time and by anyone. + +Consensus decisions can never override or go against the spirit of an earlier explicit vote. + +If any [team member](#team-members) raises objections, the team members work together towards a solution that all involved can accept. This solution is again subject to rough consensus. + +In case no consensus can be found, but a decision one way or the other must be made, any [team member](#team-members) may call a formal [majority vote](#majority-vote). + +### Majority vote + +Majority votes must be called explicitly in a separate thread on the appropriate mailing list. The subject must be prefixed with `[VOTE]`. In the body, the call to vote must state the proposal being voted on. It should reference any discussion leading up to this point. + +Votes may take the form of a single proposal, with the option to vote yes or no, or the form of multiple alternatives. + +A vote on a single proposal is considered successful if more vote in favor than against. + +If there are multiple alternatives, members may vote for one or more alternatives, or vote “no” to object to all alternatives. It is not possible to cast an “abstain” vote. A vote on multiple alternatives is considered decided in favor of one alternative if it has received the most votes in favor, and a vote from more than half of those voting. Should no alternative reach this quorum, another vote on a reduced number of options may be called separately. + +### Supermajority vote + +Supermajority votes must be called explicitly in a separate thread on the appropriate mailing list. The subject must be prefixed with `[VOTE]`. In the body, the call to vote must state the proposal being voted on. It should reference any discussion leading up to this point. + +Votes may take the form of a single proposal, with the option to vote yes or no, or the form of multiple alternatives. + +A vote on a single proposal is considered successful if at least two thirds of those eligible to vote vote in favor. + +If there are multiple alternatives, members may vote for one or more alternatives, or vote “no” to object to all alternatives. A vote on multiple alternatives is considered decided in favor of one alternative if it has received the most votes in favor, and a vote from at least two thirds of those eligible to vote. Should no alternative reach this quorum, another vote on a reduced number of options may be called separately. + +## On- / Offboarding + +### Onboarding + +The new member is + +- added to the list of [team members](#team-members). Ideally by sending a PR of their own, at least approving said PR. +- announced on the [GitHub Discussions][https://github.com/grafana/oncall/discussions] by an existing team member. Ideally, the new member replies in this thread, acknowledging team membership. +- added to the projects with commit rights. +- added to the [team mailing list][team]. + +### Offboarding + +The ex-member is + +- removed from the list of [team members](#team-members). Ideally by sending a PR of their own, at least approving said PR. In case of forced removal, no approval is needed. +- removed from the projects. Optionally, they can retain maintainership of one or more repositories if the [team](#team-members) agrees. +- removed from the team mailing list and demoted to a normal member of the other mailing lists. +- not allowed to call themselves an active team member any more, nor allowed to imply this to be the case. +- added to a list of previous members if they so choose. + +If needed, we reserve the right to publicly announce removal. diff --git a/LICENSING.md b/LICENSING.md index 4e53ac0d..34951583 100644 --- a/LICENSING.md +++ b/LICENSING.md @@ -9,9 +9,11 @@ The default license for this project is [AGPL-3.0-only](LICENSE). The following directories and their subdirectories are licensed under Apache-2.0: ``` +n/a ``` The following directories and their subdirectories are licensed under their original upstream licenses: ``` +n/a ``` diff --git a/MAINTAINERS.md b/MAINTAINERS.md new file mode 100644 index 00000000..bd9b78f3 --- /dev/null +++ b/MAINTAINERS.md @@ -0,0 +1,14 @@ +The following are the main/default maintainers: + +- Ildar Iskhakov — [@iskhakov](https://github.com/iskhakov) ([Grafana Labs](https://grafana.com/)) +- Matvey Kukuy — [@Matvey-Kuk](https://github.com/Matvey-Kuk) ([Grafana Labs](https://grafana.com/)) + +Some parts of the codebase have other maintainers, the package paths also include all sub-packages: + +n/a + +For the sake of brevity, not all subtrees are explicitly listed. Due to the +size of this repository, the natural changes in focus of maintainers over time, +and nuances of where particular features live, this list will always be +incomplete and out of date. However the listed maintainer(s) should be able to +direct a PR/question to the right person. diff --git a/README.md b/README.md index aa8300dc..b9569831 100644 --- a/README.md +++ b/README.md @@ -1,76 +1,61 @@ -# Grafana OnCall Incident Response -Grafana OnCall, cloud version of Grafana OnCall: https://grafana.com/products/cloud/ + -Developer-friendly, incident response management with brilliant Slack integration. -- Connect monitoring systems -- Collect and analyze data -- On-call rotation -- Automatic escalation -- Never miss alerts with calls and SMS +Developer-friendly incident response with brilliant Slack integration. -![Grafana OnCall Screenshot](screenshot.png) + + +- Collect and analyze alerts from multiple monitoring systems +- On-call rotations based on schedules +- Automatic escalations +- Phone calls, SMS, Slack, Telegram notifications ## Getting Started -OnCall consists of two parts: -1. OnCall backend -2. "Grafana OnCall" plugin you need to install in your Grafana -### How to run OnCall backend -1. An all-in-one image of OnCall is available on docker hub to run it: +We prepared multiple environments: [production](https://grafana.com/docs/grafana-cloud/oncall/open-source/#production-environment), [developer](DEVELOPER.md) and hobby: + +1. Download docker-compose.yaml: ```bash -docker run -it --name oncall-backend -p 8000:8000 grafana/oncall-all-in-one +curl https://github.com/grafana/oncall/blob/dev/docker-compose.yml -o docker-compose.yaml ``` -2. When the image starts up you will see a message like this: +2. Set variables: ```bash -👋 This script will issue an invite token to securely connect the frontend. -Maintainers will be happy to help in the slack channel #grafana-oncall: https://slack.grafana.com/ -Your invite token: , use it in the Grafana OnCall plugin. +echo "DOMAIN=http://localhost:8080 +SECRET_KEY=my_random_secret_must_be_more_than_32_characters_long +RABBITMQ_PASSWORD=rabbitmq_secret_pw +MYSQL_PASSWORD=mysql_secret_pw +COMPOSE_PROFILES=with_grafana # Remove this line if you want to use existing grafana +GRAFANA_USER=admin +GRAFANA_PASSWORD=admin" > .env_hobby ``` -3. If you started your container detached with -d check the log: +3. Launch services: ```bash -docker logs oncall-backend +docker-compose --env-file .env_hobby -f docker-compose.yml up --build -d ``` -### How to install "Grafana OnCall" Plugin and connect with a backend -1. Open Grafana in your browser and login as an Admin -2. Navigate to Configuration → Plugins -3. Type Grafana OnCall into the "Search Grafana plugins" field -4. Select the Grafana OnCall plugin and press the "Install" button -5. On the Grafana OnCall Plugin page Enable the plugin and go to the Configuration tab you should see a status field with the message -``` -OnCall has not been setup, configure & initialize below. -``` -6. Fill in configuration fields using the token you got from the backend earlier, then press "Install Configuration" -``` -OnCall API URL: (The URL & port used to access OnCall) -http://host.docker.internal:8000 - -OnCall Invitation Token (Single use token to connect Grafana instance): -Invitation token from docker startup - -Grafana URL (URL OnCall will use to talk to this Grafana instance): -http://localhost:3000 (or http://host.docker.internal:3000 if your grafana is running in Docker locally) +4. Issue one-time invite token: +```bash +docker-compose --env-file .env_hobby -f docker-compose.yml run engine python manage.py issue_invite_for_the_frontend --override ``` -## Getting Help -- `#grafana-oncall` channel at https://slack.grafana.com/ -- Grafana Labs community forum for OnCall: https://community.grafana.com -- File an [issue](https://github.com/grafana/oncall/issues) for bugs, issues and feature suggestions. +5. Go to [OnCall Plugin Configuration](http://localhost:3000/plugins/grafana-oncall-app) (or find OnCall plugin in configuration->plugins) and connect OnCall _plugin_ with OnCall _backend_: +``` +Invite token: ^^^ from the previous step. +OnCall backend URL: http://engine:8080 +Grafana Url: http://grafana:3000 +``` -## Production Setup +6. Enjoy! -Looking for the production instructions? We're going to release them soon. Please join our Slack channel to be the first to know about them. + +## Join community + + + + ## Further Reading - *Documentation* - [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/) - *Blog Post* - [Announcing Grafana OnCall, the easiest way to do on-call management](https://grafana.com/blog/2021/11/09/announcing-grafana-oncall/) - *Presentation* - [Deep dive into the Grafana, Prometheus, and Alertmanager stack for alerting and on-call management](https://grafana.com/go/observabilitycon/2021/alerting/?pg=blog) - -## FAQ - -- How do I generate a new invitation token to connect plugin with a backend? -```bash -docker exec oncall-backend python manage.py issue_invite_for_the_frontend --override -``` diff --git a/developer-docker-compose.yml b/docker-compose-developer.yml similarity index 78% rename from developer-docker-compose.yml rename to docker-compose-developer.yml index b24312d6..e35c3c70 100644 --- a/developer-docker-compose.yml +++ b/docker-compose-developer.yml @@ -3,16 +3,16 @@ version: '3.2' services: mysql: - image: mariadb:10.2 + image: mysql:5.7 platform: linux/x86_64 mem_limit: 500m cpus: 0.5 - command: --default-authentication-plugin=mysql_native_password + command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci restart: always ports: - 3306:3306 environment: - MYSQL_ROOT_PASSWORD: local_dev_pwd + MYSQL_ROOT_PASSWORD: empty MYSQL_DATABASE: oncall_local_dev healthcheck: test: [ "CMD", "mysqladmin" ,"ping", "-h", "localhost" ] @@ -40,15 +40,15 @@ services: - 5672:5672 mysql-to-create-grafana-db: - image: mariadb:10.2 + image: mysql:5.7 platform: linux/x86_64 - command: bash -c "mysql -h mysql -uroot -plocal_dev_pwd -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'" + command: bash -c "mysql -h mysql -uroot -pempty -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'" depends_on: mysql: condition: service_healthy grafana: - image: "grafana/grafana:8.3.2" + image: "grafana/grafana:9.0.0-beta3" restart: always mem_limit: 500m cpus: 0.5 @@ -56,7 +56,7 @@ services: GF_DATABASE_TYPE: mysql GF_DATABASE_HOST: mysql GF_DATABASE_USER: root - GF_DATABASE_PASSWORD: local_dev_pwd + GF_DATABASE_PASSWORD: empty GF_SECURITY_ADMIN_USER: oncall GF_SECURITY_ADMIN_PASSWORD: oncall GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..c4695fd8 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,174 @@ +services: + engine: + # TODO: change to the public image once it's public + # image: ... + build: engine + ports: + - 8080:8080 + command: > + sh -c "uwsgi --ini uwsgi.ini" + environment: + BASE_URL: $DOMAIN + SECRET_KEY: $SECRET_KEY + RABBITMQ_USERNAME: "rabbitmq" + RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD + RABBITMQ_HOST: "rabbitmq" + RABBITMQ_PORT: "5672" + RABBITMQ_DEFAULT_VHOST: "/" + MYSQL_PASSWORD: $MYSQL_PASSWORD + MYSQL_DB_NAME: oncall_hobby + MYSQL_USER: ${MYSQL_USER:-root} + MYSQL_HOST: ${MYSQL_HOST:-mysql} + MYSQL_PORT: 3306 + REDIS_URI: redis://redis:6379/0 + DJANGO_SETTINGS_MODULE: settings.hobby + OSS: "True" + CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery" + depends_on: + mysql: + condition: service_healthy + oncall_db_migration: + condition: service_completed_successfully + rabbitmq: + condition: service_started + redis: + condition: service_started + + celery: + # TODO: change to the public image once it's public + build: engine + command: sh -c "./celery_with_exporter.sh" + environment: + BASE_URL: $DOMAIN + SECRET_KEY: $SECRET_KEY + RABBITMQ_USERNAME: "rabbitmq" + RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD + RABBITMQ_HOST: "rabbitmq" + RABBITMQ_PORT: "5672" + RABBITMQ_DEFAULT_VHOST: "/" + MYSQL_PASSWORD: $MYSQL_PASSWORD + MYSQL_DB_NAME: oncall_hobby + MYSQL_USER: ${MYSQL_USER:-root} + MYSQL_HOST: ${MYSQL_HOST:-mysql} + MYSQL_PORT: 3306 + REDIS_URI: redis://redis:6379/0 + DJANGO_SETTINGS_MODULE: settings.hobby + OSS: "True" + CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery" + CELERY_WORKER_CONCURRENCY: "1" + CELERY_WORKER_MAX_TASKS_PER_CHILD: "100" + CELERY_WORKER_SHUTDOWN_INTERVAL: "65m" + CELERY_WORKER_BEAT_ENABLED: "True" + depends_on: + mysql: + condition: service_healthy + oncall_db_migration: + condition: service_completed_successfully + rabbitmq: + condition: service_started + redis: + condition: service_started + + oncall_db_migration: + build: engine + command: python manage.py migrate --noinput + environment: + BASE_URL: $DOMAIN + SECRET_KEY: $SECRET_KEY + RABBITMQ_USERNAME: "rabbitmq" + RABBITMQ_PASSWORD: $RABBITMQ_PASSWORD + RABBITMQ_HOST: "rabbitmq" + RABBITMQ_PORT: "5672" + RABBITMQ_DEFAULT_VHOST: "/" + MYSQL_PASSWORD: $MYSQL_PASSWORD + MYSQL_DB_NAME: oncall_hobby + MYSQL_USER: ${MYSQL_USER:-root} + MYSQL_HOST: ${MYSQL_HOST:-mysql} + MYSQL_PORT: 3306 + REDIS_URI: redis://redis:6379/0 + DJANGO_SETTINGS_MODULE: settings.hobby + OSS: "True" + CELERY_WORKER_QUEUE: "default,critical,long,slack,telegram,webhook,retry,celery" + depends_on: + mysql: + condition: service_healthy + rabbitmq: + condition: service_started + + mysql: + image: mysql:5.7 + platform: linux/x86_64 + mem_limit: 500m + cpus: 0.5 + command: --default-authentication-plugin=mysql_native_password --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci + restart: always + ports: + - 3306:3306 + volumes: + - dbdata:/var/lib/mysql + environment: + MYSQL_ROOT_PASSWORD: $MYSQL_PASSWORD + MYSQL_DATABASE: oncall_hobby + healthcheck: + test: "mysql -uroot -p$MYSQL_PASSWORD oncall_hobby -e 'select 1'" + timeout: 20s + retries: 10 + + redis: + image: redis + mem_limit: 100m + cpus: 0.1 + restart: always + ports: + - 6379:6379 + + rabbitmq: + image: "rabbitmq:3.7.15-management" + hostname: rabbitmq + mem_limit: 1000m + cpus: 0.5 + volumes: + - rabbitmqdata:/var/lib/rabbitmq + environment: + RABBITMQ_DEFAULT_USER: "rabbitmq" + RABBITMQ_DEFAULT_PASS: $RABBITMQ_PASSWORD + RABBITMQ_DEFAULT_VHOST: "/" + + mysql_to_create_grafana_db: + image: mysql:5.7 + platform: linux/x86_64 + command: bash -c "mysql -h ${MYSQL_HOST:-mysql} -uroot -p${MYSQL_PASSWORD:?err} -e 'CREATE DATABASE IF NOT EXISTS grafana CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;'" + depends_on: + mysql: + condition: service_healthy + profiles: + - with_grafana + + grafana: + image: "grafana/grafana:9.0.0-beta3" + mem_limit: 500m + ports: + - 3000:3000 + cpus: 0.5 + environment: + GF_DATABASE_TYPE: mysql + GF_DATABASE_HOST: ${MYSQL_HOST:-mysql} + GF_DATABASE_USER: ${MYSQL_USER:-root} + GF_DATABASE_PASSWORD: ${MYSQL_PASSWORD:?err} + GF_SECURITY_ADMIN_USER: ${GRAFANA_USER:-admin} + GF_SECURITY_ADMIN_PASSWORD: ${GRAFANA_PASSWORD:?err} + GF_PLUGINS_ALLOW_LOADING_UNSIGNED_PLUGINS: grafana-oncall-app + GF_INSTALL_PLUGINS: grafana-oncall-app + depends_on: + mysql_to_create_grafana_db: + condition: service_completed_successfully + mysql: + condition: service_healthy + profiles: + - with_grafana + +volumes: + dbdata: + rabbitmqdata: + caddy_data: + caddy_config: diff --git a/docs/Makefile b/docs/Makefile index 5ddacacf..e66f1c1c 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -1,5 +1,5 @@ IMAGE = grafana/docs-base:latest -CONTENT_PATH = /hugo/content/docs/amixr/latest +CONTENT_PATH = /hugo/content/docs/oncall/latest PORT = 3002:3002 .PHONY: pull diff --git a/docs/README.md b/docs/README.md index 8d702ceb..b6a557c7 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,5 +4,5 @@ Source for documentation at https://grafana.com/docs/amixr/ ## Preview the website -Run `make docs`. This launches a preview of the website with the current grafana docs at `http://localhost:3002/docs/amixr/` which will refresh automatically when changes are made to content in the `sources` directory. +Run `make docs`. This launches a preview of the website with the current grafana docs at `http://localhost:3002/docs/oncall/latest/` which will refresh automatically when changes are made to content in the `sources` directory. Make sure Docker is running. diff --git a/docs/img/GH_discussions.png b/docs/img/GH_discussions.png new file mode 100644 index 00000000..d3a1798a Binary files /dev/null and b/docs/img/GH_discussions.png differ diff --git a/docs/img/GrafanaOnCall_Stack_Fullcolor_black.png b/docs/img/GrafanaOnCall_Stack_Fullcolor_black.png new file mode 100644 index 00000000..94d3c86b Binary files /dev/null and b/docs/img/GrafanaOnCall_Stack_Fullcolor_black.png differ diff --git a/docs/img/How Grafana OnCall works_diagram.png b/docs/img/How Grafana OnCall works_diagram.png new file mode 100644 index 00000000..6812230f Binary files /dev/null and b/docs/img/How Grafana OnCall works_diagram.png differ diff --git a/docs/img/community_call.png b/docs/img/community_call.png new file mode 100644 index 00000000..22692fad Binary files /dev/null and b/docs/img/community_call.png differ diff --git a/docs/img/logo.png b/docs/img/logo.png new file mode 100644 index 00000000..ec11fc3a Binary files /dev/null and b/docs/img/logo.png differ diff --git a/docs/img/slack.png b/docs/img/slack.png new file mode 100644 index 00000000..d5ec5e6d Binary files /dev/null and b/docs/img/slack.png differ diff --git a/docs/sources/getting-started.md b/docs/sources/getting-started.md index 336058ff..dac5232b 100644 --- a/docs/sources/getting-started.md +++ b/docs/sources/getting-started.md @@ -17,7 +17,7 @@ These procedures introduce you to the configuration of user settings, how to set ## Before you begin -You must have a Grafana Cloud account. +You must have a [Grafana Cloud](https://grafana.com/products/cloud/) account or [Open Source Grafana OnCall]({{< relref " open-source.md" >}}) Each supported integration and the associated monitoring system has a slightly different configuration method. These methods will not be explained in this guide, however, you can follow the online instructions provided when adding an integration. diff --git a/docs/sources/oncall-api-reference/_index.md b/docs/sources/oncall-api-reference/_index.md index b696b0fc..000ee452 100644 --- a/docs/sources/oncall-api-reference/_index.md +++ b/docs/sources/oncall-api-reference/_index.md @@ -21,15 +21,12 @@ To authorize, use the **Authorization** header: ```shell # With shell, you can just pass the correct header with each request -curl "api_endpoint_here" --header "Authorization: meowmeowmeow" +curl "api_endpoint_here" --header "Authorization: "api_key_here"" ``` -Note that `meowmeowmeow` is a valid key for test purposes. -Replace `meowmeowmeow` with your API key in production. +Grafana OnCall uses API keys to allow access to the API. You can request a new OnCall API key in OnCall -> Settings page. -Grafana OnCall uses API keys to allow access to the API. You can request a new OnCall API key in the API section. - -An API key is specific to a user and a Grafana stack. If you want to switch to a different team configuration, request a different API key. +An API key is specific to a user and a Grafana stack. If you want to switch to a different stack configuration, request a different API key. ## Pagination diff --git a/docs/sources/open-source.md b/docs/sources/open-source.md new file mode 100644 index 00000000..fc31bf69 --- /dev/null +++ b/docs/sources/open-source.md @@ -0,0 +1,170 @@ +--- +aliases: + - /docs/grafana-cloud/oncall/open-source/ + - /docs/oncall/latest/open-source/ +keywords: + - Open Source +title: Open Source +weight: 100 +--- + +# Open Source + +We prepared three environments for OSS users: +- **Hobby** environment for local usage & playing around: [README.md](https://github.com/grafana/oncall#getting-started). +- **Development** environment for contributors: [DEVELOPER.md](https://github.com/grafana/oncall/blob/dev/DEVELOPER.md) +- **Production** environment for reliable cloud installation using Helm: [Production Environment](#production-environment) + +## Production Environment + +We prepared the helm chart for production environment: https://github.com/grafana/oncall/helm + +## Slack Setup + +Grafana OnCall Slack integration use a lot of Slack API features: +- Subscription on Slack events requires OnCall to be externally available and provide https endpoint. +- You will need to register new Slack App. + +1. Make sure your OnCall is up and running. + +2. You need OnCall to be accessible through https. For development purposes we suggest using [localtunnel](https://github.com/localtunnel/localtunnel). For production purposes please consider setting up proper web server with HTTPS termination. For localtunnel: +```bash +# Choose the unique prefix instead of pretty-turkey-83 +# Localtunnel will generate an url, e.g. https://pretty-turkey-83.loca.lt +# it is referred as below +lt --port 8080 -s pretty-turkey-83 --print-requests +``` + +3. If you use localtunnel, open your external URL and click "Continue" to allow requests to bypass the warning page. + +4. [Create a Slack Workspace](https://slack.com/create) for development, or use your company workspace. + +5. Go to https://api.slack.com/apps and click Create New App button + +6. Select `From an app manifest` option and choose the right workspace + +7. Copy and paste the following block with the correct and fields + + ```yaml + _metadata: + major_version: 1 + minor_version: 1 + display_information: + name: + features: + app_home: + home_tab_enabled: true + messages_tab_enabled: true + messages_tab_read_only_enabled: false + bot_user: + display_name: + always_online: true + shortcuts: + - name: Create a new incident + type: message + callback_id: incident_create + description: Creates a new OnCall incident + - name: Add to postmortem + type: message + callback_id: add_postmortem + description: Add this message to postmortem + slash_commands: + - command: /oncall + url: /slack/interactive_api_endpoint/ + description: oncall + should_escape: false + oauth_config: + redirect_urls: + - /api/internal/v1/complete/slack-install-free/ + - /api/internal/v1/complete/slack-login/ + scopes: + user: + - channels:read + - chat:write + - identify + - users.profile:read + bot: + - app_mentions:read + - channels:history + - channels:read + - chat:write + - chat:write.customize + - chat:write.public + - commands + - files:write + - groups:history + - groups:read + - im:history + - im:read + - im:write + - mpim:history + - mpim:read + - mpim:write + - reactions:write + - team:read + - usergroups:read + - usergroups:write + - users.profile:read + - users:read + - users:read.email + - users:write + settings: + event_subscriptions: + request_url: /slack/event_api_endpoint/ + bot_events: + - app_home_opened + - app_mention + - channel_archive + - channel_created + - channel_deleted + - channel_rename + - channel_unarchive + - member_joined_channel + - message.channels + - message.im + - subteam_created + - subteam_members_changed + - subteam_updated + - user_change + interactivity: + is_enabled: true + request_url: /slack/interactive_api_endpoint/ + org_deploy_enabled: false + socket_mode_enabled: false + ``` + +6. Go to your "OnCall" -> "Env Variables" and set: + ``` + SLACK_CLIENT_OAUTH_ID = Basic Information -> App Credentials -> Client ID + SLACK_CLIENT_OAUTH_SECRET = Basic Information -> App Credentials -> Client Secret + SLACK_SIGNING_SECRET = Basic Information -> App Credentials -> Signing Secret + SLACK_INSTALL_RETURN_REDIRECT_HOST = << OnCall external URL >> + ``` + +7. Go to "OnCall" -> "ChatOps" -> "Slack" and install Slack Integration + +8. All set! + +## Telegram Setup + +- Telegram integrations requires OnCall to be externally available and provide https endpoint. +- Telegram integration in OnCall is designed for collaborative team work. It requires Telegram Group and a Telegram Channel (private) for alerts. + +1. Make sure your OnCall is up and running. + +2. Respectfully ask [BotFather](https://t.me/BotFather) for a key, put it in `TELEGRAM_TOKEN` in "OnCall" -> "Env Variables". + +3. Set `TELEGRAM_WEBHOOK_HOST` with your external url for OnCall. + +4. Go to "OnCall" -> "ChatOps" -> Telegram and enjoy! + +## Grafana OSS-Cloud Setup + +Grafana OSS could be connected to Grafana Cloud for heartbeat and SMS / Phone Calls. We tried our best in making Grafana OSS <-> Cloud self-explanatory. Check "Cloud" page in your OSS OnCall instance. + +Please note that it's possible either to use Grafana Cloud either Twilio for SMS/Phone calls. + +## Twilio Setup + +1. Make sure Grafana OSS <-> Cloud connector is disabled. Set `GRAFANA_CLOUD_NOTIFICATIONS_ENABLED` as False. +2. Check "OnCall" -> "Env Variables" and set all variables starting with `TWILIO_` diff --git a/engine/Dockerfile.all-in-one b/engine/Dockerfile.all-in-one deleted file mode 100644 index 6b0e5d43..00000000 --- a/engine/Dockerfile.all-in-one +++ /dev/null @@ -1,38 +0,0 @@ -FROM python:3.9-alpine - -RUN apk add bash -RUN apk add python3-dev -RUN apk add build-base -RUN apk add linux-headers -RUN apk add pcre-dev -RUN apk add mariadb-connector-c-dev -RUN apk add openssl-dev -RUN apk add libffi-dev -RUN apk add git -RUN apk add curl -RUN apk add redis - -RUN pip install uwsgi -RUN pip install regex==2021.11.2 - -WORKDIR /etc/app -COPY ./requirements.txt ./requirements.txt -RUN pip install -r requirements.txt - -COPY ./scripts/start_all_in_one.sh ./start_all_in_one.sh - -COPY ./ ./ -RUN rm db.sqlite3 || true - -RUN DJANGO_SETTINGS_MODULE=settings.prod_without_db \ - SECRET_KEY="ThEmUsTSecretKEYforBUILDstage123" \ - TELEGRAM_TOKEN="0000000000:XXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXX" \ - SLACK_CLIENT_OAUTH_ID=1 python manage.py collectstatic --no-input - -VOLUME /etc/app/sqlite_data -VOLUME /etc/app/secret_data -VOLUME /etc/app/redis_data - -EXPOSE 8000 - -CMD ["bash", "./start_all_in_one.sh"] diff --git a/engine/apps/alerts/escalation_snapshot/snapshot_classes/escalation_policy_snapshot.py b/engine/apps/alerts/escalation_snapshot/snapshot_classes/escalation_policy_snapshot.py index 2ee420e7..a082270e 100644 --- a/engine/apps/alerts/escalation_snapshot/snapshot_classes/escalation_policy_snapshot.py +++ b/engine/apps/alerts/escalation_snapshot/snapshot_classes/escalation_policy_snapshot.py @@ -266,7 +266,7 @@ class EscalationPolicySnapshot: escalation_policy_step=self.step, ) else: - notify_to_users_list = list_users_to_notify_from_ical(on_call_schedule) + notify_to_users_list = list_users_to_notify_from_ical(on_call_schedule, include_viewers=True) if notify_to_users_list is None: log_record = AlertGroupLogRecord( type=AlertGroupLogRecord.TYPE_ESCALATION_FAILED, diff --git a/engine/apps/alerts/grafana_alerting_sync_manager/grafana_alerting_sync.py b/engine/apps/alerts/grafana_alerting_sync_manager/grafana_alerting_sync.py index 7bfcbdef..a9ca08fb 100644 --- a/engine/apps/alerts/grafana_alerting_sync_manager/grafana_alerting_sync.py +++ b/engine/apps/alerts/grafana_alerting_sync_manager/grafana_alerting_sync.py @@ -5,7 +5,7 @@ from typing import Optional from django.apps import apps from rest_framework import status -from apps.alerts.tasks import create_contact_points_for_datasource +from apps.alerts.tasks import schedule_create_contact_points_for_datasource from apps.grafana_plugin.helpers import GrafanaAPIClient logger = logging.getLogger(__name__) @@ -77,16 +77,15 @@ class GrafanaAlertingSyncManager: # sync other datasource for datasource in datasources: if datasource["type"] == GrafanaAlertingSyncManager.ALERTING_DATASOURCE: - if self.create_contact_point(datasource) is None: + contact_point = self.create_contact_point(datasource) + if contact_point is None: # Failed to create contact point duo to getting wrong alerting config. It is expected behaviour. # Add datasource to list and retry to create contact point for it async datasources_to_create.append(datasource) if datasources_to_create: # create other contact points async - create_contact_points_for_datasource.apply_async( - (self.alert_receive_channel.pk, datasources_to_create), - ) + schedule_create_contact_points_for_datasource(self.alert_receive_channel.pk, datasources_to_create) else: self.alert_receive_channel.is_finished_alerting_setup = True self.alert_receive_channel.save(update_fields=["is_finished_alerting_setup"]) diff --git a/engine/apps/alerts/incident_appearance/templaters/phone_call_templater.py b/engine/apps/alerts/incident_appearance/templaters/phone_call_templater.py index 6f9997d7..3d0127ca 100644 --- a/engine/apps/alerts/incident_appearance/templaters/phone_call_templater.py +++ b/engine/apps/alerts/incident_appearance/templaters/phone_call_templater.py @@ -1,5 +1,5 @@ from apps.alerts.incident_appearance.templaters.alert_templater import AlertTemplater -from common.utils import clean_markup +from common.utils import clean_markup, escape_for_twilio_phone_call class AlertPhoneCallTemplater(AlertTemplater): @@ -24,8 +24,4 @@ class AlertPhoneCallTemplater(AlertTemplater): return sf.format(data) def _escape(self, data): - # https://www.twilio.com/docs/api/errors/12100 - data = data.replace("&", "&") - data = data.replace(">", ">") - data = data.replace("<", "<") - return data + return escape_for_twilio_phone_call(data) diff --git a/engine/apps/alerts/migrations/0001_squashed_initial.py b/engine/apps/alerts/migrations/0001_squashed_initial.py index 40ec3b57..bc66bc5c 100644 --- a/engine/apps/alerts/migrations/0001_squashed_initial.py +++ b/engine/apps/alerts/migrations/0001_squashed_initial.py @@ -16,6 +16,8 @@ from django.db import migrations, models import django.db.models.deletion import django.db.models.manager +from apps.alerts.integration_options_mixin import IntegrationOptionsMixin + class Migration(migrations.Migration): @@ -132,7 +134,7 @@ class Migration(migrations.Migration): ('public_primary_key', models.CharField(default=apps.alerts.models.alert_receive_channel.generate_public_primary_key_for_alert_receive_channel, max_length=20, unique=True, validators=[django.core.validators.MinLengthValidator(13)])), ('created_at', models.DateTimeField(auto_now_add=True)), ('deleted_at', models.DateTimeField(blank=True, null=True)), - ('integration', models.CharField(choices=[('alertmanager', 'AlertManager'), ('grafana', 'Grafana'), ('grafana_alerting', 'Grafana Alerting'), ('formatted_webhook', 'Formatted Webhook'), ('webhook', 'Webhook'), ('amazon_sns', 'Amazon SNS'), ('heartbeat', 'Heartbeat'), ('inbound_email', 'Inboubd Email'), ('maintenance', 'Maintenance'), ('manual', 'Manual'), ('slack_channel', 'Slack Channel'), ('stackdriver', 'Stackdriver'), ('curler', 'Curler'), ('datadog', 'Datadog'), ('demo', 'Demo'), ('elastalert', 'Elastalert'), ('fabric', 'Fabric'), ('kapacitor', 'Kapacitor'), ('newrelic', 'New Relic'), ('pagerduty', 'Pagerduty'), ('pingdom', 'Pingdom'), ('prtg', 'PRTG'), ('sentry', 'Sentry'), ('uptimerobot', 'UptimeRobot'), ('zabbix', 'Zabbix')], default='grafana', max_length=100)), + ('integration', models.CharField(choices=IntegrationOptionsMixin.INTEGRATION_CHOICES,default=IntegrationOptionsMixin.DEFAULT_INTEGRATION, max_length=100)), ('allow_source_based_resolving', models.BooleanField(default=True)), ('token', models.CharField(db_index=True, default=apps.alerts.models.alert_receive_channel.random_token_generator, max_length=30)), ('smile_code', models.TextField(default=':slightly_smiling_face:')), diff --git a/engine/apps/alerts/migrations/0003_squashed_create_demo_token_instances.py b/engine/apps/alerts/migrations/0003_squashed_create_demo_token_instances.py deleted file mode 100644 index 5729cbd6..00000000 --- a/engine/apps/alerts/migrations/0003_squashed_create_demo_token_instances.py +++ /dev/null @@ -1,178 +0,0 @@ -# Generated by Django 3.2.5 on 2021-08-04 10:42 - -import sys -from django.db import migrations -from django.utils import timezone, dateparse -from apps.alerts.models.alert_receive_channel import number_to_smiles_translator -from apps.public_api import constants as public_api_constants - - -TYPE_SINGLE_EVENT = 0 -TYPE_RECURRENT_EVENT = 1 -FREQUENCY_WEEKLY = 1 -SOURCE_TERRAFORM = 3 -STEP_WAIT = 0 -STEP_NOTIFY_USERS_QUEUE = 12 -SOURCE_WEB = 1 - - -def create_demo_token_instances(apps, schema_editor): - if not (len(sys.argv) > 1 and sys.argv[1] == 'test'): - User = apps.get_model('user_management', 'User') - Organization = apps.get_model('user_management', 'Organization') - AlertReceiveChannel = apps.get_model('alerts', 'AlertReceiveChannel') - EscalationChain = apps.get_model('alerts', 'EscalationChain') - ChannelFilter = apps.get_model('alerts', 'ChannelFilter') - EscalationPolicy = apps.get_model('alerts', 'EscalationPolicy') - OnCallScheduleICal = apps.get_model('schedules', 'OnCallScheduleICal') - AlertGroup = apps.get_model('alerts', 'AlertGroup') - Alert = apps.get_model('alerts', 'Alert') - CustomButton = apps.get_model("alerts", "CustomButton") - CustomOnCallShift = apps.get_model('schedules', 'CustomOnCallShift') - - organization = Organization.objects.get(public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID) - user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID) - - alert_receive_channel, _ = AlertReceiveChannel.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_INTEGRATION_ID, - defaults=dict( - integration=0, - author=user, - organization=organization, - smile_code=number_to_smiles_translator(0) - ) - ) - escalation_chain, _ = EscalationChain.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ESCALATION_CHAIN_ID, - defaults=dict( - name="default", - organization=organization, - ) - ) - - channel_filter_1, _ = ChannelFilter.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ROUTE_ID_1, - defaults=dict( - alert_receive_channel=alert_receive_channel, - slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID, - filtering_term='us-(east|west)', - order=0, - escalation_chain=escalation_chain, - ) - ) - ChannelFilter.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ROUTE_ID_2, - defaults=dict( - alert_receive_channel=alert_receive_channel, - slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID, - filtering_term='.*', - order=1, - is_default=True, - escalation_chain=escalation_chain, - ) - ) - - EscalationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1, - defaults=dict( - step=STEP_WAIT, - wait_delay=timezone.timedelta(minutes=1), - order=0, - escalation_chain=escalation_chain, - ) - ) - - escalation_policy_1, _ = EscalationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_2, - defaults=dict( - step=STEP_NOTIFY_USERS_QUEUE, - order=1, - escalation_chain=escalation_chain, - ) - ) - escalation_policy_1.notify_to_users_queue.add(user) - - schedule, _ = OnCallScheduleICal.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL, - defaults=dict( - organization=organization, - name=public_api_constants.DEMO_SCHEDULE_NAME_ICAL, - ical_url_overrides=public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES, - channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - ) - ) - - alert_group, _ = AlertGroup.all_objects.get_or_create( - public_primary_key=public_api_constants.DEMO_INCIDENT_ID, - defaults=dict( - channel=alert_receive_channel, - channel_filter=channel_filter_1, - resolved=True, - resolved_at=dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_RESOLVED_AT), - ) - ) - alert_group.started_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_CREATED_AT) - alert_group.save(update_fields=['started_at']) - - for id, created_at in public_api_constants.DEMO_ALERT_IDS: - alert, _ = Alert.objects.get_or_create( - public_primary_key=id, - defaults=dict( - group=alert_group, - raw_request_data=public_api_constants.DEMO_ALERT_PAYLOAD, - title='Memory above 90% threshold', - ) - ) - alert.created_at = dateparse.parse_datetime(created_at) - alert.save(update_fields=['created_at']) - - CustomButton.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_CUSTOM_ACTION_ID, - defaults=dict( - name=public_api_constants.DEMO_CUSTOM_ACTION_NAME, - organization=organization, - ) - ) - - on_call_shift_1, _ = CustomOnCallShift.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, - defaults=dict( - type=TYPE_SINGLE_EVENT, - organization=organization, - name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1, - start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_1), - duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION), - ) - ) - - on_call_shift_1.users.add(user) - - on_call_shift_2, _ = CustomOnCallShift.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, - defaults=dict( - type=TYPE_RECURRENT_EVENT, - organization=organization, - name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2, - start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_2), - duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION), - frequency=FREQUENCY_WEEKLY, - interval=2, - by_day=public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY, - source=SOURCE_TERRAFORM, - ) - ) - - on_call_shift_2.users.add(user) - - -class Migration(migrations.Migration): - - dependencies = [ - ('alerts', '0002_squashed_initial'), - ('user_management', '0002_squashed_create_demo_token_instances'), - ('schedules', '0002_squashed_initial'), - ] - - operations = [ - migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop) - ] diff --git a/engine/apps/alerts/models/channel_filter.py b/engine/apps/alerts/models/channel_filter.py index b1f1dae2..fb369088 100644 --- a/engine/apps/alerts/models/channel_filter.py +++ b/engine/apps/alerts/models/channel_filter.py @@ -113,20 +113,7 @@ class ChannelFilter(OrderedModel): return satisfied_filter def is_satisfying(self, raw_request_data, title, message=None): - AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel") - - return ( - self.is_default - or self.check_filter(json.dumps(raw_request_data)) - or self.check_filter(str(title)) - or - # Special case for Amazon SNS - ( - self.check_filter(str(message)) - if self.alert_receive_channel.integration == AlertReceiveChannel.INTEGRATION_AMAZON_SNS - else False - ) - ) + return self.is_default or self.check_filter(json.dumps(raw_request_data)) or self.check_filter(str(title)) def check_filter(self, value): return re.search(self.filtering_term, value) diff --git a/engine/apps/alerts/tasks/__init__.py b/engine/apps/alerts/tasks/__init__.py index 8e0e994f..3ff8501e 100644 --- a/engine/apps/alerts/tasks/__init__.py +++ b/engine/apps/alerts/tasks/__init__.py @@ -4,6 +4,7 @@ from .calculcate_escalation_finish_time import calculate_escalation_finish_time from .call_ack_url import call_ack_url # noqa: F401 from .check_escalation_finished import check_escalation_finished_task # noqa: F401 from .create_contact_points_for_datasource import create_contact_points_for_datasource # noqa: F401 +from .create_contact_points_for_datasource import schedule_create_contact_points_for_datasource # noqa: F401 from .custom_button_result import custom_button_result # noqa: F401 from .delete_alert_group import delete_alert_group # noqa: F401 from .distribute_alert import distribute_alert # noqa: F401 diff --git a/engine/apps/alerts/tasks/create_contact_points_for_datasource.py b/engine/apps/alerts/tasks/create_contact_points_for_datasource.py index f3dc3f4b..a447a39c 100644 --- a/engine/apps/alerts/tasks/create_contact_points_for_datasource.py +++ b/engine/apps/alerts/tasks/create_contact_points_for_datasource.py @@ -1,9 +1,32 @@ +import logging + +from celery.utils.log import get_task_logger from django.apps import apps +from django.core.cache import cache from rest_framework import status from apps.grafana_plugin.helpers import GrafanaAPIClient from common.custom_celery_tasks import shared_dedicated_queue_retry_task +logger = get_task_logger(__name__) +logger.setLevel(logging.DEBUG) + + +def get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id): + CACHE_KEY_PREFIX = "create_contact_points_for_datasource" + return f"{CACHE_KEY_PREFIX}_{alert_receive_channel_id}" + + +@shared_dedicated_queue_retry_task +def schedule_create_contact_points_for_datasource(alert_receive_channel_id, datasource_list): + CACHE_LIFETIME = 600 + START_TASK_DELAY = 3 + task = create_contact_points_for_datasource.apply_async( + args=[alert_receive_channel_id, datasource_list], countdown=START_TASK_DELAY + ) + cache_key = get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id) + cache.set(cache_key, task.id, timeout=CACHE_LIFETIME) + @shared_dedicated_queue_retry_task(autoretry_for=(Exception,), retry_backoff=True, max_retries=10) def create_contact_points_for_datasource(alert_receive_channel_id, datasource_list): @@ -11,6 +34,11 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li Try to create contact points for other datasource. Restart task for datasource, for which contact point was not created. """ + cache_key = get_cache_key_create_contact_points_for_datasource(alert_receive_channel_id) + cached_task_id = cache.get(cache_key) + current_task_id = create_contact_points_for_datasource.request.id + if cached_task_id is not None and current_task_id != cached_task_id: + return AlertReceiveChannel = apps.get_model("alerts", "AlertReceiveChannel") @@ -21,7 +49,7 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li api_token=alert_receive_channel.organization.api_token, ) # list of datasource for which contact point creation was failed - datasource_to_create = [] + datasources_to_create = [] for datasource in datasource_list: contact_point = None config, response_info = client.get_alerting_config(datasource["id"]) @@ -29,16 +57,22 @@ def create_contact_points_for_datasource(alert_receive_channel_id, datasource_li if response_info.get("status_code") == status.HTTP_404_NOT_FOUND: client.get_alertmanager_status_with_config(datasource["id"]) contact_point = alert_receive_channel.grafana_alerting_sync_manager.create_contact_point(datasource) + elif response_info.get("status_code") == status.HTTP_400_BAD_REQUEST: + logger.warning( + f"Failed to create contact point for integration {alert_receive_channel_id}, " + f"datasource info: {datasource}; response: {response_info}" + ) + continue else: contact_point = alert_receive_channel.grafana_alerting_sync_manager.create_contact_point(datasource) if contact_point is None: # Failed to create contact point duo to getting wrong alerting config. # Add datasource to list and retry to create contact point for it again - datasource_to_create.append(datasource) + datasources_to_create.append(datasource) # if some contact points were not created, restart task for them - if datasource_to_create: - create_contact_points_for_datasource.apply_async((alert_receive_channel_id, datasource_to_create), countdown=5) + if datasources_to_create: + schedule_create_contact_points_for_datasource(alert_receive_channel_id, datasources_to_create) else: alert_receive_channel.is_finished_alerting_setup = True alert_receive_channel.save(update_fields=["is_finished_alerting_setup"]) diff --git a/engine/apps/alerts/tasks/notify_user.py b/engine/apps/alerts/tasks/notify_user.py index 05a9456f..b3a998b4 100644 --- a/engine/apps/alerts/tasks/notify_user.py +++ b/engine/apps/alerts/tasks/notify_user.py @@ -12,6 +12,7 @@ from apps.alerts.constants import NEXT_ESCALATION_DELAY from apps.alerts.incident_appearance.renderers.web_renderer import AlertGroupWebRenderer from apps.alerts.signals import user_notification_action_triggered_signal from apps.base.messaging import get_messaging_backend_from_id +from apps.base.utils import live_settings from common.custom_celery_tasks import shared_dedicated_queue_retry_task from .task_logger import task_logger @@ -56,6 +57,13 @@ def notify_user_task( if not user.is_notification_allowed: task_logger.info(f"notify_user_task: user {user.pk} notification is not allowed for role {user.role}") + UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + reason=f"notification is not allowed for user with role {user.role}", + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE, + ).save() return user_has_notification, _ = UserHasNotification.objects.get_or_create( @@ -257,11 +265,31 @@ def perform_notification(log_record_pk): ).save() return + if not user.is_notification_allowed: + UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + reason=f"notification is not allowed for user with role {user.role}", + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE, + ).save() + return + if notification_channel == UserNotificationPolicy.NotificationChannel.SMS: - SMSMessage.send_sms(user, alert_group, notification_policy) + SMSMessage.send_sms( + user, + alert_group, + notification_policy, + is_cloud_notification=live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED, + ) elif notification_channel == UserNotificationPolicy.NotificationChannel.PHONE_CALL: - PhoneCall.make_call(user, alert_group, notification_policy) + PhoneCall.make_call( + user, + alert_group, + notification_policy, + is_cloud_notification=live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED, + ) elif notification_channel == UserNotificationPolicy.NotificationChannel.TELEGRAM: if alert_group.notify_in_telegram_enabled is True: diff --git a/engine/apps/alerts/tests/test_alert_group_renderer.py b/engine/apps/alerts/tests/test_alert_group_renderer.py index 5253832e..aa7df113 100644 --- a/engine/apps/alerts/tests/test_alert_group_renderer.py +++ b/engine/apps/alerts/tests/test_alert_group_renderer.py @@ -2,7 +2,7 @@ import pytest from apps.alerts.incident_appearance.templaters import AlertSlackTemplater from apps.alerts.models import AlertGroup -from apps.integrations.metadata.configuration import grafana +from config_integrations import grafana @pytest.mark.django_db diff --git a/engine/apps/alerts/tests/test_default_templates.py b/engine/apps/alerts/tests/test_default_templates.py index 69288fb6..63cfd0b8 100644 --- a/engine/apps/alerts/tests/test_default_templates.py +++ b/engine/apps/alerts/tests/test_default_templates.py @@ -10,9 +10,9 @@ from apps.alerts.incident_appearance.templaters import ( AlertWebTemplater, ) from apps.alerts.models import Alert, AlertReceiveChannel -from apps.integrations.metadata.configuration import grafana from common.jinja_templater import jinja_template_env from common.utils import getattrd +from config_integrations import grafana @pytest.mark.django_db diff --git a/engine/apps/alerts/tests/test_escalation_policy_snapshot.py b/engine/apps/alerts/tests/test_escalation_policy_snapshot.py index a3d27f45..9a555c35 100644 --- a/engine/apps/alerts/tests/test_escalation_policy_snapshot.py +++ b/engine/apps/alerts/tests/test_escalation_policy_snapshot.py @@ -10,6 +10,7 @@ from apps.alerts.escalation_snapshot.utils import eta_for_escalation_step_notify from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy from apps.schedules.ical_utils import list_users_to_notify_from_ical from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar +from common.constants.role import Role def get_escalation_policy_snapshot_from_model(escalation_policy): @@ -200,6 +201,55 @@ def test_escalation_step_notify_on_call_schedule( assert mocked_execute_tasks.called +@patch("apps.alerts.escalation_snapshot.snapshot_classes.EscalationPolicySnapshot._execute_tasks", return_value=None) +@pytest.mark.django_db +def test_escalation_step_notify_on_call_schedule_viewer_user( + mocked_execute_tasks, + escalation_step_test_setup, + make_user_for_organization, + make_escalation_policy, + make_schedule, + make_on_call_shift, +): + organization, user, _, channel_filter, alert_group, reason = escalation_step_test_setup + viewer = make_user_for_organization(organization=organization, role=Role.VIEWER) + + schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar) + # create on_call_shift with user to notify + data = { + "start": timezone.datetime.now().replace(microsecond=0), + "duration": timezone.timedelta(seconds=7200), + } + on_call_shift = make_on_call_shift( + organization=organization, shift_type=CustomOnCallShift.TYPE_SINGLE_EVENT, **data + ) + on_call_shift.users.add(viewer) + schedule.custom_on_call_shifts.add(on_call_shift) + + notify_schedule_step = make_escalation_policy( + escalation_chain=channel_filter.escalation_chain, + escalation_policy_step=EscalationPolicy.STEP_NOTIFY_SCHEDULE, + notify_schedule=schedule, + ) + escalation_policy_snapshot = get_escalation_policy_snapshot_from_model(notify_schedule_step) + expected_eta = timezone.now() + timezone.timedelta(seconds=NEXT_ESCALATION_DELAY) + result = escalation_policy_snapshot.execute(alert_group, reason) + expected_result = EscalationPolicySnapshot.StepExecutionResultData( + eta=result.eta, + stop_escalation=False, + pause_escalation=False, + start_from_beginning=False, + ) + assert expected_eta + timezone.timedelta(seconds=15) > result.eta > expected_eta - timezone.timedelta(seconds=15) + assert result == expected_result + assert notify_schedule_step.log_records.filter(type=AlertGroupLogRecord.TYPE_ESCALATION_TRIGGERED).exists() + assert list(escalation_policy_snapshot.notify_to_users_queue) == list( + list_users_to_notify_from_ical(schedule, include_viewers=True) + ) + assert list(escalation_policy_snapshot.notify_to_users_queue) == [viewer] + assert mocked_execute_tasks.called + + @patch("apps.alerts.escalation_snapshot.snapshot_classes.EscalationPolicySnapshot._execute_tasks", return_value=None) @pytest.mark.django_db def test_escalation_step_notify_user_group( diff --git a/engine/apps/alerts/tests/test_notify_user.py b/engine/apps/alerts/tests/test_notify_user.py index 06677544..0f43305b 100644 --- a/engine/apps/alerts/tests/test_notify_user.py +++ b/engine/apps/alerts/tests/test_notify_user.py @@ -2,9 +2,10 @@ from unittest.mock import patch import pytest -from apps.alerts.tasks.notify_user import perform_notification +from apps.alerts.tasks.notify_user import notify_user_task, perform_notification from apps.base.models.user_notification_policy import UserNotificationPolicy from apps.base.models.user_notification_policy_log_record import UserNotificationPolicyLogRecord +from common.constants.role import Role @pytest.mark.django_db @@ -118,3 +119,62 @@ def test_notify_user_missing_data_errors( assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED assert error_log_record.reason == "Expected data is missing" assert error_log_record.notification_error_code is None + + +@pytest.mark.django_db +def test_notify_user_perform_notification_error_if_viewer( + make_organization, + make_user, + make_user_notification_policy, + make_alert_receive_channel, + make_alert_group, + make_user_notification_policy_log_record, +): + organization = make_organization() + user_1 = make_user(organization=organization, role=Role.VIEWER, _verified_phone_number="1234567890") + user_notification_policy = make_user_notification_policy( + user=user_1, + step=UserNotificationPolicy.Step.NOTIFY, + notify_by=UserNotificationPolicy.NotificationChannel.SMS, + ) + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group = make_alert_group(alert_receive_channel=alert_receive_channel) + log_record = make_user_notification_policy_log_record( + author=user_1, + alert_group=alert_group, + notification_policy=user_notification_policy, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED, + ) + + perform_notification(log_record.pk) + + error_log_record = UserNotificationPolicyLogRecord.objects.last() + assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED + assert error_log_record.reason == f"notification is not allowed for user with role {user_1.role}" + assert ( + error_log_record.notification_error_code + == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE + ) + + +@pytest.mark.django_db +def test_notify_user_error_if_viewer( + make_organization, + make_user, + make_alert_receive_channel, + make_alert_group, +): + organization = make_organization() + user_1 = make_user(organization=organization, role=Role.VIEWER, _verified_phone_number="1234567890") + alert_receive_channel = make_alert_receive_channel(organization=organization) + alert_group = make_alert_group(alert_receive_channel=alert_receive_channel) + + notify_user_task(user_1.pk, alert_group.pk) + + error_log_record = UserNotificationPolicyLogRecord.objects.last() + assert error_log_record.type == UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED + assert error_log_record.reason == f"notification is not allowed for user with role {user_1.role}" + assert ( + error_log_record.notification_error_code + == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE + ) diff --git a/engine/apps/api/serializers/user.py b/engine/apps/api/serializers/user.py index e9ec91b2..db0db0ed 100644 --- a/engine/apps/api/serializers/user.py +++ b/engine/apps/api/serializers/user.py @@ -1,9 +1,12 @@ +from django.conf import settings from rest_framework import serializers from apps.api.serializers.telegram import TelegramToUserConnectorSerializer from apps.base.constants import ADMIN_PERMISSIONS, ALL_ROLES_PERMISSIONS, EDITOR_PERMISSIONS from apps.base.messaging import get_messaging_backends from apps.base.models import UserNotificationPolicy +from apps.base.utils import live_settings +from apps.oss_installation.utils import cloud_user_identity_status from apps.twilioapp.utils import check_phone_number_is_valid from apps.user_management.models import User from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField @@ -30,6 +33,7 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin): permissions = serializers.SerializerMethodField() notification_chain_verbal = serializers.SerializerMethodField() + cloud_connection_status = serializers.SerializerMethodField() SELECT_RELATED = ["telegram_verification_code", "telegram_connection", "organization", "slack_user_identity"] @@ -50,6 +54,7 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin): "messaging_backends", "permissions", "notification_chain_verbal", + "cloud_connection_status", ] read_only_fields = [ "email", @@ -88,6 +93,15 @@ class UserSerializer(DynamicFieldsModelSerializer, EagerLoadingMixin): default, important = UserNotificationPolicy.get_short_verbals_for_user(user=obj) return {"default": " - ".join(default), "important": " - ".join(important)} + def get_cloud_connection_status(self, obj): + if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + connector = self.context.get("connector", None) + identities = self.context.get("cloud_identities", {}) + identity = identities.get(obj.email, None) + status, _ = cloud_user_identity_status(connector, identity) + return status + return None + class UserHiddenFieldsSerializer(UserSerializer): available_for_all_roles_fields = [ diff --git a/engine/apps/api/tests/test_features.py b/engine/apps/api/tests/test_features.py index e391b8fb..30b37944 100644 --- a/engine/apps/api/tests/test_features.py +++ b/engine/apps/api/tests/test_features.py @@ -3,7 +3,13 @@ from django.urls import reverse from rest_framework import status from rest_framework.test import APIClient -from apps.api.views.features import FEATURE_LIVE_SETTINGS, FEATURE_SLACK, FEATURE_TELEGRAM +from apps.api.views.features import ( + FEATURE_GRAFANA_CLOUD_CONNECTION, + FEATURE_GRAFANA_CLOUD_NOTIFICATIONS, + FEATURE_LIVE_SETTINGS, + FEATURE_SLACK, + FEATURE_TELEGRAM, +) @pytest.mark.django_db @@ -30,15 +36,24 @@ def test_select_features_all_enabled( make_user_auth_headers, ): organization, user, token = make_organization_and_user_with_plugin_token() + settings.OSS_INSTALLATION = True settings.FEATURE_SLACK_INTEGRATION_ENABLED = True settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED = True settings.FEATURE_LIVE_SETTINGS_ENABLED = True + settings.FEATURE_GRAFANA_CLOUD_CONNECTION = True + settings.FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = True client = APIClient() url = reverse("api-internal:features") response = client.get(url, format="json", **make_user_auth_headers(user, token)) assert response.status_code == status.HTTP_200_OK - assert response.json() == [FEATURE_SLACK, FEATURE_TELEGRAM, FEATURE_LIVE_SETTINGS] + assert response.json() == [ + FEATURE_SLACK, + FEATURE_TELEGRAM, + FEATURE_GRAFANA_CLOUD_CONNECTION, + FEATURE_LIVE_SETTINGS, + FEATURE_GRAFANA_CLOUD_NOTIFICATIONS, + ] @pytest.mark.django_db @@ -48,9 +63,12 @@ def test_select_features_all_disabled( make_user_auth_headers, ): organization, user, token = make_organization_and_user_with_plugin_token() + settings.OSS_INSTALLATION = False settings.FEATURE_SLACK_INTEGRATION_ENABLED = False settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED = False settings.FEATURE_LIVE_SETTINGS_ENABLED = False + settings.FEATURE_GRAFANA_CLOUD_CONNECTION = False + settings.FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = FEATURE_GRAFANA_CLOUD_NOTIFICATIONS client = APIClient() url = reverse("api-internal:features") response = client.get(url, format="json", **make_user_auth_headers(user, token)) diff --git a/engine/apps/api/tests/test_organization_log_record.py b/engine/apps/api/tests/test_organization_log_record.py index bf48368b..a27d45ae 100644 --- a/engine/apps/api/tests/test_organization_log_record.py +++ b/engine/apps/api/tests/test_organization_log_record.py @@ -141,7 +141,6 @@ def test_get_filter_created_at_invalid_format( assert response.status_code == status.HTTP_400_BAD_REQUEST -@pytest.mark.skip(reason="SQLITE Incompatibility") @pytest.mark.django_db def test_get_filter_by_labels( make_organization_and_user_with_plugin_token, diff --git a/engine/apps/api/tests/test_user.py b/engine/apps/api/tests/test_user.py index 5731ed17..dd23feb5 100644 --- a/engine/apps/api/tests/test_user.py +++ b/engine/apps/api/tests/test_user.py @@ -75,6 +75,7 @@ def test_update_user_cant_change_email_and_username( "user": admin.username, } }, + "cloud_connection_status": 0, "permissions": ADMIN_PERMISSIONS, "notification_chain_verbal": {"default": "", "important": ""}, "slack_user_identity": None, @@ -124,6 +125,7 @@ def test_list_users( "notification_chain_verbal": {"default": "", "important": ""}, "slack_user_identity": None, "avatar": admin.avatar_url, + "cloud_connection_status": 0, }, { "pk": editor.public_primary_key, @@ -144,6 +146,7 @@ def test_list_users( "notification_chain_verbal": {"default": "", "important": ""}, "slack_user_identity": None, "avatar": editor.avatar_url, + "cloud_connection_status": 0, }, ], } diff --git a/engine/apps/api/views/features.py b/engine/apps/api/views/features.py index 6a4285de..805308a9 100644 --- a/engine/apps/api/views/features.py +++ b/engine/apps/api/views/features.py @@ -4,11 +4,14 @@ from rest_framework.response import Response from rest_framework.views import APIView from apps.auth_token.auth import PluginAuthentication +from apps.base.utils import live_settings FEATURE_SLACK = "slack" FEATURE_TELEGRAM = "telegram" FEATURE_LIVE_SETTINGS = "live_settings" MOBILE_APP_PUSH_NOTIFICATIONS = "mobile_app" +FEATURE_GRAFANA_CLOUD_NOTIFICATIONS = "grafana_cloud_notifications" +FEATURE_GRAFANA_CLOUD_CONNECTION = "grafana_cloud_connection" class FeaturesAPIView(APIView): @@ -31,9 +34,6 @@ class FeaturesAPIView(APIView): if settings.FEATURE_TELEGRAM_INTEGRATION_ENABLED: enabled_features.append(FEATURE_TELEGRAM) - if settings.FEATURE_LIVE_SETTINGS_ENABLED: - enabled_features.append(FEATURE_LIVE_SETTINGS) - if settings.MOBILE_APP_PUSH_NOTIFICATIONS_ENABLED: DynamicSetting = apps.get_model("base", "DynamicSetting") mobile_app_settings = DynamicSetting.objects.get_or_create( @@ -48,4 +48,12 @@ class FeaturesAPIView(APIView): if request.auth.organization.pk in mobile_app_settings.json_value["org_ids"]: enabled_features.append(MOBILE_APP_PUSH_NOTIFICATIONS) + if settings.OSS_INSTALLATION: + # Features below should be enabled only in OSS + enabled_features.append(FEATURE_GRAFANA_CLOUD_CONNECTION) + if settings.FEATURE_LIVE_SETTINGS_ENABLED: + enabled_features.append(FEATURE_LIVE_SETTINGS) + if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + enabled_features.append(FEATURE_GRAFANA_CLOUD_NOTIFICATIONS) + return enabled_features diff --git a/engine/apps/api/views/live_setting.py b/engine/apps/api/views/live_setting.py index 2ed6d723..1718bd15 100644 --- a/engine/apps/api/views/live_setting.py +++ b/engine/apps/api/views/live_setting.py @@ -12,6 +12,7 @@ from apps.api.serializers.live_setting import LiveSettingSerializer from apps.auth_token.auth import PluginAuthentication from apps.base.models import LiveSetting from apps.base.utils import live_settings +from apps.oss_installation.tasks import sync_users_with_cloud from apps.slack.tasks import unpopulate_slack_user_identities from apps.telegram.client import TelegramClient from apps.telegram.tasks import register_telegram_webhook @@ -32,13 +33,19 @@ class LiveSettingViewSet(PublicPrimaryKeyMixin, viewsets.ModelViewSet): def get_queryset(self): LiveSetting.populate_settings_if_needed() - return LiveSetting.objects.filter(name__in=LiveSetting.AVAILABLE_NAMES).order_by("name") + queryset = LiveSetting.objects.filter(name__in=LiveSetting.AVAILABLE_NAMES).order_by("name") + search = self.request.query_params.get("search", None) + if search: + queryset = queryset.filter(name=search) + return queryset def perform_update(self, serializer): new_value = serializer.validated_data["value"] self._update_hook(new_value) - - super().perform_update(serializer) + instance = serializer.save() + sync_users = self.request.query_params.get("sync_users", "true") == "true" + if instance.name == "GRAFANA_CLOUD_ONCALL_TOKEN" and sync_users: + sync_users_with_cloud.apply_async() def perform_destroy(self, instance): new_value = instance.default_value @@ -66,6 +73,17 @@ class LiveSettingViewSet(PublicPrimaryKeyMixin, viewsets.ModelViewSet): if sti is not None: unpopulate_slack_user_identities.apply_async((sti.pk, True), countdown=0) + if instance.name == "GRAFANA_CLOUD_ONCALL_TOKEN": + from apps.oss_installation.models import CloudConnector + + try: + old_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN + except ImproperlyConfigured: + old_token = None + + if old_token != new_value: + CloudConnector.remove_sync() + def _reset_telegram_integration(self, new_token): # tell Telegram to cancel sending events from old bot with suppress(ImproperlyConfigured, error.InvalidToken, error.Unauthorized): diff --git a/engine/apps/api/views/user.py b/engine/apps/api/views/user.py index ee0a75de..e7d20a32 100644 --- a/engine/apps/api/views/user.py +++ b/engine/apps/api/views/user.py @@ -34,6 +34,7 @@ from apps.auth_token.models import UserScheduleExportAuthToken from apps.auth_token.models.mobile_app_auth_token import MobileAppAuthToken from apps.auth_token.models.mobile_app_verification_token import MobileAppVerificationToken from apps.base.messaging import get_messaging_backend_from_id +from apps.base.utils import live_settings from apps.telegram.client import TelegramClient from apps.telegram.models import TelegramVerificationCode from apps.twilioapp.phone_manager import PhoneManager @@ -56,7 +57,19 @@ class CurrentUserView(APIView): permission_classes = (IsAuthenticated,) def get(self, request): - serializer = UserSerializer(request.user, context={"request": self.request}) + context = {"request": self.request, "format": self.format_kwarg, "view": self} + + if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + from apps.oss_installation.models import CloudConnector, CloudUserIdentity + + connector = CloudConnector.objects.first() + if connector is not None: + cloud_identities = list(CloudUserIdentity.objects.filter(email__in=[request.user.email])) + cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities} + context["cloud_identities"] = cloud_identities + context["connector"] = connector + + serializer = UserSerializer(request.user, context=context) return Response(serializer.data) def put(self, request): @@ -179,6 +192,46 @@ class UserView( return queryset.order_by("id") + def list(self, request, *args, **kwargs): + queryset = self.filter_queryset(self.get_queryset()) + + page = self.paginate_queryset(queryset) + if page is not None: + context = {"request": self.request, "format": self.format_kwarg, "view": self} + if settings.OSS_INSTALLATION: + if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + from apps.oss_installation.models import CloudConnector, CloudUserIdentity + + connector = CloudConnector.objects.first() + if connector is not None: + emails = list(queryset.values_list("email", flat=True)) + cloud_identities = list(CloudUserIdentity.objects.filter(email__in=emails)) + cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities} + context["cloud_identities"] = cloud_identities + context["connector"] = connector + serializer = self.get_serializer(page, many=True, context=context) + return self.get_paginated_response(serializer.data) + + serializer = self.get_serializer(queryset, many=True) + return Response(serializer.data) + + def retrieve(self, request, *args, **kwargs): + context = {"request": self.request, "format": self.format_kwarg, "view": self} + instance = self.get_object() + + if settings.OSS_INSTALLATION and live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + from apps.oss_installation.models import CloudConnector, CloudUserIdentity + + connector = CloudConnector.objects.first() + if connector is not None: + cloud_identities = list(CloudUserIdentity.objects.filter(email__in=[instance.email])) + cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities} + context["cloud_identities"] = cloud_identities + context["connector"] = connector + + serializer = self.get_serializer(instance, context=context) + return Response(serializer.data) + def current(self, request): serializer = UserSerializer(self.get_queryset().get(pk=self.request.user.pk)) return Response(serializer.data) diff --git a/engine/apps/auth_token/auth.py b/engine/apps/auth_token/auth.py index aa1a6251..be4a99f3 100644 --- a/engine/apps/auth_token/auth.py +++ b/engine/apps/auth_token/auth.py @@ -9,7 +9,6 @@ from rest_framework.authentication import BaseAuthentication, get_authorization_ from rest_framework.request import Request from apps.grafana_plugin.helpers.gcom import check_token -from apps.public_api import constants as public_api_constants from apps.user_management.models import User from apps.user_management.models.organization import Organization from common.constants.role import Role @@ -29,12 +28,6 @@ class ApiTokenAuthentication(BaseAuthentication): def authenticate(self, request): auth = get_authorization_header(request).decode("utf-8") - - if auth == public_api_constants.DEMO_AUTH_TOKEN: - user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID) - auth_token = user.auth_tokens.first() - return user, auth_token - user, auth_token = self.authenticate_credentials(auth) if user.role != Role.ADMIN: diff --git a/engine/apps/auth_token/migrations/0003_squashed_create_demo_token_instances.py b/engine/apps/auth_token/migrations/0003_squashed_create_demo_token_instances.py deleted file mode 100644 index 225e0fcb..00000000 --- a/engine/apps/auth_token/migrations/0003_squashed_create_demo_token_instances.py +++ /dev/null @@ -1,40 +0,0 @@ -# Generated by Django 3.2.5 on 2021-08-04 13:02 - -import sys -from django.db import migrations - -from apps.auth_token import constants -from apps.auth_token import crypto -from apps.public_api import constants as public_api_constants - - -def create_demo_token_instances(apps, schema_editor): - if not (len(sys.argv) > 1 and sys.argv[1] == 'test'): - User = apps.get_model('user_management', 'User') - Organization = apps.get_model('user_management', 'Organization') - ApiAuthToken = apps.get_model('auth_token', 'ApiAuthToken') - - organization = Organization.objects.get(public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID) - user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID) - - token_string = crypto.generate_token_string() - digest = crypto.hash_token_string(token_string) - - ApiAuthToken.objects.get_or_create( - name=public_api_constants.DEMO_AUTH_TOKEN, - user=user, - organization=organization, - defaults=dict(token_key=token_string[:constants.TOKEN_KEY_LENGTH], digest=digest) - ) - - -class Migration(migrations.Migration): - - dependencies = [ - ('auth_token', '0002_squashed_initial'), - ('user_management', '0002_squashed_create_demo_token_instances') - ] - - operations = [ - migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop) - ] diff --git a/engine/apps/base/migrations/0003_squashed_create_demo_token_instances.py b/engine/apps/base/migrations/0003_squashed_create_demo_token_instances.py deleted file mode 100644 index a590210a..00000000 --- a/engine/apps/base/migrations/0003_squashed_create_demo_token_instances.py +++ /dev/null @@ -1,74 +0,0 @@ -# Generated by Django 3.2.5 on 2021-08-04 10:45 - -import sys -from django.db import migrations -from django.utils import timezone -from apps.public_api import constants as public_api_constants - - -STEP_WAIT = 0 -STEP_NOTIFY = 1 -NOTIFY_BY_SMS = 1 -NOTIFY_BY_PHONE = 2 -FIVE_MINUTES = timezone.timedelta(minutes=5) - - -def create_demo_token_instances(apps, schema_editor): - if not (len(sys.argv) > 1 and sys.argv[1] == 'test'): - User = apps.get_model('user_management', 'User') - UserNotificationPolicy = apps.get_model("base", "UserNotificationPolicy") - - user = User.objects.get(public_primary_key=public_api_constants.DEMO_USER_ID) - - UserNotificationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1, - defaults=dict( - important=False, - user=user, - notify_by=NOTIFY_BY_SMS, - step=STEP_NOTIFY, - order=0, - ) - ) - UserNotificationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2, - defaults=dict( - important=False, - user=user, - step=STEP_WAIT, - wait_delay=FIVE_MINUTES, - order=1, - ) - ) - UserNotificationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3, - defaults=dict( - important=False, - user=user, - step=STEP_NOTIFY, - notify_by=NOTIFY_BY_PHONE, - order=2, - ) - ) - - UserNotificationPolicy.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4, - defaults=dict( - important=True, - user=user, - notify_by=NOTIFY_BY_PHONE, - order=0, - ) - ) - - -class Migration(migrations.Migration): - - dependencies = [ - ('base', '0002_squashed_initial'), - ('user_management', '0002_squashed_create_demo_token_instances') - ] - - operations = [ - migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop) - ] diff --git a/engine/apps/base/models/live_setting.py b/engine/apps/base/models/live_setting.py index c08ab11f..54a5299d 100644 --- a/engine/apps/base/models/live_setting.py +++ b/engine/apps/base/models/live_setting.py @@ -38,35 +38,45 @@ class LiveSetting(models.Model): "TWILIO_NUMBER", "TWILIO_VERIFY_SERVICE_SID", "TELEGRAM_TOKEN", + "TELEGRAM_WEBHOOK_HOST", "SLACK_CLIENT_OAUTH_ID", "SLACK_CLIENT_OAUTH_SECRET", "SLACK_SIGNING_SECRET", + "SLACK_INSTALL_RETURN_REDIRECT_HOST", "SEND_ANONYMOUS_USAGE_STATS", "GRAFANA_CLOUD_ONCALL_TOKEN", "GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", + "GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", ) DESCRIPTIONS = { "SLACK_SIGNING_SECRET": ( "Check this instruction for details how to set up Slack. " + "https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup" + "'>instruction for details how to set up Slack. " "Slack secrets can't be verified on the backend, please try installing the Slack Bot " - "after you update Slack credentials." + "after you update them." ), "SLACK_CLIENT_OAUTH_SECRET": ( "Check this instruction for details how to set up Slack. " + "https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup" + "'>instruction for details how to set up Slack. " "Slack secrets can't be verified on the backend, please try installing the Slack Bot " - "after you update Slack credentials." + "after you update them." ), "SLACK_CLIENT_OAUTH_ID": ( "Check this instruction for details how to set up Slack. " + "https://grafana.com/docs/grafana-cloud/oncall/open-source/#slack-setup" + "'>instruction for details how to set up Slack. " "Slack secrets can't be verified on the backend, please try installing the Slack Bot " - "after you update Slack credentials." + "after you update them." + ), + "SLACK_INSTALL_RETURN_REDIRECT_HOST": ( + "Check instruction for details how to set up Slack. " + "Slack secrets can't be verified on the backend, please try installing the Slack Bot " + "after you update them." ), "TWILIO_ACCOUNT_SID": ( "Twilio username to allow amixr send sms and make phone calls, " @@ -99,13 +109,17 @@ class LiveSetting(models.Model): "TELEGRAM_TOKEN": ( "Secret token for Telegram bot, you can get one via " "BotFather." ), + "TELEGRAM_WEBHOOK_HOST": ( + "Externally available URL for Telegram to make requests. Please restart OnCall backend after after update." + ), "SEND_ANONYMOUS_USAGE_STATS": ( "Grafana OnCall will send anonymous, but uniquely-identifiable usage analytics to Grafana Labs." - " These statistics are sent to https://stats.grafana.org/. For more information on what's sent, look at" - "https://github.com/..." # TODO: add url to usage stats code + " These statistics are sent to https://stats.grafana.org/. For more information on what's sent, look at the " + " source code." ), "GRAFANA_CLOUD_ONCALL_TOKEN": "Secret token for Grafana Cloud OnCall instance.", "GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED": "Enable hearbeat integration with Grafana Cloud OnCall.", + "GRAFANA_CLOUD_NOTIFICATIONS_ENABLED": "Enable SMS/call notifications via Grafana Cloud OnCall", } SECRET_SETTING_NAMES = ( @@ -171,4 +185,5 @@ class LiveSetting(models.Model): ) self.error = LiveSettingValidator(live_setting=self).get_error() + super().save(*args, **kwargs) diff --git a/engine/apps/base/models/user_notification_policy_log_record.py b/engine/apps/base/models/user_notification_policy_log_record.py index 93fd0820..15f86067 100644 --- a/engine/apps/base/models/user_notification_policy_log_record.py +++ b/engine/apps/base/models/user_notification_policy_log_record.py @@ -68,7 +68,8 @@ class UserNotificationPolicyLogRecord(models.Model): ERROR_NOTIFICATION_IN_SLACK_CHANNEL_IS_ARCHIVED, ERROR_NOTIFICATION_IN_SLACK_RATELIMIT, ERROR_NOTIFICATION_MESSAGING_BACKEND_ERROR, - ) = range(25) + ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE, + ) = range(26) # for this errors we want to send message to general log channel ERRORS_TO_SEND_IN_SLACK_CHANNEL = [ @@ -266,6 +267,10 @@ class UserNotificationPolicyLogRecord(models.Model): result += f"failed to notify {user_verbal} in Slack, because channel is archived" elif self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_IN_SLACK_RATELIMIT: result += f"failed to notify {user_verbal} in Slack due to Slack rate limit" + elif ( + self.notification_error_code == UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ALLOWED_USER_ROLE + ): + result += f"failed to notify {user_verbal}, not allowed role" else: # TODO: handle specific backend errors try: diff --git a/engine/apps/base/utils.py b/engine/apps/base/utils.py index 7342d00e..8339e295 100644 --- a/engine/apps/base/utils.py +++ b/engine/apps/base/utils.py @@ -94,6 +94,13 @@ class LiveSettingValidator: except Exception as e: return f"Telegram error: {str(e)}" + @classmethod + def _check_grafana_cloud_oncall_token(cls, grafana_oncall_token): + from apps.oss_installation.models import CloudConnector + + _, err = CloudConnector.sync_with_cloud(grafana_oncall_token) + return err + @staticmethod def _is_email_valid(email): return re.match(r"^[^@]+@[^@]+\.[^@]+$", email) diff --git a/engine/apps/grafana_plugin/tasks/sync.py b/engine/apps/grafana_plugin/tasks/sync.py index 2d6c37bd..5ee38fe2 100644 --- a/engine/apps/grafana_plugin/tasks/sync.py +++ b/engine/apps/grafana_plugin/tasks/sync.py @@ -6,7 +6,6 @@ from django.utils import timezone from apps.grafana_plugin.helpers import GcomAPIClient from apps.grafana_plugin.helpers.gcom import get_active_instance_ids -from apps.public_api.constants import DEMO_ORGANIZATION_ID from apps.user_management.models import Organization from apps.user_management.sync import sync_organization from common.custom_celery_tasks import shared_dedicated_queue_retry_task @@ -23,9 +22,7 @@ SYNC_PERIOD = timezone.timedelta(minutes=25) def start_sync_organizations(): sync_threshold = timezone.now() - SYNC_PERIOD - organization_qs = Organization.objects.exclude(public_primary_key=DEMO_ORGANIZATION_ID).filter( - last_time_synced__lte=sync_threshold - ) + organization_qs = Organization.objects.filter(last_time_synced__lte=sync_threshold) active_instance_ids, is_cloud_configured = get_active_instance_ids() if is_cloud_configured: diff --git a/engine/apps/integrations/metadata/configuration/amazon_sns.py b/engine/apps/integrations/metadata/configuration/amazon_sns.py deleted file mode 100644 index 954542d0..00000000 --- a/engine/apps/integrations/metadata/configuration/amazon_sns.py +++ /dev/null @@ -1,99 +0,0 @@ -# Main -enabled = True -title = "Amazon SNS" -slug = "amazon_sns" -short_description = None -is_displayed_on_web = True -description = None -is_featured = False -is_able_to_autoresolve = True -is_demo_alert_enabled = True - -description = None - -# Default templates -slack_title = """\ -{% if payload|length == 0 -%} -{% set title = payload.get("AlarmName", "Alert") %} -{%- else -%} -{% set title = "Alert" %} -{%- endif %} - -*<{{ grafana_oncall_link }}|#{{ grafana_oncall_incident_id }} {{ title }}>* via {{ integration_name }} -{% if source_link %} - (*<{{ source_link }}|source>*) -{%- endif %}""" - -slack_message = """\ -{% if payload|length == 1 and "message" in payload -%} -{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }} -{%- else -%} -*State* {{ payload.get("NewStateValue", "NO") }} -Region: {{ payload.get("Region", "Undefined") }} -_Description_: {{ payload.get("AlarmDescription", "Undefined") }} -{%- endif %} -""" - -slack_image_url = None - -web_title = """\ -{% if payload|length == 0 -%} -{{ payload.get("AlarmName", "Alert")}} -{%- else -%} -Alert -{%- endif %}""" - -web_message = """\ -{% if payload|length == 1 and "message" in payload -%} -{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }} -{%- else -%} -**State** {{ payload.get("NewStateValue", "NO") }} -Region: {{ payload.get("Region", "Undefined") }} -*Description*: {{ payload.get("AlarmDescription", "Undefined") }} -{%- endif %} -""" - -web_image_url = slack_image_url - -sms_title = web_title - -phone_call_title = web_title - -email_title = web_title - -email_message = "{{ payload|tojson_pretty }}" - -telegram_title = sms_title - -telegram_message = """\ -{% if payload|length == 1 and "message" in payload -%} -{{ payload.get("message", "Non-JSON payload received. Please make sure you publish monitoring Alarms to SNS, not logs: https://docs.amixr.io/#/integrations/amazon_sns") }} -{%- else -%} -State {{ payload.get("NewStateValue", "NO") }} -Region: {{ payload.get("Region", "Undefined") }} -Description: {{ payload.get("AlarmDescription", "Undefined") }} -{%- endif %} -""" - -telegram_image_url = slack_image_url - -source_link = """\ -{% if payload|length == 0 -%} -{% if payload.get("Trigger", {}).get("Namespace") == "AWS/ElasticBeanstalk" -%} -https://console.aws.amazon.com/elasticbeanstalk/home?region={{ payload.get("TopicArn").split(":")[3] }} -{%- else -%} -https://console.aws.amazon.com/cloudwatch//home?region={{ payload.get("TopicArn").split(":")[3] }} -{%- endif %} -{%- endif %}""" - -grouping_id = web_title - -resolve_condition = """\ -{{ payload.get("NewStateValue", "") == "OK" }} -""" - -acknowledge_condition = None - -group_verbose_name = web_title - -example_payload = {"foo": "bar"} diff --git a/engine/apps/integrations/tests/test_ratelimit.py b/engine/apps/integrations/tests/test_ratelimit.py index a713d1ab..75e3d903 100644 --- a/engine/apps/integrations/tests/test_ratelimit.py +++ b/engine/apps/integrations/tests/test_ratelimit.py @@ -43,7 +43,6 @@ def test_ratelimit_alerts_per_integration( assert mocked_task.call_count == 1 -@pytest.mark.skip(reason="SQLITE Incompatibility") @mock.patch("ratelimit.utils._split_rate", return_value=(1, 60)) @mock.patch("apps.integrations.tasks.create_alert.apply_async", return_value=None) @pytest.mark.django_db @@ -55,10 +54,16 @@ def test_ratelimit_alerts_per_team( ): organization = make_organization() integration_1 = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_WEBHOOK) - url_1 = reverse("integrations:webhook", kwargs={"alert_channel_key": integration_1.token}) + url_1 = reverse( + "integrations:universal", + kwargs={"integration_type": AlertReceiveChannel.INTEGRATION_WEBHOOK, "alert_channel_key": integration_1.token}, + ) integration_2 = make_alert_receive_channel(organization, integration=AlertReceiveChannel.INTEGRATION_WEBHOOK) - url_2 = reverse("integrations:webhook", kwargs={"alert_channel_key": integration_2.token}) + url_2 = reverse( + "integrations:universal", + kwargs={"integration_type": AlertReceiveChannel.INTEGRATION_WEBHOOK, "alert_channel_key": integration_2.token}, + ) c = Client() @@ -71,7 +76,6 @@ def test_ratelimit_alerts_per_team( assert mocked_task.call_count == 1 -@pytest.mark.skip(reason="SQLITE Incompatibility") @mock.patch("ratelimit.utils._split_rate", return_value=(1, 60)) @mock.patch("apps.heartbeat.tasks.process_heartbeat_task.apply_async", return_value=None) @pytest.mark.django_db diff --git a/engine/apps/oss_installation/cloud_heartbeat.py b/engine/apps/oss_installation/cloud_heartbeat.py new file mode 100644 index 00000000..8d445e83 --- /dev/null +++ b/engine/apps/oss_installation/cloud_heartbeat.py @@ -0,0 +1,110 @@ +import logging +import random +from urllib.parse import urljoin + +import requests +from django.apps import apps +from django.conf import settings +from rest_framework import status + +from apps.base.utils import live_settings + +logger = logging.getLogger(__name__) + + +def setup_heartbeat_integration(name=None): + """Setup Grafana Cloud OnCall heartbeat integration.""" + CloudHeartbeat = apps.get_model("oss_installation", "CloudHeartbeat") + + cloud_heartbeat = None + api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN + if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not api_token: + return cloud_heartbeat + # don't specify a team in the data, so heartbeat integration will be created in the General. + name = name or f"OnCall Cloud Heartbeat {settings.BASE_URL}" + data = {"type": "formatted_webhook", "name": name} + url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "/api/v1/integrations/") + try: + headers = {"Authorization": api_token} + r = requests.post(url=url, data=data, headers=headers, timeout=5) + if r.status_code == status.HTTP_201_CREATED: + response_data = r.json() + cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create( + defaults={"integration_id": response_data["id"], "integration_url": response_data["heartbeat"]["link"]} + ) + if r.status_code == status.HTTP_400_BAD_REQUEST: + response_data = r.json() + error = response_data["detail"] + if error == "Integration with this name already exists": + response = requests.get(url=f"{url}?name={name}", headers=headers) + integrations = response.json().get("results", []) + if len(integrations) == 1: + integration = integrations[0] + cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create( + defaults={ + "integration_id": integration["id"], + "integration_url": integration["heartbeat"]["link"], + } + ) + else: + setup_heartbeat_integration(f"{name} { random.randint(1, 1024)}") + except requests.Timeout: + logger.warning("Unable to create cloud heartbeat integration. Request timeout.") + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to create cloud heartbeat integration. Request exception {str(e)}.") + return cloud_heartbeat + + +def send_cloud_heartbeat(): + CloudHeartbeat = apps.get_model("oss_installation", "CloudHeartbeat") + CloudConnector = apps.get_model("oss_installation", "CloudConnector") + """Send heartbeat to Grafana Cloud OnCall integration.""" + if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not live_settings.GRAFANA_CLOUD_ONCALL_TOKEN: + logger.info( + "Unable to send cloud heartbeat. Check values for GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED and GRAFANA_CLOUD_ONCALL_TOKEN." + ) + return + connector = CloudConnector.objects.first() + if connector is None: + logger.info("Unable to send cloud heartbeat. Cloud is not connected") + return + logger.info("Start send cloud heartbeat") + try: + cloud_heartbeat = CloudHeartbeat.objects.get() + except CloudHeartbeat.DoesNotExist: + cloud_heartbeat = setup_heartbeat_integration() + + if cloud_heartbeat is None: + logger.warning("Unable to setup cloud heartbeat integration.") + return + cloud_heartbeat.success = False + try: + response = requests.get(cloud_heartbeat.integration_url, timeout=5) + logger.info(f"Send cloud heartbeat with response {response.status_code}") + except requests.Timeout: + logger.warning("Unable to send cloud heartbeat. Request timeout.") + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to send cloud heartbeat. Request exception {str(e)}.") + else: + if response.status_code == status.HTTP_200_OK: + cloud_heartbeat.success = True + logger.info("Successfully send cloud heartbeat") + elif response.status_code == status.HTTP_403_FORBIDDEN: + # check for 403 because AlertChannelDefiningMixin returns 403 if no integration was found. + logger.info("Failed to send cloud heartbeat. Integration was not created yet") + # force re-creation on next run + cloud_heartbeat.delete() + else: + logger.info(f"Failed to send cloud heartbeat. response {response.status_code}") + # save result of cloud heartbeat if it wasn't deleted + if cloud_heartbeat.pk is not None: + cloud_heartbeat.save() + logger.info("Finish send cloud heartbeat") + + +def get_heartbeat_link(connector, heartbeat): + if connector is None: + return None + if heartbeat is None: + return None + return urljoin(connector.cloud_url, f"a/grafana-oncall-app/?page=integrations&id={heartbeat.integration_id}") diff --git a/engine/apps/oss_installation/constants.py b/engine/apps/oss_installation/constants.py new file mode 100644 index 00000000..11f3dc48 --- /dev/null +++ b/engine/apps/oss_installation/constants.py @@ -0,0 +1,4 @@ +CLOUD_NOT_SYNCED = 0 +CLOUD_SYNCED_USER_NOT_FOUND = 1 +CLOUD_SYNCED_PHONE_NOT_VERIFIED = 2 +CLOUD_SYNCED_PHONE_VERIFIED = 3 diff --git a/engine/apps/oss_installation/migrations/0001_squashed_initial.py b/engine/apps/oss_installation/migrations/0001_squashed_initial.py index dac55f47..b1a34cbd 100644 --- a/engine/apps/oss_installation/migrations/0001_squashed_initial.py +++ b/engine/apps/oss_installation/migrations/0001_squashed_initial.py @@ -30,4 +30,20 @@ class Migration(migrations.Migration): ('report_sent_at', models.DateTimeField(default=None, null=True)), ], ), + migrations.CreateModel( + name='CloudConnector', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('cloud_url', models.URLField()), + ], + ), + migrations.CreateModel( + name='CloudUserIdentity', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('phone_number_verified', models.BooleanField(default=False)), + ('cloud_id', models.CharField(max_length=20)), + ('email', models.EmailField(max_length=254)), + ], + ), ] diff --git a/engine/apps/oss_installation/models/__init__.py b/engine/apps/oss_installation/models/__init__.py index 53dea35e..beab1774 100644 --- a/engine/apps/oss_installation/models/__init__.py +++ b/engine/apps/oss_installation/models/__init__.py @@ -1,2 +1,4 @@ -from .heartbeat import CloudHeartbeat # noqa: F401 +from .cloud_connector import CloudConnector # noqa: F401 +from .cloud_heartbeat import CloudHeartbeat # noqa: F401 +from .cloud_user_identity import CloudUserIdentity # noqa: F401 from .oss_installation import OssInstallation # noqa: F401 diff --git a/engine/apps/oss_installation/models/cloud_connector.py b/engine/apps/oss_installation/models/cloud_connector.py new file mode 100644 index 00000000..38541bf5 --- /dev/null +++ b/engine/apps/oss_installation/models/cloud_connector.py @@ -0,0 +1,155 @@ +import logging +from urllib.parse import urljoin + +import requests +from django.db import models, transaction + +from apps.base.utils import live_settings +from apps.oss_installation.models.cloud_user_identity import CloudUserIdentity +from apps.user_management.models import User +from common.constants.role import Role +from settings.base import GRAFANA_CLOUD_ONCALL_API_URL + +logger = logging.getLogger(__name__) + + +class CloudConnector(models.Model): + """ + CloudOrganizationConnector model represents connection between oss organization and cloud organization. + """ + + cloud_url = models.URLField() + + @classmethod + def sync_with_cloud(cls, token=None): + """ + sync_with_cloud sync organization with cloud organization defined by provided GRAFANA_CLOUD_ONCALL_TOKEN. + """ + sync_status = False + error_msg = None + + api_token = token or live_settings.GRAFANA_CLOUD_ONCALL_TOKEN + if api_token is None: + logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is not set") + error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set" + else: + info_url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/info/") + try: + r = requests.get(info_url, headers={"AUTHORIZATION": api_token}, timeout=5) + if r.status_code == 200: + connector, _ = cls.objects.get_or_create() + connector.cloud_url = r.json()["url"] + connector.save() + elif r.status_code == 403: + logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is invalid") + error_msg = "Invalid token" + else: + error_msg = f"Non-200 HTTP code. Got {r.status_code}" + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to sync with cloud. Request exception {str(e)}") + error_msg = f"Unable to sync with cloud" + + return sync_status, error_msg + + def sync_users_with_cloud(self) -> tuple[bool, str]: + sync_status = False + error_msg = None + + api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN + if api_token is None: + logger.warning("Unable to sync with cloud. GRAFANA_CLOUD_ONCALL_TOKEN is not set") + error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set" + + existing_emails = list(User.objects.filter(role__in=(Role.ADMIN, Role.EDITOR)).values_list("email", flat=True)) + matching_users = [] + users_url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/users") + + fetch_next_page = True + users_fetched = True + page = 1 + while fetch_next_page: + try: + url = urljoin(users_url, f"?page={page}&?short=true") + r = requests.get(url, headers={"AUTHORIZATION": api_token}, timeout=5) + if r.status_code != 200: + logger.warning( + f"Unable to fetch page {page} while sync_users_with_cloud. Response status code {r.status_code}" + ) + error_msg = f"Non-200 HTTP code. Got {r.status_code}" + users_fetched = False + break + data = r.json() + matching_users.extend(list(filter(lambda u: (u["email"] in existing_emails), data["results"]))) + page += 1 + if data["next"] is None: + fetch_next_page = False + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to sync users with cloud. Request exception {str(e)}") + error_msg = f"Unable to sync with cloud" + users_fetched = False + break + + if users_fetched: + with transaction.atomic(): + cloud_users_identities_to_create = [] + for user in matching_users: + cloud_users_identities_to_create.append( + CloudUserIdentity( + cloud_id=user["id"], + email=user["email"], + phone_number_verified=user["is_phone_number_verified"], + ) + ) + + CloudUserIdentity.objects.all().delete() + CloudUserIdentity.objects.bulk_create(cloud_users_identities_to_create, batch_size=1000) + sync_status = True + return sync_status, error_msg + + def sync_user_with_cloud(self, user): + sync_status = False + error_msg = None + + api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN + if api_token is None: + logger.warning(f"Unable to sync_user_with cloud user_id {user.id}. GRAFANA_CLOUD_ONCALL_TOKEN is not set") + error_msg = "GRAFANA_CLOUD_ONCALL_TOKEN is not set" + else: + url = urljoin(GRAFANA_CLOUD_ONCALL_API_URL, f"api/v1/users/?email={user.email}") + try: + r = requests.get(url, headers={"AUTHORIZATION": api_token}, timeout=5) + if r.status_code != 200: + logger.warning( + f"Unable to sync_user_with_cloud user_id {user.id}. Response status code {r.status_code}" + ) + error_msg = f"Non-200 HTTP code. Got {r.status_code}" + else: + data = r.json() + if len(data["results"]) != 0: + cloud_used_data = data["results"][0] + with transaction.atomic(): + CloudUserIdentity.objects.filter(email=user.email).delete() + CloudUserIdentity.objects.create( + email=user.email, + phone_number_verified=cloud_used_data["is_phone_number_verified"], + cloud_id=cloud_used_data["id"], + ) + sync_status = True + else: + logger.warning( + f"Unable to sync_user_with_cloud user_id {user.id}. User with {user.email} not found" + ) + error_msg = f"User with email not found {user.email}" + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to sync_user_with cloud user_id {user.id}. Request exception {str(e)}") + error_msg = f"Unable to sync with cloud" + + return sync_status, error_msg + + @classmethod + def remove_sync(cls): + from apps.oss_installation.models import CloudHeartbeat + + cls.objects.all().delete() + CloudUserIdentity.objects.all().delete() + CloudHeartbeat.objects.all().delete() diff --git a/engine/apps/oss_installation/models/heartbeat.py b/engine/apps/oss_installation/models/cloud_heartbeat.py similarity index 100% rename from engine/apps/oss_installation/models/heartbeat.py rename to engine/apps/oss_installation/models/cloud_heartbeat.py diff --git a/engine/apps/oss_installation/models/cloud_user_identity.py b/engine/apps/oss_installation/models/cloud_user_identity.py new file mode 100644 index 00000000..ec83ac2f --- /dev/null +++ b/engine/apps/oss_installation/models/cloud_user_identity.py @@ -0,0 +1,7 @@ +from django.db import models + + +class CloudUserIdentity(models.Model): + phone_number_verified = models.BooleanField(default=False) + cloud_id = models.CharField(max_length=20) + email = models.EmailField() diff --git a/engine/apps/oss_installation/models/oss_installation.py b/engine/apps/oss_installation/models/oss_installation.py index 9e4dd3dd..2e553fcf 100644 --- a/engine/apps/oss_installation/models/oss_installation.py +++ b/engine/apps/oss_installation/models/oss_installation.py @@ -1,9 +1,16 @@ +import logging import uuid from django.db import models +logger = logging.getLogger(__name__) + class OssInstallation(models.Model): + """ + OssInstallation is model to track installation of OSS OnCall version. + """ + installation_id = models.UUIDField(default=uuid.uuid4, editable=False) created_at = models.DateTimeField(auto_now=True) report_sent_at = models.DateTimeField(null=True, default=None) diff --git a/engine/apps/oss_installation/serializers/__init__.py b/engine/apps/oss_installation/serializers/__init__.py new file mode 100644 index 00000000..991cf99b --- /dev/null +++ b/engine/apps/oss_installation/serializers/__init__.py @@ -0,0 +1 @@ +from .cloud_user import CloudUserSerializer # noqa: F401 diff --git a/engine/apps/oss_installation/serializers/cloud_user.py b/engine/apps/oss_installation/serializers/cloud_user.py new file mode 100644 index 00000000..53ccd808 --- /dev/null +++ b/engine/apps/oss_installation/serializers/cloud_user.py @@ -0,0 +1,20 @@ +from rest_framework import serializers + +from apps.oss_installation.models import CloudConnector, CloudUserIdentity +from apps.oss_installation.utils import cloud_user_identity_status +from apps.user_management.models import User + + +class CloudUserSerializer(serializers.ModelSerializer): + cloud_data = serializers.SerializerMethodField() + + class Meta: + model = User + fields = ["cloud_data"] + + def get_cloud_data(self, obj): + connector = CloudConnector.objects.filter().first() + cloud_user_identity = CloudUserIdentity.objects.filter(email=obj.email).first() + status, link = cloud_user_identity_status(connector, cloud_user_identity) + cloud_data = {"status": status, "link": link} + return cloud_data diff --git a/engine/apps/oss_installation/tasks.py b/engine/apps/oss_installation/tasks.py index 2c11a54a..56e3678a 100644 --- a/engine/apps/oss_installation/tasks.py +++ b/engine/apps/oss_installation/tasks.py @@ -1,13 +1,9 @@ -from urllib.parse import urljoin - -import requests from celery.utils.log import get_task_logger -from django.conf import settings +from django.apps import apps from django.utils import timezone -from rest_framework import status from apps.base.utils import live_settings -from apps.oss_installation.models import CloudHeartbeat, OssInstallation +from apps.oss_installation.cloud_heartbeat import send_cloud_heartbeat from apps.oss_installation.usage_stats import UsageStatsService from common.custom_celery_tasks import shared_dedicated_queue_retry_task @@ -17,6 +13,8 @@ logger = get_task_logger(__name__) @shared_dedicated_queue_retry_task() def send_usage_stats_report(): logger.info("Start send_usage_stats_report") + OssInstallation = apps.get_model("oss_installation", "OssInstallation") + installation = OssInstallation.objects.get_or_create()[0] enabled = live_settings.SEND_ANONYMOUS_USAGE_STATS if enabled: @@ -30,66 +28,24 @@ def send_usage_stats_report(): logger.info("Finish send_usage_stats_report") -def _setup_heartbeat_integration(): - """Setup Grafana Cloud OnCall heartbeat integration.""" - cloud_heartbeat = None - api_token = live_settings.GRAFANA_CLOUD_ONCALL_TOKEN - # don't specify a team in the data, so heartbeat integration will be created in the General. - data = {"type": "formatted_webhook", "name": f"OnCall {settings.BASE_URL}"} - url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "/api/v1/integrations/") - try: - headers = {"Authorization": api_token} - r = requests.post(url=url, data=data, headers=headers, timeout=5) - if r.status_code == status.HTTP_201_CREATED: - response_data = r.json() - cloud_heartbeat, _ = CloudHeartbeat.objects.update_or_create( - defaults={"integration_id": response_data["id"], "integration_url": response_data["heartbeat"]["link"]} - ) - except requests.Timeout: - logger.warning("Unable to create cloud heartbeat integration. Request timeout.") - except requests.exceptions.RequestException as e: - logger.warning(f"Unable to create cloud heartbeat integration. Request exception {str(e)}.") - return cloud_heartbeat +@shared_dedicated_queue_retry_task() +def send_cloud_heartbeat_task(): + send_cloud_heartbeat() @shared_dedicated_queue_retry_task() -def send_cloud_heartbeat(): - """Send heartbeat to Grafana Cloud OnCall integration.""" - if not live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED or not live_settings.GRAFANA_CLOUD_ONCALL_TOKEN: - logger.info( - "Unable to send cloud heartbeat. Check values for GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED and GRAFANA_CLOUD_ONCALL_TOKEN." - ) - return - - logger.info("Start send cloud heartbeat") - try: - cloud_heartbeat = CloudHeartbeat.objects.get() - except CloudHeartbeat.DoesNotExist: - cloud_heartbeat = _setup_heartbeat_integration() - - if cloud_heartbeat is None: - logger.warning("Unable to setup cloud heartbeat integration.") - return - cloud_heartbeat.success = False - try: - response = requests.get(cloud_heartbeat.integration_url, timeout=5) - logger.info(f"Send cloud heartbeat with response {response.status_code}") - except requests.Timeout: - logger.warning("Unable to send cloud heartbeat. Request timeout.") - except requests.exceptions.RequestException as e: - logger.warning(f"Unable to send cloud heartbeat. Request exception {str(e)}.") - else: - if response.status_code == status.HTTP_200_OK: - cloud_heartbeat.success = True - logger.info("Successfully send cloud heartbeat") - elif response.status_code == status.HTTP_403_FORBIDDEN: - # check for 403 because AlertChannelDefiningMixin returns 403 if no integration was found. - logger.info("Failed to send cloud heartbeat. Integration was not created yet") - # force re-creation on next run - cloud_heartbeat.delete() +def sync_users_with_cloud(): + CloudConnector = apps.get_model("oss_installation", "CloudConnector") + logger.info("Start sync_users_with_cloud") + if live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED: + connector = CloudConnector.objects.first() + if connector is not None: + status, error = connector.sync_users_with_cloud() + log_message = "Users synced. Status {status}." + if error: + log_message += f" Error {error}" + logger.info(log_message) else: - logger.info(f"Failed to send cloud heartbeat. response {response.status_code}") - # save result of cloud heartbeat if it wasn't deleted - if cloud_heartbeat.pk is not None: - cloud_heartbeat.save() - logger.info("Finish send cloud heartbeat") + logger.info("Grafana Cloud is not connected") + else: + logger.info("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED is not enabled") diff --git a/engine/apps/oss_installation/urls.py b/engine/apps/oss_installation/urls.py index 956ffe74..ddf04020 100644 --- a/engine/apps/oss_installation/urls.py +++ b/engine/apps/oss_installation/urls.py @@ -1,7 +1,15 @@ -from common.api_helpers.optional_slash_router import optional_slash_path +from django.urls import include, path -from .views import CloudHeartbeatStatusView +from common.api_helpers.optional_slash_router import OptionalSlashRouter, optional_slash_path + +from .views import CloudConnectionView, CloudHeartbeatView, CloudUsersView, CloudUserView + +router = OptionalSlashRouter() +router.register("cloud_users", CloudUserView, basename="cloud-users") urlpatterns = [ - optional_slash_path("cloud_heartbeat_status", CloudHeartbeatStatusView.as_view(), name="cloud_heartbeat_status"), + path("", include(router.urls)), + optional_slash_path("cloud_users", CloudUsersView.as_view(), name="cloud-users-list"), + optional_slash_path("cloud_connection", CloudConnectionView.as_view(), name="cloud-connection-status"), + optional_slash_path("cloud_heartbeat", CloudHeartbeatView.as_view(), name="cloud-heartbeat"), ] diff --git a/engine/apps/oss_installation/usage_stats.py b/engine/apps/oss_installation/usage_stats.py index db90cce8..b3a1bd43 100644 --- a/engine/apps/oss_installation/usage_stats.py +++ b/engine/apps/oss_installation/usage_stats.py @@ -3,11 +3,11 @@ import platform from dataclasses import asdict, dataclass import requests +from django.apps import apps from django.conf import settings from django.db.models import Sum from apps.alerts.models import AlertGroupCounter -from apps.oss_installation.models import OssInstallation from apps.oss_installation.utils import active_oss_users_count USAGE_STATS_URL = "https://stats.grafana.org/oncall-usage-report" @@ -27,9 +27,12 @@ class UsageStatsReport: class UsageStatsService: def get_usage_stats_report(self): + OssInstallation = apps.get_model("oss_installation", "OssInstallation") metrics = {} metrics["active_users_count"] = active_oss_users_count() - total_alert_groups = AlertGroupCounter.objects.aggregate(Sum("value")).get("value__sum", 0) + total_alert_groups = AlertGroupCounter.objects.aggregate(Sum("value")).get("value__sum", None) + if total_alert_groups is None: + total_alert_groups = 0 metrics["alert_groups_count"] = total_alert_groups usage_stats_id = OssInstallation.objects.get_or_create()[0].installation_id diff --git a/engine/apps/oss_installation/utils.py b/engine/apps/oss_installation/utils.py index fcfb537c..4aad084a 100644 --- a/engine/apps/oss_installation/utils.py +++ b/engine/apps/oss_installation/utils.py @@ -1,19 +1,23 @@ -from contextlib import suppress +import logging +from urllib.parse import urljoin +from django.apps import apps from django.utils import timezone -from apps.alerts.models import AlertGroupLogRecord, EscalationPolicy -from apps.base.models import UserNotificationPolicyLogRecord -from apps.public_api.constants import DEMO_USER_ID +from apps.oss_installation import constants as oss_constants from apps.schedules.ical_utils import list_users_to_notify_from_ical_for_period -from apps.schedules.models import OnCallSchedule -from apps.user_management.models import User + +logger = logging.getLogger(__name__) def active_oss_users_count(): """ active_oss_users_count returns count of active users of oss installation. """ + OnCallSchedule = apps.get_model("schedules", "OnCallSchedule") + AlertGroupLogRecord = apps.get_model("alerts", "AlertGroupLogRecord") + EscalationPolicy = apps.get_model("alerts", "EscalationPolicy") + UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") # Take logs for previous 24 hours start = timezone.now() - timezone.timedelta(hours=24) @@ -62,9 +66,21 @@ def active_oss_users_count(): for user in users_from_schedule: unique_active_users.add(user.pk) - # Remove demo user from active users - with suppress(User.DoesNotExist): - demo_user = User.objects.get(public_primary_key=DEMO_USER_ID) - with suppress(KeyError): - unique_active_users.remove(demo_user.pk) return len(unique_active_users) + + +def cloud_user_identity_status(connector, identity): + link = None + if connector is None: + status = oss_constants.CLOUD_NOT_SYNCED + elif identity is None: + status = oss_constants.CLOUD_SYNCED_USER_NOT_FOUND + link = connector.cloud_url + else: + if identity.phone_number_verified: + status = oss_constants.CLOUD_SYNCED_PHONE_VERIFIED + else: + status = oss_constants.CLOUD_SYNCED_PHONE_NOT_VERIFIED + + link = urljoin(connector.cloud_url, f"a/grafana-oncall-app/?page=users&p=1&id={identity.cloud_id}") + return status, link diff --git a/engine/apps/oss_installation/views/__init__.py b/engine/apps/oss_installation/views/__init__.py index 0716482b..b3c50ba3 100644 --- a/engine/apps/oss_installation/views/__init__.py +++ b/engine/apps/oss_installation/views/__init__.py @@ -1 +1,3 @@ -from .cloud_heartbeat_status import CloudHeartbeatStatusView # noqa: F401 +from .cloud_connection import CloudConnectionView # noqa: F401 +from .cloud_heartbeat import CloudHeartbeatView # noqa: F401 +from .cloud_users import CloudUsersView, CloudUserView # noqa: F401 diff --git a/engine/apps/oss_installation/views/cloud_connection.py b/engine/apps/oss_installation/views/cloud_connection.py new file mode 100644 index 00000000..21b6624c --- /dev/null +++ b/engine/apps/oss_installation/views/cloud_connection.py @@ -0,0 +1,39 @@ +from rest_framework import status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView + +from apps.api.permissions import IsAdmin +from apps.auth_token.auth import PluginAuthentication +from apps.base.models import LiveSetting +from apps.base.utils import live_settings +from apps.oss_installation.cloud_heartbeat import get_heartbeat_link +from apps.oss_installation.models import CloudConnector, CloudHeartbeat + + +class CloudConnectionView(APIView): + authentication_classes = (PluginAuthentication,) + permission_classes = (IsAuthenticated, IsAdmin) + + def get(self, request): + connector = CloudConnector.objects.first() + heartbeat = CloudHeartbeat.objects.first() + response = { + "cloud_connection_status": connector is not None, + "cloud_notifications_enabled": live_settings.GRAFANA_CLOUD_NOTIFICATIONS_ENABLED, + "cloud_heartbeat_enabled": live_settings.GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED, + "cloud_heartbeat_link": get_heartbeat_link(connector, heartbeat), + "cloud_heartbeat_status": heartbeat is not None and heartbeat.success, + } + return Response(response) + + def delete(self, request): + s = LiveSetting.objects.filter(name="GRAFANA_CLOUD_ONCALL_TOKEN").first() + if s is not None: + s.value = None + s.save() + connector = CloudConnector.objects.first() + if connector is None: + return Response(status=status.HTTP_404_NOT_FOUND) + connector.remove_sync() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/engine/apps/oss_installation/views/cloud_heartbeat.py b/engine/apps/oss_installation/views/cloud_heartbeat.py new file mode 100644 index 00000000..932087c3 --- /dev/null +++ b/engine/apps/oss_installation/views/cloud_heartbeat.py @@ -0,0 +1,27 @@ +from rest_framework import status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView + +from apps.api.permissions import IsAdmin +from apps.auth_token.auth import PluginAuthentication +from apps.oss_installation.cloud_heartbeat import get_heartbeat_link, setup_heartbeat_integration +from apps.oss_installation.models import CloudConnector, CloudHeartbeat + + +class CloudHeartbeatView(APIView): + authentication_classes = (PluginAuthentication,) + permission_classes = (IsAuthenticated, IsAdmin) + + def post(self, request): + connector = CloudConnector.objects.first() + if connector is not None: + try: + CloudHeartbeat.objects.get() + return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Cloud heartbeat already exists"}) + except CloudHeartbeat.DoesNotExist: + heartbeat = setup_heartbeat_integration() + link = get_heartbeat_link(connector, heartbeat) + return Response(status=status.HTTP_200_OK, data={"link": link}) + else: + return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"}) diff --git a/engine/apps/oss_installation/views/cloud_heartbeat_status.py b/engine/apps/oss_installation/views/cloud_heartbeat_status.py deleted file mode 100644 index be553641..00000000 --- a/engine/apps/oss_installation/views/cloud_heartbeat_status.py +++ /dev/null @@ -1,15 +0,0 @@ -from rest_framework.permissions import IsAuthenticated -from rest_framework.response import Response -from rest_framework.views import APIView - -from apps.auth_token.auth import PluginAuthentication -from apps.oss_installation.models import CloudHeartbeat - - -class CloudHeartbeatStatusView(APIView): - authentication_classes = (PluginAuthentication,) - permission_classes = (IsAuthenticated,) - - def get(self, request): - response = {"status": CloudHeartbeat.status()} - return Response(response) diff --git a/engine/apps/oss_installation/views/cloud_users.py b/engine/apps/oss_installation/views/cloud_users.py new file mode 100644 index 00000000..3eb7685b --- /dev/null +++ b/engine/apps/oss_installation/views/cloud_users.py @@ -0,0 +1,107 @@ +from collections import OrderedDict + +from rest_framework import mixins, status, viewsets +from rest_framework.decorators import action +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView + +from apps.api.permissions import ActionPermission, AnyRole, IsAdmin, IsOwnerOrAdmin +from apps.auth_token.auth import PluginAuthentication +from apps.oss_installation.models import CloudConnector, CloudUserIdentity +from apps.oss_installation.serializers import CloudUserSerializer +from apps.oss_installation.utils import cloud_user_identity_status +from apps.user_management.models import User +from common.api_helpers.mixins import PublicPrimaryKeyMixin +from common.api_helpers.paginators import HundredPageSizePaginator +from common.constants.role import Role + + +class CloudUsersView(HundredPageSizePaginator, APIView): + authentication_classes = (PluginAuthentication,) + permission_classes = (IsAuthenticated, IsAdmin) + + def get(self, request): + organization = request.user.organization + + queryset = User.objects.filter(organization=organization, role__in=[Role.ADMIN, Role.EDITOR]) + + if request.user.current_team is not None: + queryset = queryset.filter(teams=request.user.current_team).distinct() + emails = list(queryset.values_list("email", flat=True)) + + results = self.paginate_queryset(queryset, request, view=self) + + cloud_identities = list(CloudUserIdentity.objects.filter(email__in=emails)) + cloud_identities = {cloud_identity.email: cloud_identity for cloud_identity in cloud_identities} + + response = [] + + connector = CloudConnector.objects.first() + + for user in results: + cloud_identity = cloud_identities.get(user.email, None) + status, link = cloud_user_identity_status(connector, cloud_identity) + response.append( + { + "id": user.public_primary_key, + "email": user.email, + "username": user.username, + "cloud_data": {"status": status, "link": link}, + } + ) + + return self.get_paginated_response_with_matched_users_count(response, len(cloud_identities)) + + def get_paginated_response_with_matched_users_count(self, data, matched_users_count): + return Response( + OrderedDict( + [ + ("count", self.page.paginator.count), + ("matched_users_count", matched_users_count), + ("next", self.get_next_link()), + ("previous", self.get_previous_link()), + ("results", data), + ] + ) + ) + + def post(self, request): + connector = CloudConnector.objects.first() + if connector is not None: + sync_status, err = connector.sync_users_with_cloud() + return Response(status=status.HTTP_200_OK, data={"status": sync_status, "error": err}) + else: + return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"}) + + +class CloudUserView( + PublicPrimaryKeyMixin, + mixins.RetrieveModelMixin, + viewsets.GenericViewSet, +): + authentication_classes = (PluginAuthentication,) + permission_classes = (IsAuthenticated, ActionPermission) + + action_permissions = { + AnyRole: ("retrieve",), + IsAdmin: ("sync",), + } + action_object_permissions = { + IsOwnerOrAdmin: ("retrieve", "sync"), + } + serializer_class = CloudUserSerializer + + def get_queryset(self): + queryset = User.objects.filter(organization=self.request.user.organization) + return queryset + + @action(detail=True, methods=["post"]) + def sync(self, request, pk): + user = self.get_object() + connector = CloudConnector.objects.first() + if connector is not None: + sync_status, err = connector.sync_user_with_cloud(user) + return Response(status=status.HTTP_200_OK, data={"status": sync_status, "error": err}) + else: + return Response(status=status.HTTP_400_BAD_REQUEST, data={"detail": "Grafana Cloud is not connected"}) diff --git a/engine/apps/public_api/constants.py b/engine/apps/public_api/constants.py index 4a14df3f..cd2f6e38 100644 --- a/engine/apps/public_api/constants.py +++ b/engine/apps/public_api/constants.py @@ -1,69 +1,3 @@ from django.utils import dateparse -DEMO_USER_ID = "U4DNY931HHJS5" -DEMO_ORGANIZATION_ID = "TCNPY4A1BWUMP" -DEMO_SLACK_USER_ID = "UALEXSLACKDJPK" -DEMO_SLACK_TEAM_ID = "TALEXSLACKDJPK" -DEMO_AUTH_TOKEN = "meowmeowmeow" -DEMO_USER_USERNAME = "Alex" -DEMO_USER_EMAIL = "public-api-demo-user-1@amixr.io" -DEMO_INTEGRATION_ID = "CFRPV98RPR1U8" -DEMO_INTEGRATION_LINK_TOKEN = "mReAoNwDm0eMwKo1mTeTwYo" -DEMO_INTEGRATION_NAME = "Grafana :blush:" -DEMO_ROUTE_ID_1 = "RIYGUJXCPFHXY" -DEMO_ROUTE_ID_2 = "RVBE4RKQSCGJ2" -DEMO_SLACK_CHANNEL_FOR_ROUTE_ID = "CH23212D" -DEMO_ESCALATION_CHAIN_ID = "F5JU6KJET33FE" -DEMO_ESCALATION_POLICY_ID_1 = "E3GA6SJETWWJS" -DEMO_ESCALATION_POLICY_ID_2 = "E5JJTU52M5YM4" -DEMO_SCHEDULE_ID_ICAL = "SBM7DV7BKFUYU" -DEMO_SCHEDULE_ID_CALENDAR = "S3Z477AHDXTMF" -DEMO_SCHEDULE_NAME_ICAL = "Demo schedule iCal" -DEMO_SCHEDULE_NAME_CALENDAR = "Demo schedule Calendar" -DEMO_SCHEDULE_ICAL_URL_PRIMARY = "https://example.com/meow_calendar.ics" -DEMO_SCHEDULE_ICAL_URL_OVERRIDES = "https://example.com/meow_calendar_overrides.ics" -DEMO_INCIDENT_ID = "I68T24C13IFW1" -DEMO_INCIDENT_CREATED_AT = "2020-05-19T12:37:01.430444Z" -DEMO_INCIDENT_RESOLVED_AT = "2020-05-19T13:37:01.429805Z" -DEMO_ALERT_IDS = [ - ("AA74DN7T4JQB6", "2020-05-11T20:07:43Z"), - ("AR9SSYFKE2PV7", "2020-05-11T20:07:54Z"), - ("AWJQSGEYYUFGH", "2020-05-11T20:07:58Z"), -] -DEMO_ALERT_PAYLOAD = { - "evalMatches": [ - {"value": 100, "metric": "High value", "tags": None}, - {"value": 200, "metric": "Higher Value", "tags": None}, - ], - "message": "Someone is testing the alert notification within grafana.", - "ruleId": 0, - "ruleName": "Test notification", - "ruleUrl": "https://amixr.io/", - "state": "alerting", - "title": "[Alerting] Test notification", -} VALID_DATE_FOR_DELETE_INCIDENT = dateparse.parse_date("2020-07-04") -DEMO_SLACK_CHANNEL_NAME = "meow_channel" -DEMO_SLACK_CHANNEL_SLACK_ID = "MEOW_SLACK_ID" -DEMO_PERSONAL_NOTIFICATION_ID_1 = "NT79GA9I7E4DJ" -DEMO_PERSONAL_NOTIFICATION_ID_2 = "ND9EHN5LN1DUU" -DEMO_PERSONAL_NOTIFICATION_ID_3 = "NEF49YQ1HNPDD" -DEMO_PERSONAL_NOTIFICATION_ID_4 = "NWAL6WFJNWDD8" -DEMO_RESOLUTION_NOTE_ID = "M4BTQUS3PRHYQ" -DEMO_RESOLUTION_NOTE_TEXT = "Demo resolution note" -DEMO_RESOLUTION_NOTE_CREATED_AT = "2020-06-19T12:40:01.429805Z" -DEMO_RESOLUTION_NOTE_SOURCE = "web" -DEMO_CUSTOM_ACTION_ID = "KGEFG74LU1D8L" -DEMO_CUSTOM_ACTION_NAME = "Publish Incident To Jira" -DEMO_SLACK_USER_GROUP_ID = "GPFAPH7J7BKJB" -DEMO_SLACK_USER_GROUP_SLACK_ID = "MEOW_SLACK_ID" -DEMO_SLACK_USER_GROUP_NAME = "Meow Group" -DEMO_SLACK_USER_GROUP_HANDLE = "meow_group" -DEMO_ON_CALL_SHIFT_ID_1 = "OH3V5FYQEYJ6M" -DEMO_ON_CALL_SHIFT_ID_2 = "O9WTH7CKM3KZW" -DEMO_ON_CALL_SHIFT_NAME_1 = "Demo single event" -DEMO_ON_CALL_SHIFT_NAME_2 = "Demo recurrent event" -DEMO_ON_CALL_SHIFT_START_1 = "2020-09-10T08:00:00" -DEMO_ON_CALL_SHIFT_START_2 = "2020-09-10T16:00:00" -DEMO_ON_CALL_SHIFT_DURATION = 10800 -DEMO_ON_CALL_SHIFT_BY_DAY = ["MO", "WE", "FR"] diff --git a/engine/apps/public_api/helpers.py b/engine/apps/public_api/helpers.py index f684e34a..587445cb 100644 --- a/engine/apps/public_api/helpers.py +++ b/engine/apps/public_api/helpers.py @@ -1,14 +1,8 @@ -from apps.public_api.constants import DEMO_AUTH_TOKEN, VALID_DATE_FOR_DELETE_INCIDENT +from apps.public_api.constants import VALID_DATE_FOR_DELETE_INCIDENT from apps.slack.slack_client import SlackClientWithErrorHandling from apps.slack.slack_client.exceptions import SlackAPITokenException -def is_demo_token_request(request): - if DEMO_AUTH_TOKEN == request.headers.get("Authorization"): - return True - return False - - def team_has_slack_token_for_deleting(alert_group): if alert_group.slack_message and alert_group.slack_message.slack_team_identity: sc = SlackClientWithErrorHandling(alert_group.slack_message.slack_team_identity.bot_access_token) diff --git a/engine/apps/public_api/serializers/integrations.py b/engine/apps/public_api/serializers/integrations.py index 82d418c0..090523a2 100644 --- a/engine/apps/public_api/serializers/integrations.py +++ b/engine/apps/public_api/serializers/integrations.py @@ -4,8 +4,6 @@ from rest_framework import fields, serializers from apps.alerts.grafana_alerting_sync_manager.grafana_alerting_sync import GrafanaAlertingSyncManager from apps.alerts.models import AlertReceiveChannel -from apps.public_api.constants import DEMO_INTEGRATION_LINK_TOKEN -from apps.public_api.helpers import is_demo_token_request from common.api_helpers.custom_fields import TeamPrimaryKeyRelatedField from common.api_helpers.exceptions import BadRequest from common.api_helpers.mixins import EagerLoadingMixin @@ -62,12 +60,6 @@ class IntegrationSerializer(EagerLoadingMixin, serializers.ModelSerializer, Main default_route = self._get_default_route_iterative(instance) serializer = DefaultChannelFilterSerializer(default_route, context=self.context) result["default_route"] = serializer.data - if is_demo_token_request(self.context["request"]): - # Replace integration token to not receive alerts on demo integration - link = result["link"] - real_token = instance.token - link = link.replace(real_token, DEMO_INTEGRATION_LINK_TOKEN) - result["link"] = link return result diff --git a/engine/apps/public_api/serializers/schedules_base.py b/engine/apps/public_api/serializers/schedules_base.py index 80cd8bc5..8eed1cf8 100644 --- a/engine/apps/public_api/serializers/schedules_base.py +++ b/engine/apps/public_api/serializers/schedules_base.py @@ -2,8 +2,6 @@ from django.apps import apps from django.utils import timezone from rest_framework import serializers -from apps.public_api import constants as public_api_constants -from apps.public_api.helpers import is_demo_token_request from apps.schedules.ical_utils import list_users_to_notify_from_ical from apps.schedules.models import OnCallSchedule from apps.slack.models import SlackUserGroup @@ -36,14 +34,11 @@ class ScheduleBaseSerializer(serializers.ModelSerializer): raise BadRequest(detail="Schedule with this name already exists") def get_on_call_now(self, obj): - if not is_demo_token_request(self.context["request"]): - users_on_call = list_users_to_notify_from_ical(obj, timezone.datetime.now(timezone.utc)) - if users_on_call is not None: - return [user.public_primary_key for user in users_on_call] - else: - return [] + users_on_call = list_users_to_notify_from_ical(obj, timezone.datetime.now(timezone.utc)) + if users_on_call is not None: + return [user.public_primary_key for user in users_on_call] else: - return [public_api_constants.DEMO_USER_ID] + return [] def _correct_validated_data(self, validated_data): slack_field = validated_data.pop("slack", {}) diff --git a/engine/apps/public_api/tests/conftest.py b/engine/apps/public_api/tests/conftest.py index a4d11c26..f8b6f8b0 100644 --- a/engine/apps/public_api/tests/conftest.py +++ b/engine/apps/public_api/tests/conftest.py @@ -1,14 +1,7 @@ import pytest -from django.utils import dateparse, timezone from pytest_factoryboy import register -from apps.alerts.models import EscalationPolicy, ResolutionNote -from apps.auth_token.models import ApiAuthToken -from apps.base.models import UserNotificationPolicy -from apps.public_api import constants as public_api_constants -from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar, OnCallScheduleICal from apps.user_management.tests.factories import OrganizationFactory, UserFactory -from common.constants.role import Role register(UserFactory) register(OrganizationFactory) @@ -22,222 +15,3 @@ def make_organization_and_user_with_token(make_organization_and_user, make_publi return organization, user, token return _make_organization_and_user_with_token - - -@pytest.fixture() -def make_organization_and_user_with_slack_identities_for_demo_token( - make_slack_team_identity, - make_organization, - make_slack_user_identity, - make_user, -): - def _make_organization_and_user_with_slack_identities_for_demo_token(): - slack_team_identity = make_slack_team_identity(slack_id=public_api_constants.DEMO_SLACK_TEAM_ID) - organization = make_organization( - slack_team_identity=slack_team_identity, public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID - ) - slack_user_identity = make_slack_user_identity( - slack_id=public_api_constants.DEMO_SLACK_USER_ID, - slack_team_identity=slack_team_identity, - ) - user = make_user( - organization=organization, - public_primary_key=public_api_constants.DEMO_USER_ID, - email=public_api_constants.DEMO_USER_EMAIL, - username=public_api_constants.DEMO_USER_USERNAME, - role=Role.ADMIN, - slack_user_identity=slack_user_identity, - ) - ApiAuthToken.create_auth_token(user, organization, public_api_constants.DEMO_AUTH_TOKEN) - token = public_api_constants.DEMO_AUTH_TOKEN - return organization, user, token - - return _make_organization_and_user_with_slack_identities_for_demo_token - - -@pytest.fixture() -def make_data_for_demo_token( - make_alert_receive_channel, - make_channel_filter, - make_escalation_chain, - make_escalation_policy, - make_alert_group, - make_alert, - make_resolution_note, - make_custom_action, - make_slack_user_group, - make_schedule, - make_on_call_shift, - make_slack_channel, - make_user_notification_policy, -): - def _make_data_for_demo_token(organization, user): - alert_receive_channel = make_alert_receive_channel( - organization, - public_primary_key=public_api_constants.DEMO_INTEGRATION_ID, - verbal_name=public_api_constants.DEMO_INTEGRATION_NAME, - ) - route_1 = make_channel_filter( - public_primary_key=public_api_constants.DEMO_ROUTE_ID_1, - alert_receive_channel=alert_receive_channel, - slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID, - filtering_term="us-(east|west)", - order=0, - ) - make_channel_filter( - public_primary_key=public_api_constants.DEMO_ROUTE_ID_2, - alert_receive_channel=alert_receive_channel, - slack_channel_id=public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID, - filtering_term=".*", - order=1, - is_default=True, - ) - escalation_chain = make_escalation_chain( - organization, public_primary_key=public_api_constants.DEMO_ESCALATION_CHAIN_ID - ) - make_escalation_policy( - escalation_chain, - public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1, - escalation_policy_step=EscalationPolicy.STEP_WAIT, - order=0, - wait_delay=EscalationPolicy.ONE_MINUTE, - ) - escalation_policy_2 = make_escalation_policy( - escalation_chain, - public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_2, - escalation_policy_step=EscalationPolicy.STEP_NOTIFY_USERS_QUEUE, - order=1, - ) - escalation_policy_2.notify_to_users_queue.add(user) - alert_group = make_alert_group( - alert_receive_channel, - public_primary_key=public_api_constants.DEMO_INCIDENT_ID, - resolved=True, - channel_filter=route_1, - ) - alert_group.started_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_CREATED_AT) - alert_group.resolved_at = dateparse.parse_datetime(public_api_constants.DEMO_INCIDENT_RESOLVED_AT) - alert_group.save(update_fields=["started_at", "resolved_at"]) - for alert_id, created_at in public_api_constants.DEMO_ALERT_IDS: - alert = make_alert( - public_primary_key=alert_id, - alert_group=alert_group, - raw_request_data=public_api_constants.DEMO_ALERT_PAYLOAD, - ) - alert.created_at = dateparse.parse_datetime(created_at) - alert.save(update_fields=["created_at"]) - - resolution_note = make_resolution_note( - alert_group=alert_group, - source=ResolutionNote.Source.WEB, - author=user, - public_primary_key=public_api_constants.DEMO_RESOLUTION_NOTE_ID, - message_text=public_api_constants.DEMO_RESOLUTION_NOTE_TEXT, - ) - resolution_note.created_at = dateparse.parse_datetime(public_api_constants.DEMO_RESOLUTION_NOTE_CREATED_AT) - resolution_note.save(update_fields=["created_at"]) - - make_custom_action( - public_primary_key=public_api_constants.DEMO_CUSTOM_ACTION_ID, - organization=organization, - name=public_api_constants.DEMO_CUSTOM_ACTION_NAME, - ) - - user_group = make_slack_user_group( - public_primary_key=public_api_constants.DEMO_SLACK_USER_GROUP_ID, - name=public_api_constants.DEMO_SLACK_USER_GROUP_NAME, - handle=public_api_constants.DEMO_SLACK_USER_GROUP_HANDLE, - slack_id=public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID, - slack_team_identity=organization.slack_team_identity, - ) - - # ical schedule - make_schedule( - organization=organization, - schedule_class=OnCallScheduleICal, - public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL, - ical_url_primary=public_api_constants.DEMO_SCHEDULE_ICAL_URL_PRIMARY, - ical_url_overrides=public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES, - name=public_api_constants.DEMO_SCHEDULE_NAME_ICAL, - channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - user_group=user_group, - ) - # calendar schedule - schedule_calendar = make_schedule( - organization=organization, - schedule_class=OnCallScheduleCalendar, - public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_CALENDAR, - name=public_api_constants.DEMO_SCHEDULE_NAME_CALENDAR, - channel=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - user_group=user_group, - time_zone="America/New_york", - ) - - on_call_shift_1 = make_on_call_shift( - shift_type=CustomOnCallShift.TYPE_SINGLE_EVENT, - organization=organization, - public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, - name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1, - start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_1), - duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION), - ) - on_call_shift_1.users.add(user) - - on_call_shift_2 = make_on_call_shift( - shift_type=CustomOnCallShift.TYPE_RECURRENT_EVENT, - organization=organization, - public_primary_key=public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, - name=public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2, - start=dateparse.parse_datetime(public_api_constants.DEMO_ON_CALL_SHIFT_START_2), - duration=timezone.timedelta(seconds=public_api_constants.DEMO_ON_CALL_SHIFT_DURATION), - frequency=CustomOnCallShift.FREQUENCY_WEEKLY, - interval=2, - by_day=public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY, - source=CustomOnCallShift.SOURCE_TERRAFORM, - ) - on_call_shift_2.users.add(user) - - schedule_calendar.custom_on_call_shifts.add(on_call_shift_1) - schedule_calendar.custom_on_call_shifts.add(on_call_shift_2) - - make_slack_channel( - organization.slack_team_identity, - slack_id=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - name=public_api_constants.DEMO_SLACK_CHANNEL_NAME, - ) - make_user_notification_policy( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1, - important=False, - user=user, - notify_by=UserNotificationPolicy.NotificationChannel.SMS, - step=UserNotificationPolicy.Step.NOTIFY, - order=0, - ) - make_user_notification_policy( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2, - important=False, - user=user, - step=UserNotificationPolicy.Step.WAIT, - wait_delay=UserNotificationPolicy.FIVE_MINUTES, - order=1, - ) - make_user_notification_policy( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3, - important=False, - user=user, - step=UserNotificationPolicy.Step.NOTIFY, - notify_by=UserNotificationPolicy.NotificationChannel.PHONE_CALL, - order=2, - ) - - make_user_notification_policy( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4, - important=True, - user=user, - step=UserNotificationPolicy.Step.NOTIFY, - notify_by=UserNotificationPolicy.NotificationChannel.PHONE_CALL, - order=0, - ) - return - - return _make_data_for_demo_token diff --git a/engine/apps/public_api/tests/test_demo_token/__init__.py b/engine/apps/public_api/tests/test_demo_token/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/engine/apps/public_api/tests/test_demo_token/test_alerts.py b/engine/apps/public_api/tests/test_demo_token/test_alerts.py deleted file mode 100644 index 4153ca2b..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_alerts.py +++ /dev/null @@ -1,110 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants - -demo_alerts_results = [] -for alert_id, created_at in public_api_constants.DEMO_ALERT_IDS: - demo_alerts_results.append( - { - "id": alert_id, - "alert_group_id": public_api_constants.DEMO_INCIDENT_ID, - "created_at": created_at, - "payload": { - "state": "alerting", - "title": "[Alerting] Test notification", - "ruleId": 0, - "message": "Someone is testing the alert notification within grafana.", - "ruleUrl": "https://amixr.io/", - "ruleName": "Test notification", - "evalMatches": [ - {"tags": None, "value": 100, "metric": "High value"}, - {"tags": None, "value": 200, "metric": "Higher Value"}, - ], - }, - } - ) - -# https://api-docs.amixr.io/#list-alerts -demo_alerts_payload = {"count": 3, "next": None, "previous": None, "results": demo_alerts_results} - - -@pytest.mark.django_db -def test_get_alerts( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alerts-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_alerts_payload - - -@pytest.mark.django_db -def test_get_alerts_filter_by_incident( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alerts-list") - response = client.get( - url + f"?alert_group_id={public_api_constants.DEMO_INCIDENT_ID}", format="json", HTTP_AUTHORIZATION=token - ) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_alerts_payload - - -@pytest.mark.django_db -def test_get_alerts_filter_by_incident_no_results( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alerts-list") - response = client.get(url + "?alert_group_id=impossible_alert_group_id", format="json", HTTP_AUTHORIZATION=token) - assert response.status_code == status.HTTP_200_OK - assert response.data["results"] == [] - - -@pytest.mark.django_db -def test_get_alerts_search( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alerts-list") - response = client.get(url + "?search=evalMatches", format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_alerts_payload - - -@pytest.mark.django_db -def test_get_alerts_search_no_results( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alerts-list") - response = client.get(url + "?search=impossible_payload", format="json", HTTP_AUTHORIZATION=token) - assert response.status_code == status.HTTP_200_OK - assert response.data["results"] == [] diff --git a/engine/apps/public_api/tests/test_demo_token/test_custom_actions.py b/engine/apps/public_api/tests/test_demo_token/test_custom_actions.py deleted file mode 100644 index 6cf21903..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_custom_actions.py +++ /dev/null @@ -1,32 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants - -demo_custom_action_payload = { - "id": public_api_constants.DEMO_CUSTOM_ACTION_ID, - "name": public_api_constants.DEMO_CUSTOM_ACTION_NAME, - "team_id": None, -} - -demo_custom_action_payload_list = {"count": 1, "next": None, "previous": None, "results": [demo_custom_action_payload]} - - -@pytest.mark.django_db -def test_demo_get_custom_actions_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:actions-list") - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_custom_action_payload_list diff --git a/engine/apps/public_api/tests/test_demo_token/test_escalation_policies.py b/engine/apps/public_api/tests/test_demo_token/test_escalation_policies.py deleted file mode 100644 index 4df862b6..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_escalation_policies.py +++ /dev/null @@ -1,169 +0,0 @@ -import pytest -from django.urls import reverse -from django.utils import timezone -from rest_framework import status -from rest_framework.test import APIClient - -from apps.alerts.models import EscalationPolicy -from apps.public_api import constants as public_api_constants - -# https://api-docs.amixr.io/#get-escalation-policy -demo_escalation_policy_payload = { - "id": public_api_constants.DEMO_ESCALATION_POLICY_ID_1, - "escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID, - "position": 0, - "type": "wait", - "duration": timezone.timedelta(seconds=60).seconds, -} - -# https://api-docs.amixr.io/#list-escalation-policies -demo_escalation_policies_payload = { - "count": 2, - "next": None, - "previous": None, - "results": [ - { - "id": public_api_constants.DEMO_ESCALATION_POLICY_ID_1, - "escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID, - "position": 0, - "type": "wait", - "duration": timezone.timedelta(seconds=60).seconds, - }, - { - "id": public_api_constants.DEMO_ESCALATION_POLICY_ID_2, - "escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID, - "position": 1, - "type": "notify_person_next_each_time", - "persons_to_notify_next_each_time": ["U4DNY931HHJS5"], - }, - ], -} - - -@pytest.mark.django_db -def test_get_escalation_policies( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:escalation_policies-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_escalation_policies_payload - - -@pytest.mark.django_db -def test_get_escalation_policies_filter_by_route( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:escalation_policies-list") - response = client.get( - url + f"?route_id={public_api_constants.DEMO_ROUTE_ID_1}", format="json", HTTP_AUTHORIZATION=token - ) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_escalation_policies_payload - - -@pytest.mark.django_db -def test_create_escalation_policy( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - data_for_create = { - "escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID, - "type": "notify_person_next_each_time", - "position": 0, - "persons_to_notify_next_each_time": [user.public_primary_key], - } - url = reverse("api-public:escalation_policies-list") - response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_201_CREATED - # check on nothing change - assert response.json() == demo_escalation_policy_payload - - -@pytest.mark.django_db -def test_invalid_step_type( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - data_for_create = { - "escalation_chain_id": public_api_constants.DEMO_ESCALATION_CHAIN_ID, - "type": "this_is_invalid_step_type", # invalid step type - "position": 0, - "persons_to_notify_next_each_time": [user.public_primary_key], - } - url = reverse("api-public:escalation_policies-list") - response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_201_CREATED - # check on nothing change - assert response.json() == demo_escalation_policy_payload - - -@pytest.mark.django_db -def test_update_escalation_step( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - data_for_update = { - "route_id": public_api_constants.DEMO_ROUTE_ID_1, - "type": "notify_person_next_each_time", - "position": 1, - "persons_to_notify_next_each_time": [user.public_primary_key], - } - url = reverse( - "api-public:escalation_policies-detail", kwargs={"pk": public_api_constants.DEMO_ESCALATION_POLICY_ID_1} - ) - response = client.put(url, data=data_for_update, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - # check on nothing change - assert response.json() == demo_escalation_policy_payload - - -@pytest.mark.django_db -def test_delete_escalation_policy( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - escalation_policy = EscalationPolicy.objects.get( - public_primary_key=public_api_constants.DEMO_ESCALATION_POLICY_ID_1 - ) - - url = reverse("api-public:escalation_policies-detail", args=[escalation_policy.public_primary_key]) - response = client.delete(url, format="json", HTTP_AUTHORIZATION=token) - - escalation_policy.refresh_from_db() - - assert response.status_code == status.HTTP_204_NO_CONTENT - # check on nothing change - escalation_policy.refresh_from_db() - assert escalation_policy is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_incidents.py b/engine/apps/public_api/tests/test_demo_token/test_incidents.py deleted file mode 100644 index 26aa3b1a..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_incidents.py +++ /dev/null @@ -1,82 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.alerts.models import AlertGroup -from apps.public_api import constants as public_api_constants - -demo_incidents_payload = { - "count": 1, - "next": None, - "previous": None, - "results": [ - { - "id": public_api_constants.DEMO_INCIDENT_ID, - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "route_id": public_api_constants.DEMO_ROUTE_ID_1, - "alerts_count": 3, - "state": "resolved", - "created_at": public_api_constants.DEMO_INCIDENT_CREATED_AT, - "resolved_at": public_api_constants.DEMO_INCIDENT_RESOLVED_AT, - "acknowledged_at": None, - "title": None, - } - ], -} - - -@pytest.mark.django_db -def test_create_incidents( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alert_groups-list") - response = client.post(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_405_METHOD_NOT_ALLOWED - - -@pytest.mark.django_db -def test_get_incidents( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alert_groups-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_incidents_payload - - -@pytest.mark.django_db -def test_delete_incidents( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:alert_groups-list") - incidents = AlertGroup.unarchived_objects.filter(public_primary_key=public_api_constants.DEMO_INCIDENT_ID) - total_count = incidents.count() - incident = incidents[0] - data = { - "mode": "delete", - } - response = client.delete(url + f"/{incident.public_primary_key}/", data, format="json", HTTP_AUTHORIZATION=token) - new_count = AlertGroup.unarchived_objects.filter(public_primary_key=public_api_constants.DEMO_INCIDENT_ID).count() - - assert response.status_code == status.HTTP_204_NO_CONTENT - incident.refresh_from_db() - assert total_count == new_count - assert incident is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_integrations.py b/engine/apps/public_api/tests/test_demo_token/test_integrations.py deleted file mode 100644 index be06f367..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_integrations.py +++ /dev/null @@ -1,239 +0,0 @@ -from urllib.parse import urljoin - -import pytest -from django.conf import settings -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.alerts.models import AlertReceiveChannel -from apps.public_api import constants as public_api_constants - -# https://api-docs.amixr.io/#post-integration -demo_integration_post_payload = { - "id": public_api_constants.DEMO_INTEGRATION_ID, - "team_id": None, - "name": "Grafana :blush:", - "link": urljoin(settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/"), - "heartbeat": None, - "default_route": { - "escalation_chain_id": None, - "id": public_api_constants.DEMO_ROUTE_ID_2, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, - }, - "type": "grafana", - "templates": { - "grouping_key": None, - "resolve_signal": None, - "acknowledge_signal": None, - "slack": {"title": None, "message": None, "image_url": None}, - "web": {"title": None, "message": None, "image_url": None}, - "sms": { - "title": None, - }, - "phone_call": { - "title": None, - }, - "email": { - "title": None, - "message": None, - }, - "telegram": { - "title": None, - "message": None, - "image_url": None, - }, - }, - "maintenance_mode": None, - "maintenance_started_at": None, - "maintenance_end_at": None, -} - -# https://api-docs.amixr.io/#get-integration -demo_integration_payload = { - "id": public_api_constants.DEMO_INTEGRATION_ID, - "team_id": None, - "name": "Grafana :blush:", - "link": urljoin(settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/"), - "default_route": { - "escalation_chain_id": None, - "id": public_api_constants.DEMO_ROUTE_ID_2, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, - }, - "type": "grafana", - "heartbeat": None, - "templates": { - "grouping_key": None, - "resolve_signal": None, - "acknowledge_signal": None, - "slack": {"title": None, "message": None, "image_url": None}, - "web": {"title": None, "message": None, "image_url": None}, - "sms": { - "title": None, - }, - "phone_call": { - "title": None, - }, - "email": { - "title": None, - "message": None, - }, - "telegram": { - "title": None, - "message": None, - "image_url": None, - }, - }, - "maintenance_mode": None, - "maintenance_started_at": None, - "maintenance_end_at": None, -} - -# https://api-docs.amixr.io/#list-integrations -demo_integrations_payload = { - "count": 1, - "next": None, - "previous": None, - "results": [ - { - "id": public_api_constants.DEMO_INTEGRATION_ID, - "team_id": None, - "name": "Grafana :blush:", - "link": urljoin( - settings.BASE_URL, f"/integrations/v1/grafana/{public_api_constants.DEMO_INTEGRATION_LINK_TOKEN}/" - ), - "default_route": { - "escalation_chain_id": None, - "id": public_api_constants.DEMO_ROUTE_ID_2, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, - }, - "type": "grafana", - "heartbeat": None, - "templates": { - "grouping_key": None, - "resolve_signal": None, - "acknowledge_signal": None, - "slack": { - "title": None, - "message": None, - "image_url": None, - }, - "web": {"title": None, "message": None, "image_url": None}, - "sms": { - "title": None, - }, - "phone_call": { - "title": None, - }, - "email": { - "title": None, - "message": None, - }, - "telegram": { - "title": None, - "message": None, - "image_url": None, - }, - }, - "maintenance_mode": None, - "maintenance_started_at": None, - "maintenance_end_at": None, - }, - ], -} - - -@pytest.mark.django_db -def test_get_integrations( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - url = reverse("api-public:integrations-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_integrations_payload - - -@pytest.mark.django_db -def test_create_integration( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - data_for_create = {"type": "grafana"} - url = reverse("api-public:integrations-list") - response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_201_CREATED - # check on nothing change - assert response.json() == demo_integration_post_payload - - -@pytest.mark.django_db -def test_update_integration( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - integration = AlertReceiveChannel.objects.get(public_primary_key=public_api_constants.DEMO_INTEGRATION_ID) - data_for_update = {"name": "new_name"} - url = reverse("api-public:integrations-detail", args=[integration.public_primary_key]) - response = client.put(url, data=data_for_update, format="json", HTTP_AUTHORIZATION=token) - - integration.refresh_from_db() - - assert response.status_code == status.HTTP_200_OK - # check on nothing change - assert response.json() == demo_integration_payload - - -@pytest.mark.django_db -def test_invalid_integration_type( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - data_for_create = {"type": "this_is_invalid_integration_type"} - url = reverse("api-public:integrations-list") - response = client.post(url, data=data_for_create, format="json", HTTP_AUTHORIZATION=token) - assert response.status_code == status.HTTP_201_CREATED - # check on nothing change - assert response.json() == demo_integration_post_payload - - -@pytest.mark.django_db -def test_delete_integration( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - integration = AlertReceiveChannel.objects.get(public_primary_key=public_api_constants.DEMO_INTEGRATION_ID) - - url = reverse("api-public:integrations-detail", args=[integration.public_primary_key]) - response = client.delete(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_204_NO_CONTENT - # check on nothing change - integration.refresh_from_db() - assert integration is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_on_call_shift.py b/engine/apps/public_api/tests/test_demo_token/test_on_call_shift.py deleted file mode 100644 index f4c4552d..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_on_call_shift.py +++ /dev/null @@ -1,172 +0,0 @@ -import pytest -from django.urls import reverse -from django.utils import timezone -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants -from apps.schedules.models import CustomOnCallShift - -demo_on_call_shift_payload_1 = { - "id": public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, - "team_id": None, - "name": public_api_constants.DEMO_ON_CALL_SHIFT_NAME_1, - "type": "single_event", - "time_zone": None, - "level": 0, - "start": public_api_constants.DEMO_ON_CALL_SHIFT_START_1, - "duration": public_api_constants.DEMO_ON_CALL_SHIFT_DURATION, - "users": [public_api_constants.DEMO_USER_ID], -} - -demo_on_call_shift_payload_2 = { - "id": public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, - "team_id": None, - "name": public_api_constants.DEMO_ON_CALL_SHIFT_NAME_2, - "type": "recurrent_event", - "time_zone": None, - "level": 0, - "start": public_api_constants.DEMO_ON_CALL_SHIFT_START_2, - "duration": public_api_constants.DEMO_ON_CALL_SHIFT_DURATION, - "frequency": "weekly", - "interval": 2, - "week_start": "SU", - "users": [public_api_constants.DEMO_USER_ID], - "by_day": public_api_constants.DEMO_ON_CALL_SHIFT_BY_DAY, - "by_month": None, - "by_monthday": None, -} - -demo_on_call_shift_payload_list = { - "count": 2, - "next": None, - "previous": None, - "results": [demo_on_call_shift_payload_1, demo_on_call_shift_payload_2], -} - - -@pytest.mark.django_db -def test_demo_get_on_call_shift_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:on_call_shifts-list") - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_on_call_shift_payload_list - - -@pytest.mark.django_db -@pytest.mark.parametrize( - "demo_on_call_shift_id,payload", - [ - (public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, demo_on_call_shift_payload_1), - (public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, demo_on_call_shift_payload_2), - ], -) -def test_demo_get_on_call_shift_1( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, - demo_on_call_shift_id, - payload, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id}) - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == payload - - -@pytest.mark.django_db -def test_demo_post_on_call_shift( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:on_call_shifts-list") - - data = { - "schedule_id": public_api_constants.DEMO_SCHEDULE_ID_CALENDAR, - "name": "New demo shift", - "type": CustomOnCallShift.TYPE_SINGLE_EVENT, - "start": timezone.now().replace(tzinfo=None, microsecond=0).isoformat(), - "duration": 3600, - } - - response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_201_CREATED - assert response.data == demo_on_call_shift_payload_1 - - -@pytest.mark.django_db -@pytest.mark.parametrize( - "demo_on_call_shift_id,payload", - [ - (public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, demo_on_call_shift_payload_1), - (public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, demo_on_call_shift_payload_2), - ], -) -def test_demo_update_on_call_shift( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, - demo_on_call_shift_id, - payload, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - data = {"name": "Updated demo name"} - - url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id}) - - response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == payload - - -@pytest.mark.django_db -@pytest.mark.parametrize( - "demo_on_call_shift_id", - [ - public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, - public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, - ], -) -def test_demo_delete_on_call_shift( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, - demo_on_call_shift_id, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:on_call_shifts-detail", kwargs={"pk": demo_on_call_shift_id}) - - response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_204_NO_CONTENT - assert CustomOnCallShift.objects.filter(public_primary_key=demo_on_call_shift_id).exists() diff --git a/engine/apps/public_api/tests/test_demo_token/test_personal_notification_rules.py b/engine/apps/public_api/tests/test_demo_token/test_personal_notification_rules.py deleted file mode 100644 index d0abf315..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_personal_notification_rules.py +++ /dev/null @@ -1,225 +0,0 @@ -import pytest -from django.urls import reverse -from django.utils import timezone -from rest_framework import status -from rest_framework.test import APIClient - -from apps.base.models import UserNotificationPolicy -from apps.base.models.user_notification_policy import NotificationChannelPublicAPIOptions -from apps.public_api import constants as public_api_constants - -TYPE_WAIT = "wait" - -demo_personal_notification_rule_payload_1 = { - "id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1, - "user_id": public_api_constants.DEMO_USER_ID, - "position": 0, - "important": False, - "type": "notify_by_sms", -} - -demo_personal_notification_rule_payload_2 = { - "id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_2, - "user_id": public_api_constants.DEMO_USER_ID, - "position": 1, - "duration": timezone.timedelta(seconds=300).seconds, - "important": False, - "type": "wait", -} - -demo_personal_notification_rule_payload_3 = { - "id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_3, - "user_id": public_api_constants.DEMO_USER_ID, - "position": 2, - "important": False, - "type": "notify_by_phone_call", -} - -demo_personal_notification_rule_payload_4 = { - "id": public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_4, - "user_id": public_api_constants.DEMO_USER_ID, - "position": 0, - "important": True, - "type": "notify_by_phone_call", -} - -demo_personal_notification_rules_payload = { - "count": 4, - "next": None, - "previous": None, - "results": [ - demo_personal_notification_rule_payload_1, - demo_personal_notification_rule_payload_2, - demo_personal_notification_rule_payload_3, - demo_personal_notification_rule_payload_4, - ], -} - -demo_personal_notification_rules_non_important_payload = { - "count": 3, - "next": None, - "previous": None, - "results": [ - demo_personal_notification_rule_payload_1, - demo_personal_notification_rule_payload_2, - demo_personal_notification_rule_payload_3, - ], -} - -demo_personal_notification_rules_important_payload = { - "count": 1, - "next": None, - "previous": None, - "results": [ - demo_personal_notification_rule_payload_4, - ], -} - - -@pytest.mark.django_db -def test_get_personal_notification_rule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - - demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1 - ) - client = APIClient() - - url = reverse( - "api-public:personal_notification_rules-detail", - kwargs={"pk": demo_personal_notification_rule_1.public_primary_key}, - ) - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_personal_notification_rule_payload_1 - - -@pytest.mark.django_db -def test_get_personal_notification_rules_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - - client = APIClient() - - url = reverse("api-public:personal_notification_rules-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_personal_notification_rules_payload - - -@pytest.mark.django_db -def test_get_personal_notification_rules_list_important( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - client = APIClient() - - url = reverse("api-public:personal_notification_rules-list") - response = client.get(url + "?important=true", format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_personal_notification_rules_important_payload - - -@pytest.mark.django_db -def test_get_personal_notification_rules_list_non_important( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - - client = APIClient() - - url = reverse("api-public:personal_notification_rules-list") - response = client.get(url + "?important=false", format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_personal_notification_rules_non_important_payload - - -@pytest.mark.django_db -def test_update_personal_notification_rule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1 - ) - client = APIClient() - - url = reverse( - "api-public:personal_notification_rules-detail", - kwargs={"pk": demo_personal_notification_rule_1.public_primary_key}, - ) - - data_to_update = { - "type": NotificationChannelPublicAPIOptions.LABELS[UserNotificationPolicy.NotificationChannel.SLACK] - } - response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_personal_notification_rule_payload_1 - # check on nothing change - demo_personal_notification_rule_1.refresh_from_db() - assert demo_personal_notification_rule_1.notify_by != UserNotificationPolicy.NotificationChannel.SLACK - - -@pytest.mark.django_db -def test_create_personal_notification_rule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - client = APIClient() - - url = reverse("api-public:personal_notification_rules-list") - data_for_create = { - "user_id": user.public_primary_key, - "type": TYPE_WAIT, - "position": 1, - "duration": timezone.timedelta(seconds=300).seconds, - } - response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create) - - assert response.status_code == status.HTTP_201_CREATED - assert response.json() == demo_personal_notification_rule_payload_1 - - -@pytest.mark.django_db -def test_delete_personal_notification_rule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - _ = make_data_for_demo_token(organization, user) - demo_personal_notification_rule_1 = UserNotificationPolicy.objects.get( - public_primary_key=public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1 - ) - client = APIClient() - - url = reverse( - "api-public:personal_notification_rules-detail", - kwargs={"pk": demo_personal_notification_rule_1.public_primary_key}, - ) - - response = client.delete(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_204_NO_CONTENT - # check on nothing change - demo_personal_notification_rule_1.refresh_from_db() - assert demo_personal_notification_rule_1 is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_resolution_notes.py b/engine/apps/public_api/tests/test_demo_token/test_resolution_notes.py deleted file mode 100644 index 888760e9..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_resolution_notes.py +++ /dev/null @@ -1,117 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.alerts.models import ResolutionNote -from apps.public_api import constants as public_api_constants - -demo_resolution_note_payload = { - "id": public_api_constants.DEMO_RESOLUTION_NOTE_ID, - "alert_group_id": public_api_constants.DEMO_INCIDENT_ID, - "author": public_api_constants.DEMO_USER_ID, - "source": public_api_constants.DEMO_RESOLUTION_NOTE_SOURCE, - "created_at": public_api_constants.DEMO_RESOLUTION_NOTE_CREATED_AT, - "text": public_api_constants.DEMO_RESOLUTION_NOTE_TEXT, -} - -demo_resolution_note_payload_list = { - "count": 1, - "next": None, - "previous": None, - "results": [demo_resolution_note_payload], -} - - -@pytest.mark.django_db -def test_demo_get_resolution_note_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:resolution_notes-list") - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_resolution_note_payload_list - - -@pytest.mark.django_db -def test_demo_get_resolution_note( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID}) - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_resolution_note_payload - - -@pytest.mark.django_db -def test_demo_post_resolution_note( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:resolution_notes-list") - - data = {"alert_group_id": public_api_constants.DEMO_INCIDENT_ID, "text": "New demo text"} - - response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_201_CREATED - assert response.data == demo_resolution_note_payload - - -@pytest.mark.django_db -def test_demo_update_resolution_note( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - data = {"alert_group_id": public_api_constants.DEMO_INCIDENT_ID, "text": "Updated demo text"} - - url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID}) - - response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_resolution_note_payload - - -@pytest.mark.django_db -def test_demo_delete_resolution_note( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:resolution_notes-detail", kwargs={"pk": public_api_constants.DEMO_RESOLUTION_NOTE_ID}) - - response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_204_NO_CONTENT - assert ResolutionNote.objects.filter(public_primary_key=public_api_constants.DEMO_RESOLUTION_NOTE_ID).exists() diff --git a/engine/apps/public_api/tests/test_demo_token/test_routes.py b/engine/apps/public_api/tests/test_demo_token/test_routes.py deleted file mode 100644 index cd8938db..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_routes.py +++ /dev/null @@ -1,182 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.alerts.models import ChannelFilter -from apps.public_api import constants as public_api_constants - -# https://api-docs.amixr.io/#get-route -demo_route_payload = { - "id": public_api_constants.DEMO_ROUTE_ID_1, - "escalation_chain_id": None, - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "routing_regex": "us-(east|west)", - "position": 0, - "is_the_last_route": False, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, -} - -# https://api-docs.amixr.io/#list-routes -demo_routes_payload = { - "count": 2, - "next": None, - "previous": None, - "results": [ - { - "id": public_api_constants.DEMO_ROUTE_ID_1, - "escalation_chain_id": None, - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "routing_regex": "us-(east|west)", - "position": 0, - "is_the_last_route": False, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, - }, - { - "id": public_api_constants.DEMO_ROUTE_ID_2, - "escalation_chain_id": None, - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "routing_regex": ".*", - "position": 1, - "is_the_last_route": True, - "slack": {"channel_id": public_api_constants.DEMO_SLACK_CHANNEL_FOR_ROUTE_ID}, - }, - ], -} - - -@pytest.mark.django_db -def test_get_route( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1) - - url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key}) - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_route_payload - - -@pytest.mark.django_db -def test_get_routes_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:routes-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_routes_payload - - -@pytest.mark.django_db -def test_get_routes_filter_by_integration_id( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:routes-list") - response = client.get( - url + f"?integration_id={public_api_constants.DEMO_INTEGRATION_ID}", format="json", HTTP_AUTHORIZATION=token - ) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_routes_payload - - -@pytest.mark.django_db -def test_create_route( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:routes-list") - data_for_create = { - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "routing_regex": "testreg", - } - response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create) - - assert response.status_code == status.HTTP_201_CREATED - assert response.json() == demo_route_payload - - -@pytest.mark.django_db -def test_invalid_route_data( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:routes-list") - data_for_create = { - "integration_id": public_api_constants.DEMO_INTEGRATION_ID, - "routing_regex": None, # routing_regex cannot be null for non-default filters - } - response = client.post(url, format="json", HTTP_AUTHORIZATION=token, data=data_for_create) - - assert response.status_code == status.HTTP_201_CREATED - assert response.json() == demo_route_payload - - -@pytest.mark.django_db -def test_update_route( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1) - - url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key}) - data_to_update = { - "routing_regex": "testreg_updated", - } - - assert channel_filter.filtering_term != data_to_update["routing_regex"] - - response = client.put(url, format="json", HTTP_AUTHORIZATION=token, data=data_to_update) - - assert response.status_code == status.HTTP_200_OK - # check on nothing change - channel_filter.refresh_from_db() - assert response.json() == demo_route_payload - assert channel_filter.filtering_term != data_to_update["routing_regex"] - - -@pytest.mark.django_db -def test_delete_route( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - channel_filter = ChannelFilter.objects.get(public_primary_key=public_api_constants.DEMO_ROUTE_ID_1) - - url = reverse("api-public:routes-detail", kwargs={"pk": channel_filter.public_primary_key}) - response = client.delete(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_204_NO_CONTENT - # check on nothing change - channel_filter.refresh_from_db() - assert channel_filter is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_schedules.py b/engine/apps/public_api/tests/test_demo_token/test_schedules.py deleted file mode 100644 index 9a56955b..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_schedules.py +++ /dev/null @@ -1,164 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants -from apps.schedules.models import OnCallSchedule - -demo_ical_schedule_payload = { - "id": public_api_constants.DEMO_SCHEDULE_ID_ICAL, - "team_id": None, - "name": public_api_constants.DEMO_SCHEDULE_NAME_ICAL, - "type": "ical", - "ical_url_primary": public_api_constants.DEMO_SCHEDULE_ICAL_URL_PRIMARY, - "ical_url_overrides": public_api_constants.DEMO_SCHEDULE_ICAL_URL_OVERRIDES, - "on_call_now": [public_api_constants.DEMO_USER_ID], - "slack": { - "channel_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - "user_group_id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID, - }, -} - -demo_calendar_schedule_payload = { - "id": public_api_constants.DEMO_SCHEDULE_ID_CALENDAR, - "team_id": None, - "name": public_api_constants.DEMO_SCHEDULE_NAME_CALENDAR, - "type": "calendar", - "time_zone": "America/New_york", - "on_call_now": [public_api_constants.DEMO_USER_ID], - "shifts": [ - public_api_constants.DEMO_ON_CALL_SHIFT_ID_1, - public_api_constants.DEMO_ON_CALL_SHIFT_ID_2, - ], - "slack": { - "channel_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - "user_group_id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID, - }, - "ical_url_overrides": None, -} - -demo_schedules_payload = { - "count": 2, - "next": None, - "previous": None, - "results": [ - demo_ical_schedule_payload, - demo_calendar_schedule_payload, - ], -} - - -@pytest.mark.django_db -def test_get_schedule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL) - - url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key}) - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_ical_schedule_payload - - -@pytest.mark.django_db -def test_create_schedule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:schedules-list") - - data = { - "name": "schedule test name", - "type": "ical", - } - - response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_201_CREATED - # check that demo instance was returned - assert response.data == demo_ical_schedule_payload - - -@pytest.mark.django_db -def test_update_ical_schedule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL) - - url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key}) - - data = { - "name": "NEW NAME", - } - - response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - # check on nothing change - schedule.refresh_from_db() - assert schedule.name != data["name"] - assert response.data == demo_ical_schedule_payload - - -@pytest.mark.django_db -def test_update_calendar_schedule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_CALENDAR) - - url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key}) - - data = { - "name": "NEW NAME", - } - - response = client.put(url, data=data, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - # check on nothing change - schedule.refresh_from_db() - assert schedule.name != data["name"] - assert response.data == demo_calendar_schedule_payload - - -@pytest.mark.django_db -def test_delete_schedule( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - schedule = OnCallSchedule.objects.get(public_primary_key=public_api_constants.DEMO_SCHEDULE_ID_ICAL) - - url = reverse("api-public:schedules-detail", kwargs={"pk": schedule.public_primary_key}) - - response = client.delete(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_204_NO_CONTENT - # check on nothing change - schedule.refresh_from_db() - assert schedule is not None diff --git a/engine/apps/public_api/tests/test_demo_token/test_slack_channels.py b/engine/apps/public_api/tests/test_demo_token/test_slack_channels.py deleted file mode 100644 index 80a11bdc..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_slack_channels.py +++ /dev/null @@ -1,34 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants - -demo_slack_channels_payload = { - "count": 1, - "next": None, - "previous": None, - "results": [ - { - "name": public_api_constants.DEMO_SLACK_CHANNEL_NAME, - "slack_id": public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - } - ], -} - - -@pytest.mark.django_db -def test_get_slack_channels_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:slack_channels-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_slack_channels_payload diff --git a/engine/apps/public_api/tests/test_demo_token/test_user_groups.py b/engine/apps/public_api/tests/test_demo_token/test_user_groups.py deleted file mode 100644 index 08ee995c..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_user_groups.py +++ /dev/null @@ -1,36 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants - -demo_user_group_payload = { - "id": public_api_constants.DEMO_SLACK_USER_GROUP_ID, - "type": "slack_based", - "slack": { - "id": public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID, - "name": public_api_constants.DEMO_SLACK_USER_GROUP_NAME, - "handle": public_api_constants.DEMO_SLACK_USER_GROUP_HANDLE, - }, -} - -demo_user_group_payload_list = {"count": 1, "next": None, "previous": None, "results": [demo_user_group_payload]} - - -@pytest.mark.django_db -def test_demo_get_user_groups_list( - make_organization_and_user_with_slack_identities_for_demo_token, - make_data_for_demo_token, -): - - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - client = APIClient() - _ = make_data_for_demo_token(organization, user) - - url = reverse("api-public:user_groups-list") - - response = client.get(url, format="json", HTTP_AUTHORIZATION=f"{token}") - - assert response.status_code == status.HTTP_200_OK - assert response.data == demo_user_group_payload_list diff --git a/engine/apps/public_api/tests/test_demo_token/test_users.py b/engine/apps/public_api/tests/test_demo_token/test_users.py deleted file mode 100644 index ffa4bfdb..00000000 --- a/engine/apps/public_api/tests/test_demo_token/test_users.py +++ /dev/null @@ -1,91 +0,0 @@ -import pytest -from django.urls import reverse -from rest_framework import status -from rest_framework.test import APIClient - -from apps.public_api import constants as public_api_constants - -# NB can compare with https://api-docs.amixr.io/#get-user - -demo_token_user_payload = { - "id": public_api_constants.DEMO_USER_ID, - "email": public_api_constants.DEMO_USER_EMAIL, - "slack": {"user_id": public_api_constants.DEMO_SLACK_USER_ID, "team_id": public_api_constants.DEMO_SLACK_TEAM_ID}, - "username": public_api_constants.DEMO_USER_USERNAME, - "role": "admin", - "is_phone_number_verified": False, -} - -# https://api-docs.amixr.io/#list-users -demo_token_users_payload = { - "count": 1, - "next": None, - "previous": None, - "results": [ - { - "id": public_api_constants.DEMO_USER_ID, - "email": public_api_constants.DEMO_USER_EMAIL, - "slack": { - "user_id": public_api_constants.DEMO_SLACK_USER_ID, - "team_id": public_api_constants.DEMO_SLACK_TEAM_ID, - }, - "username": public_api_constants.DEMO_USER_USERNAME, - "role": "admin", - "is_phone_number_verified": False, - } - ], -} - - -@pytest.mark.django_db -def test_get_user( - make_organization_and_user_with_slack_identities_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - - url = reverse("api-public:users-detail", args=[user.public_primary_key]) - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_token_user_payload - - # get current user - url = reverse("api-public:users-detail", args=["current"]) - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_token_user_payload - - -@pytest.mark.django_db -def test_get_users( - make_organization_and_user_with_slack_identities_for_demo_token, -): - organization, user, token = make_organization_and_user_with_slack_identities_for_demo_token() - - client = APIClient() - - url = reverse("api-public:users-list") - response = client.get(url, format="json", HTTP_AUTHORIZATION=token) - - assert response.status_code == status.HTTP_200_OK - assert response.json() == demo_token_users_payload - - -@pytest.mark.django_db -def test_forbidden_access( - make_organization_and_user_with_slack_identities_for_demo_token, - make_organization_and_user_with_token, -): - _, user, _ = make_organization_and_user_with_slack_identities_for_demo_token() - _, _, another_org_token = make_organization_and_user_with_token() - - client = APIClient() - - url = reverse("api-public:users-detail", args=[user.public_primary_key]) - - response = client.get(url, format="json", HTTP_AUTHORIZATION=another_org_token) - - assert response.status_code == status.HTTP_404_NOT_FOUND diff --git a/engine/apps/public_api/throttlers/__init__.py b/engine/apps/public_api/throttlers/__init__.py index e69de29b..20dc00d7 100644 --- a/engine/apps/public_api/throttlers/__init__.py +++ b/engine/apps/public_api/throttlers/__init__.py @@ -0,0 +1,3 @@ +from .info_throttler import InfoThrottler # noqa: F401 +from .phone_notification_throttler import PhoneNotificationThrottler # noqa: F401 +from .user_throttle import UserThrottle # noqa: F401 diff --git a/engine/apps/public_api/throttlers/info_throttler.py b/engine/apps/public_api/throttlers/info_throttler.py new file mode 100644 index 00000000..a48bce22 --- /dev/null +++ b/engine/apps/public_api/throttlers/info_throttler.py @@ -0,0 +1,6 @@ +from rest_framework.throttling import UserRateThrottle + + +class InfoThrottler(UserRateThrottle): + scope = "info" + rate = "100/m" diff --git a/engine/apps/public_api/throttlers/phone_notification_throttler.py b/engine/apps/public_api/throttlers/phone_notification_throttler.py new file mode 100644 index 00000000..a66e19a1 --- /dev/null +++ b/engine/apps/public_api/throttlers/phone_notification_throttler.py @@ -0,0 +1,6 @@ +from rest_framework.throttling import UserRateThrottle + + +class PhoneNotificationThrottler(UserRateThrottle): + scope = "phone_notification" + rate = "60/m" diff --git a/engine/apps/public_api/urls.py b/engine/apps/public_api/urls.py index 95fa447a..a91898df 100644 --- a/engine/apps/public_api/urls.py +++ b/engine/apps/public_api/urls.py @@ -30,4 +30,6 @@ router.register(r"teams", views.TeamView, basename="teams") urlpatterns = [ path("", include(router.urls)), optional_slash_path("info", views.InfoView.as_view(), name="info"), + optional_slash_path("make_call", views.MakeCallView.as_view(), name="make_call"), + optional_slash_path("send_sms", views.SendSMSView.as_view(), name="send_sms"), ] diff --git a/engine/apps/public_api/views/__init__.py b/engine/apps/public_api/views/__init__.py index 1892d123..4ffcec04 100644 --- a/engine/apps/public_api/views/__init__.py +++ b/engine/apps/public_api/views/__init__.py @@ -8,6 +8,7 @@ from .integrations import IntegrationView # noqa: F401 from .on_call_shifts import CustomOnCallShiftView # noqa: F401 from .organizations import OrganizationView # noqa: F401 from .personal_notifications import PersonalNotificationView # noqa: F401 +from .phone_notifications import MakeCallView, SendSMSView # noqa: F401 from .resolution_notes import ResolutionNoteView # noqa: F401 from .routes import ChannelFilterView # noqa: F401 from .schedules import OnCallScheduleChannelView # noqa: F401 diff --git a/engine/apps/public_api/views/action.py b/engine/apps/public_api/views/action.py index bbb6bc73..60ca1465 100644 --- a/engine/apps/public_api/views/action.py +++ b/engine/apps/public_api/views/action.py @@ -8,11 +8,11 @@ from apps.auth_token.auth import ApiTokenAuthentication from apps.public_api.serializers.action import ActionSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from common.api_helpers.filters import ByTeamFilter -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class ActionView(RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, GenericViewSet): +class ActionView(RateLimitHeadersMixin, mixins.ListModelMixin, GenericViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) pagination_class = FiftyPageSizePaginator diff --git a/engine/apps/public_api/views/alerts.py b/engine/apps/public_api/views/alerts.py index 56fe651e..da332176 100644 --- a/engine/apps/public_api/views/alerts.py +++ b/engine/apps/public_api/views/alerts.py @@ -6,14 +6,13 @@ from rest_framework.viewsets import GenericViewSet from apps.alerts.models import Alert from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers.alerts import AlertSerializer from apps.public_api.throttlers.user_throttle import UserThrottle -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class AlertView(RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, GenericViewSet): +class AlertView(RateLimitHeadersMixin, mixins.ListModelMixin, GenericViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -23,8 +22,6 @@ class AlertView(RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, Ge serializer_class = AlertSerializer pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_ALERT_IDS[0] - def get_queryset(self): alert_group_id = self.request.query_params.get("alert_group_id", None) search = self.request.query_params.get("search", None) diff --git a/engine/apps/public_api/views/escalation_policies.py b/engine/apps/public_api/views/escalation_policies.py index fc285588..15203f63 100644 --- a/engine/apps/public_api/views/escalation_policies.py +++ b/engine/apps/public_api/views/escalation_policies.py @@ -5,15 +5,14 @@ from rest_framework.viewsets import ModelViewSet from apps.alerts.models import EscalationPolicy from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import EscalationPolicySerializer, EscalationPolicyUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class EscalationPolicyView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class EscalationPolicyView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -25,8 +24,6 @@ class EscalationPolicyView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializ pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_ESCALATION_POLICY_ID_1 - def get_queryset(self): escalation_chain_id = self.request.query_params.get("escalation_chain_id", None) queryset = EscalationPolicy.objects.filter( diff --git a/engine/apps/public_api/views/incidents.py b/engine/apps/public_api/views/incidents.py index 1bfe830e..cd4d6098 100644 --- a/engine/apps/public_api/views/incidents.py +++ b/engine/apps/public_api/views/incidents.py @@ -8,14 +8,13 @@ from rest_framework.viewsets import GenericViewSet from apps.alerts.models import AlertGroup from apps.alerts.tasks import delete_alert_group, wipe from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.constants import VALID_DATE_FOR_DELETE_INCIDENT from apps.public_api.helpers import is_valid_group_creation_date, team_has_slack_token_for_deleting from apps.public_api.serializers import IncidentSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from common.api_helpers.exceptions import BadRequest from common.api_helpers.filters import ByTeamModelFieldFilterMixin, get_team_queryset -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import FiftyPageSizePaginator @@ -30,9 +29,7 @@ class IncidentByTeamFilter(ByTeamModelFieldFilterMixin, filters.FilterSet): ) -class IncidentView( - RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, mixins.DestroyModelMixin, GenericViewSet -): +class IncidentView(RateLimitHeadersMixin, mixins.ListModelMixin, mixins.DestroyModelMixin, GenericViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -42,8 +39,6 @@ class IncidentView( serializer_class = IncidentSerializer pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_INCIDENT_ID - filter_backends = (filters.DjangoFilterBackend,) filterset_class = IncidentByTeamFilter diff --git a/engine/apps/public_api/views/info.py b/engine/apps/public_api/views/info.py index f9649181..f9cc13ca 100644 --- a/engine/apps/public_api/views/info.py +++ b/engine/apps/public_api/views/info.py @@ -3,14 +3,14 @@ from rest_framework.response import Response from rest_framework.views import APIView from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api.throttlers.user_throttle import UserThrottle +from apps.public_api.throttlers import InfoThrottler class InfoView(APIView): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) - throttle_classes = [UserThrottle] + throttle_classes = [InfoThrottler] def get(self, request): response = {"url": self.request.auth.organization.grafana_url} diff --git a/engine/apps/public_api/views/integrations.py b/engine/apps/public_api/views/integrations.py index 8aa4784e..447c5b2b 100644 --- a/engine/apps/public_api/views/integrations.py +++ b/engine/apps/public_api/views/integrations.py @@ -6,17 +6,11 @@ from rest_framework.viewsets import ModelViewSet from apps.alerts.models import AlertReceiveChannel from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import IntegrationSerializer, IntegrationUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log from common.api_helpers.filters import ByTeamFilter -from common.api_helpers.mixins import ( - DemoTokenMixin, - FilterSerializerMixin, - RateLimitHeadersMixin, - UpdateSerializerMixin, -) +from common.api_helpers.mixins import FilterSerializerMixin, RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator from .maintaiable_object_mixin import MaintainableObjectMixin @@ -24,7 +18,6 @@ from .maintaiable_object_mixin import MaintainableObjectMixin class IntegrationView( RateLimitHeadersMixin, - DemoTokenMixin, FilterSerializerMixin, UpdateSerializerMixin, MaintainableObjectMixin, @@ -41,8 +34,6 @@ class IntegrationView( pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_INTEGRATION_ID - filter_backends = (filters.DjangoFilterBackend,) filterset_class = ByTeamFilter @@ -50,6 +41,10 @@ class IntegrationView( queryset = AlertReceiveChannel.objects.filter(organization=self.request.auth.organization).order_by( "created_at" ) + name = self.request.query_params.get("name", None) + if name is not None: + queryset = queryset.filter(verbal_name=name) + queryset = self.filter_queryset(queryset) queryset = self.serializer_class.setup_eager_loading(queryset) queryset = queryset.annotate(alert_groups_count_annotated=Count("alert_groups", distinct=True)) return queryset diff --git a/engine/apps/public_api/views/on_call_shifts.py b/engine/apps/public_api/views/on_call_shifts.py index 5f366f19..1d0df97a 100644 --- a/engine/apps/public_api/views/on_call_shifts.py +++ b/engine/apps/public_api/views/on_call_shifts.py @@ -4,17 +4,16 @@ from rest_framework.permissions import IsAuthenticated from rest_framework.viewsets import ModelViewSet from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import CustomOnCallShiftSerializer, CustomOnCallShiftUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.schedules.models import CustomOnCallShift from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log from common.api_helpers.filters import ByTeamFilter -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class CustomOnCallShiftView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class CustomOnCallShiftView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -29,8 +28,6 @@ class CustomOnCallShiftView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSeriali filter_backends = [DjangoFilterBackend] filterset_class = ByTeamFilter - demo_default_id = public_api_constants.DEMO_ON_CALL_SHIFT_ID_1 - def get_queryset(self): name = self.request.query_params.get("name", None) schedule_id = self.request.query_params.get("schedule_id", None) diff --git a/engine/apps/public_api/views/organizations.py b/engine/apps/public_api/views/organizations.py index d3bce01e..f4fd1352 100644 --- a/engine/apps/public_api/views/organizations.py +++ b/engine/apps/public_api/views/organizations.py @@ -3,17 +3,15 @@ from rest_framework.settings import api_settings from rest_framework.viewsets import ReadOnlyModelViewSet from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import OrganizationSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.user_management.models import Organization -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import TwentyFivePageSizePaginator class OrganizationView( RateLimitHeadersMixin, - DemoTokenMixin, ReadOnlyModelViewSet, ): authentication_classes = (ApiTokenAuthentication,) @@ -26,8 +24,6 @@ class OrganizationView( pagination_class = TwentyFivePageSizePaginator - demo_default_id = public_api_constants.DEMO_ORGANIZATION_ID - def get_queryset(self): # It's a dirty hack to get queryset from the object. Just in case we'll return multiple teams in the future. return Organization.objects.filter(pk=self.request.auth.organization.pk) diff --git a/engine/apps/public_api/views/personal_notifications.py b/engine/apps/public_api/views/personal_notifications.py index 0b3e0b0a..3119bea9 100644 --- a/engine/apps/public_api/views/personal_notifications.py +++ b/engine/apps/public_api/views/personal_notifications.py @@ -6,17 +6,16 @@ from rest_framework.viewsets import ModelViewSet from apps.auth_token.auth import ApiTokenAuthentication from apps.base.models import UserNotificationPolicy -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import PersonalNotificationRuleSerializer, PersonalNotificationRuleUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.user_management.models import User from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log from common.api_helpers.exceptions import BadRequest -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class PersonalNotificationView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class PersonalNotificationView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -28,8 +27,6 @@ class PersonalNotificationView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSeri pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_PERSONAL_NOTIFICATION_ID_1 - def get_queryset(self): user_id = self.request.query_params.get("user_id", None) important = self.request.query_params.get("important", None) diff --git a/engine/apps/public_api/views/phone_notifications.py b/engine/apps/public_api/views/phone_notifications.py new file mode 100644 index 00000000..f9f96f74 --- /dev/null +++ b/engine/apps/public_api/views/phone_notifications.py @@ -0,0 +1,86 @@ +import logging + +from rest_framework import serializers, status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView +from twilio.base.exceptions import TwilioRestException + +from apps.auth_token.auth import ApiTokenAuthentication +from apps.public_api.throttlers.phone_notification_throttler import PhoneNotificationThrottler +from apps.twilioapp.models import PhoneCall, SMSMessage + +logger = logging.getLogger(__name__) + + +class PhoneNotificationDataSerializer(serializers.Serializer): + email = serializers.EmailField() + message = serializers.CharField(max_length=1024) + + +class MakeCallView(APIView): + authentication_classes = (ApiTokenAuthentication,) + permission_classes = (IsAuthenticated,) + + throttle_classes = [ + PhoneNotificationThrottler, + ] + + def post(self, request): + serializer = PhoneNotificationDataSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + response_data = {} + organization = self.request.auth.organization + logger.info(f"Making cloud call. Email {serializer.validated_data['email']}") + user = organization.users.filter( + email=serializer.validated_data["email"], _verified_phone_number__isnull=False + ).first() + if user is None: + response_data = {"error": "user-not-found"} + return Response(status=status.HTTP_404_NOT_FOUND, data=response_data) + + try: + PhoneCall.make_grafana_cloud_call(user, serializer.validated_data["message"]) + except TwilioRestException as e: + logger.info(f"Making cloud call. Twilio exception {str(e)}") + return Response(status=status.HTTP_503_SERVICE_UNAVAILABLE, data=response_data) + except PhoneCall.PhoneCallsLimitExceeded: + logger.info(f"Making cloud call. PhoneCallsLimitExceeded") + return Response(status=status.HTTP_400_BAD_REQUEST, data={"error": "limit-exceeded"}) + + return Response(status=status.HTTP_200_OK, data=response_data) + + +class SendSMSView(APIView): + authentication_classes = (ApiTokenAuthentication,) + permission_classes = (IsAuthenticated,) + + throttle_classes = [ + PhoneNotificationThrottler, + ] + + def post(self, request): + serializer = PhoneNotificationDataSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + + response_data = {} + organization = self.request.auth.organization + logger.info(f"Sending cloud sms. Email {serializer.validated_data['email']}") + user = organization.users.filter( + email=serializer.validated_data["email"], _verified_phone_number__isnull=False + ).first() + if user is None: + response_data = {"error": "user-not-found"} + return Response(status=status.HTTP_404_NOT_FOUND, data=response_data) + + try: + SMSMessage.send_grafana_cloud_sms(user, serializer.validated_data["message"]) + except TwilioRestException as e: + logger.info(f"Sending cloud sms. Twilio exception {str(e)}") + return Response(status=status.HTTP_503_SERVICE_UNAVAILABLE, data=response_data) + except SMSMessage.SMSLimitExceeded: + logger.info(f"Sending cloud sms. PhoneCallsLimitExceeded") + return Response(status=status.HTTP_400_BAD_REQUEST, data={"error": "limit-exceeded"}) + + return Response(status=status.HTTP_200_OK, data=response_data) diff --git a/engine/apps/public_api/views/resolution_notes.py b/engine/apps/public_api/views/resolution_notes.py index 16e3fa41..7d07ca1f 100644 --- a/engine/apps/public_api/views/resolution_notes.py +++ b/engine/apps/public_api/views/resolution_notes.py @@ -6,14 +6,13 @@ from rest_framework.viewsets import ModelViewSet from apps.alerts.models import ResolutionNote from apps.alerts.tasks import send_update_resolution_note_signal from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers.resolution_notes import ResolutionNoteSerializer, ResolutionNoteUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class ResolutionNoteView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class ResolutionNoteView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -28,8 +27,6 @@ class ResolutionNoteView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializer pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_RESOLUTION_NOTE_ID - def get_queryset(self): alert_group_id = self.request.query_params.get("alert_group_id", None) queryset = ResolutionNote.objects.filter( diff --git a/engine/apps/public_api/views/routes.py b/engine/apps/public_api/views/routes.py index a353a962..c7afa492 100644 --- a/engine/apps/public_api/views/routes.py +++ b/engine/apps/public_api/views/routes.py @@ -7,16 +7,15 @@ from rest_framework.viewsets import ModelViewSet from apps.alerts.models import ChannelFilter from apps.auth_token.auth import ApiTokenAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.serializers import ChannelFilterSerializer, ChannelFilterUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log from common.api_helpers.exceptions import BadRequest -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import TwentyFivePageSizePaginator -class ChannelFilterView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class ChannelFilterView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -31,8 +30,6 @@ class ChannelFilterView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerM filter_backends = [DjangoFilterBackend] filterset_fields = ["alert_receive_channel"] - demo_default_id = public_api_constants.DEMO_ROUTE_ID_1 - def get_queryset(self): integration_id = self.request.query_params.get("integration_id", None) routing_regex = self.request.query_params.get("routing_regex", None) diff --git a/engine/apps/public_api/views/schedules.py b/engine/apps/public_api/views/schedules.py index 16f6a17a..946463cb 100644 --- a/engine/apps/public_api/views/schedules.py +++ b/engine/apps/public_api/views/schedules.py @@ -7,7 +7,6 @@ from rest_framework.views import Response from rest_framework.viewsets import ModelViewSet from apps.auth_token.auth import ApiTokenAuthentication, ScheduleExportAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.custom_renderers import CalendarRenderer from apps.public_api.serializers import PolymorphicScheduleSerializer, PolymorphicScheduleUpdateSerializer from apps.public_api.throttlers.user_throttle import UserThrottle @@ -16,11 +15,11 @@ from apps.schedules.models import OnCallSchedule from apps.slack.tasks import update_slack_user_group_for_schedules from apps.user_management.organization_log_creator import OrganizationLogType, create_organization_log from common.api_helpers.filters import ByTeamFilter -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, UpdateSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, UpdateSerializerMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class OnCallScheduleChannelView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSerializerMixin, ModelViewSet): +class OnCallScheduleChannelView(RateLimitHeadersMixin, UpdateSerializerMixin, ModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -32,8 +31,6 @@ class OnCallScheduleChannelView(RateLimitHeadersMixin, DemoTokenMixin, UpdateSer pagination_class = FiftyPageSizePaginator - demo_default_id = public_api_constants.DEMO_SCHEDULE_ID_ICAL - filter_backends = (filters.DjangoFilterBackend,) filterset_class = ByTeamFilter diff --git a/engine/apps/public_api/views/slack_channels.py b/engine/apps/public_api/views/slack_channels.py index f261f0b6..14d53247 100644 --- a/engine/apps/public_api/views/slack_channels.py +++ b/engine/apps/public_api/views/slack_channels.py @@ -6,11 +6,11 @@ from apps.auth_token.auth import ApiTokenAuthentication from apps.public_api.serializers.slack_channel import SlackChannelSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.slack.models import SlackChannel -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class SlackChannelView(RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, GenericViewSet): +class SlackChannelView(RateLimitHeadersMixin, mixins.ListModelMixin, GenericViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) pagination_class = FiftyPageSizePaginator diff --git a/engine/apps/public_api/views/user_groups.py b/engine/apps/public_api/views/user_groups.py index 4e6bbaf3..2859199d 100644 --- a/engine/apps/public_api/views/user_groups.py +++ b/engine/apps/public_api/views/user_groups.py @@ -6,11 +6,11 @@ from apps.auth_token.auth import ApiTokenAuthentication from apps.public_api.serializers.user_groups import UserGroupSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.slack.models import SlackUserGroup -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin +from common.api_helpers.mixins import RateLimitHeadersMixin from common.api_helpers.paginators import FiftyPageSizePaginator -class UserGroupView(RateLimitHeadersMixin, DemoTokenMixin, mixins.ListModelMixin, GenericViewSet): +class UserGroupView(RateLimitHeadersMixin, mixins.ListModelMixin, GenericViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) pagination_class = FiftyPageSizePaginator diff --git a/engine/apps/public_api/views/users.py b/engine/apps/public_api/views/users.py index 815c6553..54439d6e 100644 --- a/engine/apps/public_api/views/users.py +++ b/engine/apps/public_api/views/users.py @@ -5,19 +5,18 @@ from rest_framework.views import Response from rest_framework.viewsets import ReadOnlyModelViewSet from apps.auth_token.auth import ApiTokenAuthentication, UserScheduleExportAuthentication -from apps.public_api import constants as public_api_constants from apps.public_api.custom_renderers import CalendarRenderer from apps.public_api.serializers import FastUserSerializer, UserSerializer from apps.public_api.throttlers.user_throttle import UserThrottle from apps.schedules.ical_utils import user_ical_export from apps.schedules.models import OnCallSchedule from apps.user_management.models import User -from common.api_helpers.mixins import DemoTokenMixin, RateLimitHeadersMixin, ShortSerializerMixin +from common.api_helpers.mixins import RateLimitHeadersMixin, ShortSerializerMixin from common.api_helpers.paginators import HundredPageSizePaginator from common.constants.role import Role -class UserView(RateLimitHeadersMixin, ShortSerializerMixin, DemoTokenMixin, ReadOnlyModelViewSet): +class UserView(RateLimitHeadersMixin, ShortSerializerMixin, ReadOnlyModelViewSet): authentication_classes = (ApiTokenAuthentication,) permission_classes = (IsAuthenticated,) @@ -29,16 +28,18 @@ class UserView(RateLimitHeadersMixin, ShortSerializerMixin, DemoTokenMixin, Read throttle_classes = [UserThrottle] - demo_default_id = public_api_constants.DEMO_USER_ID - def get_queryset(self): username = self.request.query_params.get("username") + email = self.request.query_params.get("email") is_short_request = self.request.query_params.get("short", "false") == "true" queryset = self.request.auth.organization.users.filter(role__in=[Role.ADMIN, Role.EDITOR]).distinct() if username is not None: queryset = queryset.filter(username=username) + if email is not None: + queryset = queryset.filter(email=email) + if not is_short_request: queryset = self.serializer_class.setup_eager_loading(queryset) return queryset.order_by("id") diff --git a/engine/apps/schedules/ical_utils.py b/engine/apps/schedules/ical_utils.py index 309729cd..cb0bc342 100644 --- a/engine/apps/schedules/ical_utils.py +++ b/engine/apps/schedules/ical_utils.py @@ -26,14 +26,18 @@ if TYPE_CHECKING: from apps.user_management.models import User -def users_in_ical(usernames_from_ical, organization): +def users_in_ical(usernames_from_ical, organization, include_viewers=False): """ Parse ical file and return list of users found """ # Only grafana username will be used, consider adding grafana email and id - users_found_in_ical = organization.users.filter( - Q(role__in=(Role.ADMIN, Role.EDITOR)) & (Q(username__in=usernames_from_ical) | Q(email__in=usernames_from_ical)) + users_found_in_ical = organization.users + if not include_viewers: + users_found_in_ical = users_found_in_ical.filter(role__in=(Role.ADMIN, Role.EDITOR)) + + users_found_in_ical = users_found_in_ical.filter( + (Q(username__in=usernames_from_ical) | Q(email__in=usernames_from_ical)) ).distinct() # Here is the example how we extracted users previously, using slack fields too @@ -260,15 +264,17 @@ def list_of_empty_shifts_in_schedule(schedule, start_date, end_date): return sorted(empty_shifts, key=lambda dt: dt.start) -def list_users_to_notify_from_ical(schedule, events_datetime=None): +def list_users_to_notify_from_ical(schedule, events_datetime=None, include_viewers=False): """ Retrieve on-call users for the current time """ events_datetime = events_datetime if events_datetime else timezone.datetime.now(timezone.utc) - return list_users_to_notify_from_ical_for_period(schedule, events_datetime, events_datetime) + return list_users_to_notify_from_ical_for_period( + schedule, events_datetime, events_datetime, include_viewers=include_viewers + ) -def list_users_to_notify_from_ical_for_period(schedule, start_datetime, end_datetime): +def list_users_to_notify_from_ical_for_period(schedule, start_datetime, end_datetime, include_viewers=False): # get list of iCalendars from current iCal files. If there is more than one calendar, primary calendar will always # be the first calendars = schedule.get_icalendars() @@ -286,7 +292,7 @@ def list_users_to_notify_from_ical_for_period(schedule, start_datetime, end_date parsed_ical_events.setdefault(current_priority, []).extend(current_usernames) # find users by usernames. if users are not found for shift, get users from lower priority for _, usernames in sorted(parsed_ical_events.items(), reverse=True): - users_found_in_ical = users_in_ical(usernames, schedule.organization) + users_found_in_ical = users_in_ical(usernames, schedule.organization, include_viewers=include_viewers) if users_found_in_ical: break if users_found_in_ical: diff --git a/engine/apps/schedules/tasks/notify_about_empty_shifts_in_schedule.py b/engine/apps/schedules/tasks/notify_about_empty_shifts_in_schedule.py index 5d681bb7..82d96a7e 100644 --- a/engine/apps/schedules/tasks/notify_about_empty_shifts_in_schedule.py +++ b/engine/apps/schedules/tasks/notify_about_empty_shifts_in_schedule.py @@ -4,7 +4,6 @@ from django.apps import apps from django.core.cache import cache from django.utils import timezone -from apps.public_api.constants import DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL from apps.schedules.ical_utils import list_of_empty_shifts_in_schedule from apps.slack.utils import format_datetime_to_slack, post_message_to_channel from common.custom_celery_tasks import shared_dedicated_queue_retry_task @@ -19,9 +18,7 @@ def start_check_empty_shifts_in_schedule(): task_logger.info("Start start_notify_about_empty_shifts_in_schedule") - schedules = OnCallSchedule.objects.exclude( - public_primary_key__in=(DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL) - ) + schedules = OnCallSchedule.objects.all() for schedule in schedules: check_empty_shifts_in_schedule.apply_async((schedule.pk,)) @@ -58,7 +55,7 @@ def start_notify_about_empty_shifts_in_schedule(): schedules = OnCallSchedule.objects.filter( empty_shifts_report_sent_at__lte=week_ago, channel__isnull=False, - ).exclude(public_primary_key__in=(DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL)) + ) for schedule in schedules: notify_about_empty_shifts_in_schedule.apply_async((schedule.pk,)) diff --git a/engine/apps/schedules/tasks/notify_about_gaps_in_schedule.py b/engine/apps/schedules/tasks/notify_about_gaps_in_schedule.py index 4a4749f6..76d8bfd8 100644 --- a/engine/apps/schedules/tasks/notify_about_gaps_in_schedule.py +++ b/engine/apps/schedules/tasks/notify_about_gaps_in_schedule.py @@ -4,7 +4,6 @@ from django.apps import apps from django.core.cache import cache from django.utils import timezone -from apps.public_api.constants import DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL from apps.schedules.ical_utils import list_of_gaps_in_schedule from apps.slack.utils import format_datetime_to_slack, post_message_to_channel from common.custom_celery_tasks import shared_dedicated_queue_retry_task @@ -18,9 +17,7 @@ def start_check_gaps_in_schedule(): task_logger.info("Start start_check_gaps_in_schedule") - schedules = OnCallSchedule.objects.exclude( - public_primary_key__in=(DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL) - ) + schedules = OnCallSchedule.objects.all() for schedule in schedules: check_gaps_in_schedule.apply_async((schedule.pk,)) @@ -57,7 +54,7 @@ def start_notify_about_gaps_in_schedule(): schedules = OnCallSchedule.objects.filter( gaps_report_sent_at__lte=week_ago, channel__isnull=False, - ).exclude(public_primary_key__in=(DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL)) + ) for schedule in schedules: notify_about_gaps_in_schedule.apply_async((schedule.pk,)) diff --git a/engine/apps/schedules/tasks/refresh_ical_files.py b/engine/apps/schedules/tasks/refresh_ical_files.py index 083e198f..5e446b8c 100644 --- a/engine/apps/schedules/tasks/refresh_ical_files.py +++ b/engine/apps/schedules/tasks/refresh_ical_files.py @@ -2,7 +2,6 @@ from celery.utils.log import get_task_logger from django.apps import apps from apps.alerts.tasks import notify_ical_schedule_shift -from apps.public_api.constants import DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL from apps.schedules.ical_utils import is_icals_equal from apps.schedules.tasks import notify_about_empty_shifts_in_schedule, notify_about_gaps_in_schedule from apps.slack.tasks import start_update_slack_user_group_for_schedules @@ -17,9 +16,7 @@ def start_refresh_ical_files(): task_logger.info("Start refresh ical files") - schedules = OnCallSchedule.objects.all().exclude( - public_primary_key__in=(DEMO_SCHEDULE_ID_CALENDAR, DEMO_SCHEDULE_ID_ICAL) - ) + schedules = OnCallSchedule.objects.all() for schedule in schedules: refresh_ical_file.apply_async((schedule.pk,)) diff --git a/engine/apps/schedules/tests/test_ical_utils.py b/engine/apps/schedules/tests/test_ical_utils.py new file mode 100644 index 00000000..8032334d --- /dev/null +++ b/engine/apps/schedules/tests/test_ical_utils.py @@ -0,0 +1,60 @@ +import pytest +from django.utils import timezone + +from apps.schedules.ical_utils import list_users_to_notify_from_ical, users_in_ical +from apps.schedules.models import CustomOnCallShift, OnCallScheduleCalendar +from common.constants.role import Role + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "include_viewers", + [True, False], +) +def test_users_in_ical_viewers_inclusion(make_organization_and_user, make_user_for_organization, include_viewers): + organization, user = make_organization_and_user() + viewer = make_user_for_organization(organization, Role.VIEWER) + + usernames = [user.username, viewer.username] + result = users_in_ical(usernames, organization, include_viewers=include_viewers) + if include_viewers: + assert set(result) == {user, viewer} + else: + assert set(result) == {user} + + +@pytest.mark.django_db +@pytest.mark.parametrize( + "include_viewers", + [True, False], +) +def test_list_users_to_notify_from_ical_viewers_inclusion( + make_organization_and_user, make_user_for_organization, make_schedule, make_on_call_shift, include_viewers +): + organization, user = make_organization_and_user() + viewer = make_user_for_organization(organization, Role.VIEWER) + + schedule = make_schedule(organization, schedule_class=OnCallScheduleCalendar) + date = timezone.now().replace(tzinfo=None, microsecond=0) + data = { + "priority_level": 1, + "start": date, + "duration": timezone.timedelta(seconds=10800), + } + on_call_shift = make_on_call_shift( + organization=organization, shift_type=CustomOnCallShift.TYPE_SINGLE_EVENT, **data + ) + on_call_shift.users.add(user) + on_call_shift.users.add(viewer) + schedule.custom_on_call_shifts.add(on_call_shift) + + # get users on-call + date = date + timezone.timedelta(minutes=5) + users_on_call = list_users_to_notify_from_ical(schedule, date, include_viewers=include_viewers) + + if include_viewers: + assert len(users_on_call) == 2 + assert set(users_on_call) == {user, viewer} + else: + assert len(users_on_call) == 1 + assert set(users_on_call) == {user} diff --git a/engine/apps/slack/migrations/0003_squashed_create_demo_token_instances.py b/engine/apps/slack/migrations/0003_squashed_create_demo_token_instances.py deleted file mode 100644 index ae3368f1..00000000 --- a/engine/apps/slack/migrations/0003_squashed_create_demo_token_instances.py +++ /dev/null @@ -1,47 +0,0 @@ -# Generated by Django 3.2.5 on 2021-08-04 10:51 - -import sys -from django.db import migrations -from apps.public_api import constants as public_api_constants - - -def create_demo_token_instances(apps, schema_editor): - if not (len(sys.argv) > 1 and sys.argv[1] == 'test'): - SlackUserIdentity = apps.get_model('slack', 'SlackUserIdentity') - SlackTeamIdentity = apps.get_model('slack', 'SlackTeamIdentity') - SlackChannel = apps.get_model('slack', 'SlackChannel') - SlackUserGroup = apps.get_model("slack", "SlackUserGroup") - - slack_team_identity, _ = SlackTeamIdentity.objects.get_or_create( - slack_id=public_api_constants.DEMO_SLACK_TEAM_ID, - ) - SlackUserIdentity.objects.get_or_create( - slack_id=public_api_constants.DEMO_SLACK_USER_ID, - slack_team_identity=slack_team_identity, - ) - - SlackChannel.objects.get_or_create( - name=public_api_constants.DEMO_SLACK_CHANNEL_NAME, - slack_id=public_api_constants.DEMO_SLACK_CHANNEL_SLACK_ID, - slack_team_identity=slack_team_identity, - ) - - SlackUserGroup.objects.get_or_create( - slack_team_identity=slack_team_identity, - slack_id=public_api_constants.DEMO_SLACK_USER_GROUP_SLACK_ID, - public_primary_key=public_api_constants.DEMO_SLACK_USER_GROUP_ID, - name=public_api_constants.DEMO_SLACK_USER_GROUP_NAME, - handle=public_api_constants.DEMO_SLACK_USER_GROUP_HANDLE, - is_active=True, - ) - - -class Migration(migrations.Migration): - - dependencies = [ - ('slack', '0002_squashed_initial'), - ] - - operations = [ - migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop) - ] diff --git a/engine/apps/slack/scenarios/alertgroup_appearance.py b/engine/apps/slack/scenarios/alertgroup_appearance.py index edf0a704..1ccba05f 100644 --- a/engine/apps/slack/scenarios/alertgroup_appearance.py +++ b/engine/apps/slack/scenarios/alertgroup_appearance.py @@ -56,7 +56,11 @@ class OpenAlertAppearanceDialogStep( raw_request_data = json.dumps(alert_group.alerts.first().raw_request_data, sort_keys=True, indent=4) # This is a special case for amazon sns notifications in str format CHEKED - if alert_group.channel.integration == AlertReceiveChannel.INTEGRATION_AMAZON_SNS and raw_request_data == "{}": + if ( + AlertReceiveChannel.INTEGRATION_AMAZON_SNS is not None + and alert_group.channel.integration == AlertReceiveChannel.INTEGRATION_AMAZON_SNS + and raw_request_data == "{}" + ): raw_request_data = alert_group.alerts.first().message raw_request_data_chunks = [ diff --git a/engine/apps/slack/tasks.py b/engine/apps/slack/tasks.py index 48c688be..e2c250a0 100644 --- a/engine/apps/slack/tasks.py +++ b/engine/apps/slack/tasks.py @@ -9,8 +9,6 @@ from django.core.cache import cache from django.utils import timezone from apps.alerts.tasks.compare_escalations import compare_escalations -from apps.public_api import constants as public_constants -from apps.public_api.constants import DEMO_SLACK_USER_GROUP_ID from apps.slack.constants import CACHE_UPDATE_INCIDENT_SLACK_MESSAGE_LIFETIME, SLACK_BOT_ID from apps.slack.scenarios.escalation_delivery import EscalationDeliveryStep from apps.slack.scenarios.scenario_step import ScenarioStep @@ -499,7 +497,7 @@ def populate_slack_usergroups(): slack_team_identities = SlackTeamIdentity.objects.filter( detected_token_revoked__isnull=True, - ).exclude(slack_id=public_constants.DEMO_SLACK_TEAM_ID) + ) delay = 0 counter = 0 @@ -642,10 +640,7 @@ def start_update_slack_user_group_for_schedules(): SlackUserGroup = apps.get_model("slack", "SlackUserGroup") user_group_pks = ( - SlackUserGroup.objects.exclude(public_primary_key=DEMO_SLACK_USER_GROUP_ID) - .filter(oncall_schedules__isnull=False) - .distinct() - .values_list("pk", flat=True) + SlackUserGroup.objects.filter(oncall_schedules__isnull=False).distinct().values_list("pk", flat=True) ) for user_group_pk in user_group_pks: @@ -673,7 +668,7 @@ def populate_slack_channels(): slack_team_identities = SlackTeamIdentity.objects.filter( detected_token_revoked__isnull=True, - ).exclude(slack_id=public_constants.DEMO_SLACK_TEAM_ID) + ) delay = 0 counter = 0 diff --git a/engine/apps/slack/views.py b/engine/apps/slack/views.py index 8988594c..5990b0b6 100644 --- a/engine/apps/slack/views.py +++ b/engine/apps/slack/views.py @@ -119,6 +119,9 @@ class SlackEventApiEndpointView(APIView): return Response(status=403) if not settings.DEBUG: + if live_settings.SLACK_SIGNING_SECRET is None and settings.SLACK_SIGNING_SECRET_LIVE: + raise Exception("Please specify SLACK_SIGNING_SECRET or use DEBUG.") + if not ( SlackEventApiEndpointView.verify_signature( slack_request_timestamp, slack_signature, body, live_settings.SLACK_SIGNING_SECRET diff --git a/engine/apps/social_auth/live_setting_django_strategy.py b/engine/apps/social_auth/live_setting_django_strategy.py index dd913e67..d2cf0fe1 100644 --- a/engine/apps/social_auth/live_setting_django_strategy.py +++ b/engine/apps/social_auth/live_setting_django_strategy.py @@ -34,8 +34,10 @@ class LiveSettingDjangoStrategy(DjangoStrategy): def build_absolute_uri(self, path=None): """ - Overriden DjangoStrategy's method to substitute and force the host value from ENV + Overridden DjangoStrategy's method to substitute and force the host value from ENV """ + if live_settings.SLACK_INSTALL_RETURN_REDIRECT_HOST is not None and path is not None: + return live_settings.SLACK_INSTALL_RETURN_REDIRECT_HOST + path if settings.SLACK_INSTALL_RETURN_REDIRECT_HOST is not None and path is not None: return settings.SLACK_INSTALL_RETURN_REDIRECT_HOST + path if self.request: diff --git a/engine/apps/telegram/client.py b/engine/apps/telegram/client.py index 280e26c5..4de25de1 100644 --- a/engine/apps/telegram/client.py +++ b/engine/apps/telegram/client.py @@ -1,6 +1,6 @@ from typing import Optional, Tuple, Union +from urllib.parse import urljoin -from django.conf import settings from telegram import Bot, InlineKeyboardMarkup, Message, ParseMode from telegram.error import InvalidToken, Unauthorized from telegram.utils.request import Request @@ -34,7 +34,10 @@ class TelegramClient: return False def register_webhook(self, webhook_url: Optional[str] = None) -> None: - webhook_url = webhook_url or settings.TELEGRAM_WEBHOOK_URL + webhook_url = webhook_url or urljoin(live_settings.TELEGRAM_WEBHOOK_HOST, "/telegram/") + + if webhook_url is None: + webhook_url = live_settings.TELEGRAM_WEBHOOK_URL webhook_info = self.api_client.get_webhook_info() if webhook_info.url == webhook_url: diff --git a/engine/apps/twilioapp/migrations/0002_auto_20220604_1008.py b/engine/apps/twilioapp/migrations/0002_auto_20220604_1008.py new file mode 100644 index 00000000..cddd898c --- /dev/null +++ b/engine/apps/twilioapp/migrations/0002_auto_20220604_1008.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.5 on 2022-06-04 10:08 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('twilioapp', '0001_squashed_initial'), + ] + + operations = [ + migrations.AddField( + model_name='phonecall', + name='grafana_cloud_notification', + field=models.BooleanField(default=False), + ), + migrations.AddField( + model_name='smsmessage', + name='grafana_cloud_notification', + field=models.BooleanField(default=False), + ), + ] diff --git a/engine/apps/twilioapp/models/phone_call.py b/engine/apps/twilioapp/models/phone_call.py index 7d5ae0f9..64b4304e 100644 --- a/engine/apps/twilioapp/models/phone_call.py +++ b/engine/apps/twilioapp/models/phone_call.py @@ -1,14 +1,20 @@ import logging +from urllib.parse import urljoin +import requests from django.apps import apps +from django.conf import settings from django.db import models +from rest_framework import status from twilio.base.exceptions import TwilioRestException from apps.alerts.constants import ActionSource from apps.alerts.incident_appearance.renderers.phone_call_renderer import AlertGroupPhoneCallRenderer from apps.alerts.signals import user_notification_action_triggered_signal +from apps.base.utils import live_settings from apps.twilioapp.constants import TwilioCallStatuses from apps.twilioapp.twilio_client import twilio_client +from common.utils import clean_markup, escape_for_twilio_phone_call logger = logging.getLogger(__name__) @@ -34,8 +40,10 @@ class PhoneCallManager(models.Manager): if phone_call_qs.exists() and status: phone_call_qs.update(status=status) - phone_call = phone_call_qs.first() + if phone_call.grafana_cloud_notification: + # If call was made via grafana twilio it is don't needed to create logs on it's delivery status. + return log_record = None if status == TwilioCallStatuses.COMPLETED: log_record = UserNotificationPolicyLogRecord( @@ -115,6 +123,17 @@ class PhoneCall(models.Model): created_at = models.DateTimeField(auto_now_add=True) + grafana_cloud_notification = models.BooleanField(default=False) + + class PhoneCallsLimitExceeded(Exception): + """Phone calls limit exceeded""" + + class PhoneNumberNotVerifiedError(Exception): + """Phone number is not verified""" + + class CloudSendError(Exception): + """Error making call through cloud""" + def process_digit(self, digit): """The function process pressed digit at time of call to user @@ -138,57 +157,59 @@ class PhoneCall(models.Model): return bool(self.represents_alert_group.slack_message) @classmethod - def make_call(cls, user, alert_group, notification_policy): - UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") - - organization = alert_group.channel.organization - - log_record = None - if user.verified_phone_number: - # Create a PhoneCall object in db - phone_call = PhoneCall( - represents_alert_group=alert_group, - receiver=user, - notification_policy=notification_policy, - ) - - phone_calls_left = organization.phone_calls_left(user) - - if phone_calls_left > 0: - phone_call.exceeded_limit = False - renderer = AlertGroupPhoneCallRenderer(alert_group) - message_body = renderer.render() - if phone_calls_left < 3: - message_body += " {} phone calls left. Contact your admin.".format(phone_calls_left) - try: - twilio_call = twilio_client.make_call(message_body, user.verified_phone_number) - except TwilioRestException: - log_record = UserNotificationPolicyLogRecord( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, - notification_policy=notification_policy, - alert_group=alert_group, - notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ABLE_TO_CALL, - notification_step=notification_policy.step if notification_policy else None, - notification_channel=notification_policy.notify_by if notification_policy else None, - ) - else: - if twilio_call.status and twilio_call.sid: - phone_call.status = TwilioCallStatuses.DETERMINANT.get(twilio_call.status, None) - phone_call.sid = twilio_call.sid - else: - log_record = UserNotificationPolicyLogRecord( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, - notification_policy=notification_policy, - alert_group=alert_group, - notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_PHONE_CALLS_LIMIT_EXCEEDED, - notification_step=notification_policy.step if notification_policy else None, - notification_channel=notification_policy.notify_by if notification_policy else None, - ) - phone_call.exceeded_limit = True - phone_call.save() + def _make_cloud_call(cls, user, message_body): + url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/make_call") + auth = {"Authorization": live_settings.GRAFANA_CLOUD_ONCALL_TOKEN} + data = { + "email": user.email, + "message": message_body, + } + try: + response = requests.post(url, headers=auth, data=data, timeout=5) + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to make call through cloud. Request exception {str(e)}") + raise PhoneCall.CloudSendError("Unable to make call through cloud: request failed") + if response.status_code == status.HTTP_200_OK: + logger.info("Make cloud call successfully") + if response.status_code == status.HTTP_400_BAD_REQUEST and response.json().get("error") == "limit-exceeded": + raise PhoneCall.PhoneCallsLimitExceeded("Organization calls limit exceeded") + elif response.status_code == status.HTTP_404_NOT_FOUND: + raise PhoneCall.CloudSendError("Unable to make call through cloud: user not found") else: + raise PhoneCall.CloudSendError("Unable to make call through cloud: server error") + + @classmethod + def make_call(cls, user, alert_group, notification_policy, is_cloud_notification=False): + UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") + log_record = None + renderer = AlertGroupPhoneCallRenderer(alert_group) + message_body = renderer.render() + try: + if is_cloud_notification: + cls._make_cloud_call(user, message_body) + else: + cls._make_call(user, message_body, alert_group=alert_group, notification_policy=notification_policy) + except (TwilioRestException, PhoneCall.CloudSendError): + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification_policy, + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ABLE_TO_CALL, + notification_step=notification_policy.step if notification_policy else None, + notification_channel=notification_policy.notify_by if notification_policy else None, + ) + except PhoneCall.PhoneCallsLimitExceeded: + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification_policy, + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_PHONE_CALLS_LIMIT_EXCEEDED, + notification_step=notification_policy.step if notification_policy else None, + notification_channel=notification_policy.notify_by if notification_policy else None, + ) + except PhoneCall.PhoneNumberNotVerifiedError: log_record = UserNotificationPolicyLogRecord( author=user, type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, @@ -203,6 +224,41 @@ class PhoneCall(models.Model): log_record.save() user_notification_action_triggered_signal.send(sender=PhoneCall.make_call, log_record=log_record) + @classmethod + def make_grafana_cloud_call(cls, user, message_body): + message_body = escape_for_twilio_phone_call(clean_markup(message_body)) + cls._make_call(user, message_body, grafana_cloud=True) + + @classmethod + def _make_call(cls, user, message_body, alert_group=None, notification_policy=None, grafana_cloud=False): + if not user.verified_phone_number: + raise PhoneCall.PhoneNumberNotVerifiedError("User phone number is not verified") + + phone_call = PhoneCall( + represents_alert_group=alert_group, + receiver=user, + notification_policy=notification_policy, + grafana_cloud_notification=grafana_cloud, + ) + phone_calls_left = user.organization.phone_calls_left(user) + + if phone_calls_left <= 0: + phone_call.exceeded_limit = True + phone_call.save() + raise PhoneCall.PhoneCallsLimitExceeded("Organization calls limit exceeded") + + phone_call.exceeded_limit = False + if phone_calls_left < 3: + message_body += " {} phone calls left. Contact your admin.".format(phone_calls_left) + + twilio_call = twilio_client.make_call(message_body, user.verified_phone_number) + if twilio_call.status and twilio_call.sid: + phone_call.status = TwilioCallStatuses.DETERMINANT.get(twilio_call.status, None) + phone_call.sid = twilio_call.sid + phone_call.save() + + return phone_call + @staticmethod def get_error_code_by_twilio_status(status): UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") diff --git a/engine/apps/twilioapp/models/sms_message.py b/engine/apps/twilioapp/models/sms_message.py index 09404e56..00e98e4b 100644 --- a/engine/apps/twilioapp/models/sms_message.py +++ b/engine/apps/twilioapp/models/sms_message.py @@ -1,13 +1,19 @@ import logging +from urllib.parse import urljoin +import requests from django.apps import apps +from django.conf import settings from django.db import models +from rest_framework import status from twilio.base.exceptions import TwilioRestException from apps.alerts.incident_appearance.renderers.sms_renderer import AlertGroupSmsRenderer from apps.alerts.signals import user_notification_action_triggered_signal +from apps.base.utils import live_settings from apps.twilioapp.constants import TwilioMessageStatuses from apps.twilioapp.twilio_client import twilio_client +from common.utils import clean_markup logger = logging.getLogger(__name__) @@ -36,7 +42,9 @@ class SMSMessageManager(models.Manager): sms_message_qs.update(status=status) sms_message = sms_message_qs.first() - + if sms_message.grafana_cloud_notification: + # If sms was sent via grafana cloud notifications don't create logs on its delivery status. + return log_record = None if status == TwilioMessageStatuses.DELIVERED: @@ -90,6 +98,7 @@ class SMSMessage(models.Model): null=True, choices=TwilioMessageStatuses.CHOICES, ) + grafana_cloud_notification = models.BooleanField(default=False) # https://www.twilio.com/docs/sms/api/message-resource#message-properties sid = models.CharField( @@ -99,66 +108,77 @@ class SMSMessage(models.Model): created_at = models.DateTimeField(auto_now_add=True) + class SMSLimitExceeded(Exception): + """SMS limit exceeded""" + + class PhoneNumberNotVerifiedError(Exception): + """Phone number is not verified""" + + class CloudSendError(Exception): + """SMS sending through cloud error""" + @property def created_for_slack(self): return bool(self.represents_alert_group.slack_message) @classmethod - def send_sms(cls, user, alert_group, notification_policy): + def _send_cloud_sms(cls, user, message_body): + url = urljoin(settings.GRAFANA_CLOUD_ONCALL_API_URL, "api/v1/send_sms") + auth = {"Authorization": live_settings.GRAFANA_CLOUD_ONCALL_TOKEN} + data = { + "email": user.email, + "message": message_body, + } + try: + response = requests.post(url, headers=auth, data=data, timeout=5) + except requests.exceptions.RequestException as e: + logger.warning(f"Unable to send SMS through cloud. Request exception {str(e)}") + raise SMSMessage.CloudSendError("Unable to send SMS through cloud: request failed") + if response.status_code == status.HTTP_200_OK: + logger.info("Sent cloud sms successfully") + elif response.status_code == status.HTTP_400_BAD_REQUEST and response.json().get("error") == "limit-exceeded": + raise SMSMessage.SMSLimitExceeded("Organization sms limit exceeded") + elif response.status_code == status.HTTP_404_NOT_FOUND: + raise SMSMessage.CloudSendError("Unable to send SMS through cloud: user not found") + else: + raise SMSMessage.CloudSendError("Unable to send SMS through cloud: server error") + + @classmethod + def send_sms(cls, user, alert_group, notification_policy, is_cloud_notification=False): UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") - organization = alert_group.channel.organization - log_record = None - if user.verified_phone_number: - # Create an SMS object in db - sms_message = SMSMessage( - represents_alert_group=alert_group, receiver=user, notification_policy=notification_policy - ) - - sms_left = organization.sms_left(user) - if sms_left > 0: - # Mark is as successfully sent - sms_message.exceeded_limit = False - # Render alert message for sms - renderer = AlertGroupSmsRenderer(alert_group) - message_body = renderer.render() - # Notify if close to limit - if sms_left < 3: - message_body += " {} sms left. Contact your admin.".format(sms_left) - # Send an sms - try: - twilio_message = twilio_client.send_message(message_body, user.verified_phone_number) - except TwilioRestException: - log_record = UserNotificationPolicyLogRecord( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, - notification_policy=notification_policy, - alert_group=alert_group, - notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ABLE_TO_SEND_SMS, - notification_step=notification_policy.step if notification_policy else None, - notification_channel=notification_policy.notify_by if notification_policy else None, - ) - else: - if twilio_message.status and twilio_message.sid: - sms_message.status = TwilioMessageStatuses.DETERMINANT.get(twilio_message.status, None) - sms_message.sid = twilio_message.sid + renderer = AlertGroupSmsRenderer(alert_group) + message_body = renderer.render() + try: + if is_cloud_notification: + cls._send_cloud_sms(user, message_body) else: - # If no more sms left, mark as exceeded limit - log_record = UserNotificationPolicyLogRecord( - author=user, - type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, - notification_policy=notification_policy, - alert_group=alert_group, - notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_SMS_LIMIT_EXCEEDED, - notification_step=notification_policy.step if notification_policy else None, - notification_channel=notification_policy.notify_by if notification_policy else None, - ) - sms_message.exceeded_limit = True - - # Save object - sms_message.save() - else: + cls._send_sms(user, message_body, alert_group=alert_group, notification_policy=notification_policy) + except (TwilioRestException, SMSMessage.CloudSendError) as e: + logger.warning(f"Unable to send sms. Exception {e}") + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification_policy, + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_NOT_ABLE_TO_SEND_SMS, + notification_step=notification_policy.step if notification_policy else None, + notification_channel=notification_policy.notify_by if notification_policy else None, + ) + except SMSMessage.SMSLimitExceeded as e: + logger.warning(f"Unable to send sms. Exception {e}") + log_record = UserNotificationPolicyLogRecord( + author=user, + type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, + notification_policy=notification_policy, + alert_group=alert_group, + notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_SMS_LIMIT_EXCEEDED, + notification_step=notification_policy.step if notification_policy else None, + notification_channel=notification_policy.notify_by if notification_policy else None, + ) + except SMSMessage.PhoneNumberNotVerifiedError as e: + logger.warning(f"Unable to send sms. Exception {e}") log_record = UserNotificationPolicyLogRecord( author=user, type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED, @@ -173,6 +193,41 @@ class SMSMessage(models.Model): log_record.save() user_notification_action_triggered_signal.send(sender=SMSMessage.send_sms, log_record=log_record) + @classmethod + def send_grafana_cloud_sms(cls, user, message_body): + message_body = clean_markup(message_body) + cls._send_sms(user, message_body, grafana_cloud=True) + + @classmethod + def _send_sms(cls, user, message_body, alert_group=None, notification_policy=None, grafana_cloud=False): + if not user.verified_phone_number: + raise SMSMessage.PhoneNumberNotVerifiedError("User phone number is not verified") + + sms_message = SMSMessage( + represents_alert_group=alert_group, + receiver=user, + notification_policy=notification_policy, + grafana_cloud_notification=grafana_cloud, + ) + sms_left = user.organization.sms_left(user) + + if sms_left <= 0: + sms_message.exceeded_limit = True + sms_message.save() + raise SMSMessage.SMSLimitExceeded("Organization sms limit exceeded") + + sms_message.exceeded_limit = False + if sms_left < 3: + message_body += " {} sms left. Contact your admin.".format(sms_left) + + twilio_message = twilio_client.send_message(message_body, user.verified_phone_number) + if twilio_message.status and twilio_message.sid: + sms_message.status = TwilioMessageStatuses.DETERMINANT.get(twilio_message.status, None) + sms_message.sid = twilio_message.sid + sms_message.save() + + return sms_message + @staticmethod def get_error_code_by_twilio_status(status): UserNotificationPolicyLogRecord = apps.get_model("base", "UserNotificationPolicyLogRecord") diff --git a/engine/apps/user_management/migrations/0002_squashed_create_demo_token_instances.py b/engine/apps/user_management/migrations/0002_squashed_create_demo_token_instances.py deleted file mode 100644 index 8b8f932c..00000000 --- a/engine/apps/user_management/migrations/0002_squashed_create_demo_token_instances.py +++ /dev/null @@ -1,51 +0,0 @@ -# Generated by Django 3.2.5 on 2021-08-04 10:46 - -import sys -from django.db import migrations -from apps.public_api import constants as public_api_constants -from common.constants.role import Role - - -def create_demo_token_instances(apps, schema_editor): - if not (len(sys.argv) > 1 and sys.argv[1] == 'test'): - SlackUserIdentity = apps.get_model('slack', 'SlackUserIdentity') - SlackTeamIdentity = apps.get_model('slack', 'SlackTeamIdentity') - User = apps.get_model('user_management', 'User') - Organization = apps.get_model('user_management', 'Organization') - - slack_team_identity = SlackTeamIdentity.objects.get(slack_id=public_api_constants.DEMO_SLACK_TEAM_ID) - slack_user_identity = SlackUserIdentity.objects.get( - slack_id=public_api_constants.DEMO_SLACK_USER_ID, - slack_team_identity=slack_team_identity, - ) - - organization, _ = Organization.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_ORGANIZATION_ID, - defaults=dict( - slack_team_identity=slack_team_identity, - org_id=0, stack_id=0, - ) - ) - User.objects.get_or_create( - public_primary_key=public_api_constants.DEMO_USER_ID, - defaults=dict( - username=public_api_constants.DEMO_USER_USERNAME, - email=public_api_constants.DEMO_USER_EMAIL, - organization=organization, - role=Role.ADMIN, - slack_user_identity=slack_user_identity, - user_id=0, - ) - ) - - -class Migration(migrations.Migration): - - dependencies = [ - ('user_management', '0001_squashed_initial'), - ('slack', '0003_squashed_create_demo_token_instances'), - ] - - operations = [ - migrations.RunPython(create_demo_token_instances, migrations.RunPython.noop) - ] diff --git a/engine/common/api_helpers/mixins.py b/engine/common/api_helpers/mixins.py index d121d2fd..503a477e 100644 --- a/engine/common/api_helpers/mixins.py +++ b/engine/common/api_helpers/mixins.py @@ -19,7 +19,6 @@ from apps.alerts.incident_appearance.templaters import ( TemplateLoader, ) from apps.base.messaging import get_messaging_backends -from apps.public_api.helpers import is_demo_token_request from common.api_helpers.exceptions import BadRequest from common.jinja_templater import apply_jinja_template @@ -125,83 +124,6 @@ class EagerLoadingMixin: return queryset -class DemoTokenMixin: - """ - The view mixin for requests to public api with demo token authorization. - """ - - def dispatch(self, request, *args, **kwargs): - """ - Overridden dispatch method of APIView - https://github.com/encode/django-rest-framework/blob/master/rest_framework/views.py#L485 - """ - method = request.method.lower() - - if is_demo_token_request(request) and method in ["post", "put", "delete"]: - self.args = args - self.kwargs = kwargs - request = self.initialize_request(request, *args, **kwargs) - self.request = request - - # there is a strange comment about this - # https://github.com/encode/django-rest-framework/blob/master/rest_framework/views.py#L494 - self.headers = self.default_response_headers - - try: - self.initial(request, *args, **kwargs) - - """ - check for allowed request methods - - from APIView: - If `request.method` does not correspond to a handler method, - determine what kind of exception to raise. - - def http_method_not_allowed(self, request, *args, **kwargs): - raise exceptions.MethodNotAllowed(request.method) - """ - - if method in self.http_method_names: - handler = getattr(self, method, self.http_method_not_allowed) - else: - handler = self.http_method_not_allowed - - # function comparison explanation - # https://stackoverflow.com/a/18217024 - if handler == self.http_method_not_allowed: - response = handler(request, *args, **kwargs) - - elif method == "post": - # It excludes a real instance creation. - # It returns the instance with public primary key - # is equal to demo_default_id - instance = self.model._default_manager.get(public_primary_key=self.demo_default_id) - serializer = self.get_serializer(instance) - headers = self.get_success_headers(serializer.data) - response = Response(data=serializer.data, status=status.HTTP_201_CREATED, headers=headers) - - elif method == "put": - # It excludes a instance update. - # It returns the instance with public primary key - # is equal to demo_default_id - instance = self.get_object() - serializer = self.get_serializer(instance) - headers = self.get_success_headers(serializer.data) - response = Response(data=serializer.data, status=status.HTTP_200_OK, headers=headers) - - elif method == "delete": - # In this case we return nothing just success response. - response = Response(status=status.HTTP_204_NO_CONTENT) - - except Exception as exc: - response = self.handle_exception(exc) - - self.response = self.finalize_response(request, response, *args, **kwargs) - return self.response - - return super().dispatch(request, *args, **kwargs) - - class RateLimitHeadersMixin: # This mixin add RateLimit-Reset header to RateLimited response def handle_exception(self, exc): diff --git a/engine/common/utils.py b/engine/common/utils.py index 4b9ef9c1..7507bf97 100644 --- a/engine/common/utils.py +++ b/engine/common/utils.py @@ -177,6 +177,14 @@ def clean_markup(text): return cleaned +def escape_for_twilio_phone_call(text): + # https://www.twilio.com/docs/api/errors/12100 + text = text.replace("&", "&") + text = text.replace(">", ">") + text = text.replace("<", "<") + return text + + def escape_html(text): return html.escape(text) diff --git a/engine/apps/integrations/metadata/configuration/alertmanager.py b/engine/config_integrations/alertmanager.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/alertmanager.py rename to engine/config_integrations/alertmanager.py diff --git a/engine/config_integrations/elastalert.py b/engine/config_integrations/elastalert.py new file mode 100644 index 00000000..90e9bfcc --- /dev/null +++ b/engine/config_integrations/elastalert.py @@ -0,0 +1,66 @@ +# Main +enabled = True +title = "Elastalert" +slug = "elastalert" +short_description = "Elastic" +is_displayed_on_web = True +description = None +is_featured = False +is_able_to_autoresolve = True +is_demo_alert_enabled = True + +description = None + +# Default templates +slack_title = """\ +*<{{ grafana_oncall_link }}|#{{ grafana_oncall_incident_id }} Incident>* via {{ integration_name }} +{% if source_link %} + (*<{{ source_link }}|source>*) +{%- endif %}""" + +slack_message = "```{{ payload|tojson_pretty }}```" + +slack_image_url = None + +web_title = "Incident" + +web_message = """\ +``` +{{ payload|tojson_pretty }} +``` +""" + +web_image_url = slack_image_url + +sms_title = web_title + +phone_call_title = sms_title + +email_title = web_title + +email_message = "{{ payload|tojson_pretty }}" + +telegram_title = sms_title + +telegram_message = "{{ payload|tojson_pretty }}" + +telegram_image_url = slack_image_url + +source_link = None + +grouping_id = '{{ payload.get("alert_uid", "")}}' + +resolve_condition = """\ +{%- if "is_amixr_heartbeat_restored" in payload -%} +{# We don't know the payload format from your integration. #} +{# The heartbeat alerts will go here so we check for our own key #} +{{ payload["is_amixr_heartbeat_restored"] }} +{%- else -%} +{{ payload.get("state", "").upper() == "OK" }} +{%- endif %}""" + +acknowledge_condition = None + +group_verbose_name = "Incident" + +example_payload = {"message": "This alert was sent by user for the demonstration purposes"} diff --git a/engine/apps/integrations/metadata/configuration/formatted_webhook.py b/engine/config_integrations/formatted_webhook.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/formatted_webhook.py rename to engine/config_integrations/formatted_webhook.py diff --git a/engine/apps/integrations/metadata/configuration/grafana.py b/engine/config_integrations/grafana.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/grafana.py rename to engine/config_integrations/grafana.py diff --git a/engine/apps/integrations/metadata/configuration/grafana_alerting.py b/engine/config_integrations/grafana_alerting.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/grafana_alerting.py rename to engine/config_integrations/grafana_alerting.py diff --git a/engine/apps/integrations/metadata/configuration/heartbeat.py b/engine/config_integrations/heartbeat.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/heartbeat.py rename to engine/config_integrations/heartbeat.py diff --git a/engine/apps/integrations/metadata/configuration/inbound_email.py b/engine/config_integrations/inbound_email.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/inbound_email.py rename to engine/config_integrations/inbound_email.py diff --git a/engine/config_integrations/kapacitor.py b/engine/config_integrations/kapacitor.py new file mode 100644 index 00000000..d5f013fe --- /dev/null +++ b/engine/config_integrations/kapacitor.py @@ -0,0 +1,65 @@ +# Main +enabled = True +title = "Kapacitor" +slug = "kapacitor" +short_description = "InfluxDB" +description = None +is_displayed_on_web = True +is_featured = False +is_able_to_autoresolve = True +is_demo_alert_enabled = True + +description = None + +# Default templates +slack_title = """\ +*<{{ grafana_oncall_link }}|#{{ grafana_oncall_incident_id }} {{ payload.get("id", "Title undefined (check Slack Title Template)") }}>* via {{ integration_name }} +{% if source_link %} + (*<{{ source_link }}|source>*) +{%- endif %}""" + +slack_message = """\ +```{{ payload|tojson_pretty }}``` +""" + +slack_image_url = None + +web_title = '{{ payload.get("id", "Title undefined (check Web Title Template)") }}' + +web_message = """\ +``` +{{ payload|tojson_pretty }} +``` +""" + +web_image_url = slack_image_url + +sms_title = web_title + +phone_call_title = web_title + +email_title = web_title + +email_message = slack_message + +telegram_title = sms_title + +telegram_message = "{{ payload|tojson_pretty }}" + +telegram_image_url = slack_image_url + +source_link = None + +grouping_id = '{{ payload.get("id", "") }}' + +resolve_condition = '{{ payload.get("level", "").startswith("OK") }}' + +acknowledge_condition = None + +group_verbose_name = '{{ payload.get("id", "") }}' + +example_payload = { + "id": "TestAlert", + "message": "This alert was sent by user for the demonstration purposes", + "data": "{foo: bar}", +} diff --git a/engine/apps/integrations/metadata/configuration/maintenance.py b/engine/config_integrations/maintenance.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/maintenance.py rename to engine/config_integrations/maintenance.py diff --git a/engine/apps/integrations/metadata/configuration/manual.py b/engine/config_integrations/manual.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/manual.py rename to engine/config_integrations/manual.py diff --git a/engine/apps/integrations/metadata/configuration/slack_channel.py b/engine/config_integrations/slack_channel.py similarity index 100% rename from engine/apps/integrations/metadata/configuration/slack_channel.py rename to engine/config_integrations/slack_channel.py diff --git a/engine/apps/integrations/metadata/configuration/webhook.py b/engine/config_integrations/webhook.py similarity index 96% rename from engine/apps/integrations/metadata/configuration/webhook.py rename to engine/config_integrations/webhook.py index ea18fab7..113efc56 100644 --- a/engine/apps/integrations/metadata/configuration/webhook.py +++ b/engine/config_integrations/webhook.py @@ -56,7 +56,7 @@ resolve_condition = """\ {# The heartbeat alerts will go here so we check for our own key #} {{ payload["is_amixr_heartbeat_restored"] }} {%- else -%} -{{ payload.get("state", "").upper() == "OK" }}' +{{ payload.get("state", "").upper() == "OK" }} {%- endif %}""" acknowledge_condition = None diff --git a/engine/engine/urls.py b/engine/engine/urls.py index 9e55241a..518c5608 100644 --- a/engine/engine/urls.py +++ b/engine/engine/urls.py @@ -54,7 +54,7 @@ if settings.FEATURE_SLACK_INTEGRATION_ENABLED: path("slack/", include("apps.slack.urls")), ] -if settings.OSS_INSTALLATION_FEATURES_ENABLED: +if settings.OSS_INSTALLATION: urlpatterns += [ path("api/internal/v1/", include("apps.oss_installation.urls")), ] diff --git a/engine/requirements.txt b/engine/requirements.txt index a9dfc03d..2e8e11ef 100644 --- a/engine/requirements.txt +++ b/engine/requirements.txt @@ -1,4 +1,4 @@ -django==3.2.5 +django==3.2.13 djangorestframework==3.12.4 slackclient==1.3.0 whitenoise==5.3.0 @@ -9,7 +9,6 @@ celery==4.3.0 redis==3.2.0 django-celery-results==1.0.4 humanize==0.5.1 -django-mysql==2.4.1 uwsgi==2.0.20 django-cors-headers==3.7.0 django-debug-toolbar==3.2.1 @@ -27,7 +26,7 @@ slack-export-viewer==1.0.0 beautifulsoup4==4.8.1 social-auth-app-django==3.1.0 sendgrid==6.1.2 -cryptography==2.9.2 +cryptography==3.2 pytest==5.4.3 pytest-django==3.9.0 pytest_factoryboy==2.0.3 @@ -39,3 +38,5 @@ django-rest-polymorphic==0.1.9 pre-commit==2.15.0 https://github.com/iskhakov/django-push-notifications/archive/refs/tags/2.0.0-hotfix-4.tar.gz django-mirage-field==1.3.0 +django-mysql==4.6.0 +PyMySQL==1.0.2 diff --git a/engine/scripts/start_all_in_one.sh b/engine/scripts/start_all_in_one.sh deleted file mode 100644 index f4a64e39..00000000 --- a/engine/scripts/start_all_in_one.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash - -export DJANGO_SETTINGS_MODULE=settings.all_in_one - -generate_value_if_not_exist () -{ - if [ ! -f /etc/app/secret_data/$1 ]; then - touch /etc/app/secret_data/$1 - base64 /dev/urandom | head -c $2 > /etc/app/secret_data/$1 -fi -export $1=$(cat /etc/app/secret_data/$1) -} - -generate_value_if_not_exist SECRET_KEY 75 - -generate_value_if_not_exist MIRAGE_SECRET_KEY 75 -generate_value_if_not_exist MIRAGE_CIPHER_IV 16 - -export BASE_URL=http://localhost:8000 - -echo "Starting redis in the background" -# Redis will dump the changes to the volume every 60 seconds if at least 1 key changed -redis-server --daemonize yes --save 60 1 --dir /etc/app/redis_data/ -echo "Running migrations" -python manage.py migrate - -echo "Start celery" -python manage.py start_celery & - -# Postponing token issuing to make sure it's the last record in the console. -bash -c 'sleep 10; python manage.py issue_invite_for_the_frontend --override' & - -echo "Starting server" -python manage.py runserver 0.0.0.0:8000 --noreload diff --git a/engine/settings/all_in_one.py b/engine/settings/all_in_one.py deleted file mode 100644 index e2196274..00000000 --- a/engine/settings/all_in_one.py +++ /dev/null @@ -1,58 +0,0 @@ -import sys -from random import randrange - -from .prod_without_db import * # noqa - -DATABASES = { - "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": os.path.join(BASE_DIR, "sqlite_data/db.sqlite3"), # noqa - }, -} - -TESTING = "pytest" in sys.modules or "unittest" in sys.modules - -CACHES = { - "default": { - "BACKEND": "redis_cache.RedisCache", - "LOCATION": [ - "localhost:6379", - ], - "OPTIONS": { - "DB": 1, - "PARSER_CLASS": "redis.connection.HiredisParser", - "CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool", - "CONNECTION_POOL_CLASS_KWARGS": { - "max_connections": 50, - "timeout": 20, - }, - "MAX_CONNECTIONS": 1000, - "PICKLE_VERSION": -1, - }, - }, -} - -CELERY_BROKER_URL = "redis://localhost:6379/0" - -if TESTING: - TELEGRAM_TOKEN = "0000000000:XXXXXXXXXXXXXXXXXXXXXXXXXXXX-XXXXXX" - TWILIO_AUTH_TOKEN = "twilio_auth_token" - -# TODO: OSS: Add these setting to oss settings file. Add Version there too. -OSS_INSTALLATION_FEATURES_ENABLED = True - -INSTALLED_APPS += ["apps.oss_installation"] # noqa - -CELERY_BEAT_SCHEDULE["send_usage_stats"] = { # noqa - "task": "apps.oss_installation.tasks.send_usage_stats_report", - "schedule": crontab(hour=0, minute=randrange(0, 59)), # Send stats report at a random minute past midnight # noqa - "args": (), -} # noqa - -CELERY_BEAT_SCHEDULE["send_cloud_heartbeat"] = { # noqa - "task": "apps.oss_installation.tasks.send_cloud_heartbeat", - "schedule": crontab(minute="*/3"), # noqa - "args": (), -} # noqa - -SEND_ANONYMOUS_USAGE_STATS = True diff --git a/engine/settings/base.py b/engine/settings/base.py index b2150a47..baba3861 100644 --- a/engine/settings/base.py +++ b/engine/settings/base.py @@ -1,12 +1,15 @@ import os -from urllib.parse import urljoin +from random import randrange from celery.schedules import crontab from common.utils import getenv_boolean VERSION = "dev-oss" -SEND_ANONYMOUS_USAGE_STATS = False +# Indicates if instance is OSS installation. +# It is needed to plug-in oss application and urls. +OSS_INSTALLATION = getenv_boolean("GRAFANA_ONCALL_OSS_INSTALLATION", True) +SEND_ANONYMOUS_USAGE_STATS = getenv_boolean("SEND_ANONYMOUS_USAGE_STATS", default=True) # License is OpenSource or Cloud OPEN_SOURCE_LICENSE_NAME = "OpenSource" @@ -46,17 +49,18 @@ BASE_URL = os.environ.get("BASE_URL") # Root URL of OnCall backend # Feature toggles FEATURE_LIVE_SETTINGS_ENABLED = getenv_boolean("FEATURE_LIVE_SETTINGS_ENABLED", default=True) -FEATURE_TELEGRAM_INTEGRATION_ENABLED = getenv_boolean("FEATURE_TELEGRAM_INTEGRATION_ENABLED", default=False) +FEATURE_TELEGRAM_INTEGRATION_ENABLED = getenv_boolean("FEATURE_TELEGRAM_INTEGRATION_ENABLED", default=True) FEATURE_EMAIL_INTEGRATION_ENABLED = getenv_boolean("FEATURE_EMAIL_INTEGRATION_ENABLED", default=False) -FEATURE_SLACK_INTEGRATION_ENABLED = getenv_boolean("FEATURE_SLACK_INTEGRATION_ENABLED", default=False) -OSS_INSTALLATION_FEATURES_ENABLED = False +FEATURE_SLACK_INTEGRATION_ENABLED = getenv_boolean("FEATURE_SLACK_INTEGRATION_ENABLED", default=True) +GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", default=True) +GRAFANA_CLOUD_NOTIFICATIONS_ENABLED = getenv_boolean("GRAFANA_CLOUD_NOTIFICATIONS_ENABLED", default=True) TWILIO_ACCOUNT_SID = os.environ.get("TWILIO_ACCOUNT_SID") TWILIO_AUTH_TOKEN = os.environ.get("TWILIO_AUTH_TOKEN") TWILIO_NUMBER = os.environ.get("TWILIO_NUMBER") TWILIO_VERIFY_SERVICE_SID = os.environ.get("TWILIO_VERIFY_SERVICE_SID") -TELEGRAM_WEBHOOK_URL = os.environ.get("TELEGRAM_WEBHOOK_URL", urljoin(BASE_URL, "/telegram/")) +TELEGRAM_WEBHOOK_HOST = os.environ.get("TELEGRAM_WEBHOOK_HOST", BASE_URL) TELEGRAM_TOKEN = os.environ.get("TELEGRAM_TOKEN") os.environ.setdefault("MYSQL_PASSWORD", "empty") @@ -70,6 +74,10 @@ SENDGRID_FROM_EMAIL = os.environ.get("SENDGRID_FROM_EMAIL") SENDGRID_SECRET_KEY = os.environ.get("SENDGRID_SECRET_KEY") SENDGRID_INBOUND_EMAIL_DOMAIN = os.environ.get("SENDGRID_INBOUND_EMAIL_DOMAIN") +# For Grafana Cloud integration +GRAFANA_CLOUD_ONCALL_API_URL = os.environ.get("GRAFANA_CLOUD_ONCALL_API_URL", "https://a-prod-us-central-0.grafana.net") +GRAFANA_CLOUD_ONCALL_TOKEN = os.environ.get("GRAFANA_CLOUD_ONCALL_TOKEN", None) + # Application definition INSTALLED_APPS = [ @@ -218,9 +226,7 @@ USE_TZ = True # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = "/static/" -STATICFILES_DIRS = [ - "./static", -] +STATIC_ROOT = "./static/" CELERY_BROKER_URL = "amqp://rabbitmq:rabbitmq@localhost:5672" @@ -409,10 +415,6 @@ SELF_HOSTED_SETTINGS = { "ORG_TITLE": "Self-Hosted Organization", } -GRAFANA_CLOUD_ONCALL_API_URL = os.environ.get("GRAFANA_CLOUD_ONCALL_API_URL", "https://a-prod-us-central-0.grafana.net") -GRAFANA_CLOUD_ONCALL_TOKEN = os.environ.get("GRAFANA_CLOUD_ONCALL_TOKEN", None) -GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED = getenv_boolean("GRAFANA_CLOUD_ONCALL_HEARTBEAT_ENABLED", default=True) - GRAFANA_INCIDENT_STATIC_API_KEY = os.environ.get("GRAFANA_INCIDENT_STATIC_API_KEY", None) DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 @@ -424,15 +426,39 @@ FEATURE_EXTRA_MESSAGING_BACKENDS_ENABLED = getenv_boolean("FEATURE_EXTRA_MESSAGI EXTRA_MESSAGING_BACKENDS = [] INSTALLED_ONCALL_INTEGRATIONS = [ - "apps.integrations.metadata.configuration.alertmanager", - "apps.integrations.metadata.configuration.grafana", - "apps.integrations.metadata.configuration.grafana_alerting", - "apps.integrations.metadata.configuration.formatted_webhook", - "apps.integrations.metadata.configuration.webhook", - "apps.integrations.metadata.configuration.amazon_sns", - "apps.integrations.metadata.configuration.heartbeat", - "apps.integrations.metadata.configuration.inbound_email", - "apps.integrations.metadata.configuration.maintenance", - "apps.integrations.metadata.configuration.manual", - "apps.integrations.metadata.configuration.slack_channel", + "config_integrations.alertmanager", + "config_integrations.grafana", + "config_integrations.grafana_alerting", + "config_integrations.formatted_webhook", + "config_integrations.webhook", + "config_integrations.kapacitor", + "config_integrations.elastalert", + "config_integrations.heartbeat", + "config_integrations.inbound_email", + "config_integrations.maintenance", + "config_integrations.manual", + "config_integrations.slack_channel", ] + +if OSS_INSTALLATION: + INSTALLED_APPS += ["apps.oss_installation"] # noqa + + CELERY_BEAT_SCHEDULE["send_usage_stats"] = { # noqa + "task": "apps.oss_installation.tasks.send_usage_stats_report", + "schedule": crontab( + hour=0, minute=randrange(0, 59) + ), # Send stats report at a random minute past midnight # noqa + "args": (), + } # noqa + + CELERY_BEAT_SCHEDULE["send_cloud_heartbeat"] = { # noqa + "task": "apps.oss_installation.tasks.send_cloud_heartbeat_task", + "schedule": crontab(minute="*/3"), # noqa + "args": (), + } # noqa + + CELERY_BEAT_SCHEDULE["sync_users_with_cloud"] = { # noqa + "task": "apps.oss_installation.tasks.sync_users_with_cloud", + "schedule": crontab(hour="*/12"), # noqa + "args": (), + } # noqa diff --git a/engine/settings/ci-test.py b/engine/settings/ci-test.py index f3c012a0..91b3f7b0 100644 --- a/engine/settings/ci-test.py +++ b/engine/settings/ci-test.py @@ -1,3 +1,8 @@ +# flake8: noqa: F405 + +# Workaround to use pymysql instead of mysqlclient +import pymysql + from .base import * # noqa SECRET_KEY = "u5/IIbuiJR3Y9FQMBActk+btReZ5oOxu+l8MIJQWLfVzESoan5REE6UNSYYEQdjBOcty9CDak2X" @@ -9,11 +14,18 @@ BASE_URL = "http://localhost" CELERY_BROKER_URL = "amqp://rabbitmq:rabbitmq@rabbit_test:5672" +pymysql.install_as_MySQLdb() + # Primary database must have the name "default" DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": os.path.join(BASE_DIR, "sqlite_data/db.sqlite3"), # noqa + "ENGINE": "django.db.backends.mysql", + "NAME": "oncall_local_dev", + "USER": "root", + "PASSWORD": "local_dev_pwd", + "HOST": "mysql_test", + "PORT": "3306", + "OPTIONS": {"charset": "utf8mb4"}, }, } diff --git a/engine/settings/dev.py b/engine/settings/dev.py index aff8ca9d..ef43c7ff 100644 --- a/engine/settings/dev.py +++ b/engine/settings/dev.py @@ -1,6 +1,9 @@ import os import sys +# Workaround to use pymysql instead of mysqlclient +import pymysql + from .base import * # noqa SECRET_KEY = os.environ.get("SECRET_KEY", "osMsNM0PqlRHBlUvqmeJ7+ldU3IUETCrY9TrmiViaSmInBHolr1WUlS0OFS4AHrnnkp1vp9S9z1") @@ -10,11 +13,20 @@ MIRAGE_SECRET_KEY = os.environ.get( ) MIRAGE_CIPHER_IV = os.environ.get("MIRAGE_CIPHER_IV", "tZZa+60zTZO2NRcS") -# Primary database must have the name "default" +pymysql.install_as_MySQLdb() + DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": os.path.join(BASE_DIR, "sqlite_data/db.sqlite3"), # noqa + "ENGINE": "django.db.backends.mysql", + "NAME": os.environ.get("MYSQL_DB_NAME", "oncall_local_dev"), + "USER": os.environ.get("MYSQL_USER", "root"), + "PASSWORD": os.environ.get("MYSQL_PASSWORD"), + "HOST": os.environ.get("MYSQL_HOST", "127.0.0.1"), + "PORT": os.environ.get("MYSQL_PORT", "3306"), + "OPTIONS": { + "charset": "utf8mb4", + "connect_timeout": 1, + }, }, } diff --git a/engine/settings/hobby.py b/engine/settings/hobby.py new file mode 100644 index 00000000..3bd73c13 --- /dev/null +++ b/engine/settings/hobby.py @@ -0,0 +1,38 @@ +# flake8: noqa: F405 + +from random import randrange + +# Workaround to use pymysql instead of mysqlclient +import pymysql + +from .prod_without_db import * # noqa + +pymysql.install_as_MySQLdb() + +DATABASES = { + "default": { + "ENGINE": "django.db.backends.mysql", + "NAME": os.environ.get("MYSQL_DB_NAME"), + "USER": os.environ.get("MYSQL_USER"), + "PASSWORD": os.environ["MYSQL_PASSWORD"], + "HOST": os.environ.get("MYSQL_HOST"), + "PORT": os.environ.get("MYSQL_PORT"), + "OPTIONS": { + "charset": "utf8mb4", + "connect_timeout": 1, + }, + }, +} + +RABBITMQ_USERNAME = os.environ.get("RABBITMQ_USERNAME") +RABBITMQ_PASSWORD = os.environ.get("RABBITMQ_PASSWORD") +RABBITMQ_HOST = os.environ.get("RABBITMQ_HOST") +RABBITMQ_PORT = os.environ.get("RABBITMQ_PORT") + +CELERY_BROKER_URL = f"amqp://{RABBITMQ_USERNAME}:{RABBITMQ_PASSWORD}@{RABBITMQ_HOST}:{RABBITMQ_PORT}" + +MIRAGE_SECRET_KEY = SECRET_KEY +MIRAGE_CIPHER_IV = "1234567890abcdef" # use default + +APPEND_SLASH = False +SECURE_SSL_REDIRECT = False diff --git a/engine/uwsgi.ini b/engine/uwsgi.ini index 17a92d60..6f612248 100644 --- a/engine/uwsgi.ini +++ b/engine/uwsgi.ini @@ -18,4 +18,4 @@ post-buffering=1 logger=stdio log-format=source=engine:uwsgi status=%(status) method=%(method) path=%(uri) latency=%(secs) google_trace_id=%(var.HTTP_X_CLOUD_TRACE_CONTEXT) protocol=%(proto) resp_size=%(size) req_body_size=%(cl) -log-encoder=format ${strftime:%%Y-%%m-%%d %%H:%%M:%%S} ${msgnl} \ No newline at end of file +log-encoder=format ${strftime:%%Y-%%m-%%d %%H:%%M:%%S} ${msgnl} diff --git a/grafana-plugin/CHANGELOG.md b/grafana-plugin/CHANGELOG.md deleted file mode 120000 index 04c99a55..00000000 --- a/grafana-plugin/CHANGELOG.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/grafana-plugin/CHANGELOG.md b/grafana-plugin/CHANGELOG.md new file mode 100644 index 00000000..8893332c --- /dev/null +++ b/grafana-plugin/CHANGELOG.md @@ -0,0 +1,5 @@ +# Change Log + +## 0.0.71 (2022-06-06) + +- Initial Release \ No newline at end of file diff --git a/grafana-plugin/package.json b/grafana-plugin/package.json index 70fbfeb2..d0149316 100644 --- a/grafana-plugin/package.json +++ b/grafana-plugin/package.json @@ -68,7 +68,7 @@ "lodash-es": "^4.17.15", "mobx": "^5.13.0", "mobx-react": "^6.1.1", - "moment-timezone": "^0.5.28", + "moment-timezone": "^0.5.34", "rc-table": "^7.17.1", "react-copy-to-clipboard": "^5.0.2", "react-emoji-render": "^1.2.4", diff --git a/grafana-plugin/plopfile.js b/grafana-plugin/plopfile.js index 87a63fed..c1504cb0 100644 --- a/grafana-plugin/plopfile.js +++ b/grafana-plugin/plopfile.js @@ -1,11 +1,9 @@ const createComponentFiles = require('./tools/plop/generators/createComponentFiles'); const createContainerFiles = require('./tools/plop/generators/createContainerFiles'); const createModelFiles = require('./tools/plop/generators/createModelFiles'); -const appendReadmeFile = require('./tools/plop/generators/appendReadmeFile'); const componentPrompts = require('./tools/plop/prompts/componentPrompts'); const containerPrompts = require('./tools/plop/prompts/containerPrompts'); const modelPrompts = require('./tools/plop/prompts/modelPrompts'); -const readmePrompts = require('./tools/plop/prompts/readmePrompts'); // const configNeededHelper = require('./tools/plop/helpers/configNeeded'); @@ -32,9 +30,4 @@ module.exports = function plopGenerator(plop) { actions: (answers) => createContainerFiles(answers), }); - plop.setGenerator('Append build info', { - description: 'Add build info to plugin readme', - prompts: readmePrompts, - actions: (answers) => appendReadmeFile(answers), - }); }; diff --git a/grafana-plugin/src/GrafanaPluginRootPage.tsx b/grafana-plugin/src/GrafanaPluginRootPage.tsx index a3276a5f..b6f0cc65 100644 --- a/grafana-plugin/src/GrafanaPluginRootPage.tsx +++ b/grafana-plugin/src/GrafanaPluginRootPage.tsx @@ -48,7 +48,7 @@ const RootWithLoader = observer((props: AppRootProps) => { } else if (store.isUserAnonymous) { text = '😞 Unfortunately Grafana OnCall is available for authorized users only, please sign in to proceed.'; } else if (store.retrySync) { - text = `🚫 OnCall took too many tries to synchronize`; + text = `🚫 OnCall took too many tries to synchronize... Are background workers up and running?`; } return ( @@ -100,9 +100,11 @@ export const Root = observer((props: AppRootProps) => { const style = document.createElement('style'); document.head.appendChild(style); const index = style.sheet.insertRule('.page-body {max-width: unset !important}'); + const index2 = style.sheet.insertRule('.page-container {max-width: unset !important}'); return () => { style.sheet.removeRule(index); + style.sheet.removeRule(index2); }; }, []); @@ -116,6 +118,7 @@ export const Root = observer((props: AppRootProps) => { meta, grafanaUser: window.grafanaBootData.user, enableLiveSettings: store.hasFeature(AppFeature.LiveSettings), + enableCloudPage: store.hasFeature(AppFeature.CloudConnection), }), [meta, pathWithoutLeadingSlash, page, store.features] ) diff --git a/grafana-plugin/src/README.md b/grafana-plugin/src/README.md index 560dc756..66e4b915 100644 --- a/grafana-plugin/src/README.md +++ b/grafana-plugin/src/README.md @@ -12,12 +12,3 @@ with Brilliant Slack Integration ## Documentation - [On Github](http://github.com/grafana/oncall) - [Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/) -- [Connect to Grafana OnCall](https://grafana.com/docs/grafana-cloud/oncall/oncall-integrations/) - - [Alertmanager](https://grafana.com/docs/grafana-cloud/oncall/oncall-integrations/alertmanager/) - - [Grafana Cloud Alerting](https://grafana.com/docs/grafana-cloud/oncall/oncall-integrations/grafana/) - - [Zabbix](https://grafana.com/docs/grafana-cloud/oncall/oncall-integrations/zabbix/) - - [Custom Integrations](https://grafana.com/docs/grafana-cloud/oncall/oncall-integrations/custom/) -- [HTTP API Reference](https://grafana.com/docs/grafana-cloud/oncall/oncall-api-reference/) - -\ -/* BUILD INFO */ \ No newline at end of file diff --git a/grafana-plugin/src/components/Policy/NotificationPolicy.tsx b/grafana-plugin/src/components/Policy/NotificationPolicy.tsx index 040f4b43..29254a04 100644 --- a/grafana-plugin/src/components/Policy/NotificationPolicy.tsx +++ b/grafana-plugin/src/components/Policy/NotificationPolicy.tsx @@ -35,7 +35,7 @@ export interface NotificationPolicyProps { waitDelays?: WaitDelay[]; notifyByOptions?: NotifyBy[]; telegramVerified: boolean; - phoneVerified: boolean; + phoneStatus: number; color: string; number: number; userAction: UserAction; @@ -115,13 +115,21 @@ export class NotificationPolicy extends React.ComponentPhone number is verified - ) : ( - Phone number is not verified - ); + switch (phoneStatus) { + case 0: + return Cloud is not synced; + case 1: + return User is not matched with cloud; + case 2: + return Phone number is not verified; + case 3: + return Phone number is verified; + + default: + return null; + } } _renderTelegramNote() { diff --git a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx index 7d446ef1..7b2c23a6 100644 --- a/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx +++ b/grafana-plugin/src/containers/DefaultPageLayout/DefaultPageLayout.tsx @@ -100,7 +100,9 @@ const DefaultPageLayout: FC = observer((props) => { currentTeam && currentUser && store.isUserActionAllowed(UserAction.UpdateOwnSettings) && - (!currentUser.verified_phone_number || !currentUser.slack_user_identity) && + (!currentUser.verified_phone_number || + !currentUser.slack_user_identity || + currentUser.cloud_connection_status !== 3) && !getItem(AlertID.CONNECTIVITY_WARNING) ) && ( = observer((props) => { {'. '} )} - {!currentUser.verified_phone_number && 'Your phone number is not verified. '} + {currentUser.cloud_connection_status !== 3 && + !currentUser.verified_phone_number && + 'Your phone number is not verified. '} {currentTeam.slack_team_identity && !currentUser.slack_user_identity && 'Your slack account is not connected. '} diff --git a/grafana-plugin/src/containers/PersonalNotificationSettings/PersonalNotificationSettings.tsx b/grafana-plugin/src/containers/PersonalNotificationSettings/PersonalNotificationSettings.tsx index 4812b0ae..a9d2205e 100644 --- a/grafana-plugin/src/containers/PersonalNotificationSettings/PersonalNotificationSettings.tsx +++ b/grafana-plugin/src/containers/PersonalNotificationSettings/PersonalNotificationSettings.tsx @@ -12,6 +12,7 @@ import Timeline from 'components/Timeline/Timeline'; import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; import { NotificationPolicyType } from 'models/notification_policy'; import { User as UserType } from 'models/user/user.types'; +import { AppFeature } from 'state/features'; import { useStore } from 'state/useStore'; import { UserAction } from 'state/userAction'; @@ -105,6 +106,12 @@ const PersonalNotificationSettings = observer((props: PersonalNotificationSettin const user = userStore.items[userPk]; const userAction = isCurrent ? UserAction.UpdateOwnSettings : UserAction.UpdateNotificationPolicies; + const getPhoneStatus = () => { + if (store.hasFeature(AppFeature.CloudNotifications)) { + return user.cloud_connection_status; + } + return Number(user.verified_phone_number) + 2; + }; return (
@@ -124,7 +131,7 @@ const PersonalNotificationSettings = observer((props: PersonalNotificationSettin index={index} number={index + 1} telegramVerified={Boolean(user.telegram_configuration)} - phoneVerified={Boolean(user && user.verified_phone_number)} + phoneStatus={getPhoneStatus()} slackTeamIdentity={store.teamStore.currentTeam?.slack_team_identity} slackUserIdentity={user.slack_user_identity} data={notificationPolicy} diff --git a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.module.css b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.module.css index 8996189b..d0eafdf8 100644 --- a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.module.css +++ b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.module.css @@ -1,7 +1,8 @@ -.delete_configuration_button { - margin-top: 20px; -} - .command-line { width: 100%; } + +.info-block { + margin-bottom: 24px; + margin-top: 24px; +} diff --git a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx index e3ccfc4c..d3996c51 100644 --- a/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx +++ b/grafana-plugin/src/containers/PluginConfigPage/PluginConfigPage.tsx @@ -19,6 +19,7 @@ import cn from 'classnames/bind'; import CopyToClipboard from 'react-copy-to-clipboard'; import { OnCallAppSettings } from 'types'; +import Block from 'components/GBlock/Block'; import Text from 'components/Text/Text'; import WithConfirm from 'components/WithConfirm/WithConfirm'; import logo from 'img/logo.svg'; @@ -37,20 +38,16 @@ export const PluginConfigPage = (props: Props) => { const { plugin } = props; const [onCallApiUrl, setOnCallApiUrl] = useState(getItem('onCallApiUrl')); const [onCallInvitationToken, setOnCallInvitationToken] = useState(); - const [grafanaUrl, setGrafanaUrl] = useState(window.location.origin); + const [grafanaUrl, setGrafanaUrl] = useState(getItem('grafanaUrl')); const [pluginConfigLoading, setPluginConfigLoading] = useState(true); const [pluginStatusOk, setPluginStatusOk] = useState(); const [pluginStatusMessage, setPluginStatusMessage] = useState(); const [isSelfHostedInstall, setIsSelfHostedInstall] = useState(true); const [retrySync, setRetrySync] = useState(false); - const [showConfirmationModal, setShowConfirmationModal] = useState(false); - const configurePlugin = () => { - setShowConfirmationModal(true); - }; const setupPlugin = useCallback(async () => { setItem('onCallApiUrl', onCallApiUrl); - setShowConfirmationModal(false); + setItem('grafanaUrl', grafanaUrl); await getBackendSrv().post(`/api/plugins/grafana-oncall-app/settings`, { enabled: true, pinned: true, @@ -189,7 +186,9 @@ export const PluginConfigPage = (props: Props) => { if (counter >= 5) { clearInterval(interval); - setPluginStatusMessage(`OnCall took too many tries to synchronize.`); + setPluginStatusMessage( + `OnCall took too many tries to synchronize. Did you launch Celery workers? Background workers should perform synchronization, not web server.` + ); setRetrySync(true); setPluginStatusOk(false); setPluginConfigLoading(false); @@ -212,85 +211,70 @@ export const PluginConfigPage = (props: Props) => { Configure Grafana OnCall {pluginStatusOk && (

- Configuration was sucessfully created. Now you can find Grafana OnCall on right toolbar.{' '} + Plugin and the backend are connected! Check Grafana OnCall 👈👈👈{' '} Grafana OnCall Logo

)} +

{'Plugin <-> backend connection status'}

+
+            {pluginStatusMessage}
+          
- {isSelfHostedInstall ? ( -
-

{'Plugin <-> backend connection status'}

+ + {/*

{'Plugin <-> backend connection status'}

                 {pluginStatusMessage}
-              
+ */} + {retrySync && ( + + )} + {isSelfHostedInstall ? ( - -
- ) : ( - - )} + ) : ( + + )}{' '} + ) : ( Configure Grafana OnCall

This page will help you to connect OnCall backend and OnCall Grafana plugin 👋

-

1. Grafana OnCall is a Grafana plugin and backend. Run backend

+ +

1. Launch backend

- Run production backend using{' '} - - this instructions at our GitHub + Run hobby, dev or production backend:{' '} + + getting started. - , - - Or run the local one: -
-              
-                 {
-                    openNotification('Grafana OnCall command copied');
-                  }}
-                >
-                  
-                {' '}
-                docker build -t grafana/amixr-all-in-one -f Dockerfile.all-in-one .
-              
-            
- - - Need help? -
- 1. Talk to the developers in the #grafana-oncall channel at{' '} - - Slack - -
- 2. Search for issues or create a new one in the{' '} - - GitHub - -
- - } - /> + + + Need help? +
- Talk to the OnCall team in the #grafana-oncall channel at{' '} + + Slack + +
- Ask questions at{' '} + + GitHub Discussions + {' '} + or file bugs at{' '} + + GitHub Issues + +
+

2. Conect the backend and the plugin

-

{'Plugin <-> backend connection status'}

+

{'Plugin <-> backend connection status:'}

             {pluginStatusMessage}
           
@@ -301,7 +285,7 @@ Seek for such a line: “Your invite token: <> , use it in the Graf > <> - + How to re-issue the invite token? @@ -311,42 +295,29 @@ Seek for such a line: “Your invite token: <> , use it in the Graf + It should be reachable from Grafana. Possible options:
+ http://host.docker.internal:8080 (if you run backend in the docker locally) +
+ http://localhost:8080
+ ... + + } >
- + - {/* */} - {/* */} - {showConfirmationModal && ( - setShowConfirmationModal(false)} - > - - - - - - )}
)}
diff --git a/grafana-plugin/src/containers/UserSettings/UserSettings.tsx b/grafana-plugin/src/containers/UserSettings/UserSettings.tsx index ca00871b..20abb1f2 100644 --- a/grafana-plugin/src/containers/UserSettings/UserSettings.tsx +++ b/grafana-plugin/src/containers/UserSettings/UserSettings.tsx @@ -58,7 +58,8 @@ const UserSettings = observer((props: UserFormProps) => { setActiveTab(tab); }, []); - const isModalWide = activeTab === UserSettingsTab.UserInfo && isDesktopOrLaptop; + const isModalWide = + (activeTab === UserSettingsTab.UserInfo && isDesktopOrLaptop) || activeTab === UserSettingsTab.PhoneVerification; const [showNotificationSettingsTab, showSlackConnectionTab, showTelegramConnectionTab, showMobileAppVerificationTab] = [ diff --git a/grafana-plugin/src/containers/UserSettings/parts/connectors/PhoneConnector.tsx b/grafana-plugin/src/containers/UserSettings/parts/connectors/PhoneConnector.tsx index b00cf868..05ec4509 100644 --- a/grafana-plugin/src/containers/UserSettings/parts/connectors/PhoneConnector.tsx +++ b/grafana-plugin/src/containers/UserSettings/parts/connectors/PhoneConnector.tsx @@ -1,11 +1,12 @@ import React, { useCallback } from 'react'; -import { Button, Label } from '@grafana/ui'; +import { Button, Label, VerticalGroup } from '@grafana/ui'; import cn from 'classnames/bind'; import Text from 'components/Text/Text'; import { UserSettingsTab } from 'containers/UserSettings/UserSettings.types'; import { User } from 'models/user/user.types'; +import { AppFeature } from 'state/features'; import { useStore } from 'state/useStore'; import styles from './index.module.css'; @@ -29,31 +30,85 @@ const PhoneConnector = (props: PhoneConnectorProps) => { onTabChange(UserSettingsTab.PhoneVerification); }, [storeUser?.unverified_phone_number]); + const cloudVersionPhone = (user: User) => { + switch (user.cloud_connection_status) { + case 0: + return Cloud is not synced; + + case 1: + return ( + + User is not matched with cloud + + + ); + + case 2: + return ( + + Phone number is not verified in Grafana Cloud + + + ); + case 3: + return ( + + Phone number verified + + + ); + default: + return ( + + User is not matched with cloud + + + ); + } + }; + return (
- - {storeUser.verified_phone_number || '—'} - {storeUser.verified_phone_number ? ( -
- Phone number is verified - -
- ) : storeUser.unverified_phone_number ? ( -
- Phone number is not verified - -
+ {store.hasFeature(AppFeature.CloudNotifications) ? ( + <> + + {cloudVersionPhone(storeUser)} + ) : ( -
- Phone number is not added - -
+ <> + + {storeUser.verified_phone_number || '—'} + {storeUser.verified_phone_number ? ( +
+ Phone number is verified + +
+ ) : storeUser.unverified_phone_number ? ( +
+ Phone number is not verified + +
+ ) : ( +
+ Phone number is not added + +
+ )} + )}
); diff --git a/grafana-plugin/src/containers/UserSettings/parts/connectors/index.module.css b/grafana-plugin/src/containers/UserSettings/parts/connectors/index.module.css index 0e32b304..04f4550e 100644 --- a/grafana-plugin/src/containers/UserSettings/parts/connectors/index.module.css +++ b/grafana-plugin/src/containers/UserSettings/parts/connectors/index.module.css @@ -30,3 +30,7 @@ .warning-icon { color: var(--warning-text-color); } + +.error-message { + color: var(--error-text-color); +} diff --git a/grafana-plugin/src/containers/UserSettings/parts/index.tsx b/grafana-plugin/src/containers/UserSettings/parts/index.tsx index 19e598ed..82e57fd6 100644 --- a/grafana-plugin/src/containers/UserSettings/parts/index.tsx +++ b/grafana-plugin/src/containers/UserSettings/parts/index.tsx @@ -1,17 +1,20 @@ -import React, { useCallback } from 'react'; +import React, { useCallback, useEffect } from 'react'; import { Tab, TabContent, TabsBar } from '@grafana/ui'; import cn from 'classnames/bind'; +import { observer } from 'mobx-react'; import Block from 'components/GBlock/Block'; import MobileAppVerification from 'containers/MobileAppVerification/MobileAppVerification'; import { UserSettingsTab } from 'containers/UserSettings/UserSettings.types'; import { SlackTab } from 'containers/UserSettings/parts/tabs//SlackTab/SlackTab'; +import CloudPhoneSettings from 'containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings'; import { NotificationSettingsTab } from 'containers/UserSettings/parts/tabs/NotificationSettingsTab'; import PhoneVerification from 'containers/UserSettings/parts/tabs/PhoneVerification/PhoneVerification'; import TelegramInfo from 'containers/UserSettings/parts/tabs/TelegramInfo/TelegramInfo'; import { UserInfoTab } from 'containers/UserSettings/parts/tabs/UserInfoTab/UserInfoTab'; import { User } from 'models/user/user.types'; +import { AppFeature } from 'state/features'; import { useStore } from 'state/useStore'; import styles from 'containers/UserSettings/parts/index.module.css'; @@ -100,8 +103,11 @@ interface TabsContentProps { isDesktopOrLaptop: boolean; } -export const TabsContent = (props: TabsContentProps) => { +export const TabsContent = observer((props: TabsContentProps) => { const { id, activeTab, onTabChange, isDesktopOrLaptop } = props; + useEffect(() => { + store.updateFeatures(); + }, []); const store = useStore(); const { userStore } = store; @@ -124,9 +130,12 @@ export const TabsContent = (props: TabsContentProps) => { ))} {activeTab === UserSettingsTab.NotificationSettings && } - {activeTab === UserSettingsTab.PhoneVerification && ( - - )} + {activeTab === UserSettingsTab.PhoneVerification && + (store.hasFeature(AppFeature.CloudNotifications) ? ( + + ) : ( + + ))} {activeTab === UserSettingsTab.MobileAppVerification && ( )} @@ -134,4 +143,4 @@ export const TabsContent = (props: TabsContentProps) => { {activeTab === UserSettingsTab.TelegramInfo && } ); -}; +}); diff --git a/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.module.css b/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.module.css new file mode 100644 index 00000000..ab86c434 --- /dev/null +++ b/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.module.css @@ -0,0 +1,3 @@ +.test { + color: grey; +} diff --git a/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.tsx b/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.tsx new file mode 100644 index 00000000..74438027 --- /dev/null +++ b/grafana-plugin/src/containers/UserSettings/parts/tabs/CloudPhoneSettings/CloudPhoneSettings.tsx @@ -0,0 +1,168 @@ +import React, { useCallback, useEffect, useState } from 'react'; + +import { getLocationSrv, LocationUpdate } from '@grafana/runtime'; +import { + Field, + Input, + Button, + Modal, + HorizontalGroup, + Alert, + Icon, + VerticalGroup, + Table, + LoadingPlaceholder, +} from '@grafana/ui'; +import cn from 'classnames/bind'; +import { observer } from 'mobx-react'; + +import Block from 'components/GBlock/Block'; +import GTable from 'components/GTable/GTable'; +import PluginLink from 'components/PluginLink/PluginLink'; +import Text from 'components/Text/Text'; +import WithConfirm from 'components/WithConfirm/WithConfirm'; +import { User } from 'models/user/user.types'; +import { AppFeature } from 'state/features'; +import { WithStoreProps } from 'state/types'; +import { useStore } from 'state/useStore'; +import { UserAction } from 'state/userAction'; +import { withMobXProviderContext } from 'state/withStore'; + +import styles from './CloudPhoneSettings.module.css'; + +const cx = cn.bind(styles); + +interface CloudPhoneSettingsProps extends WithStoreProps { + userPk?: User['pk']; +} + +const CloudPhoneSettings = observer((props: CloudPhoneSettingsProps) => { + const { userPk } = props; + const store = useStore(); + const [syncing, setSyncing] = useState(false); + const [userStatus, setUserStatus] = useState(0); + const [userLink, setUserLink] = useState(null); + + useEffect(() => { + getCloudUserInfo(); + }, []); + + const handleLinkClick = (link: string) => { + window.open(link, '_blank'); + }; + + const syncUser = async () => { + setSyncing(true); + await store.cloudStore.syncCloudUser(userPk); + const cloudUser = await store.cloudStore.getCloudUser(userPk); + setUserStatus(cloudUser?.cloud_data?.status); + setUserLink(cloudUser?.cloud_data?.link); + setSyncing(false); + }; + + const getCloudUserInfo = async () => { + const cloudUser = await store.cloudStore.getCloudUser(userPk); + setUserStatus(cloudUser?.cloud_data?.status); + setUserLink(cloudUser?.cloud_data?.link); + }; + + const UserCloudStatus = () => { + switch (userStatus) { + case 0: + if (store.hasFeature(AppFeature.CloudNotifications)) { + return ( + + Your account successfully matched, but Cloud is not connected. + + + + + ); + } + return ( + + Grafana Cloud is not synced + + ); + case 1: + return ( + + + { + 'We can’t find a matching account in the connected Grafana Cloud instance (matching happens by e-mail). ' + } + + + + ); + case 2: + return ( + + + Your account successfully matched with the Grafana Cloud account. Please verify your phone number.{' '} + + + + ); + case 3: + return ( + + + Your account successfully matched with the Grafana Cloud account. Your phone number is verified.{' '} + + + + ); + default: + return ( + + + { + 'We can’t find a matching account in the connected Grafana Cloud instance (matching happens by e-mail). ' + } + + + + ); + } + }; + + return ( + <> + {store.isUserActionAllowed(UserAction.UpdateOtherUsersSettings) ? ( + + + OnCall use Grafana Cloud for SMS and phone call notifications + {syncing ? ( + + ) : ( + + )} + + {!syncing ? : } + + ) : ( + + OnCall use Grafana Cloud for SMS and phone call notifications + You do not have permission to perform this action. Ask an admin to upgrade your permissions. + + )} + + ); +}); + +export default withMobXProviderContext(CloudPhoneSettings); diff --git a/grafana-plugin/src/icons/cross-circled.svg b/grafana-plugin/src/icons/cross-circled.svg new file mode 100644 index 00000000..f468d638 --- /dev/null +++ b/grafana-plugin/src/icons/cross-circled.svg @@ -0,0 +1,8 @@ + + + diff --git a/grafana-plugin/src/icons/heart-line.svg b/grafana-plugin/src/icons/heart-line.svg new file mode 100644 index 00000000..6c063e81 --- /dev/null +++ b/grafana-plugin/src/icons/heart-line.svg @@ -0,0 +1,24 @@ + + + + + + + diff --git a/grafana-plugin/src/icons/index.tsx b/grafana-plugin/src/icons/index.tsx index fc1b0d3a..7b77d8f6 100644 --- a/grafana-plugin/src/icons/index.tsx +++ b/grafana-plugin/src/icons/index.tsx @@ -168,6 +168,42 @@ export const HeartRedIcon = (props: IconProps) => ( ); +export const HeartIcon = (props: IconProps) => ( + + + + + +); + +export const CrossCircleIcon = (props: IconProps) => ( + + + +); + export const GrafanaIcon = (props: IconProps) => ( - + - + diff --git a/grafana-plugin/src/img/screenshot.png b/grafana-plugin/src/img/screenshot.png index d9adbe25..a0d5d3cf 100644 Binary files a/grafana-plugin/src/img/screenshot.png and b/grafana-plugin/src/img/screenshot.png differ diff --git a/grafana-plugin/src/index.css b/grafana-plugin/src/index.css index 93b9dfe1..bdfece87 100644 --- a/grafana-plugin/src/index.css +++ b/grafana-plugin/src/index.css @@ -30,13 +30,20 @@ background: var(--highlighted-row-bg); } -@media (max-width: 1440px) { +/* This is for Grafana 8, remove later */ +@media (max-width: 1540px) { .page-header__tabs > ul > li > a > div { display: none; } } -@media (max-width: 1200px) { +@media (max-width: 1540px) { + .page-header__tabs > div > div > a > div { + display: none; + } +} + +@media (max-width: 1300px) { .sidemenu { position: fixed !important; height: 100%; diff --git a/grafana-plugin/src/models/alertgroup/alertgroup.ts b/grafana-plugin/src/models/alertgroup/alertgroup.ts index 684948a1..ba284236 100644 --- a/grafana-plugin/src/models/alertgroup/alertgroup.ts +++ b/grafana-plugin/src/models/alertgroup/alertgroup.ts @@ -68,7 +68,7 @@ export class AlertGroupStore extends BaseStore { constructor(rootStore: RootStore) { super(rootStore); - this.path = '/alertgroups1/'; + this.path = '/alertgroups/'; } async attachAlert(pk: Alert['pk'], rootPk: Alert['pk']) { diff --git a/grafana-plugin/src/models/base_store.ts b/grafana-plugin/src/models/base_store.ts index 9af0c5d4..ab46fe3c 100644 --- a/grafana-plugin/src/models/base_store.ts +++ b/grafana-plugin/src/models/base_store.ts @@ -52,10 +52,11 @@ export default class BaseStore { } @action - async update(id: any, data: any) { + async update(id: any, data: any, params: any = null) { const result = await makeRequest(`${this.path}${id}/`, { method: 'PUT', data, + params: params, }).catch(this.onApiError); // Update env_status field for current team diff --git a/grafana-plugin/src/models/cloud/cloud.ts b/grafana-plugin/src/models/cloud/cloud.ts new file mode 100644 index 00000000..fa19125a --- /dev/null +++ b/grafana-plugin/src/models/cloud/cloud.ts @@ -0,0 +1,83 @@ +import { get } from 'lodash-es'; +import { action, computed, observable } from 'mobx'; + +import BaseStore from 'models/base_store'; +import { NotificationPolicyType } from 'models/notification_policy'; +import { User } from 'models/user/user.types'; +import { makeRequest } from 'network'; +import { Mixpanel } from 'services/mixpanel'; +import { RootStore } from 'state'; +import { move } from 'state/helpers'; + +import { Cloud } from './cloud.types'; + +export class CloudStore extends BaseStore { + @observable.shallow + searchResult: { matched_users_count?: number; results?: Array } = {}; + + @observable.shallow + items: { [id: string]: Cloud } = {}; + + constructor(rootStore: RootStore) { + super(rootStore); + + this.path = '/cloud_users/'; + } + + @action + async updateItems(page = 1) { + const { matched_users_count, results } = await makeRequest(this.path, { + params: { page }, + }); + + this.items = { + ...this.items, + ...results.reduce( + (acc: { [key: number]: Cloud }, item: Cloud) => ({ + ...acc, + [item.id]: item, + }), + {} + ), + }; + + this.searchResult = { + matched_users_count, + results: results.map((item: Cloud) => item.id), + }; + } + + getSearchResult() { + return { + matched_users_count: this.searchResult.matched_users_count, + results: this.searchResult.results && this.searchResult.results.map((id: Cloud['id']) => this.items?.[id]), + }; + } + + async syncCloudUsers() { + return await makeRequest(`${this.path}`, { method: 'POST' }); + } + + async syncCloudUser(id: string) { + return await makeRequest(`${this.path}${id}/sync/`, { method: 'POST' }); + } + + async getCloudHeartbeat() { + return await makeRequest(`/cloud_heartbeat/`, { method: 'POST' }).catch((error) => { + console.log(error); + }); + } + + async getCloudUser(id: string) { + return await makeRequest(`${this.path}${id}`, { method: 'GET' }); + } + + async getCloudConnectionStatus() { + return await makeRequest(`/cloud_connection/`, { method: 'GET' }); + } + + @action + async disconnectToCloud() { + return await makeRequest(`/cloud_connection/`, { method: 'DELETE' }); + } +} diff --git a/grafana-plugin/src/models/cloud/cloud.types.ts b/grafana-plugin/src/models/cloud/cloud.types.ts new file mode 100644 index 00000000..15658b3d --- /dev/null +++ b/grafana-plugin/src/models/cloud/cloud.types.ts @@ -0,0 +1,9 @@ +export interface Cloud { + id: string; + username: string; + email: string; + cloud_data?: { + status?: number; + link?: string; + }; +} diff --git a/grafana-plugin/src/models/global_setting/global_setting.ts b/grafana-plugin/src/models/global_setting/global_setting.ts index a7e6deb0..edcb2986 100644 --- a/grafana-plugin/src/models/global_setting/global_setting.ts +++ b/grafana-plugin/src/models/global_setting/global_setting.ts @@ -60,4 +60,9 @@ export class GlobalSettingStore extends BaseStore { return this.searchResult[query].map((globalSettingId: GlobalSetting['id']) => this.items[globalSettingId]); } + + async getGlobalSettingItemByName(name: string) { + const results = await this.getAll(); + return results.find((element: { name: string }) => element.name === name); + } } diff --git a/grafana-plugin/src/models/user/user.types.ts b/grafana-plugin/src/models/user/user.types.ts index cb4e03bf..4f1ba2ed 100644 --- a/grafana-plugin/src/models/user/user.types.ts +++ b/grafana-plugin/src/models/user/user.types.ts @@ -50,4 +50,7 @@ export interface User { permissions: UserAction[]; trigger_video_call?: boolean; export_url?: string; + status?: number; + link?: string; + cloud_connection_status?: number; } diff --git a/grafana-plugin/src/pages/cloud/CloudPage.module.css b/grafana-plugin/src/pages/cloud/CloudPage.module.css new file mode 100644 index 00000000..416d2a70 --- /dev/null +++ b/grafana-plugin/src/pages/cloud/CloudPage.module.css @@ -0,0 +1,67 @@ +.info-block { + width: 70%; + min-width: 1100px; + padding: 24px; +} + +.warning-message { + color: var(--warning-text-color); +} + +.success-message { + color: var(--success-text-color); +} + +.error-message { + color: var(--error-text-color); +} + +.user-table { + margin-top: 24px; + width: 100%; +} + +.user-row { + height: 32px; +} + +.cloud-page-title, +.heartbit-button { + margin-top: 24px; +} + +.cloud-oncall-name { + color: #f55f3e; +} + +.block-icon { + color: var(--secondary-text-color); +} + +.error-icon { + display: inline-block; + white-space: break-spaces; + line-height: 20px; + color: var(--error-text-color); +} + +.error-icon svg { + vertical-align: middle; +} + +.heart-icon { + color: var(--secondary-text-color); + margin-right: 8px; +} + +.block-button { + margin-top: 24px; +} + +.table-title { + margin-bottom: 16px; +} + +.table-button { + float: right; +} diff --git a/grafana-plugin/src/pages/cloud/CloudPage.tsx b/grafana-plugin/src/pages/cloud/CloudPage.tsx new file mode 100644 index 00000000..63df35e0 --- /dev/null +++ b/grafana-plugin/src/pages/cloud/CloudPage.tsx @@ -0,0 +1,390 @@ +import React, { useCallback, useEffect, useState } from 'react'; + +import { getLocationSrv, LocationUpdate } from '@grafana/runtime'; +import { + Field, + Input, + Button, + Modal, + HorizontalGroup, + Alert, + Icon, + VerticalGroup, + Table, + LoadingPlaceholder, +} from '@grafana/ui'; +import cn from 'classnames/bind'; +import { observer } from 'mobx-react'; + +import Block from 'components/GBlock/Block'; +import GTable from 'components/GTable/GTable'; +import PluginLink from 'components/PluginLink/PluginLink'; +import Text from 'components/Text/Text'; +import WithConfirm from 'components/WithConfirm/WithConfirm'; +import { CrossCircleIcon, HeartIcon } from 'icons'; +import { Cloud } from 'models/cloud/cloud.types'; +import { WithStoreProps } from 'state/types'; +import { useStore } from 'state/useStore'; +import { withMobXProviderContext } from 'state/withStore'; +import { openErrorNotification } from 'utils'; + +import styles from './CloudPage.module.css'; + +const cx = cn.bind(styles); + +interface CloudPageProps extends WithStoreProps {} +const ITEMS_PER_PAGE = 50; + +const CloudPage = observer((props: CloudPageProps) => { + const store = useStore(); + const [page, setPage] = useState(1); + const [cloudApiKey, setCloudApiKey] = useState(''); + const [apiKeyError, setApiKeyError] = useState(false); + const [cloudIsConnected, setCloudIsConnected] = useState(undefined); + const [cloudNotificationsEnabled, setCloudNotificationsEnabled] = useState(false); + const [heartbeatLink, setheartbeatLink] = useState(null); + const [heartbeatEnabled, setheartbeatEnabled] = useState(false); + const [showConfirmationModal, setShowConfirmationModal] = useState(false); + const [syncingUsers, setSyncingUsers] = useState(false); + + useEffect(() => { + store.cloudStore.updateItems(page); + store.cloudStore.getCloudConnectionStatus().then((cloudStatus) => { + setCloudIsConnected(cloudStatus.cloud_connection_status); + setheartbeatEnabled(cloudStatus.cloud_heartbeat_enabled); + setheartbeatLink(cloudStatus.cloud_heartbeat_link); + setCloudNotificationsEnabled(cloudStatus.cloud_notifications_enabled); + }); + }, [cloudIsConnected]); + + const { matched_users_count, results } = store.cloudStore.getSearchResult(); + + const handleChangePage = (page: number) => { + setPage(page); + store.cloudStore.updateItems(page); + }; + + const handleChangeCloudApiKey = useCallback((e) => { + setCloudApiKey(e.target.value); + setApiKeyError(false); + }, []); + + const disconnectCloudOncall = () => { + setCloudIsConnected(false); + store.cloudStore.disconnectToCloud(); + }; + + const connectToCloud = async () => { + setShowConfirmationModal(false); + const globalSettingItem = await store.globalSettingStore.getGlobalSettingItemByName('GRAFANA_CLOUD_ONCALL_TOKEN'); + store.globalSettingStore + .update(globalSettingItem?.id, { name: 'GRAFANA_CLOUD_ONCALL_TOKEN', value: cloudApiKey }, { sync_users: false }) + .then(async (response) => { + if (response.error) { + setCloudIsConnected(false); + setApiKeyError(true); + openErrorNotification(response.error); + } else { + setCloudIsConnected(true); + syncUsers(); + const heartbeatData: { link: string } = await store.cloudStore.getCloudHeartbeat(); + setheartbeatLink(heartbeatData?.link); + } + }); + }; + + const syncUsers = async () => { + setSyncingUsers(true); + await store.cloudStore.syncCloudUsers(); + await store.cloudStore.updateItems(); + setSyncingUsers(false); + }; + + const handleLinkClick = (link: string) => { + window.open(link, '_blank'); + }; + + const renderButtons = (user: Cloud) => { + switch (user?.cloud_data?.status) { + case 0: + return null; + case 1: + return null; + case 2: + return ( + + ); + case 3: + return ( + + ); + default: + return null; + } + }; + + const renderStatus = (user: Cloud) => { + switch (user?.cloud_data?.status) { + case 0: + return Grafana Cloud is not synced; + case 1: + return User not found in Grafana Cloud; + case 2: + return Phone number is not verified in Grafana Cloud; + case 3: + return Phone number verified; + + default: + return User not found in Grafana Cloud; + } + }; + + const renderStatusIcon = (user: Cloud) => { + switch (user?.cloud_data?.status) { + case 0: + return ( +
+ +
+ ); + case 1: + return ( +
+ +
+ ); + + case 2: + return ; + case 3: + return ; + default: + return ( +
+ +
+ ); + } + }; + + const renderEmail = (user: Cloud) => { + return {user.email}; + }; + + const columns = [ + { + width: '2%', + render: renderStatusIcon, + key: 'statusIcon', + }, + { + width: '28%', + render: renderEmail, + key: 'email', + }, + { + width: '50%', + render: renderStatus, + key: 'status', + }, + { + width: '20%', + render: renderButtons, + key: 'buttons', + align: 'actions', + }, + ]; + + const ConnectedBlock = ( + + + + + Cloud OnCall API key + + Cloud OnCall is sucessfully connected. + + + + + + + + + + + + + Monitor cloud instance with heartbeat + + + Once connected, current OnCall instance will send heartbeats every 3 minutes to the cloud Instance. If no + heartbeat will be received in 10 minutes, cloud instance will issue an alert. + +
+ {heartbeatEnabled ? ( + heartbeatLink ? ( + + ) : ( + Heartbeat will be created in a moment automatically + ) + ) : ( + Heartbeat is not enabled. You can go to the Env Variables tab and enable it + )} +
+
+
+ + {cloudNotificationsEnabled ? ( + + + SMS and phone call notifications + + +
+ + { + 'Ask your users to sign up in Grafana Cloud, verify phone number and feel free to set up SMS & phone call notificaitons in personal settings! Only users with Admin or Editor role will be synced.' + } + + + ( +
+ + + {matched_users_count ? matched_users_count : 0} user + {matched_users_count === 1 ? '' : 's'} + {` matched between OSS and Cloud OnCall`} + + {syncingUsers ? ( + + ) : ( + + )} + +
+ )} + rowKey="id" + // @ts-ignore + columns={columns} + data={results} + pagination={{ + page, + total: Math.ceil((matched_users_count || 0) / ITEMS_PER_PAGE), + onChange: handleChangePage, + }} + /> +
+
+ ) : ( + + + SMS and phone call notifications + + + {'Please enable Grafana cloud notification to be able to see list of cloud users'} + + + )} +
+
+ ); + + const DisconnectedBlock = ( + + + + + Cloud OnCall API key + + + + + + + + + + + + + + Monitor cloud instance with heartbeat + + + Once connected, current OnCall instance will send heartbeats every 3 minutes to the cloud Instance. If no + heartbeat will be received in 10 minutes, cloud instance will issue an alert. + + + + + + + SMS and phone call notifications + + + Users matched between OSS and Cloud OnCall currently unavailable. + + + + ); + + return ( +
+ + + Connect Open Source OnCall and Cloud OnCall + + {cloudIsConnected === undefined ? ( + + ) : cloudIsConnected ? ( + ConnectedBlock + ) : ( + DisconnectedBlock + )} + +
+ ); +}); + +export default withMobXProviderContext(CloudPage); diff --git a/grafana-plugin/src/pages/incidents/Incidents.tsx b/grafana-plugin/src/pages/incidents/Incidents.tsx index 9ba1dd71..6e8f3f72 100644 --- a/grafana-plugin/src/pages/incidents/Incidents.tsx +++ b/grafana-plugin/src/pages/incidents/Incidents.tsx @@ -302,12 +302,11 @@ class Incidents extends React.Component (results && results.some((alert: AlertType) => alert.undoAction)) || Object.keys(affectedRows).length ); - console.log('results', results); return (
{this.renderBulkActions()} a { + color: var(--primary-text-link); +} + +.description-style { + word-wrap: break-word; + word-break: break-word; +} diff --git a/grafana-plugin/src/pages/livesettings/LiveSettingsPage.tsx b/grafana-plugin/src/pages/livesettings/LiveSettingsPage.tsx index 71e8b6aa..9542253f 100644 --- a/grafana-plugin/src/pages/livesettings/LiveSettingsPage.tsx +++ b/grafana-plugin/src/pages/livesettings/LiveSettingsPage.tsx @@ -14,7 +14,9 @@ import { } from '@grafana/ui'; import cn from 'classnames/bind'; import { omit } from 'lodash-es'; +import { observe } from 'mobx'; import { observer } from 'mobx-react'; +import { Lambda } from 'mobx/lib/internal'; import { AlignType } from 'rc-table/lib/interface'; import { Redirect } from 'react-router-dom'; @@ -46,6 +48,23 @@ class LiveSettings extends React.Component hideValues: true, }; + disposer: Lambda; + + constructor(props: LiveSettingsProps) { + super(props); + + const { store } = props; + + this.disposer = observe(store.userStore, (change) => { + if (change.name === 'currentUserPk') { + this.update(); + } + }); + } + + componentWillUnmount() { + this.disposer(); + } componentDidMount() { this.update(); } @@ -197,6 +216,7 @@ class LiveSettings extends React.Component dangerouslySetInnerHTML={{ __html: item.description, }} + className={cx('description-style')} /> ); }; diff --git a/grafana-plugin/src/pages/users/Users.module.css b/grafana-plugin/src/pages/users/Users.module.css index c9922e8c..021460b0 100644 --- a/grafana-plugin/src/pages/users/Users.module.css +++ b/grafana-plugin/src/pages/users/Users.module.css @@ -50,3 +50,22 @@ margin-right: 8px; color: var(--warning-text-color); } + +.error-icon { + display: inline-block; + white-space: break-spaces; + line-height: 20px; + color: var(--error-text-color); +} + +.error-icon svg { + vertical-align: middle; +} + +.warning-message { + color: var(--warning-text-color); +} + +.success-message { + color: var(--success-text-color); +} diff --git a/grafana-plugin/src/pages/users/Users.tsx b/grafana-plugin/src/pages/users/Users.tsx index 67a0bf59..87abf9cd 100644 --- a/grafana-plugin/src/pages/users/Users.tsx +++ b/grafana-plugin/src/pages/users/Users.tsx @@ -14,8 +14,10 @@ import Text from 'components/Text/Text'; import UsersFilters from 'components/UsersFilters/UsersFilters'; import UserSettings from 'containers/UserSettings/UserSettings'; import { WithPermissionControl } from 'containers/WithPermissionControl/WithPermissionControl'; +import { CrossCircleIcon } from 'icons'; import { getRole } from 'models/user/user.helpers'; import { User, User as UserType, UserRole } from 'models/user/user.types'; +import { AppFeature } from 'state/features'; import { WithStoreProps } from 'state/types'; import { UserAction } from 'state/userAction'; import { withMobXProviderContext } from 'state/withStore'; @@ -290,10 +292,37 @@ class Users extends React.Component { }; renderNote = (user: UserType) => { - if (!user.verified_phone_number || !user.slack_user_identity) { + const { store } = this.props; + let phone_verified; + let phone_verified_message; + if (store.hasFeature(AppFeature.CloudNotifications)) { + // If cloud notifications is enabled show message about its status, not local phone verification. + switch (user.cloud_connection_status) { + case 0: + phone_verified = false; + phone_verified_message = 'Cloud is not synced'; + break; + case 1: + phone_verified = false; + phone_verified_message = 'User not matched with cloud'; + break; + case 2: + phone_verified = false; + phone_verified_message = 'Phone number is not verified in Grafana Cloud'; + break; + case 3: + phone_verified = false; + phone_verified_message = 'Phone number is verified in Grafana Cloud'; + break; + } + } else { + phone_verified = user.verified_phone_number; + phone_verified_message = 'Phone not verified'; + } + if (!phone_verified || !user.slack_user_identity || !user.telegram_configuration) { let texts = []; - if (!user.verified_phone_number) { - texts.push('Phone not verified'); + if (!phone_verified) { + texts.push(phone_verified_message); } if (!user.slack_user_identity) { texts.push('Slack not verified'); diff --git a/grafana-plugin/src/state/features.ts b/grafana-plugin/src/state/features.ts index 8363575c..bf915f19 100644 --- a/grafana-plugin/src/state/features.ts +++ b/grafana-plugin/src/state/features.ts @@ -3,4 +3,6 @@ export enum AppFeature { Telegram = 'telegram', LiveSettings = 'live_settings', MobileApp = 'mobile_app', + CloudNotifications = 'grafana_cloud_notifications', + CloudConnection = 'grafana_cloud_connection', } diff --git a/grafana-plugin/src/state/rootBaseStore.ts b/grafana-plugin/src/state/rootBaseStore.ts index 5900ab1f..331f6ca1 100644 --- a/grafana-plugin/src/state/rootBaseStore.ts +++ b/grafana-plugin/src/state/rootBaseStore.ts @@ -9,6 +9,7 @@ import { AlertReceiveChannel } from 'models/alert_receive_channel/alert_receive_ import { AlertReceiveChannelFiltersStore } from 'models/alert_receive_channel_filters/alert_receive_channel_filters'; import { AlertGroupStore } from 'models/alertgroup/alertgroup'; import { ApiTokenStore } from 'models/api_token/api_token'; +import { CloudStore } from 'models/cloud/cloud'; import { EscalationChainStore } from 'models/escalation_chain/escalation_chain'; import { EscalationPolicyStore } from 'models/escalation_policy/escalation_policy'; import { GlobalSettingStore } from 'models/global_setting/global_setting'; @@ -81,6 +82,7 @@ export class RootBaseStore { // -------------------------- userStore: UserStore = new UserStore(this); + cloudStore: CloudStore = new CloudStore(this); grafanaTeamStore: GrafanaTeamStore = new GrafanaTeamStore(this); alertReceiveChannelStore: AlertReceiveChannelStore = new AlertReceiveChannelStore(this); outgoingWebhookStore: OutgoingWebhookStore = new OutgoingWebhookStore(this); diff --git a/grafana-plugin/src/utils/consts.ts b/grafana-plugin/src/utils/consts.ts index 7546075e..d2ad118c 100644 --- a/grafana-plugin/src/utils/consts.ts +++ b/grafana-plugin/src/utils/consts.ts @@ -1,4 +1,4 @@ import plugin from '../../package.json'; // eslint-disable-line export const APP_TITLE = 'Grafana OnCall'; -export const APP_SUBTITLE = `Incident Response powered by Amixr (${plugin?.version})`; +export const APP_SUBTITLE = `Developer-friendly incident response (${plugin?.version})`; diff --git a/grafana-plugin/src/utils/hooks.ts b/grafana-plugin/src/utils/hooks.ts index bb456564..b26ff31e 100644 --- a/grafana-plugin/src/utils/hooks.ts +++ b/grafana-plugin/src/utils/hooks.ts @@ -16,6 +16,7 @@ type Args = { orgRole: 'Viewer' | 'Editor' | 'Admin'; }; enableLiveSettings: boolean; + enableCloudPage: boolean; }; export function useForceUpdate() { @@ -23,7 +24,7 @@ export function useForceUpdate() { return () => setValue((value) => value + 1); } -export function useNavModel({ meta, pages, path, page, grafanaUser, enableLiveSettings }: Args) { +export function useNavModel({ meta, pages, path, page, grafanaUser, enableLiveSettings, enableCloudPage }: Args) { return useMemo(() => { const tabs: NavModelItem[] = []; @@ -36,7 +37,8 @@ export function useNavModel({ meta, pages, path, page, grafanaUser, enableLiveSe hideFromTabs: hideFromTabs || (role === 'Admin' && grafanaUser.orgRole !== role) || - (id === 'live-settings' && !enableLiveSettings), + (id === 'live-settings' && !enableLiveSettings) || + (id === 'cloud' && !enableCloudPage), }); if (page === id) { @@ -61,7 +63,7 @@ export function useNavModel({ meta, pages, path, page, grafanaUser, enableLiveSe node, main: node, }; - }, [meta.info.logos.large, pages, path, page, enableLiveSettings]); + }, [meta.info.logos.large, pages, path, page, enableLiveSettings, enableCloudPage]); } export function usePrevious(value: any) { diff --git a/grafana-plugin/src/vars.css b/grafana-plugin/src/vars.css index a0af933b..0216e04c 100644 --- a/grafana-plugin/src/vars.css +++ b/grafana-plugin/src/vars.css @@ -22,6 +22,8 @@ --secondary-text-color: rgba(36, 41, 46, 0.75); --disabled-text-color: rgba(36, 41, 46, 0.5); --warning-text-color: #8a6c00; + --success-text-color: rgb(10, 118, 78); + --error-text-color: rgb(207, 14, 91); --primary-text-link: #1f62e0; --timeline-icon-background: rgba(70, 76, 84, 0); --timeline-icon-background-resolution-note: rgba(50, 116, 217, 0); @@ -38,6 +40,8 @@ --secondary-text-color: rgba(204, 204, 220, 0.65); --disabled-text-color: rgba(204, 204, 220, 0.4); --warning-text-color: #f8d06b; + --success-text-color: rgb(108, 207, 142); + --error-text-color: rgb(255, 82, 134); --primary-text-link: #6e9fff; --timeline-icon-background: rgba(70, 76, 84, 1); --timeline-icon-background-resolution-note: rgba(50, 116, 217, 1); diff --git a/grafana-plugin/tools/plop/generators/appendReadmeFile.js b/grafana-plugin/tools/plop/generators/appendReadmeFile.js deleted file mode 100644 index 70f8f850..00000000 --- a/grafana-plugin/tools/plop/generators/appendReadmeFile.js +++ /dev/null @@ -1,16 +0,0 @@ -module.exports = function createReadmeFiles(answers) { - const actions = []; - - const pathToApp = 'src'; - - const pathToReadmeTemplate = 'tools/plop/templates'; - - actions.push({ - type: 'modify', - path: `${pathToApp}/README.md`, - pattern: '/* BUILD INFO */', - templateFile: `${pathToReadmeTemplate}/BuildInfo.md.hbs`, - }); - - return actions; -}; diff --git a/grafana-plugin/tools/plop/prompts/readmePrompts.js b/grafana-plugin/tools/plop/prompts/readmePrompts.js deleted file mode 100644 index 5568a762..00000000 --- a/grafana-plugin/tools/plop/prompts/readmePrompts.js +++ /dev/null @@ -1,17 +0,0 @@ -module.exports = [ - { - type: 'input', - name: 'gitTag', - message: 'Git Tag', - }, - { - type: 'input', - name: 'gitBranch', - message: 'Git Branch', - }, - { - type: 'input', - name: 'gitCommit', - message: 'Git Commit', - }, -]; diff --git a/grafana-plugin/tools/plop/templates/BuildInfo.md.hbs b/grafana-plugin/tools/plop/templates/BuildInfo.md.hbs deleted file mode 100644 index 4f43b81d..00000000 --- a/grafana-plugin/tools/plop/templates/BuildInfo.md.hbs +++ /dev/null @@ -1 +0,0 @@ -Build Info - Tag: {{gitTag}} - Branch: {{gitBranch}} - Commit: {{gitCommit}} \ No newline at end of file diff --git a/grafana-plugin/yarn.lock b/grafana-plugin/yarn.lock index 4aa2a4f2..a7e1204d 100644 --- a/grafana-plugin/yarn.lock +++ b/grafana-plugin/yarn.lock @@ -2,6 +2,14 @@ # yarn lockfile v1 +"@ampproject/remapping@^2.1.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== + dependencies: + "@jridgewell/gen-mapping" "^0.1.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@babel/code-frame@7.12.11": version "7.12.11" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.12.11.tgz#f4ad435aa263db935b8f10f2c552d23fb716a63f" @@ -23,13 +31,6 @@ dependencies: "@babel/highlight" "^7.12.13" -"@babel/code-frame@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" - integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== - dependencies: - "@babel/highlight" "^7.14.5" - "@babel/code-frame@^7.16.7", "@babel/code-frame@^7.8.3": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" @@ -42,16 +43,16 @@ resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.4.tgz#45720fe0cecf3fd42019e1d12cc3d27fadc98d58" integrity sha512-i2wXrWQNkH6JplJQGn3Rd2I4Pij8GdHkXwHMxm+zV5YG/Jci+bCNrWZEWC4o+umiDkRrRs4dVzH3X4GP7vyjQQ== -"@babel/compat-data@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.14.5.tgz#8ef4c18e58e801c5c95d3c1c0f2874a2680fadea" - integrity sha512-kixrYn4JwfAVPa0f2yfzc2AWti6WRRyO3XjWW5PJAvtE11qhSayrrcrEnee05KAtNaPC+EwehE8Qt1UedEVB8w== - "@babel/compat-data@^7.16.4", "@babel/compat-data@^7.9.0": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.16.8.tgz#31560f9f29fdf1868de8cb55049538a1b9732a60" integrity sha512-m7OkX0IdKLKPpBlJtF561YJal5y/jyI5fNfWbPxh2D/nbzzGI4qRyrD8xO2jB24u7l+5I2a43scCG2IrfjC50Q== +"@babel/compat-data@^7.17.10": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.18.5.tgz#acac0c839e317038c73137fbb6ef71a1d6238471" + integrity sha512-BxhE40PVCBxVEJsSBhB6UWyAuqJRxGsAw8BdHMJ3AKGydcwuWW4kOO3HmqBQAdcq/OP+/DlTVxLvsCzRTnZuGg== + "@babel/core@7.9.0": version "7.9.0" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.9.0.tgz#ac977b538b77e132ff706f3b8a4dbad09c03c56e" @@ -74,27 +75,6 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/core@>=7.9.0": - version "7.14.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.14.6.tgz#e0814ec1a950032ff16c13a2721de39a8416fcab" - integrity sha512-gJnOEWSqTk96qG5BoIrl5bVtc23DCycmIePPYnamY9RboYdI4nFy5vAQMSl81O5K/W0sLDWfGysnOECC+KUUCA== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/generator" "^7.14.5" - "@babel/helper-compilation-targets" "^7.14.5" - "@babel/helper-module-transforms" "^7.14.5" - "@babel/helpers" "^7.14.6" - "@babel/parser" "^7.14.6" - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.1.2" - semver "^6.3.0" - source-map "^0.5.0" - "@babel/core@^7.1.0", "@babel/core@^7.7.5": version "7.14.3" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.14.3.tgz#5395e30405f0776067fbd9cf0884f15bfb770a38" @@ -116,6 +96,27 @@ semver "^6.3.0" source-map "^0.5.0" +"@babel/core@^7.17.9": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.18.5.tgz#c597fa680e58d571c28dda9827669c78cdd7f000" + integrity sha512-MGY8vg3DxMnctw0LdvSEojOsumc70g0t18gNyUdAZqB1Rpd1Bqo/svHGvt+UJ6JcGX+DIekGFDxxIWofBxLCnQ== + dependencies: + "@ampproject/remapping" "^2.1.0" + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-compilation-targets" "^7.18.2" + "@babel/helper-module-transforms" "^7.18.0" + "@babel/helpers" "^7.18.2" + "@babel/parser" "^7.18.5" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.5" + "@babel/types" "^7.18.4" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.1" + semver "^6.3.0" + "@babel/generator@^7.14.2", "@babel/generator@^7.14.3", "@babel/generator@^7.4.0": version "7.14.3" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.3.tgz#0c2652d91f7bddab7cccc6ba8157e4f40dcedb91" @@ -125,15 +126,6 @@ jsesc "^2.5.1" source-map "^0.5.0" -"@babel/generator@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.14.5.tgz#848d7b9f031caca9d0cd0af01b063f226f52d785" - integrity sha512-y3rlP+/G25OIX3mYKKIOlQRcqj7YgrvHxOLbVmyLJ9bPmi5ttvUmpydVjcFjZphOktWuA7ovbx91ECloWTfjIA== - dependencies: - "@babel/types" "^7.14.5" - jsesc "^2.5.1" - source-map "^0.5.0" - "@babel/generator@^7.16.8", "@babel/generator@^7.9.0": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.16.8.tgz#359d44d966b8cd059d543250ce79596f792f2ebe" @@ -143,6 +135,15 @@ jsesc "^2.5.1" source-map "^0.5.0" +"@babel/generator@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.18.2.tgz#33873d6f89b21efe2da63fe554460f3df1c5880d" + integrity sha512-W1lG5vUwFvfMd8HVXqdfbuG7RuaSrTCCD8cl8fP8wOivdbtbIg2Db3IWUcgvfxKbbn6ZBGYRW/Zk1MIwK49mgw== + dependencies: + "@babel/types" "^7.18.2" + "@jridgewell/gen-mapping" "^0.3.0" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.12.13": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.12.13.tgz#0f58e86dfc4bb3b1fcd7db806570e177d439b6ab" @@ -175,16 +176,6 @@ browserslist "^4.16.6" semver "^6.3.0" -"@babel/helper-compilation-targets@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.14.5.tgz#7a99c5d0967911e972fe2c3411f7d5b498498ecf" - integrity sha512-v+QtZqXEiOnpO6EYvlImB6zCD2Lel06RzOPzmkz/D/XgQiUu3C/Jb1LOqSt/AIA34TYi/Q+KlT8vTQrgdxkbLw== - dependencies: - "@babel/compat-data" "^7.14.5" - "@babel/helper-validator-option" "^7.14.5" - browserslist "^4.16.6" - semver "^6.3.0" - "@babel/helper-compilation-targets@^7.16.7", "@babel/helper-compilation-targets@^7.8.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz#06e66c5f299601e6c7da350049315e83209d551b" @@ -195,6 +186,16 @@ browserslist "^4.17.5" semver "^6.3.0" +"@babel/helper-compilation-targets@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.18.2.tgz#67a85a10cbd5fc7f1457fec2e7f45441dc6c754b" + integrity sha512-s1jnPotJS9uQnzFtiZVBUxe67CuBa679oWFHpxYYnTpRL/1ffhyX44R9uYiXoa/pLXcY9H2moJta0iaanlk/rQ== + dependencies: + "@babel/compat-data" "^7.17.10" + "@babel/helper-validator-option" "^7.16.7" + browserslist "^4.20.2" + semver "^6.3.0" + "@babel/helper-create-regexp-features-plugin@^7.12.13": version "7.14.3" resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.14.3.tgz#149aa6d78c016e318c43e2409a0ae9c136a86688" @@ -218,6 +219,11 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-environment-visitor@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.2.tgz#8a6d2dedb53f6bf248e31b4baf38739ee4a637bd" + integrity sha512-14GQKWkX9oJzPiQQ7/J36FTXcD4kSp8egKjO9nINlSKiHITRA9q/R74qu8S9xlc/b/yjsJItQUeeh3xnGN0voQ== + "@babel/helper-explode-assignable-expression@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.16.7.tgz#12a6d8522fdd834f194e868af6354e8650242b7a" @@ -234,15 +240,6 @@ "@babel/template" "^7.12.13" "@babel/types" "^7.14.2" -"@babel/helper-function-name@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.14.5.tgz#89e2c474972f15d8e233b52ee8c480e2cfcd50c4" - integrity sha512-Gjna0AsXWfFvrAuX+VKcN/aNNWonizBj39yGwUzVDVTlMYJMK2Wp6xdpy72mfArFq5uK+NOuexfzZlzI1z9+AQ== - dependencies: - "@babel/helper-get-function-arity" "^7.14.5" - "@babel/template" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-function-name@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz#f1ec51551fb1c8956bc8dd95f38523b6cf375f8f" @@ -252,6 +249,14 @@ "@babel/template" "^7.16.7" "@babel/types" "^7.16.7" +"@babel/helper-function-name@^7.17.9": + version "7.17.9" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.17.9.tgz#136fcd54bc1da82fcb47565cf16fd8e444b1ff12" + integrity sha512-7cRisGlVtiVqZ0MW0/yFB4atgpGLWEHUVYnb448hZK4x+vih0YO5UoS11XIYtZYqHd0dIPMdUSv8q5K4LdMnIg== + dependencies: + "@babel/template" "^7.16.7" + "@babel/types" "^7.17.0" + "@babel/helper-get-function-arity@^7.12.13": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz#bc63451d403a3b3082b97e1d8b3fe5bd4091e583" @@ -259,13 +264,6 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-get-function-arity@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.14.5.tgz#25fbfa579b0937eee1f3b805ece4ce398c431815" - integrity sha512-I1Db4Shst5lewOM4V+ZKJzQ0JGGaZ6VY1jYvMghRjqs6DWgxLCIyFt30GlnKkfUeFLpJt2vzbMVEXVSXlIFYUg== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-get-function-arity@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz#ea08ac753117a669f1508ba06ebcc49156387419" @@ -273,13 +271,6 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-hoist-variables@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.14.5.tgz#e0dd27c33a78e577d7c8884916a3e7ef1f7c7f8d" - integrity sha512-R1PXiz31Uc0Vxy4OEOm07x0oSjKAdPPCh3tPivn/Eo8cvz6gveAeuyUUPB21Hoiif0uoPQSSdhIPS3352nvdyQ== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-hoist-variables@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz#86bcb19a77a509c7b77d0e22323ef588fa58c246" @@ -294,13 +285,6 @@ dependencies: "@babel/types" "^7.13.12" -"@babel/helper-member-expression-to-functions@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.14.5.tgz#d5c70e4ad13b402c95156c7a53568f504e2fb7b8" - integrity sha512-UxUeEYPrqH1Q/k0yRku1JE7dyfyehNwT6SVkMHvYvPDv4+uu627VXBckVj891BO8ruKBkiDoGnZf4qPDD8abDQ== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-member-expression-to-functions@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.16.7.tgz#42b9ca4b2b200123c3b7e726b0ae5153924905b0" @@ -315,7 +299,7 @@ dependencies: "@babel/types" "^7.16.7" -"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.14.5": +"@babel/helper-module-imports@^7.12.13": version "7.14.5" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.14.5.tgz#6d1a44df6a38c957aa7c312da076429f11b422f3" integrity sha512-SwrNHu5QWS84XlHwGYPDtCxcA0hrSlL2yhWYLgeOc0w7ccOl2qv4s/nARI0aYZW+bSwAL5CukeXA47B/1NKcnQ== @@ -343,20 +327,6 @@ "@babel/traverse" "^7.14.2" "@babel/types" "^7.14.2" -"@babel/helper-module-transforms@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.14.5.tgz#7de42f10d789b423eb902ebd24031ca77cb1e10e" - integrity sha512-iXpX4KW8LVODuAieD7MzhNjmM6dzYY5tfRqT+R9HDXWl0jPn/djKmA+G9s/2C2T9zggw5tK1QNqZ70USfedOwA== - dependencies: - "@babel/helper-module-imports" "^7.14.5" - "@babel/helper-replace-supers" "^7.14.5" - "@babel/helper-simple-access" "^7.14.5" - "@babel/helper-split-export-declaration" "^7.14.5" - "@babel/helper-validator-identifier" "^7.14.5" - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-module-transforms@^7.16.7", "@babel/helper-module-transforms@^7.9.0": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz#7665faeb721a01ca5327ddc6bba15a5cb34b6a41" @@ -371,6 +341,20 @@ "@babel/traverse" "^7.16.7" "@babel/types" "^7.16.7" +"@babel/helper-module-transforms@^7.18.0": + version "7.18.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.18.0.tgz#baf05dec7a5875fb9235bd34ca18bad4e21221cd" + integrity sha512-kclUYSUBIjlvnzN2++K9f2qzYKFgjmnmjwL4zlmU5f8ZtzgWe8s0rUPSTGy2HmK4P8T52MQsS+HTQAgZd3dMEA== + dependencies: + "@babel/helper-environment-visitor" "^7.16.7" + "@babel/helper-module-imports" "^7.16.7" + "@babel/helper-simple-access" "^7.17.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/helper-validator-identifier" "^7.16.7" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.0" + "@babel/types" "^7.18.0" + "@babel/helper-optimise-call-expression@^7.12.13": version "7.12.13" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz#5c02d171b4c8615b1e7163f888c1c81c30a2aaea" @@ -378,13 +362,6 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-optimise-call-expression@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.14.5.tgz#f27395a8619e0665b3f0364cddb41c25d71b499c" - integrity sha512-IqiLIrODUOdnPU9/F8ib1Fx2ohlgDhxnIDU7OEVi+kAbEZcyiF7BLU8W6PfvPi9LzztjS7kcbzbmL7oG8kD6VA== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-optimise-call-expression@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.16.7.tgz#a34e3560605abbd31a18546bd2aad3e6d9a174f2" @@ -426,16 +403,6 @@ "@babel/traverse" "^7.14.2" "@babel/types" "^7.14.4" -"@babel/helper-replace-supers@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.14.5.tgz#0ecc0b03c41cd567b4024ea016134c28414abb94" - integrity sha512-3i1Qe9/8x/hCHINujn+iuHy+mMRLoc77b2nI9TB0zjH1hvn9qGlXjWlggdwUcju36PkPCy/lpM7LLUdcTyH4Ow== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.14.5" - "@babel/helper-optimise-call-expression" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/helper-replace-supers@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.16.7.tgz#e9f5f5f32ac90429c1a4bdec0f231ef0c2838ab1" @@ -454,13 +421,6 @@ dependencies: "@babel/types" "^7.13.12" -"@babel/helper-simple-access@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.14.5.tgz#66ea85cf53ba0b4e588ba77fc813f53abcaa41c4" - integrity sha512-nfBN9xvmCt6nrMZjfhkl7i0oTV3yxR4/FztsbOASyTvVcoYd0TRHh7eMLdlEcCqobydC0LAF3LtC92Iwxo0wyw== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-simple-access@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz#d656654b9ea08dbb9659b69d61063ccd343ff0f7" @@ -468,6 +428,13 @@ dependencies: "@babel/types" "^7.16.7" +"@babel/helper-simple-access@^7.17.7": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.2.tgz#4dc473c2169ac3a1c9f4a51cfcd091d1c36fcff9" + integrity sha512-7LIrjYzndorDY88MycupkpQLKS1AFfsVRm2k/9PtKScSy5tZq0McZTj+DiMRynboZfIqOKvo03pmhTaUgiD6fQ== + dependencies: + "@babel/types" "^7.18.2" + "@babel/helper-skip-transparent-expression-wrappers@^7.16.0": version "7.16.0" resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.16.0.tgz#0ee3388070147c3ae051e487eca3ebb0e2e8bb09" @@ -482,13 +449,6 @@ dependencies: "@babel/types" "^7.12.13" -"@babel/helper-split-export-declaration@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.14.5.tgz#22b23a54ef51c2b7605d851930c1976dd0bc693a" - integrity sha512-hprxVPu6e5Kdp2puZUmvOGjaLv9TCe58E/Fl6hRq4YiVQxIcNvuq6uTM2r1mT/oPskuS9CgR+I94sqAYv0NGKA== - dependencies: - "@babel/types" "^7.14.5" - "@babel/helper-split-export-declaration@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz#0b648c0c42da9d3920d85ad585f2778620b8726b" @@ -516,11 +476,6 @@ resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.12.17.tgz#d1fbf012e1a79b7eebbfdc6d270baaf8d9eb9831" integrity sha512-TopkMDmLzq8ngChwRlyjR6raKD6gMSae4JdYDB8bByKreQgG0RBTuKe9LRxW3wFtUnjxOPRKBDwEH6Mg5KeDfw== -"@babel/helper-validator-option@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz#6e72a1fff18d5dfcb878e1e62f1a021c4b72d5a3" - integrity sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow== - "@babel/helper-validator-option@^7.16.7": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz#b203ce62ce5fe153899b617c08957de860de4d23" @@ -545,14 +500,14 @@ "@babel/traverse" "^7.14.0" "@babel/types" "^7.14.0" -"@babel/helpers@^7.14.6": - version "7.14.6" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.14.6.tgz#5b58306b95f1b47e2a0199434fa8658fa6c21635" - integrity sha512-yesp1ENQBiLI+iYHSJdoZKUtRpfTlL1grDIX9NRlAVppljLw/4tTyYupIB7uIYmC3stW/imAv8EqaKaS/ibmeA== +"@babel/helpers@^7.18.2": + version "7.18.2" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.18.2.tgz#970d74f0deadc3f5a938bfa250738eb4ac889384" + integrity sha512-j+d+u5xT5utcQSzrh9p+PaJX94h++KN+ng9b9WEJq7pkUPAd61FGqhjuUEdfknb3E/uDBb7ruwEeKkIxNJPIrg== dependencies: - "@babel/template" "^7.14.5" - "@babel/traverse" "^7.14.5" - "@babel/types" "^7.14.5" + "@babel/template" "^7.16.7" + "@babel/traverse" "^7.18.2" + "@babel/types" "^7.18.2" "@babel/helpers@^7.9.0": version "7.16.7" @@ -572,15 +527,6 @@ chalk "^2.0.0" js-tokens "^4.0.0" -"@babel/highlight@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.14.5.tgz#6861a52f03966405001f6aa534a01a24d99e8cd9" - integrity sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg== - dependencies: - "@babel/helper-validator-identifier" "^7.14.5" - chalk "^2.0.0" - js-tokens "^4.0.0" - "@babel/highlight@^7.16.7", "@babel/highlight@^7.8.3": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.16.7.tgz#81a01d7d675046f0d96f82450d9d9578bdfd6b0b" @@ -595,21 +541,16 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.4.tgz#a5c560d6db6cd8e6ed342368dea8039232cbab18" integrity sha512-ArliyUsWDUqEGfWcmzpGUzNfLxTdTp6WU4IuP6QFSp9gGfWS6boxFCkJSJ/L4+RG8z/FnIU3WxCk6hPL9SSWeA== -"@babel/parser@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.5.tgz#4cd2f346261061b2518873ffecdf1612cb032829" - integrity sha512-TM8C+xtH/9n1qzX+JNHi7AN2zHMTiPUtspO0ZdHflW8KaskkALhMmuMHb4bCmNdv9VAPzJX3/bXqkVLnAvsPfg== - -"@babel/parser@^7.14.6": - version "7.14.7" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.14.7.tgz#6099720c8839ca865a2637e6c85852ead0bdb595" - integrity sha512-X67Z5y+VBJuHB/RjwECp8kSl5uYi0BvRbNeWqkaJCVh+LiTPl19WBUfG627psSgp9rSf6ojuXghQM3ha6qHHdA== - "@babel/parser@^7.16.7", "@babel/parser@^7.16.8", "@babel/parser@^7.9.0": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.16.8.tgz#61c243a3875f7d0b0962b0543a33ece6ff2f1f17" integrity sha512-i7jDUfrVBWc+7OKcBzEe5n7fbv3i2fWtxKzzCvOjnzSxMfWMigAhtfJ7qzZNGFNMsCCd67+uz553dYKWXPvCKw== +"@babel/parser@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.18.5.tgz#337062363436a893a2d22faa60be5bb37091c83c" + integrity sha512-YZWVaglMiplo7v8f1oMQ5ZPQr0vn7HPeZXxXWsxXJRjGVrzUFn9OxFQl1sb5wzfootjA/yChhW84BV+383FSOw== + "@babel/plugin-proposal-async-generator-functions@^7.8.3": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.16.8.tgz#3bdd1ebbe620804ea9416706cd67d60787504bc8" @@ -1174,15 +1115,6 @@ "@babel/parser" "^7.12.13" "@babel/types" "^7.12.13" -"@babel/template@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.14.5.tgz#a9bc9d8b33354ff6e55a9c60d1109200a68974f4" - integrity sha512-6Z3Po85sfxRGachLULUhOmvAaOo7xCvqGQtxINai2mEGPFm6pQ4z5QInFnUrRpfoSV60BnjyF5F3c+15fxFV1g== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/parser" "^7.14.5" - "@babel/types" "^7.14.5" - "@babel/template@^7.16.7", "@babel/template@^7.8.6": version "7.16.7" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.16.7.tgz#8d126c8701fde4d66b264b3eba3d96f07666d155" @@ -1206,21 +1138,6 @@ debug "^4.1.0" globals "^11.1.0" -"@babel/traverse@^7.14.5": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.14.5.tgz#c111b0f58afab4fea3d3385a406f692748c59870" - integrity sha512-G3BiS15vevepdmFqmUc9X+64y0viZYygubAMO8SvBmKARuF6CPSZtH4Ng9vi/lrWlZFGe3FWdXNy835akH8Glg== - dependencies: - "@babel/code-frame" "^7.14.5" - "@babel/generator" "^7.14.5" - "@babel/helper-function-name" "^7.14.5" - "@babel/helper-hoist-variables" "^7.14.5" - "@babel/helper-split-export-declaration" "^7.14.5" - "@babel/parser" "^7.14.5" - "@babel/types" "^7.14.5" - debug "^4.1.0" - globals "^11.1.0" - "@babel/traverse@^7.16.7", "@babel/traverse@^7.16.8", "@babel/traverse@^7.9.0": version "7.16.8" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.16.8.tgz#bab2f2b09a5fe8a8d9cad22cbfe3ba1d126fef9c" @@ -1237,6 +1154,22 @@ debug "^4.1.0" globals "^11.1.0" +"@babel/traverse@^7.18.0", "@babel/traverse@^7.18.2", "@babel/traverse@^7.18.5": + version "7.18.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.18.5.tgz#94a8195ad9642801837988ab77f36e992d9a20cd" + integrity sha512-aKXj1KT66sBj0vVzk6rEeAO6Z9aiiQ68wfDgge3nHhA/my6xMM/7HGQUNumKZaoa2qUPQ5whJG9aAifsxUKfLA== + dependencies: + "@babel/code-frame" "^7.16.7" + "@babel/generator" "^7.18.2" + "@babel/helper-environment-visitor" "^7.18.2" + "@babel/helper-function-name" "^7.17.9" + "@babel/helper-hoist-variables" "^7.16.7" + "@babel/helper-split-export-declaration" "^7.16.7" + "@babel/parser" "^7.18.5" + "@babel/types" "^7.18.4" + debug "^4.1.0" + globals "^11.1.0" + "@babel/types@^7.0.0", "@babel/types@^7.12.13", "@babel/types@^7.13.12", "@babel/types@^7.14.0", "@babel/types@^7.14.2", "@babel/types@^7.14.4", "@babel/types@^7.2.0", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.0", "@babel/types@^7.4.4": version "7.14.4" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.14.4.tgz#bfd6980108168593b38b3eb48a24aa026b919bc0" @@ -1261,6 +1194,14 @@ "@babel/helper-validator-identifier" "^7.16.7" to-fast-properties "^2.0.0" +"@babel/types@^7.17.0", "@babel/types@^7.18.0", "@babel/types@^7.18.2", "@babel/types@^7.18.4": + version "7.18.4" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.18.4.tgz#27eae9b9fd18e9dccc3f9d6ad051336f307be354" + integrity sha512-ThN1mBcMq5pG/Vm2IcBmPPfyPXbd8S02rS+OBIDENdufvqC7Z/jHPCv9IcP01277aKtDI8g/2XysBN4hA8niiw== + dependencies: + "@babel/helper-validator-identifier" "^7.16.7" + to-fast-properties "^2.0.0" + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -2093,6 +2034,46 @@ "@types/yargs" "^15.0.0" chalk "^4.0.0" +"@jridgewell/gen-mapping@^0.1.0": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.1" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz#cf92a983c83466b8c0ce9124fadeaf09f7c66ea9" + integrity sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg== + dependencies: + "@jridgewell/set-array" "^1.0.0" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + +"@jridgewell/resolve-uri@^3.0.3": + version "3.0.7" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz#30cd49820a962aff48c8fffc5cd760151fca61fe" + integrity sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA== + +"@jridgewell/set-array@^1.0.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.1.tgz#36a6acc93987adcf0ba50c66908bd0b70de8afea" + integrity sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ== + +"@jridgewell/sourcemap-codec@^1.4.10": + version "1.4.13" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz#b6461fb0c2964356c469e115f504c95ad97ab88c" + integrity sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w== + +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.13" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz#dcfe3e95f224c8fe97a87a5235defec999aa92ea" + integrity sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w== + dependencies: + "@jridgewell/resolve-uri" "^3.0.3" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@monaco-editor/loader@^1.1.1": version "1.2.0" resolved "https://registry.yarnpkg.com/@monaco-editor/loader/-/loader-1.2.0.tgz#373fad69973384624e3d9b60eefd786461a76acd" @@ -2219,11 +2200,11 @@ "@sinonjs/commons" "^1.7.0" "@stylelint/postcss-css-in-js@^0.37.2": - version "0.37.2" - resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.2.tgz#7e5a84ad181f4234a2480803422a47b8749af3d2" - integrity sha512-nEhsFoJurt8oUmieT8qy4nk81WRHmJynmVwn/Vts08PL9fhgIsMhk1GId5yAN643OzqEEb5S/6At2TZW7pqPDA== + version "0.37.3" + resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.3.tgz#d149a385e07ae365b0107314c084cb6c11adbf49" + integrity sha512-scLk3cSH1H9KggSniseb2KNAU5D9FWc3H7BxCSAIdtU9OWIyw0zkEZ9qEKHryRM+SExYXRKNb7tOOVNAsQ3iwg== dependencies: - "@babel/core" ">=7.9.0" + "@babel/core" "^7.17.9" "@stylelint/postcss-markdown@^0.36.2": version "0.36.2" @@ -2607,9 +2588,9 @@ integrity sha512-0d5Wd09ItQWH1qFbEyQ7oTQ3GZrMfth5JkbN3EvTKLXcHLRDSXeLnlvlOn0wvxVIwK5o2M8JzP/OWz7T3NRsbw== "@types/mdast@^3.0.0": - version "3.0.4" - resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.4.tgz#8ee6b5200751b6cadb9a043ca39612693ad6cb9e" - integrity sha512-gIdhbLDFlspL53xzol2hVzrXAbzt71erJHoOwQZWssjaiouOotf03lNtMmFm9VfFkvnLWccSVjUAZGQ5Kqw+jA== + version "3.0.10" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.10.tgz#4724244a82a4598884cbbe9bcfd73dff927ee8af" + integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== dependencies: "@types/unist" "*" @@ -2955,9 +2936,9 @@ source-map "^0.6.1" "@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2": - version "2.0.5" - resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.5.tgz#fdd299f23205c3455af88ce618dd65c14cb73e22" - integrity sha512-wnra4Vw9dopnuybR6HBywJ/URYpYrKLoepBTEtgfJup8Ahoi2zJECPP2cwiXp7btTvOT2CULv87aQRA4eZSP6g== + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-2.0.6.tgz#250a7b16c3b91f672a24552ec64678eeb1d3a08d" + integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== "@types/webpack-dev-server@*": version "3.11.4" @@ -3516,6 +3497,11 @@ ansi-regex@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + ansi-styles@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" @@ -3785,7 +3771,7 @@ attr-accept@^2.2.1: resolved "https://registry.yarnpkg.com/attr-accept/-/attr-accept-2.2.2.tgz#646613809660110749e92f2c10833b70968d929b" integrity sha512-7prDjvt9HmqiZ0cl5CRjtS84sEyhsHP2coDkaZKRKVfCDo9s7iw7ChVmar78Gu9pC4SoR/28wFu/G5JJhTnqEg== -autoprefixer@^9.6.1, autoprefixer@^9.8.6: +autoprefixer@^9.6.1: version "9.8.6" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.6.tgz#3b73594ca1bf9266320c5acf1588d74dea74210f" integrity sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg== @@ -3798,6 +3784,19 @@ autoprefixer@^9.6.1, autoprefixer@^9.8.6: postcss "^7.0.32" postcss-value-parser "^4.1.0" +autoprefixer@^9.8.6: + version "9.8.8" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.8.8.tgz#fd4bd4595385fa6f06599de749a4d5f7a474957a" + integrity sha512-eM9d/swFopRt5gdJ7jrpCwgvEMIayITpojhkkSMRsFHYuH5bkSQ4p/9qTEHtmNudUZh22Tehu7I6CxAW0IXTKA== + dependencies: + browserslist "^4.12.0" + caniuse-lite "^1.0.30001109" + normalize-range "^0.1.2" + num2fraction "^1.2.2" + picocolors "^0.2.1" + postcss "^7.0.32" + postcss-value-parser "^4.1.0" + aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" @@ -4181,6 +4180,17 @@ browserslist@^4.17.5, browserslist@^4.19.1, browserslist@^4.9.1: node-releases "^2.0.1" picocolors "^1.0.0" +browserslist@^4.20.2: + version "4.20.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.20.4.tgz#98096c9042af689ee1e0271333dbc564b8ce4477" + integrity sha512-ok1d+1WpnU24XYN7oC3QWgTyMhY/avPJ/r9T00xxvUOIparA/gc+UPUMaod3i+G6s+nI2nUb9xZ5k794uIwShw== + dependencies: + caniuse-lite "^1.0.30001349" + electron-to-chromium "^1.4.147" + escalade "^3.1.1" + node-releases "^2.0.5" + picocolors "^1.0.0" + bs-logger@0.x: version "0.2.6" resolved "https://registry.yarnpkg.com/bs-logger/-/bs-logger-0.2.6.tgz#eb7d365307a72cf974cc6cda76b68354ad336bd8" @@ -4381,6 +4391,11 @@ caniuse-lite@^1.0.30001286: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001299.tgz#d753bf6444ed401eb503cbbe17aa3e1451b5a68c" integrity sha512-iujN4+x7QzqA2NCSrS5VUy+4gLmRd4xv6vbBBsmfVqTx8bLAD8097euLqQgKxSVLvxjSDcvF1T/i9ocgnUFexw== +caniuse-lite@^1.0.30001349: + version "1.0.30001352" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001352.tgz#cc6f5da3f983979ad1e2cdbae0505dccaa7c6a12" + integrity sha512-GUgH8w6YergqPQDGWhJGt8GDRnY0L/iJVQcU3eJ46GYf52R8tk0Wxp0PymuFVZboJYXGiCqwozAYZNRjVj6IcA== + capital-case@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/capital-case/-/capital-case-1.0.4.tgz#9d130292353c9249f6b00fa5852bee38a717e669" @@ -4430,7 +4445,7 @@ chalk@^3.0.0: ansi-styles "^4.1.0" supports-color "^7.1.0" -chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: +chalk@^4.0.0, chalk@^4.1.0: version "4.1.1" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== @@ -4438,6 +4453,14 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.1: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@^4.1.1: + version "4.1.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== + dependencies: + ansi-styles "^4.1.0" + supports-color "^7.1.0" + change-case@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/change-case/-/change-case-3.1.0.tgz#0e611b7edc9952df2e8513b27b42de72647dd17e" @@ -5756,7 +5779,14 @@ debug@^3.1.0, debug@^3.2.7: dependencies: ms "^2.1.1" -debug@^4.0.0, debug@^4.2.0: +debug@^4.0.0: + version "4.3.4" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== + dependencies: + ms "2.1.2" + +debug@^4.2.0: version "4.3.2" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.2.tgz#f0a49c18ac8779e31d4a0c6029dfb76873c7428b" integrity sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw== @@ -6113,6 +6143,11 @@ electron-to-chromium@^1.3.723: resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.750.tgz#7e5ef6f478316b0bd656af5942fe502610e97eaf" integrity sha512-Eqy9eHNepZxJXT+Pc5++zvEi5nQ6AGikwFYDCYwXUFBr+ynJ6pDG7MzZmwGYCIuXShLJM0n4bq+aoKDmvSGJ8A== +electron-to-chromium@^1.4.147: + version "1.4.152" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.152.tgz#7dedbe8f3dc1c597088982a203f392e60f7ee90a" + integrity sha512-jk4Ju5SGZAQQJ1iI4Rgru7dDlvkQPLpNPWH9gIZmwCD4YteA5Bbk1xPcPDUf5jUYs3e1e80RXdi8XgKQZaigeg== + electron-to-chromium@^1.4.17: version "1.4.45" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.45.tgz#cf1144091d6683cbd45a231954a745f02fb24598" @@ -6768,7 +6803,7 @@ fast-glob@^2.0.2: merge2 "^1.2.3" micromatch "^3.1.10" -fast-glob@^3.0.3, fast-glob@^3.1.1: +fast-glob@^3.0.3: version "3.2.5" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== @@ -6780,18 +6815,7 @@ fast-glob@^3.0.3, fast-glob@^3.1.1: micromatch "^4.0.2" picomatch "^2.2.1" -fast-glob@^3.2.5: - version "3.2.7" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" - integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - -fast-glob@^3.2.9: +fast-glob@^3.2.5, fast-glob@^3.2.9: version "3.2.11" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== @@ -7380,7 +7404,7 @@ globby@^10.0.1: merge2 "^1.2.3" slash "^3.0.0" -globby@^11.0.1: +globby@^11.0.1, globby@^11.0.3: version "11.1.0" resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== @@ -7392,18 +7416,6 @@ globby@^11.0.1: merge2 "^1.4.1" slash "^3.0.0" -globby@^11.0.3: - version "11.0.4" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5" - integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - globby@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/globby/-/globby-7.1.1.tgz#fb2ccff9401f8600945dfada97440cca972b8680" @@ -7709,9 +7721,9 @@ html-minifier@^3.2.3, html-minifier@^3.5.8: uglify-js "3.4.x" html-tags@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.1.0.tgz#7b5e6f7e665e9fb41f30007ed9e0d41e97fb2140" - integrity sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg== + version "3.2.0" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.2.0.tgz#dbb3518d20b726524e4dd43de397eb0a95726961" + integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== html-webpack-plugin@^3.2.0: version "3.2.0" @@ -7851,12 +7863,12 @@ ignore@^4.0.6: resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== -ignore@^5.1.1, ignore@^5.1.4, ignore@^5.1.8: +ignore@^5.1.1: version "5.1.8" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== -ignore@^5.2.0: +ignore@^5.1.8, ignore@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== @@ -9316,6 +9328,11 @@ json5@^1.0.1: dependencies: minimist "^1.2.0" +json5@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== + jsonfile@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" @@ -10146,13 +10163,20 @@ mobx@^5.13.0: resolved "https://registry.yarnpkg.com/mobx/-/mobx-5.15.7.tgz#b9a5f2b6251f5d96980d13c78e9b5d8d4ce22665" integrity sha512-wyM3FghTkhmC+hQjyPGGFdpehrcX1KOXsDuERhfK2YbJemkUhEB+6wzEN639T21onxlfYBmriA1PFnvxTUhcKw== -moment-timezone@*, moment-timezone@^0.5.28: +moment-timezone@*: version "0.5.33" resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.33.tgz#b252fd6bb57f341c9b59a5ab61a8e51a73bbd22c" integrity sha512-PTc2vcT8K9J5/9rDEPe5czSIKgLoGsH8UNpA4qZTVw0Vd/Uz19geE9abbIOQKaAQFcnQ3v5YEXrbSc5BpshH+w== dependencies: moment ">= 2.9.0" +moment-timezone@^0.5.34: + version "0.5.34" + resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.34.tgz#a75938f7476b88f155d3504a9343f7519d9a405c" + integrity sha512-3zAEHh2hKUs3EXLESx/wsgw6IQdusOT8Bxm3D9UrHPQR7zlMmzwybC8zHEM1tQ4LJwP7fcxrWr8tuBg05fFCbg== + dependencies: + moment ">= 2.9.0" + moment@2.24.0: version "2.24.0" resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b" @@ -10362,6 +10386,11 @@ node-releases@^2.0.1: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.1.tgz#3d1d395f204f1f2f29a54358b9fb678765ad2fc5" integrity sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA== +node-releases@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.5.tgz#280ed5bc3eba0d96ce44897d8aee478bfb3d9666" + integrity sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q== + normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" @@ -10402,7 +10431,7 @@ normalize-range@^0.1.2: normalize-selector@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/normalize-selector/-/normalize-selector-0.2.0.tgz#d0b145eb691189c63a78d201dc4fdb1293ef0c03" - integrity sha1-0LFF62kRicY6eNIB3E/bEpPvDAM= + integrity sha512-dxvWdI8gw6eAvk9BlPffgEoGfM7AdijoCwOEJge3e3ulT2XLgmU7KvvxprOaCu05Q1uGRHmOhHe1r6emZoKyFw== normalize-url@1.9.1: version "1.9.1" @@ -11758,7 +11787,7 @@ postcss-selector-parser@^5.0.0-rc.3, postcss-selector-parser@^5.0.0-rc.4: indexes-of "^1.0.1" uniq "^1.0.1" -postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.5: +postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2: version "6.0.6" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.6.tgz#2c5bba8174ac2f6981ab631a42ab0ee54af332ea" integrity sha512-9LXrvaaX3+mcv5xkg5kFwqSzSH1JIObIx51PrndZwlmznwXRfxMddDvo9gve3gVR8ZTKgoFDdWkbRFmEhT4PMg== @@ -11766,6 +11795,14 @@ postcss-selector-parser@^6.0.0, postcss-selector-parser@^6.0.2, postcss-selector cssesc "^3.0.0" util-deprecate "^1.0.2" +postcss-selector-parser@^6.0.5: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + postcss-svgo@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.3.tgz#343a2cdbac9505d416243d496f724f38894c941e" @@ -11817,7 +11854,7 @@ postcss@^7.0.0, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.2, postcss@^7.0.2 source-map "^0.6.1" supports-color "^6.1.0" -postcss@^7.0.1, postcss@^7.0.27: +postcss@^7.0.1, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.35: version "7.0.39" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.39.tgz#9624375d965630e2e1f2c02a935c82a59cb48309" integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== @@ -11825,15 +11862,6 @@ postcss@^7.0.1, postcss@^7.0.27: picocolors "^0.2.1" source-map "^0.6.1" -postcss@^7.0.21, postcss@^7.0.35: - version "7.0.36" - resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.36.tgz#056f8cffa939662a8f5905950c07d5285644dfcb" - integrity sha512-BebJSIUMwJHRH0HAQoxN4u1CN86glsrwsW0q7T+/m44eXOUAxSNdHRkNZPYz5vVUbg17hFgOQDE7fZk7li3pZw== - dependencies: - chalk "^2.4.2" - source-map "^0.6.1" - supports-color "^6.1.0" - prefix-style@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/prefix-style/-/prefix-style-2.0.1.tgz#66bba9a870cfda308a5dc20e85e9120932c95a06" @@ -12994,7 +13022,7 @@ repeat-element@^1.1.2: repeat-string@^1.0.0, repeat-string@^1.5.4, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== replace-in-file-webpack-plugin@^1.0.6: version "1.0.6" @@ -13925,7 +13953,7 @@ string-width@^2.0.0, string-width@^2.1.0, string-width@^2.1.1: is-fullwidth-code-point "^2.0.0" strip-ansi "^4.0.0" -string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: +string-width@^4.1.0, string-width@^4.2.0: version "4.2.2" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== @@ -13934,6 +13962,15 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.0" +string-width@^4.2.2, string-width@^4.2.3: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + string.prototype.matchall@^4.0.2: version "4.0.5" resolved "https://registry.yarnpkg.com/string.prototype.matchall/-/string.prototype.matchall-4.0.5.tgz#59370644e1db7e4c0c045277690cf7b01203c4da" @@ -14015,6 +14052,13 @@ strip-ansi@^5.1.0, strip-ansi@^5.2.0: dependencies: ansi-regex "^4.1.0" +strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + strip-bom@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" @@ -14251,7 +14295,7 @@ table-layout@^0.4.3: typical "^2.6.1" wordwrapjs "^3.0.0" -table@^6.0.4, table@^6.6.0: +table@^6.0.4: version "6.7.1" resolved "https://registry.yarnpkg.com/table/-/table-6.7.1.tgz#ee05592b7143831a8c94f3cee6aae4c1ccef33e2" integrity sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg== @@ -14263,6 +14307,17 @@ table@^6.0.4, table@^6.6.0: string-width "^4.2.0" strip-ansi "^6.0.0" +table@^6.6.0: + version "6.8.0" + resolved "https://registry.yarnpkg.com/table/-/table-6.8.0.tgz#87e28f14fa4321c3377ba286f07b79b281a3b3ca" + integrity sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA== + dependencies: + ajv "^8.0.1" + lodash.truncate "^4.4.2" + slice-ansi "^4.0.0" + string-width "^4.2.3" + strip-ansi "^6.0.1" + tapable@^1.0.0, tapable@^1.1.3: version "1.1.3" resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2" @@ -14794,9 +14849,9 @@ unicode-property-aliases-ecmascript@^1.0.4: integrity sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg== unified@^9.1.0: - version "9.2.1" - resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.1.tgz#ae18d5674c114021bfdbdf73865ca60f410215a3" - integrity sha512-juWjuI8Z4xFg8pJbnEZ41b5xjGUWGHqXALmBZ3FC3WX0PIx1CZBIIJ6mXbYMcf6Yw4Fi0rFUTA1cdz/BglbOhA== + version "9.2.2" + resolved "https://registry.yarnpkg.com/unified/-/unified-9.2.2.tgz#67649a1abfc3ab85d2969502902775eb03146975" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== dependencies: bail "^1.0.0" extend "^3.0.0" diff --git a/screenshot.png b/screenshot.png index d9adbe25..a0d5d3cf 100644 Binary files a/screenshot.png and b/screenshot.png differ