diff --git a/docs/sources/manage/notify/phone-calls-sms/index.md b/docs/sources/manage/notify/phone-calls-sms/index.md index 5c11d456..b41de723 100644 --- a/docs/sources/manage/notify/phone-calls-sms/index.md +++ b/docs/sources/manage/notify/phone-calls-sms/index.md @@ -31,7 +31,8 @@ There are no specific limits, but we reserve the right to stop sending sms/calls ## Route incoming calls to the engineer who is on-call -Grafana OnCall does not provide a phone number for routing incoming requests. [GH Issue.](https://github.com/grafana/oncall/issues/1459) +See our [docs][Incoming Call Routing], and [blog post](https://grafana.com/blog/2024/06/10/a-guide-to-grafana-oncall-sms-and-call-routing/), +on Advanced SMS & call routing configuration, for a guide on how to configure incoming call routing. ## Is there a list of pre-defined phone numbers? @@ -50,4 +51,7 @@ is able to notify you. Also we suggest to back up Phone Calls and SMS with other [Grafana OSS-Cloud Setup]: "/docs/oncall/ -> /docs/oncall//set-up/open-source#grafana-oss-cloud-setup" [Grafana OSS-Cloud Setup]: "/docs/grafana-cloud/ -> /docs/grafana-cloud/alerting-and-irm/oncall/set-up/open-source#grafana-oss-cloud-setup" + +[Incoming Call Routing]: "/docs/oncall/ -> /docs/oncall//configure/live-call-routing" +[Incoming Call Routing]: "/docs/grafana-cloud/ -> /docs/grafana-cloud/alerting-and-irm/oncall/configure/live-call-routing" {{% /docs/reference %}} diff --git a/engine/apps/alerts/incident_appearance/renderers/slack_renderer.py b/engine/apps/alerts/incident_appearance/renderers/slack_renderer.py index c99d5957..7f2eded8 100644 --- a/engine/apps/alerts/incident_appearance/renderers/slack_renderer.py +++ b/engine/apps/alerts/incident_appearance/renderers/slack_renderer.py @@ -91,19 +91,22 @@ class AlertGroupSlackRenderer(AlertGroupBaseRenderer): def render_alert_group_attachments(self): attachments = self.alert_renderer.render_alert_attachments() + alert_group = self.alert_group + root_alert_group = alert_group.root_alert_group + + if root_alert_group is not None: + slack_message = root_alert_group.slack_message + root_ag_name = root_alert_group.long_verbose_name_without_formatting - if self.alert_group.root_alert_group is not None: - slack_message = self.alert_group.root_alert_group.slack_message - root_ag_name = self.alert_group.root_alert_group.long_verbose_name_without_formatting - if slack_message: - footer_text = f"Attached to *<{slack_message.permalink}|{root_ag_name}>*" - else: - footer_text = (f"Attached to *{root_ag_name}*",) attachments.extend( [ { "fallback": "Subscription...", - "footer": footer_text, + "footer": ( + f"Attached to *<{slack_message.permalink}|{root_ag_name}>*" + if slack_message + else f"Attached to *{root_ag_name}*" + ), "color": "danger", "mrkdwn": True, "callback_id": "subscription notification", @@ -118,42 +121,44 @@ class AlertGroupSlackRenderer(AlertGroupBaseRenderer): } ] ) - if self.alert_group.root_alert_group.acknowledged: + + if root_alert_group.acknowledged: attachments[0]["color"] = "warning" - if self.alert_group.root_alert_group.resolved: + if root_alert_group.resolved: attachments[0]["color"] = "good" attachments[0]["actions"] = [] + return attachments + # Attaching resolve information + if alert_group.resolved: + attachments.append( + { + "fallback": "Resolved...", + "text": alert_group.get_resolve_text(mention_user=False), + "callback_id": "alert", + } + ) + elif alert_group.acknowledged: + attachments.append( + { + "fallback": "Acknowledged...", + "text": alert_group.get_acknowledge_text(mention_user=False), + "callback_id": "alert", + } + ) + # Attaching buttons - if self.alert_group.wiped_at is None: + if alert_group.wiped_at is None: attachment_alert_buttons = self._get_buttons_attachments() if len(attachment_alert_buttons["blocks"][0]["elements"]) > 0: attachments.append(attachment_alert_buttons) - # Attaching resolve information - if self.alert_group.resolved: - resolve_attachment = { - "fallback": "Resolved...", - "text": self.alert_group.get_resolve_text(mention_user=False), - "callback_id": "alert", - } - attachments.append(resolve_attachment) - else: - if self.alert_group.acknowledged: - ack_attachment = { - "fallback": "Acknowledged...", - "text": self.alert_group.get_acknowledge_text(mention_user=False), - "callback_id": "alert", - } - attachments.append(ack_attachment) - # Attaching invitation info - if not self.alert_group.resolved: + if not alert_group.resolved: attachments += self._get_invitation_attachment() - attachments = self._set_attachments_color(attachments) - return attachments + return self._set_attachments_color(attachments) def _set_attachments_color(self, attachments): color = "#a30200" # danger @@ -174,155 +179,110 @@ class AlertGroupSlackRenderer(AlertGroupBaseRenderer): def _get_buttons_blocks(self): from apps.alerts.models import AlertGroup - buttons = [] - if not self.alert_group.is_maintenance_incident: - if not self.alert_group.resolved: - if not self.alert_group.acknowledged: - buttons.append( - { - "text": { - "type": "plain_text", - "text": "Acknowledge", - "emoji": True, - }, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step( - "distribute_alerts", - "AcknowledgeGroupStep", - ).routing_uid(), - }, - ) - else: - buttons.append( - { - "text": { - "type": "plain_text", - "text": "Unacknowledge", - "emoji": True, - }, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step( - "distribute_alerts", - "UnAcknowledgeGroupStep", - ).routing_uid(), - }, - ) - buttons.append( - { - "text": {"type": "plain_text", "text": "Resolve", "emoji": True}, - "type": "button", - "style": "primary", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step("distribute_alerts", "ResolveGroupStep").routing_uid(), - }, - ) + alert_group = self.alert_group + integration = alert_group.channel + grafana_incident_enabled = integration.organization.is_grafana_incident_enabled - if not self.alert_group.silenced: - silence_options = [ - { - "text": {"type": "plain_text", "text": text, "emoji": True}, - "value": self._alert_group_action_value(delay=value), - } - for value, text in AlertGroup.SILENCE_DELAY_OPTIONS - ] - buttons.append( - { - "placeholder": {"type": "plain_text", "text": "Silence", "emoji": True}, - "type": "static_select", - "options": silence_options, - "action_id": ScenarioStep.get_step("distribute_alerts", "SilenceGroupStep").routing_uid(), - } - ) - else: - buttons.append( - { - "text": {"type": "plain_text", "text": "Unsilence", "emoji": True}, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step("distribute_alerts", "UnSilenceGroupStep").routing_uid(), - }, - ) - - buttons.append( - { - "text": {"type": "plain_text", "text": "Responders", "emoji": True}, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step("manage_responders", "StartManageResponders").routing_uid(), - }, - ) - - attach_button = { - "text": {"type": "plain_text", "text": "Attach to ...", "emoji": True}, - "type": "button", - "action_id": ScenarioStep.get_step("distribute_alerts", "SelectAttachGroupStep").routing_uid(), - "value": self._alert_group_action_value(), - } - buttons.append(attach_button) - else: - buttons.append( - { - "text": {"type": "plain_text", "text": "Unresolve", "emoji": True}, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step("distribute_alerts", "UnResolveGroupStep").routing_uid(), - }, - ) - - if self.alert_group.channel.is_available_for_custom_templates: - buttons.append( - { - "text": {"type": "plain_text", "text": ":mag: Format Alert", "emoji": True}, - "type": "button", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step( - "alertgroup_appearance", "OpenAlertAppearanceDialogStep" - ).routing_uid(), - }, - ) - - # Resolution notes button - resolution_notes_count = self.alert_group.resolution_notes.count() - resolution_notes_button = { + def _make_button(text, action_id_step_class_name, action_id_scenario_step="distribute_alerts"): + return { "text": { "type": "plain_text", - "text": "Resolution notes [{}]".format(resolution_notes_count), + "text": text, "emoji": True, }, "type": "button", - "action_id": ScenarioStep.get_step("resolution_note", "ResolutionNoteModalStep").routing_uid(), - "value": self._alert_group_action_value(resolution_note_window_action="edit"), + "value": self._alert_group_action_value(), + "action_id": ScenarioStep.get_step(action_id_scenario_step, action_id_step_class_name).routing_uid(), } - if resolution_notes_count == 0: - resolution_notes_button["style"] = "primary" - resolution_notes_button["text"]["text"] = "Add Resolution notes" + + acknowledge_button = _make_button("Acknowledge", "AcknowledgeGroupStep") + unacknowledge_button = _make_button("Unacknowledge", "UnAcknowledgeGroupStep") + resolve_button = _make_button("Resolve", "ResolveGroupStep") + unresolve_button = _make_button("Unresolve", "UnResolveGroupStep") + unsilence_button = _make_button("Unsilence", "UnSilenceGroupStep") + responders_button = _make_button("Responders", "StartManageResponders", "manage_responders") + attach_button = _make_button("Attach to ...", "SelectAttachGroupStep") + format_alert_button = _make_button( + ":mag: Format Alert", "OpenAlertAppearanceDialogStep", "alertgroup_appearance" + ) + + resolution_notes_count = alert_group.resolution_notes.count() + resolution_notes_button = { + "text": { + "type": "plain_text", + "text": f"Resolution notes [{resolution_notes_count}]", + "emoji": True, + }, + "type": "button", + "action_id": ScenarioStep.get_step("resolution_note", "ResolutionNoteModalStep").routing_uid(), + "value": self._alert_group_action_value(resolution_note_window_action="edit"), + } + if resolution_notes_count == 0: + resolution_notes_button["style"] = "primary" + resolution_notes_button["text"]["text"] = "Add Resolution notes" + + silence_button = { + "placeholder": { + "type": "plain_text", + "text": "Silence", + "emoji": True, + }, + "type": "static_select", + "options": [ + { + "text": {"type": "plain_text", "text": text, "emoji": True}, + "value": self._alert_group_action_value(delay=value), + } + for value, text in AlertGroup.SILENCE_DELAY_OPTIONS + ], + "action_id": ScenarioStep.get_step("distribute_alerts", "SilenceGroupStep").routing_uid(), + } + + declare_incident_button = { + "type": "button", + "text": { + "type": "plain_text", + "text": ":fire: Declare incident", + "emoji": True, + }, + "value": "declare_incident", + "url": self.alert_group.declare_incident_link, + "action_id": ScenarioStep.get_step("declare_incident", "DeclareIncidentStep").routing_uid(), + } + + buttons = [] + if not alert_group.is_maintenance_incident: + if not alert_group.resolved: + if not alert_group.acknowledged: + buttons.append(acknowledge_button) + else: + if grafana_incident_enabled: + buttons.append(declare_incident_button) + buttons.append(unacknowledge_button) + + buttons.extend( + [ + resolve_button, + unsilence_button if alert_group.silenced else silence_button, + responders_button, + attach_button, + ] + ) + else: + buttons.append(unresolve_button) + + if integration.is_available_for_custom_templates: + buttons.append(format_alert_button) + buttons.append(resolution_notes_button) - # Declare incident button - if self.alert_group.channel.organization.is_grafana_incident_enabled: - incident_button = { - "type": "button", - "text": {"type": "plain_text", "text": ":fire: Declare incident", "emoji": True}, - "value": "declare_incident", - "url": self.alert_group.declare_incident_link, - "action_id": ScenarioStep.get_step("declare_incident", "DeclareIncidentStep").routing_uid(), - } - buttons.append(incident_button) + if grafana_incident_enabled and not alert_group.acknowledged: + buttons.append(declare_incident_button) else: - if not self.alert_group.resolved: - buttons.append( - { - "text": {"type": "plain_text", "text": "Resolve", "emoji": True}, - "type": "button", - "style": "primary", - "value": self._alert_group_action_value(), - "action_id": ScenarioStep.get_step("distribute_alerts", "ResolveGroupStep").routing_uid(), - }, - ) - blocks = [{"type": "actions", "elements": buttons}] - return blocks + if not alert_group.resolved: + buttons.append(resolve_button) + + return [{"type": "actions", "elements": buttons}] def _get_invitation_attachment(self): from apps.alerts.models import Invitation diff --git a/engine/apps/slack/tests/test_slack_renderer.py b/engine/apps/slack/tests/test_slack_renderer.py index 5d78a09e..14774307 100644 --- a/engine/apps/slack/tests/test_slack_renderer.py +++ b/engine/apps/slack/tests/test_slack_renderer.py @@ -38,7 +38,7 @@ def test_slack_renderer_unacknowledge_button( alert_group = make_alert_group(alert_receive_channel, acknowledged=True) make_alert(alert_group=alert_group, raw_request_data={}) - elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[0]["blocks"][0]["elements"] + elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[1]["blocks"][0]["elements"] button = elements[0] assert button["text"]["text"] == "Unacknowledge" @@ -82,7 +82,7 @@ def test_slack_renderer_unresolve_button(make_organization, make_alert_receive_c alert_group = make_alert_group(alert_receive_channel, resolved=True) make_alert(alert_group=alert_group, raw_request_data={}) - elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[0]["blocks"][0]["elements"] + elements = AlertGroupSlackRenderer(alert_group).render_alert_group_attachments()[1]["blocks"][0]["elements"] button = elements[0] assert button["text"]["text"] == "Unresolve" diff --git a/engine/requirements.txt b/engine/requirements.txt index 50b0d193..5ee51fd0 100644 --- a/engine/requirements.txt +++ b/engine/requirements.txt @@ -34,7 +34,7 @@ cachetools==4.2.2 # via # google-auth # python-telegram-bot -celery==5.3.1 +celery[redis]==5.3.1 # via -r requirements.in certifi==2024.2.2 # via @@ -157,7 +157,7 @@ firebase-admin==5.4.0 # via fcm-django flask==3.0.2 # via slack-export-viewer -google-api-core==2.17.0 +google-api-core[grpc]==2.17.0 # via # firebase-admin # google-api-python-client @@ -416,11 +416,6 @@ rsa==4.9 # via google-auth s3transfer==0.10.0 # via boto3 -setuptools==70.0.0 - # via - # -r requirements.in - # apscheduler - # opentelemetry-instrumentation six==1.16.0 # via # apscheduler @@ -484,5 +479,5 @@ wrapt==1.16.0 # opentelemetry-instrumentation x-wr-timezone==0.0.6 # via recurring-ical-events -zipp==3.17.0 +zipp==3.19.1 # via importlib-metadata diff --git a/grafana-plugin/src/containers/OutgoingWebhookForm/OutgoingWebhookFormFields.tsx b/grafana-plugin/src/containers/OutgoingWebhookForm/OutgoingWebhookFormFields.tsx index 02042de8..041d6200 100644 --- a/grafana-plugin/src/containers/OutgoingWebhookForm/OutgoingWebhookFormFields.tsx +++ b/grafana-plugin/src/containers/OutgoingWebhookForm/OutgoingWebhookFormFields.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { SelectableValue } from '@grafana/data'; import { Button, Field, Input, RadioButtonList, Select, Switch, useStyles2 } from '@grafana/ui'; +import { observer } from 'mobx-react'; import Emoji from 'react-emoji-render'; import { Controller, useFormContext } from 'react-hook-form'; @@ -45,345 +46,344 @@ const FORWARD_RADIO_OPTIONS = [ }, ]; -export const OutgoingWebhookFormFields = ({ - preset, - hasLabelsFeature, - onTemplateEditClick, -}: OutgoingWebhookFormFieldsProps) => { - const { grafanaTeamStore, alertReceiveChannelStore } = useStore(); - const { - control, - formState: { errors }, - watch, - } = useFormContext(); +export const OutgoingWebhookFormFields: React.FC = observer( + ({ preset, hasLabelsFeature, onTemplateEditClick }) => { + const { grafanaTeamStore, alertReceiveChannelStore } = useStore(); + const { items, fetchItems, fetchItemById } = alertReceiveChannelStore; + const { + control, + formState: { errors }, + watch, + } = useFormContext(); - const forwardAll = watch(WebhookFormFieldName.ForwardAll); - const styles = useStyles2(getStyles); + const forwardAll = watch(WebhookFormFieldName.ForwardAll); + const styles = useStyles2(getStyles); - const controls = ( - <> - ( - - - - )} - /> - ( - - - - )} - /> - ( - - - allowClear - items={grafanaTeamStore.items} - fetchItemsFn={grafanaTeamStore.updateItems} - fetchItemFn={grafanaTeamStore.fetchItemById} - getSearchResult={grafanaTeamStore.getSearchResult} - displayField="name" - valueField="id" - placeholder="Choose (Optional)" - value={field.value} - onChange={field.onChange} - /> - - )} - /> - ( - - field.onChange(value)} - /> - - )} - /> - ( - - - isMulti - placeholder="Choose (Optional)" - items={alertReceiveChannelStore.items} - fetchItemsFn={alertReceiveChannelStore.fetchItems} - fetchItemFn={alertReceiveChannelStore.fetchItemById} - getSearchResult={() => AlertReceiveChannelHelper.getSearchResult(alertReceiveChannelStore)} - displayField="verbal_name" - valueField="id" - getOptionLabel={(item: SelectableValue) => } - value={field.value} - onChange={field.onChange} - /> - - )} - /> - {hasLabelsFeature && ( + const controls = ( + <> ( - + + + )} /> - )} - ( - -
-
- -
-
-
- )} - /> - ( - -
-
- -
-
+
)} /> - - - ); - return ( - <> - {React.Children.toArray(controls.props.children).filter( - (child) => !preset?.controlled_fields.includes((child as React.ReactElement).props.name) - )} - - ); -}; + + + ( + opt.boolean === field.value)?.value} + onChange={(value) => field.onChange(value === FORWARD)} + /> + )} + /> + + + ( + ( + +
+
+ +
+
+
+ )} + /> + )} + /> +
+ + ); + + return ( + <> + {React.Children.toArray(controls.props.children).filter( + (child) => !preset?.controlled_fields.includes((child as React.ReactElement).props.name) + )} + + ); + } +); diff --git a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx index bc4f3d6a..b5b63239 100644 --- a/grafana-plugin/src/containers/RotationForm/RotationForm.tsx +++ b/grafana-plugin/src/containers/RotationForm/RotationForm.tsx @@ -351,17 +351,15 @@ export const RotationForm = observer((props: RotationFormProps) => { setRotationStart(value); setShiftStart(value); - setShiftEnd( - isLimitShiftEnabled - ? dayJSAddWithDSTFixed({ - baseDate: value, - addParams: [activePeriod, 'seconds'], - }) - : dayJSAddWithDSTFixed({ - baseDate: value, - addParams: [recurrenceNum, repeatEveryPeriodToUnitName[recurrencePeriod]], - }) - ); + let addParams; + if (isLimitShiftEnabled) { + addParams = [activePeriod, 'seconds']; + } else if (isMaskedByWeekdays) { + addParams = [24, 'hours']; + } else { + addParams = [recurrenceNum, repeatEveryPeriodToUnitName[recurrencePeriod]]; + } + setShiftEnd(dayJSAddWithDSTFixed({ baseDate: value, addParams })); }; const handleActivePeriodChange = useCallback( @@ -422,7 +420,7 @@ export const RotationForm = observer((props: RotationFormProps) => { setIsLimitShiftEnabled(value); if (!value) { - if (isMaskedByWeekdays && shiftEnd.diff(shiftStart, 'hours') > 24) { + if (isMaskedByWeekdays) { setShiftEnd( dayJSAddWithDSTFixed({ baseDate: shiftStart, @@ -477,7 +475,9 @@ export const RotationForm = observer((props: RotationFormProps) => { const isMonthlyRecurrence = shift.frequency === RepeatEveryPeriod.MONTHS; const activeOnSelectedPartOfDay = - repeatEveryInSeconds(shift.frequency, shift.interval) !== shiftEnd.diff(shiftStart, 'seconds') && + ((!isMaskedByWeekdays && + repeatEveryInSeconds(shift.frequency, shift.interval) !== shiftEnd.diff(shiftStart, 'seconds')) || + (isMaskedByWeekdays && shiftEnd.diff(shiftStart, 'hour') < 24)) && // Disallow for Monthly view, except if it's masked by week days (!isMonthlyRecurrence || (isMonthlyRecurrence && isMaskedByWeekdays)); diff --git a/grafana-plugin/src/models/alert_receive_channel/alert_receive_channel.ts b/grafana-plugin/src/models/alert_receive_channel/alert_receive_channel.ts index eff96663..4173e8b7 100644 --- a/grafana-plugin/src/models/alert_receive_channel/alert_receive_channel.ts +++ b/grafana-plugin/src/models/alert_receive_channel/alert_receive_channel.ts @@ -121,6 +121,7 @@ export class AlertReceiveChannelStore { }); } + @AutoLoadingState(ActionKey.FETCH_INTEGRATIONS) async fetchItems(query: any = ''): Promise> { const { data: { results }, diff --git a/tools/scripts/discord_webhooks.py b/tools/scripts/discord_webhooks.py new file mode 100644 index 00000000..e7b0eee9 --- /dev/null +++ b/tools/scripts/discord_webhooks.py @@ -0,0 +1,80 @@ +import json +import requests + +# having setup a Discord webhook for a channel, this script will +# setup OnCall webhooks to send and update notifications for alert group created/updated triggers + +# Configuration +ONCALL_API_BASE_URL = "https://oncall-prod-us-central-0.grafana.net/oncall" +ONCALL_TOKEN = "" +ONCALL_WEBHOOK_PREFIX = "discord" # prefix for webhooks naming +DISCORD_WEBHOOK_URL = "" + +NOTIFICATION_TEMPLATE = """ +{% if alert_group.state == 'acknowledged'%}:orange_circle:{% elif alert_group.state == 'resolved'%}:green_circle:{% elif alert_group.state == 'silenced'%}:white_circle:{% else %}:red_circle:{% endif %} **{{ alert_group.title }}** +*{{ alert_group.state }}* +{{ alert_payload.message }} +*{{ integration.name }}* + +{% if event.type == 'acknowledge' %}**Acknowledged by: {{ user.username }}**{% endif %}{% if event.type == 'resolve' %}**Resolved by: {{ user.username }}**{% endif %}{% if event.type == 'silence' %}**Silenced by: {{ user.username }} (until {{ event.until }})**{% endif %} + +[View in Grafana OnCall]({{ alert_group.permalinks.web }}) +""" + +# --- Do not edit below this line --- + +def get_oncall_webhook(name): + webhook_uid = None + oncall_url = "{}/api/v1/webhooks/?name={}".format(ONCALL_API_BASE_URL, name) + oncall_api_headers = { + "Authorization": ONCALL_TOKEN + } + r = requests.get(oncall_url, headers=oncall_api_headers) + r.raise_for_status() + results = r.json().get("results", []) + if results: + webhook_uid = results[0]["id"] + return webhook_uid + + +def setup_oncall_webhook(name, trigger, http_method, endpoint, additional_data=None): + url = "{}{}".format(DISCORD_WEBHOOK_URL, endpoint) + data = {"content": NOTIFICATION_TEMPLATE} + if additional_data is not None: + data.update(additional_data) + webhook_name = "{}-{}".format(ONCALL_WEBHOOK_PREFIX, name) + # check if already exists + webhook_uid = get_oncall_webhook(webhook_name) + # create webhook here/ oncall api here + oncall_url = "{}/api/v1/webhooks/".format(ONCALL_API_BASE_URL) + oncall_api_headers = { + "Authorization": ONCALL_TOKEN + } + oncall_http_method = "POST" + webhook_data = { + "name": webhook_name, + "url": url, + "http_method": http_method, + "trigger_type": trigger, + "forward_all": False, + "data": json.dumps(data), + } + if webhook_uid: + webhook_data["id"] = webhook_uid + oncall_url += webhook_uid + oncall_http_method = "PUT" + r = requests.request( + oncall_http_method, oncall_url, headers=oncall_api_headers, json=webhook_data + ) + r.raise_for_status() + return r + + +# setup webhook for new alert group +endpoint = "?wait=true" +new_ag_webhook = setup_oncall_webhook("new", "alert group created", "POST", endpoint) + +# setup webhook for status changes +webhook_create_id = new_ag_webhook.json()["id"] +update_endpoint = "/messages/{{{{ responses.{}.id }}}}".format(webhook_create_id) +update_ag_webhook = setup_oncall_webhook("update", "status change", "PATCH", update_endpoint) diff --git a/tools/scripts/mattermost_webhooks.py b/tools/scripts/mattermost_webhooks.py new file mode 100644 index 00000000..67dd56f3 --- /dev/null +++ b/tools/scripts/mattermost_webhooks.py @@ -0,0 +1,114 @@ +import json +import requests + +# this script will get the mattermost channel ID using the mattermost API and +# setup OnCall webhooks to send and update notifications for alert group created/updated triggers + +# Configuration +ONCALL_API_BASE_URL = "https://oncall-prod-us-central-0.grafana.net/oncall" +ONCALL_TOKEN = "" +ONCALL_WEBHOOK_PREFIX = "mattermost" # prefix for webhooks naming +MATTERMOST_API_BASE_URL = "http://localhost:8065" +MATTERMOST_BOT_TOKEN = "" +MATTERMOST_TEAM_NAME = "testing" # mattermost team name to which the bot belongs to +MATTERMOST_CHANNEL_NAME = "testing" # mattermost channel the bot user will post notifications (should be a member too) + +NOTIFICATION_TEMPLATE = """ +{% if alert_group.state == 'acknowledged'%}:large_orange_circle:{% elif alert_group.state == 'resolved'%}:large_green_circle:{% elif alert_group.state == 'silenced'%}:white_circle:{% else %}:red_circle:{% endif %} **{{ alert_group.title }}** +*{{ alert_group.state }}* +{{ alert_payload.message }} +*{{ integration.name }}* + +{% if event.type == 'acknowledge' %}**Acknowledged by: {{ user.username }}**{% endif %}{% if event.type == 'resolve' %}**Resolved by: {{ user.username }}**{% endif %}{% if event.type == 'silence' %}**Silenced by: {{ user.username }} (until {{ event.until }})**{% endif %} + +[View in Grafana OnCall]({{ alert_group.permalinks.web }}) +""" + +# --- Do not edit below this line --- + +MATTERMOST_API_HEADERS = { + "Authorization": "Bearer {}".format(MATTERMOST_BOT_TOKEN), +} + +def get_mattermost_channel_id(): + url = "{}/api/v4/teams/name/{}/channels/name/{}".format( + MATTERMOST_API_BASE_URL, MATTERMOST_TEAM_NAME, MATTERMOST_CHANNEL_NAME + ) + r = requests.get(url, headers=MATTERMOST_API_HEADERS) + r.raise_for_status() + return r.json()["id"] + + +def get_oncall_webhook(name): + webhook_uid = None + oncall_url = "{}/api/v1/webhooks/?name={}".format(ONCALL_API_BASE_URL, name) + oncall_api_headers = { + "Authorization": ONCALL_TOKEN + } + r = requests.get(oncall_url, headers=oncall_api_headers) + r.raise_for_status() + results = r.json().get("results", []) + if results: + webhook_uid = results[0]["id"] + return webhook_uid + + +def setup_oncall_webhook(name, trigger, http_method, endpoint, additional_data=None): + url = "{}{}".format(MATTERMOST_API_BASE_URL, endpoint) + headers = MATTERMOST_API_HEADERS + data = {"message": NOTIFICATION_TEMPLATE} + if additional_data is not None: + data.update(additional_data) + webhook_name = "{}-{}".format(ONCALL_WEBHOOK_PREFIX, name) + # check if already exists + webhook_uid = get_oncall_webhook(webhook_name) + # create webhook here/ oncall api here + oncall_url = "{}/api/v1/webhooks/".format(ONCALL_API_BASE_URL) + oncall_api_headers = { + "Authorization": ONCALL_TOKEN + } + oncall_http_method = "POST" + webhook_data = { + "name": webhook_name, + "url": url, + "http_method": http_method, + "trigger_type": trigger, + "forward_all": False, + "data": json.dumps(data), + "authorization_header": MATTERMOST_API_HEADERS["Authorization"], + } + if webhook_uid: + webhook_data["id"] = webhook_uid + oncall_url += webhook_uid + oncall_http_method = "PUT" + r = requests.request( + oncall_http_method, oncall_url, headers=oncall_api_headers, json=webhook_data + ) + r.raise_for_status() + return r + + +# get mattermost channel id from name +channel_id = get_mattermost_channel_id() + +# setup webhook for new alert group +endpoint = "/api/v4/posts" +new_ag_webhook = setup_oncall_webhook( + "new", "alert group created", "POST", endpoint, + additional_data={ + "channel_id": channel_id, + "metadata": { + "alert_group_id": "{{ alert_group.id }}" + } + } +) + +# setup webhook for status changes +webhook_create_id = new_ag_webhook.json()["id"] +update_endpoint = "/api/v4/posts/{{{{ responses.{}.id }}}}".format(webhook_create_id) +update_ag_webhook = setup_oncall_webhook( + "update", "status change", "PUT", update_endpoint, + additional_data={ + "id": "{{{{ responses.{}.id }}}}".format(webhook_create_id), + } +) diff --git a/tools/scripts/oncall_hours_report.py b/tools/scripts/oncall_hours_report.py new file mode 100644 index 00000000..d52ca5bc --- /dev/null +++ b/tools/scripts/oncall_hours_report.py @@ -0,0 +1,81 @@ +import csv +import requests +from datetime import datetime, timedelta + +# CUSTOMIZE THE FOLLOWING VARIABLES +START_DATE = "2023-09-01" +END_DATE = "2023-09-30" +# time outside this range (or during weekends) will be considered non-working hours +WORKING_HOURS_START_TIME = timedelta(hours=0, minutes=0, seconds=0) +WORKING_HOURS_END_TIME = timedelta(hours=23, minutes=59, seconds=59) + +MY_ONCALL_API_BASE_URL = "https://oncall-prod-us-central-0.grafana.net/oncall/api/v1/schedules" +MY_ONCALL_API_KEY = "" +OUTPUT_FILE_NAME = f"oncall-report-{START_DATE}-to-{END_DATE}.csv" + + +clamp = lambda t, start, end: max(start, min(end, t)) +day_delta = lambda t: t - t.replace(hour = 0, minute = 0, second = 0) + + +def working_hours_between(a, b): + zero = timedelta(0) + start = WORKING_HOURS_START_TIME + end = WORKING_HOURS_END_TIME + assert(zero <= start <= end <= timedelta(1)) + working_day = end - start + days = (b - a).days + 1 + weeks = days // 7 + # exclude weekends + if a.weekday()==0 and (b.weekday()==4 or b.weekday()==5): + extra = 5 + else: + extra = (max(0, 5 - a.weekday()) + min(5, 1 + b.weekday())) % 5 + weekdays = weeks * 5 + extra + total = working_day * weekdays + if a.weekday() < 5: + total -= clamp(day_delta(a) - start, zero, working_day) + if b.weekday() < 5: + total -= clamp(end - day_delta(b), zero, working_day) + return total + + +headers = {"Authorization": MY_ONCALL_API_KEY} +schedule_ids = [schedule["id"] for schedule in requests.get(MY_ONCALL_API_BASE_URL, headers=headers).json()["results"]] +user_on_call_hours = {} + +for schedule_id in schedule_ids: + response = requests.get( + f"{MY_ONCALL_API_BASE_URL}/{schedule_id}/final_shifts?start_date={START_DATE}&end_date={END_DATE}", + headers=headers) + + for final_shift in response.json()["results"]: + user_pk = final_shift["user_pk"] + end = datetime.fromisoformat(final_shift["shift_end"]) + start = datetime.fromisoformat(final_shift["shift_start"]) + shift_time_in_seconds = (end - start).total_seconds() + shift_time_in_hours = shift_time_in_seconds / (60 * 60) + working_hours_time = working_hours_between(start, end) + working_hours_time_in_hours = working_hours_time.total_seconds() / (60 * 60) + + if user_pk in user_on_call_hours: + user_on_call_hours[user_pk]["hours_on_call"] += shift_time_in_hours + user_on_call_hours[user_pk]["working_hours_time"] += working_hours_time_in_hours + else: + user_on_call_hours[user_pk] = { + "email": final_shift["user_email"], + "hours_on_call": shift_time_in_hours, + "working_hours_time": working_hours_time_in_hours, + } + +with open(OUTPUT_FILE_NAME, "w") as fp: + csv_writer = csv.DictWriter(fp, ["user_pk", "user_email", "hours_on_call", "non_working_hours_on_call"]) + csv_writer.writeheader() + + for user_pk, user_info in user_on_call_hours.items(): + csv_writer.writerow({ + "user_pk": user_pk, + "user_email": user_info["email"], + "hours_on_call": user_info["hours_on_call"], + "non_working_hours_on_call": user_info["hours_on_call"] - user_info["working_hours_time"], + }) diff --git a/tools/scripts/oncall_reports.py b/tools/scripts/oncall_reports.py new file mode 100644 index 00000000..1c908ce8 --- /dev/null +++ b/tools/scripts/oncall_reports.py @@ -0,0 +1,232 @@ +# requires requests (pip install requests) + +# This script will output 3 .csv files: +# - oncall.escalation_chains.csv: escalation chains names and their respective serialized steps +# - oncall.orphaned_schedules.csv: schedules ID and name for schedules not linked to any escalation chain +# - oncall.users.csv: users information in the speficied period +# (team, notification policies, hours on-call, # acknowledged, # resolved) + +# You can run it like this: +# $ ONCALL_API_TOKEN= DAYS=7 python oncall.reports.py + +import csv +import os + +from datetime import datetime, timedelta, timezone + +import requests + +ONCALL_API_BASE_URL = os.environ.get( + "ONCALL_API_BASE_URL", + "https://oncall-prod-us-central-0.grafana.net/oncall", +) +ONCALL_API_TOKEN = os.environ.get("ONCALL_API_TOKEN") + +# number of days to consider (default: last 30 days) +NUM_LAST_DAYS = int(os.environ.get("DAYS", 30)) + +# output CSV filenames with the data +ESCALATION_CHAINS_OUTPUT_FILE_NAME = "oncall.escalation_chains.csv" +ORPHANED_SCHEDULES_OUTPUT_FILE_NAME = "oncall.orphaned_schedules.csv" +USERS_OUTPUT_FILE_NAME = "oncall.users.csv" + + +headers = { + "Authorization": ONCALL_API_TOKEN, +} + +users = {} +teams = {} +escalation_chains = {} +schedules = {} + +end_date = datetime.now(timezone.utc).replace(hour=0, minute=0, microsecond=0) +start_date = end_date - timedelta(days=NUM_LAST_DAYS) +hours_field_name = "hours_on_call_last_{}d".format(NUM_LAST_DAYS) + +def _serialize_step(p): + step = p["type"] + if step == "wait": + step = "{}({})".format(p["type"], p["duration"]) + elif step == "trigger_webhook": + step = "{}({})".format(p["type"], p["action_to_trigger"]) + elif step == "notify_user_group": + step = "{}({})".format(p["type"], p["group_to_notify"]) + elif step == "notify_persons": + step = "{}({})".format( + p["type"], + ','.join(users[u_id]["username"] for u_id in p["persons_to_notify"]) if p["persons_to_notify"] else '', + ) + elif step == "notify_on_call_from_schedule": + schedule_id = p["notify_on_call_from_schedule"] + step = "{}({})".format( + p["type"], + schedules.get(schedule_id, "missing") if schedule_id else '', + ) + elif step == "notify_if_time_from_to": + step = "{}({}-{})".format(p["type"], p["notify_if_time_from"], p["notify_if_time_to"]) + return step + +# fetch teams +# GET {{API_URL}}/api/v1/teams/ + +print("Fetching teams data...") +url = ONCALL_API_BASE_URL + "/api/v1/teams/" +r = requests.get(url, params={"perpage": 100}, headers=headers) # TODO: handle pagination +r.raise_for_status() +results = r.json().get("results") +for t in results: + teams[t["id"]] = t["name"] + + +# fetch users (TODO: handle pagination) +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/users/#list-users +# GET {{API_URL}}/api/v1/users/ + +print("Fetching users data...") +page = 1 +while True: + url = ONCALL_API_BASE_URL + "/api/v1/users/" + r = requests.get(url, params={"page": page}, headers=headers) + r.raise_for_status() + response_data = r.json() + results = response_data.get("results") + for u in results: + users[u["id"]] = { + "username": u["username"], + "email": u["email"], + "teams": ",".join([teams[t] for t in u["teams"]]), + "acknowledged_count": 0, + "resolved_count": 0, + hours_field_name: 0, + } + page += 1 + total_pages = int(response_data.get("total_pages")) + if page > total_pages: + break + +# fetch policies +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/personal_notification_rules/#list-personal-notification-rules +# {{API_URL}}/api/v1/personal_notification_rules/ ?user_id= & important= + +print("Fetching users notification policies...") +url = ONCALL_API_BASE_URL + "/api/v1/personal_notification_rules/" +for u in users: + for important in ("true", "false"): + r = requests.get(url, params={"user_id": u, "important": important}, headers=headers) + r.raise_for_status() + results = r.json().get("results") + policy = ",".join(_serialize_step(p) for p in results) + key = "important" if important == "true" else "default" + users[u][key] = policy + + +# get on-call schedule time +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/schedules/#export-a-schedules-final-shifts + +print("Fetching schedules/shifts data...") +url = ONCALL_API_BASE_URL + "/api/v1/schedules" +r = requests.get(url, headers=headers) +r.raise_for_status() +results = r.json().get("results") +for schedule in results: + schedules[schedule["id"]] = schedule["name"] + schedule_id = schedule["id"] + url = ONCALL_API_BASE_URL + "/api/v1/schedules/{}/final_shifts".format(schedule_id) + params = { + "start_date": start_date.strftime("%Y-%m-%d"), + "end_date": end_date.strftime("%Y-%m-%d"), + } + r = requests.get(url, params=params, headers=headers) + r.raise_for_status() + shifts = r.json().get("results") + for final_shift in shifts: + user_pk = final_shift["user_pk"] + end = datetime.fromisoformat(final_shift["shift_end"].replace('Z', '+00:00')) + start = datetime.fromisoformat(final_shift["shift_start"].replace('Z', '+00:00')) + shift_time_in_seconds = (end - start).total_seconds() + shift_time_in_hours = shift_time_in_seconds / (60 * 60) + on_call_hours = users.get(user_pk, {}).get(hours_field_name, 0) + users[user_pk][hours_field_name] = on_call_hours + shift_time_in_hours + + +# fetch alert groups +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/alertgroups/#list-alert-groups +# GET {{API_URL}}/api/v1/alert_groups/ + +print("Fetching alert groups data...") +page = 1 +in_range = True +while in_range: + url = ONCALL_API_BASE_URL + "/api/v1/alert_groups" + r = requests.get(url, params={"page": page}, headers=headers) + r.raise_for_status() + results = r.json().get("results") + for ag in results: + created_at = datetime.fromisoformat(ag["created_at"].replace('Z', '+00:00')) + if created_at < start_date: + in_range = False + break + ack_by = ag["acknowledged_by"] + resolved_by = ag["resolved_by"] + if ack_by: + users[ack_by]["acknowledged_count"] += 1 + if resolved_by: + users[resolved_by]["resolved_count"] += 1 + page += 1 + + +# fetch escalation chains +# https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/escalation_chains/#list-escalation-chains +# GET {{API_URL}}/api/v1/escalation_chains/ + +print("Fetching escalation chains data...") +url = ONCALL_API_BASE_URL + "/api/v1/escalation_chains/" +r = requests.get(url, params={"perpage": 100}, headers=headers) +r.raise_for_status() +results = r.json().get("results") +orphaned_schedules = set(schedules.keys()) +for chain in results: + chain_id = chain["id"] + # fetch policies for escalation chain + # https://grafana.com/docs/grafana-cloud/alerting-and-irm/oncall/oncall-api-reference/escalation_policies/#list-escalation-policies + # GET {{API_URL}}/api/v1/escalation_policies/ + url = ONCALL_API_BASE_URL + "/api/v1/escalation_policies/" + r = requests.get(url, params={"escalation_chain_id": chain_id}, headers=headers) + r.raise_for_status() + results = r.json().get("results") + steps = ",".join(_serialize_step(p) for p in results) + escalation_chains[chain_id] = {"name": chain["name"], "steps": steps} + notify_schedules = [s for s in results if s["type"] == "notify_on_call _from_schedule"] + for s in notify_schedules: + # remove schedule from potential orphaned schedules + schedule_id = s["notify_on_call _from_schedule"] + orphaned_schedules.remove(schedule_id) + + +# write orphaned schedules report +with open(ORPHANED_SCHEDULES_OUTPUT_FILE_NAME, "w") as fp: + fieldnames = ["schedule_id", "name"] + csv_writer = csv.DictWriter(fp, fieldnames) + csv_writer.writeheader() + for s_id in orphaned_schedules: + row = {"schedule_id": s_id, "name": schedules[s_id]} + csv_writer.writerow(row) + + +# write escalation chains report +with open(ESCALATION_CHAINS_OUTPUT_FILE_NAME, "w") as fp: + fieldnames = ["name", "steps"] + csv_writer = csv.DictWriter(fp, fieldnames) + csv_writer.writeheader() + for chain_info in escalation_chains.values(): + csv_writer.writerow(chain_info) + + +# write users report +with open(USERS_OUTPUT_FILE_NAME, "w") as fp: + fieldnames = ["username", "email", "teams", "important", "default", hours_field_name, "acknowledged_count", "resolved_count"] + csv_writer = csv.DictWriter(fp, fieldnames) + csv_writer.writeheader() + for user_info in users.values(): + csv_writer.writerow(user_info) diff --git a/tools/scripts/readme.md b/tools/scripts/readme.md new file mode 100644 index 00000000..a2696097 --- /dev/null +++ b/tools/scripts/readme.md @@ -0,0 +1,19 @@ +# Sample scripts using public API + +- [oncall_hours_reports.py](oncall_hours_reports.py) +Generate per-user on-call hours report + +- [oncall_reports.py](oncall_reports.py) +Generate CSV user reports using public API + +- [shift_shifts.py](shift_shifts.py) +Shift schedule shifts by a given delta + +- [mattermost_webhooks.py](mattermost_webhooks.py) +Setup Mattermost webhooks for alert group notifications + +- [discord_webhooks.py](discord_webhooks.py) +Setup Discord webhooks for alert group notifications + +- [swap_requests_workday.py](swap_requests_workday.py) +Create shift swap requests using Workday absences information diff --git a/tools/scripts/shift_shifts.py b/tools/scripts/shift_shifts.py new file mode 100644 index 00000000..c3705bae --- /dev/null +++ b/tools/scripts/shift_shifts.py @@ -0,0 +1,51 @@ +# requires: requests + +import requests +from datetime import datetime, timedelta + +HOURS_DELTA = -1 # delta in hours to shift rotations +ONCALL_API_BASE_URL = "https://oncall-prod-us-central-0.grafana.net/oncall" +ONCALL_API_TOKEN = "" +# update only a specific schedule, by id (e.g. "SVVGWD8W1Q38A") +# if set to None, will update all your schedules +SCHEDULE_ID = None + +headers = { + "Authorization": ONCALL_API_TOKEN, +} + +if SCHEDULE_ID is not None: + # filter schedule shifts + url = f"{ONCALL_API_BASE_URL}/api/v1/on_call_shifts/?schedule_id={SCHEDULE_ID}" +else: + # assuming there is up to 100 shifts only (max page size) + url = f"{ONCALL_API_BASE_URL}/api/v1/on_call_shifts/?perpage=100" + +# note: overrides are not included +r = requests.get(url, headers=headers) +if not r.ok: + raise Exception(r.status_code) + +now = datetime.utcnow() +shift_delta = timedelta(hours=HOURS_DELTA) +shifts = r.json()["results"] +for shift in shifts: + # get shift information + shift_id = shift["id"] + shift_start = datetime.strptime(shift["start"], "%Y-%m-%dT%H:%M:%S") + until = shift.get("until") + if until is not None: + until = datetime.strptime(shift["start"], "%Y-%m-%dT%H:%M:%S") + if until < now: + # skip finished rotation + continue + # update shift start by delta + updated_start = shift_start + shift_delta + update_data = {"start": updated_start.isoformat()} + shift_url = f"{ONCALL_API_BASE_URL}/api/v1/on_call_shifts/{shift_id}" + r = requests.put(shift_url, json=update_data, headers=headers) + if not r.ok: + print(f"Failed to update shift {shift_id}") + else: + print(f"Shift {shift_id} updated") + \ No newline at end of file diff --git a/tools/scripts/swap_requests_workday.py b/tools/scripts/swap_requests_workday.py new file mode 100644 index 00000000..6edf413d --- /dev/null +++ b/tools/scripts/swap_requests_workday.py @@ -0,0 +1,104 @@ +# pip install openpyxl pytz requests + +# ONCALL_API_TOKEN="" python swap_requests_workday.py -u -s -t +# e.g. ONCALL_API_TOKEN="the-token" python swap_requests_workday.py -u UCGEIXI1MR1NZ -s SF1R2ZQZKJNLK workday.xlsx -t "America/Montevideo" -d + +# TODO: handle specific events (public holidays, vacation, sick leave, etc) + +import argparse +import datetime +import os + +import openpyxl +import pytz +import requests + + +ONCALL_API_TOKEN = os.environ.get("ONCALL_API_TOKEN", "") +ONCALL_API_BASE_URL = os.environ.get( + "ONCALL_API_BASE_URL", "# https://oncall-prod-us-central-0.grafana.net/oncall" +) + +parser = argparse.ArgumentParser( + description="Create shift swap requests from a Workday absences exported file" +) +parser.add_argument("-d", "--dry-run", action="store_true", help="Dry run") +parser.add_argument("-u", "--user", required=True, help="User ID, swap beneficiary") +parser.add_argument("-s", "--schedule", required=True, help="Schedule ID") +parser.add_argument( + "-t", "--timezone", required=False, default="UTC", help="User timezone" +) +parser.add_argument("filename", help="Workday export (.xlsx)") + +# Read arguments from command line +args = parser.parse_args() + +try: + tz = pytz.timezone(args.timezone) +except pytz.UnknownTimeZoneError: + raise + +# shift swaps API +path = "/api/v1/shift_swaps/" +url = ONCALL_API_BASE_URL + path +# required auth +headers = { + "Authorization": ONCALL_API_TOKEN, +} + +now = datetime.datetime.now(datetime.timezone.utc) +excel = openpyxl.load_workbook(args.filename) +sheet = excel.active +for r in list(sheet.rows)[2:]: + starting_datetime, _, absence_type, duration, unit, comment, status, _ = [ + cell.value for cell in r + ] + + starting_datetime = tz.localize(starting_datetime).astimezone(pytz.UTC) + if starting_datetime <= now: + # ignore past absences + continue + + if duration <= 0: + # ignore corrections + continue + + if status != "Approved": + # only consider approved requests + continue + + # check request already exists + params = { + "schedule_id": args.schedule, + "beneficiary": args.user, + "starting_after": starting_datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + } + r = requests.get(url, params=params, headers=headers) + r.raise_for_status() + results = r.json().get("results") + if results and results[0]["swap_start"] == params["starting_after"]: + print("Swap request already exists for {}".format(params["starting_after"])) + continue + + # assert unit == "Days" + ending_datetime = starting_datetime + datetime.timedelta(hours=24 * duration) + description = "{}: {}".format(absence_type, comment or "I will be off") + # create swap request + data = { + "schedule": args.schedule, + "swap_start": starting_datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "swap_end": ending_datetime.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "description": description, + "beneficiary": args.user, + } + if args.dry_run: + print("Swap request payload would be:") + print(data) + else: + r = requests.post(url, json=data, headers=headers) + r.raise_for_status() + print( + "Swap request created for {} ({})".format( + params["starting_after"], absence_type + ) + )