Minor formatting changes (#2641)

# What this PR does

- Updates `black` and `flake8` to latest
- Removes `F541` from flake8 ignore (`F541 f-string is missing
placeholders`)
- Enables ["float to top"
option](https://pycqa.github.io/isort/docs/configuration/options.html#float-to-top)
for `isort`
This commit is contained in:
Vadim Stepanov 2023-07-26 14:45:44 +01:00 committed by GitHub
parent e15d18a604
commit f977f9faee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
36 changed files with 61 additions and 67 deletions

View file

@ -16,7 +16,7 @@ repos:
args: [--settings-file=dev/scripts/.isort.cfg, --filter-files]
- repo: https://github.com/psf/black
rev: 22.3.0
rev: 23.7.0
hooks:
- id: black
files: ^engine
@ -29,7 +29,7 @@ repos:
files: ^dev/scripts
- repo: https://github.com/pycqa/flake8
rev: 3.9.2
rev: 6.0.0
hooks:
- id: flake8
files: ^engine
@ -40,22 +40,20 @@ repos:
name: flake8 - pd-migrator
files: ^tools/pagerduty-migrator
# Make sure config is compatible with black
# https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length
# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8
args:
[
--max-line-length=88,
"--select=C,E,F,W,B,B950",
"--max-line-length=88",
"--extend-ignore=E203,E501",
]
- id: flake8
name: flake8 - dev/scripts
files: ^dev/scripts
# Make sure config is compatible with black
# https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length
# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#flake8
args:
[
--max-line-length=88,
"--select=C,E,F,W,B,B950",
"--max-line-length=88",
"--extend-ignore=E203,E501",
]

View file

@ -247,7 +247,7 @@ class EscalationSnapshotMixin:
)
task_id = celery_uuid()
AlertGroup.objects.filter(pk=self.pk,).update(
AlertGroup.objects.filter(pk=self.pk).update(
active_escalation_id=task_id,
is_escalation_finished=False,
raw_escalation_snapshot=raw_escalation_snapshot,

View file

@ -271,7 +271,7 @@ class AlertGroupLogRecord(models.Model):
if escalation_chain is not None:
result += f' with escalation chain "{escalation_chain.name}"'
else:
result += f" with no escalation chain, skipping escalation"
result += " with no escalation chain, skipping escalation"
else:
result += "alert group assigned to deleted route, skipping escalation"
elif self.type == AlertGroupLogRecord.TYPE_ACK:

View file

@ -25,7 +25,7 @@ def send_alert_group_escalation_auditor_task_heartbeat() -> None:
requests.get(heartbeat_url).raise_for_status()
task_logger.info(f"Heartbeat successfully sent to {heartbeat_url}")
else:
task_logger.info(f"Skipping sending heartbeat as no heartbeat URL is configured")
task_logger.info("Skipping sending heartbeat as no heartbeat URL is configured")
def audit_alert_group_escalation(alert_group: "AlertGroup") -> None:

View file

@ -55,7 +55,7 @@ def notify_user_task(
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
reason=f"notification is not allowed for user",
reason="notification is not allowed for user",
alert_group=alert_group,
notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_FORBIDDEN,
).save()
@ -253,7 +253,7 @@ def perform_notification(log_record_pk):
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,
reason=f"notification is not allowed for user",
reason="notification is not allowed for user",
alert_group=alert_group,
notification_error_code=UserNotificationPolicyLogRecord.ERROR_NOTIFICATION_FORBIDDEN,
).save()
@ -347,7 +347,7 @@ def perform_notification(log_record_pk):
backend = None
if backend is None:
task_logger.debug(f"notify_user failed because messaging backend is not available")
task_logger.debug("notify_user failed because messaging backend is not available")
UserNotificationPolicyLogRecord(
author=user,
type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_FAILED,

View file

@ -20,7 +20,6 @@ def resolve_alert_group_by_source_if_needed(alert_group_pk):
alert_group.active_resolve_calculation_id
)
else:
is_more_than_max_alerts_in_group = alert_group.alerts_count_gt(
AlertGroupForAlertManager.MAX_ALERTS_IN_GROUP_FOR_AUTO_RESOLVE
)

View file

@ -62,13 +62,13 @@ class ChannelFilterSerializer(EagerLoadingMixin, serializers.ModelSerializer):
try:
valid_jinja_template_for_serializer_method_field({"route_template": filtering_term})
except JinjaTemplateError:
raise serializers.ValidationError([f"Jinja template is incorrect"])
raise serializers.ValidationError(["Jinja template is incorrect"])
elif filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX or filtering_term_type is None:
if filtering_term is not None:
if not is_regex_valid(filtering_term):
raise serializers.ValidationError(["Regular expression is incorrect"])
else:
raise serializers.ValidationError([f"Expression type is incorrect"])
raise serializers.ValidationError(["Expression type is incorrect"])
return data
def get_slack_channel(self, obj):

View file

@ -77,7 +77,7 @@ def test_get_filter_started_at(alert_group_internal_api_setup, make_user_auth_he
url = reverse("api-internal:alertgroup-list")
response = client.get(
url + f"?started_at=1970-01-01T00:00:00/2099-01-01T23:59:59",
url + "?started_at=1970-01-01T00:00:00/2099-01-01T23:59:59",
format="json",
**make_user_auth_headers(user, token),
)
@ -653,7 +653,7 @@ def test_get_filter_mine(
url = reverse("api-internal:alertgroup-list")
first_response = client.get(
url + f"?mine=true",
url + "?mine=true",
format="json",
**make_user_auth_headers(first_user, token),
)
@ -661,7 +661,7 @@ def test_get_filter_mine(
assert len(first_response.data["results"]) == 1
second_response = client.get(
url + f"?mine=false",
url + "?mine=false",
format="json",
**make_user_auth_headers(first_user, token),
)

View file

@ -269,7 +269,7 @@ def test_move_to_position(
url = reverse(
"api-internal:channel_filter-move-to-position", kwargs={"pk": first_channel_filter.public_primary_key}
)
url += f"?position=1"
url += "?position=1"
response = client.put(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_200_OK
@ -297,7 +297,7 @@ def test_move_to_position_invalid_index(
url = reverse(
"api-internal:channel_filter-move-to-position", kwargs={"pk": first_channel_filter.public_primary_key}
)
url += f"?position=2"
url += "?position=2"
response = client.put(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_400_BAD_REQUEST
@ -320,7 +320,7 @@ def test_move_to_position_cant_move_default(
url = reverse(
"api-internal:channel_filter-move-to-position", kwargs={"pk": default_channel_filter.public_primary_key}
)
url += f"?position=1"
url += "?position=1"
response = client.put(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_400_BAD_REQUEST

View file

@ -255,7 +255,7 @@ def test_team_permissions_not_in_team(
assert response.json() == {"error_code": "wrong_team"}
# Editor cannot retrieve other user information
url = reverse(f"api-internal:user-detail", kwargs={"pk": another_user.public_primary_key})
url = reverse("api-internal:user-detail", kwargs={"pk": another_user.public_primary_key})
response = client.get(url, **make_user_auth_headers(user, token))
assert response.status_code == status.HTTP_403_FORBIDDEN

View file

@ -331,7 +331,7 @@ class UserView(
logger.info("get_verification_code: validating reCAPTCHA code")
valid = check_recaptcha_internal_api(request, "mobile_verification_code")
if not valid:
logger.warning(f"get_verification_code: invalid reCAPTCHA validation")
logger.warning("get_verification_code: invalid reCAPTCHA validation")
return Response("failed reCAPTCHA check", status=status.HTTP_400_BAD_REQUEST)
logger.info('get_verification_code: pass reCAPTCHA validation"')
@ -358,7 +358,7 @@ class UserView(
logger.info("get_verification_code_via_call: validating reCAPTCHA code")
valid = check_recaptcha_internal_api(request, "mobile_verification_code")
if not valid:
logger.warning(f"get_verification_code_via_call: invalid reCAPTCHA validation")
logger.warning("get_verification_code_via_call: invalid reCAPTCHA validation")
return Response("failed reCAPTCHA check", status=status.HTTP_400_BAD_REQUEST)
logger.info('get_verification_code_via_call: pass reCAPTCHA validation"')

View file

@ -87,7 +87,7 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
if domain == live_settings.INBOUND_EMAIL_DOMAIN:
return token
else:
logger.info(f"get_integration_token_from_request: message.envelope_recipient is not present")
logger.info("get_integration_token_from_request: message.envelope_recipient is not present")
"""
TODO: handle case when envelope_recipient is not provided.
Now we can't just compare to/cc domains one by one with INBOUND_EMAIL_DOMAIN
@ -123,7 +123,7 @@ class InboundEmailWebhookView(AlertChannelDefiningMixin, APIView):
"""
# TODO: These settings should be checked before app start.
if not live_settings.INBOUND_EMAIL_ESP:
logger.error(f"InboundEmailWebhookView: INBOUND_EMAIL_ESP env variable must be set.")
logger.error("InboundEmailWebhookView: INBOUND_EMAIL_ESP env variable must be set.")
return HttpResponse(
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

View file

@ -60,7 +60,7 @@ def notify_user_async(user_pk, alert_group_pk, notification_policy_pk):
notification_step=notification_policy.step,
notification_channel=notification_policy.notify_by,
)
logger.error(f"Error while sending email: empty EMAIL_HOST env variable")
logger.error("Error while sending email: empty EMAIL_HOST env variable")
return
emails_left = user.organization.emails_left(user)

View file

@ -35,7 +35,7 @@ class GrafanaPluginConfig(AppConfig):
# and the GRAFANA_API_URL env var is not specified, exit the application
if has_existing_org is False and settings.SELF_HOSTED_SETTINGS["GRAFANA_API_URL"] is None:
logger.error(
f"For OSS installations, GRAFANA_API_URL is a required environment variable. Please set it and restart the application."
"For OSS installations, GRAFANA_API_URL is a required environment variable. Please set it and restart the application."
)
sys.exit()
except OperationalError:

View file

@ -61,7 +61,7 @@ class TestIsRbacEnabledForStack:
({"config": {"feature_toggles": {"enable": f"foo,bar,{TEST_FEATURE_TOGGLE}baz"}}}, False),
({"config": {"feature_toggles": {"enable": f"foo,bar,{TEST_FEATURE_TOGGLE},baz"}}}, True),
({"config": {"feature_toggles": {"enable": f"foo bar {TEST_FEATURE_TOGGLE} baz"}}}, True),
({"config": {"feature_toggles": {"enable": f"foo bar baz", TEST_FEATURE_TOGGLE: "true"}}}, True),
({"config": {"feature_toggles": {"enable": "foo bar baz", TEST_FEATURE_TOGGLE: "true"}}}, True),
({"config": {"feature_toggles": {TEST_FEATURE_TOGGLE: "true"}}}, True),
# this case will probably never happen, but lets account for it anyways
(

View file

@ -26,7 +26,7 @@ class SelfHostedInstallView(GrafanaHeadersMixin, APIView):
provisioning_info = {"error": None}
if settings.LICENSE != settings.OPEN_SOURCE_LICENSE_NAME:
provisioning_info["error"] = f"License type not authorized"
provisioning_info["error"] = "License type not authorized"
return Response(status=status.HTTP_403_FORBIDDEN)
grafana_api_client = GrafanaAPIClient(api_url=grafana_url, api_token=grafana_api_token)

View file

@ -31,7 +31,7 @@ def create_alertmanager_alerts(alert_receive_channel_pk, alert, is_demo=False, f
alert_receive_channel.deleted_at is not None
or alert_receive_channel.integration == AlertReceiveChannel.INTEGRATION_MAINTENANCE
):
logger.info(f"AlertReceiveChannel alert ignored if deleted/maintenance")
logger.info("AlertReceiveChannel alert ignored if deleted/maintenance")
return
try:

View file

@ -186,7 +186,6 @@ def calculate_and_cache_user_was_notified_metric(organization_id):
metric_user_was_notified: typing.Dict[int, UserWasNotifiedOfAlertGroupsMetricsDict] = {}
for user in users:
counter = (
user.personal_log_records.filter(type=UserNotificationPolicyLogRecord.TYPE_PERSONAL_NOTIFICATION_TRIGGERED)
.values("alert_group")

View file

@ -76,7 +76,7 @@ def _send_push_notification(
if not CloudConnector.objects.exists():
_error_cb()
logger.error(f"Error while sending a mobile push notification: not connected to cloud")
logger.error("Error while sending a mobile push notification: not connected to cloud")
return
try:

View file

@ -185,7 +185,7 @@ def test_get_youre_going_oncall_fcm_message(
):
mock_fcm_message = "mncvmnvcmnvcnmvcmncvmn"
mock_notification_title = "asdfasdf"
mock_notification_subtitle = f"9:06\u202fAM - 9:06\u202fAM\nSchedule XYZ"
mock_notification_subtitle = "9:06\u202fAM - 9:06\u202fAM\nSchedule XYZ"
shift_pk = "mncvmnvc"
seconds_until_going_oncall = 600

View file

@ -48,7 +48,7 @@ class CloudConnector(models.Model):
error_msg = f"Non-200 HTTP code. Got {r.status_code}"
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync with cloud. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
error_msg = "Unable to sync with cloud"
return sync_status, error_msg
@ -86,7 +86,7 @@ class CloudConnector(models.Model):
fetch_next_page = False
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync users with cloud. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
error_msg = "Unable to sync with cloud"
users_fetched = False
break
@ -146,7 +146,7 @@ class CloudConnector(models.Model):
error_msg = f"User with email not found {user.email}"
except requests.exceptions.RequestException as e:
logger.warning(f"Unable to sync_user_with cloud user_id {user.id}. Request exception {str(e)}")
error_msg = f"Unable to sync with cloud"
error_msg = "Unable to sync with cloud"
return sync_status, error_msg

View file

@ -120,10 +120,10 @@ class PhoneBackend:
if response.status_code == 200:
logger.info("PhoneBackend._notify_by_cloud_call: OK")
elif response.status_code == 400 and response.json().get("error") == "limit-exceeded":
logger.info(f"PhoneBackend._notify_by_cloud_call: phone calls limit exceeded")
logger.info("PhoneBackend._notify_by_cloud_call: phone calls limit exceeded")
raise CallsLimitExceeded
elif response.status_code == 400 and response.json().get("error") == "number-not-verified":
logger.info(f"PhoneBackend._notify_by_cloud_call: cloud number not verified")
logger.info("PhoneBackend._notify_by_cloud_call: cloud number not verified")
raise NumberNotVerified
elif response.status_code == 404:
logger.info(f"PhoneBackend._notify_by_cloud_call: user not found id={user.id} email={user.email}")

View file

@ -169,13 +169,13 @@ class ChannelFilterSerializer(BaseChannelFilterSerializer):
try:
valid_jinja_template_for_serializer_method_field({"route_template": filtering_term})
except JinjaTemplateError:
raise serializers.ValidationError([f"Jinja template is incorrect"])
raise serializers.ValidationError(["Jinja template is incorrect"])
elif filtering_term_type == ChannelFilter.FILTERING_TERM_TYPE_REGEX or filtering_term_type is None:
if filtering_term is not None:
if not is_regex_valid(filtering_term):
raise serializers.ValidationError(["Regular expression is incorrect"])
else:
raise serializers.ValidationError([f"Expression type is incorrect"])
raise serializers.ValidationError(["Expression type is incorrect"])
return data

View file

@ -133,7 +133,7 @@ def test_get_incidents_filter_by_state_new(
client = APIClient()
url = reverse("api-public:alert_groups-list")
response = client.get(url + f"?state=new", format="json", HTTP_AUTHORIZATION=f"{token}")
response = client.get(url + "?state=new", format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
@ -149,7 +149,7 @@ def test_get_incidents_filter_by_state_acknowledged(
client = APIClient()
url = reverse("api-public:alert_groups-list")
response = client.get(url + f"?state=acknowledged", format="json", HTTP_AUTHORIZATION=f"{token}")
response = client.get(url + "?state=acknowledged", format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
@ -165,7 +165,7 @@ def test_get_incidents_filter_by_state_silenced(
client = APIClient()
url = reverse("api-public:alert_groups-list")
response = client.get(url + f"?state=silenced", format="json", HTTP_AUTHORIZATION=f"{token}")
response = client.get(url + "?state=silenced", format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
@ -181,7 +181,7 @@ def test_get_incidents_filter_by_state_resolved(
client = APIClient()
url = reverse("api-public:alert_groups-list")
response = client.get(url + f"?state=resolved", format="json", HTTP_AUTHORIZATION=f"{token}")
response = client.get(url + "?state=resolved", format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_200_OK
assert response.json() == expected_response
@ -195,7 +195,7 @@ def test_get_incidents_filter_by_state_unknown(
client = APIClient()
url = reverse("api-public:alert_groups-list")
response = client.get(url + f"?state=unknown", format="json", HTTP_AUTHORIZATION=f"{token}")
response = client.get(url + "?state=unknown", format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_400_BAD_REQUEST

View file

@ -41,7 +41,6 @@ class ShiftSwapRequestManager(models.Manager):
class ShiftSwapRequest(models.Model):
objects = ShiftSwapRequestManager()
objects_with_deleted = models.Manager()

View file

@ -364,7 +364,7 @@ def _get_team_select(slack_user_identity, organization, value, input_id_prefix):
{
"text": {
"type": "plain_text",
"text": f"General",
"text": "General",
"emoji": True,
},
"value": DEFAULT_TEAM_VALUE,

View file

@ -496,7 +496,7 @@ def _get_team_select_blocks(slack_user_identity, organization, is_selected, valu
{
"text": {
"type": "plain_text",
"text": f"No team",
"text": "No team",
"emoji": True,
},
"value": DEFAULT_TEAM_VALUE,

View file

@ -198,7 +198,7 @@ class SlackEventApiEndpointView(APIView):
# Means that slack_team_identity unpopulated
if not slack_team_identity.organizations.exists():
logger.warning(f"OnCall Team for SlackTeamIdentity is not detected, stop it!")
logger.warning("OnCall Team for SlackTeamIdentity is not detected, stop it!")
# Open pop-up to inform user why OnCall bot doesn't work if any action was triggered
warning_text = (
"OnCall is not able to process this action because this Slack workspace was "

View file

@ -45,7 +45,7 @@ class TwilioPhoneProvider(PhoneProvider):
# If status callback is not valid and not accessible from public url then trying to send message without it
# https://www.twilio.com/docs/api/errors/21609
if e.code == 21609:
logger.info(f"TwilioPhoneProvider.make_notification_call: error 21609, calling without callback_url")
logger.info("TwilioPhoneProvider.make_notification_call: error 21609, calling without callback_url")
try_without_callback = True
else:
logger.error(f"TwilioPhoneProvider.make_notification_call: failed {e}")
@ -75,7 +75,7 @@ class TwilioPhoneProvider(PhoneProvider):
# If status callback is not valid and not accessible from public url then trying to send message without it
# https://www.twilio.com/docs/api/errors/21609
if e.code == 21609:
logger.info(f"TwilioPhoneProvider.send_notification_sms: error 21609, sending without callback_url")
logger.info("TwilioPhoneProvider.send_notification_sms: error 21609, sending without callback_url")
try_without_callback = True
else:
logger.error(f"TwilioPhoneProvider.send_notification_sms: failed {e}")

View file

@ -4,7 +4,6 @@ from apps.phone_notifications.phone_provider import ProviderPhoneCall
class ZvonokCallStatuses:
ATTEMPTS_EXC = 10
COMPL_FINISHED = 20
COMPL_NOFINISHED = 30

View file

@ -34,12 +34,12 @@ class ZvonokPhoneProvider(PhoneProvider):
response.raise_for_status()
body = response.json()
if not body:
logger.error(f"ZvonokPhoneProvider.make_notification_call: failed, empty body")
logger.error("ZvonokPhoneProvider.make_notification_call: failed, empty body")
raise FailedToMakeCall(graceful_msg=f"Failed make notification call to {number}, empty body")
call_id = body.get("call_id")
if not call_id:
logger.error(f"ZvonokPhoneProvider.make_notification_call: failed, missing call id")
logger.error("ZvonokPhoneProvider.make_notification_call: failed, missing call id")
raise FailedToMakeCall(graceful_msg=self._get_graceful_msg(body, number))
logger.info(f"ZvonokPhoneProvider.make_notification_call: success, call_id {call_id}")
@ -66,7 +66,7 @@ class ZvonokPhoneProvider(PhoneProvider):
response.raise_for_status()
body = response.json()
if not body:
logger.error(f"ZvonokPhoneProvider.make_call: failed, empty body")
logger.error("ZvonokPhoneProvider.make_call: failed, empty body")
raise FailedToMakeCall(graceful_msg=f"Failed make call to {number}, empty body")
call_id = body.get("call_id")
@ -116,7 +116,7 @@ class ZvonokPhoneProvider(PhoneProvider):
response.raise_for_status()
body = response.json()
if not body:
logger.error(f"ZvonokPhoneProvider.make_verification_call: failed, empty body")
logger.error("ZvonokPhoneProvider.make_verification_call: failed, empty body")
raise FailedToMakeCall(graceful_msg=f"Failed make verification call to {number}, empty body")
call_id = body.get("call_id")

View file

@ -28,7 +28,7 @@ def write_maintenance_insight_log(instance, user, event: MaintenanceEvent):
if team:
log_line += f" team={json.dumps(team.name)} team_id={team.public_primary_key}"
else:
log_line += f' team="General"'
log_line += ' team="General"'
if user:
username = json.dumps(user.username)
user_id = user.public_primary_key

View file

@ -3,6 +3,7 @@ import os
import time
import celery
from celery import Celery
from celery.app.log import TaskFormatter
from celery.utils.debug import memdump, sample_mem
from celery.utils.log import get_task_logger
@ -19,7 +20,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.prod")
logger = get_task_logger(__name__)
logger.setLevel(logging.DEBUG)
from celery import Celery # noqa: E402
app = Celery("proj")

View file

@ -1,14 +1,13 @@
[tool.isort]
profile = "black"
line_length=120
# TODO: upgrade this to python 3.11
py_version=39
float_to_top=true
py_version=311
extend_skip_glob = "**/migrations/**"
[tool.black]
line-length = 120
# TODO: upgrade this to python 3.11
target-version = ["py39"]
target-version = ["py311"]
force-exclude = "migrations"
[tool.mypy]

View file

@ -1,5 +1,7 @@
import os
from .base import * # noqa: F401, F403
try:
import uwsgi
from prometheus_client import multiprocess
@ -13,7 +15,6 @@ except ModuleNotFoundError:
# Only works under uwsgi web server environment
pass
from .base import * # noqa
SLACK_SIGNING_SECRET = os.environ.get("SLACK_SIGNING_SECRET")
SLACK_SIGNING_SECRET_LIVE = os.environ.get("SLACK_SIGNING_SECRET_LIVE", "")

View file

@ -1,6 +1,6 @@
[flake8]
max-line-length = 180
extend-ignore = F541, E203
extend-ignore = E203
extend-exclude = */migrations/*
ban-relative-imports = parents
banned-modules =