Enable flake8-bugbear, fix issues (#3454)

Enables [flake8-bugbear](https://github.com/PyCQA/flake8-bugbear),
checking for bugs/design problems, and [fixes the issues
found](https://pastebin.com/fEDBz6Ta) (some interesting ones,
particularly with mutable args).

Related to https://github.com/grafana/oncall/pull/3448
This commit is contained in:
Matias Bordese 2023-11-29 12:04:48 -03:00 committed by GitHub
parent 2fdd885abe
commit 7aa78f5f73
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 64 additions and 51 deletions

View file

@ -35,6 +35,7 @@ repos:
files: ^engine
args: [--config=engine/tox.ini]
additional_dependencies:
- flake8-bugbear
- flake8-tidy-imports
- id: flake8
name: flake8 - pd-migrator

View file

@ -21,10 +21,11 @@ class AlertGroupForAlertManager(AlertGroup): # type: ignore[django-manager-miss
non_resolved_hashes = set()
hash = alert.get_integration_optimization_hash()
if alert.calculated_is_resolve_signal:
# Calculate leftover hashes
for alert in AlertForAlertManager.objects.filter(group=self).exclude(pk=alert.pk)[
other_alerts = AlertForAlertManager.objects.filter(group=self).exclude(pk=alert.pk)[
: AlertGroupForAlertManager.MAX_ALERTS_IN_GROUP_FOR_AUTO_RESOLVE
]:
]
# Calculate leftover hashes
for alert in other_alerts:
if alert.calculated_is_resolve_signal:
try:
non_resolved_hashes.remove(alert.get_integration_optimization_hash())

View file

@ -532,27 +532,27 @@ class AlertReceiveChannel(IntegrationOptionsMixin, MaintainableObject):
@property
def heartbeat_restored_title(self):
return getattr(self.heartbeat_module, "heartbeat_restored_title")
return self.heartbeat_module.heartbeat_restored_title
@property
def heartbeat_restored_message(self):
return getattr(self.heartbeat_module, "heartbeat_restored_message")
return self.heartbeat_module.heartbeat_restored_message
@property
def heartbeat_restored_payload(self):
return getattr(self.heartbeat_module, "heartbeat_restored_payload")
return self.heartbeat_module.heartbeat_restored_payload
@property
def heartbeat_expired_title(self):
return getattr(self.heartbeat_module, "heartbeat_expired_title")
return self.heartbeat_module.heartbeat_expired_title
@property
def heartbeat_expired_message(self):
return getattr(self.heartbeat_module, "heartbeat_expired_message")
return self.heartbeat_module.heartbeat_expired_message
@property
def heartbeat_expired_payload(self):
return getattr(self.heartbeat_module, "heartbeat_expired_payload")
return self.heartbeat_module.heartbeat_expired_payload
@property
def heartbeat_module(self):

View file

@ -46,7 +46,7 @@ def test_start_maintenance_integration_multiple_previous_instances(
organization, integration=AlertReceiveChannel.INTEGRATION_GRAFANA, author=user
)
# 2 maintenance integrations were created in the past
for i in range(2):
for _ in range(2):
AlertReceiveChannel.create(
organization=organization, integration=AlertReceiveChannel.INTEGRATION_MAINTENANCE, author=user
)

View file

@ -44,7 +44,7 @@ def render_curl_command(webhook_url, http_request_type, post_kwargs):
# TODO: remove this function when we remove CustomButton model
def request_outgoing_webhook(webhook_url, http_request_type, post_kwargs={}) -> Tuple[bool, str]:
def request_outgoing_webhook(webhook_url, http_request_type, post_kwargs=None) -> Tuple[bool, str]:
OUTGOING_WEBHOOK_TIMEOUT = 10
if http_request_type not in ["POST", "GET"]:
raise Exception(f"Wrong http_method parameter: {http_request_type}")
@ -65,6 +65,9 @@ def request_outgoing_webhook(webhook_url, http_request_type, post_kwargs={}) ->
if ipaddress.ip_address(socket.gethostbyname(webhook_url_ip_address)).is_private:
return False, "This url is not supported for outgoing webhooks"
if post_kwargs is None:
post_kwargs = {}
try:
if http_request_type == "POST":
r = requests.post(webhook_url, timeout=OUTGOING_WEBHOOK_TIMEOUT, **post_kwargs)

View file

@ -24,11 +24,11 @@ class EscalationChainListSerializer(EscalationChainSerializer):
def get_number_of_integrations(self, obj):
# num_integrations param added in queryset via annotate. Check EscalationChainViewSet.get_queryset
return getattr(obj, "num_integrations")
return getattr(obj, "num_integrations", 0)
def get_number_of_routes(self, obj):
# num_routes param added in queryset via annotate. Check EscalationChainViewSet.get_queryset
return getattr(obj, "num_routes")
return getattr(obj, "num_routes", 0)
class FilterEscalationChainSerializer(serializers.ModelSerializer):

View file

@ -190,7 +190,7 @@ def test_labels_feature_false(
make_user_auth_headers,
settings,
):
setattr(settings, "FEATURE_LABELS_ENABLED_FOR_ALL", False)
settings.FEATURE_LABELS_ENABLED_FOR_ALL = False
organization, user, token = make_organization_and_user_with_plugin_token()
client = APIClient()

View file

@ -1610,7 +1610,7 @@ def test_related_escalation_chains(
)
# setup escalation chains linked to web schedule
escalation_chains = []
for i in range(3):
for _ in range(3):
chain = make_escalation_chain(user.organization)
make_escalation_policy(
escalation_chain=chain,

View file

@ -169,7 +169,9 @@ def test_teams_number_of_users_currently_oncall_attribute_works_properly(
team2.users.set([user1])
team3.users.set([user3])
def _make_schedule(team=None, oncall_users=[]):
def _make_schedule(team=None, oncall_users=None):
if oncall_users is None:
oncall_users = []
schedule = make_schedule(organization, schedule_class=OnCallScheduleWeb)
if team:

View file

@ -49,12 +49,14 @@ def overridden_complete_slack_auth(request, backend, *args, **kwargs):
# if this was a user login/linking account, redirect to profile
redirect_to = "/a/grafana-oncall-app/users/me"
result = do_complete(
request.backend,
_do_login,
kwargs.update(
user=request.user,
redirect_name=REDIRECT_FIELD_NAME,
request=request,
)
result = do_complete(
request.backend,
_do_login,
*args,
**kwargs,
)

View file

@ -98,7 +98,7 @@ def build_paged_responses(page_size, pages, total_items):
remaining -= page_size
items = []
for j in range(page_item_count):
for _ in range(page_item_count):
items.append({"id": str(uuid.uuid4())})
next_cursor = None if i == pages - 1 else i * page_size
response.append(({"items": items, "nextCursor": next_cursor}, {}))

View file

@ -82,7 +82,7 @@ class ApplicationMetricsCollector:
alert_groups_total_keys
)
for org_key, ag_states in org_ag_states.items():
for integration, integration_data in ag_states.items():
for _, integration_data in ag_states.items():
# Labels values should have the same order as _integration_labels_with_state
labels_values = [
integration_data["integration_name"], # integration
@ -111,7 +111,7 @@ class ApplicationMetricsCollector:
alert_groups_response_time_keys
)
for org_key, ag_response_time in org_ag_response_times.items():
for integration, integration_data in ag_response_time.items():
for _, integration_data in ag_response_time.items():
# Labels values should have the same order as _integration_labels
labels_values = [
integration_data["integration_name"], # integration
@ -143,7 +143,7 @@ class ApplicationMetricsCollector:
user_was_notified_keys
)
for org_key, users in org_users.items():
for user, user_data in users.items():
for _, user_data in users.items():
# Labels values should have the same order as _user_labels
labels_values = [
user_data["user_username"], # username

View file

@ -53,7 +53,7 @@ TEMPLATE_PUBLIC_API_NAME_TO_DB_FIELD = {
TEMPLATES_WITH_SEPARATE_DB_FIELD = [SLACK, WEB, PHONE_CALL, SMS, TELEGRAM] + PUBLIC_BEHAVIOUR_TEMPLATES_FIELDS
PUBLIC_API_CUSTOMIZABLE_NOTIFICATION_CHANNEL_TEMPLATES = [SLACK, WEB, PHONE_CALL, SMS, TELEGRAM]
for backend_id, backend in get_messaging_backends():
for _, backend in get_messaging_backends():
if backend.customizable_templates:
PUBLIC_API_CUSTOMIZABLE_NOTIFICATION_CHANNEL_TEMPLATES.append(backend.slug)

View file

@ -17,7 +17,7 @@ class BaseChannelFilterSerializer(OrderedModelSerializer):
"""Update existing fields of the serializer with messaging backends fields"""
super().__init__(*args, **kwargs)
for backend_id, backend in get_messaging_backends():
for _, backend in get_messaging_backends():
if backend is None:
continue
field = backend.slug

View file

@ -331,7 +331,7 @@ def test_create_schedules_same_name(make_organization_and_user_with_token):
"time_zone": "UTC",
}
for i in range(2):
for _ in range(2):
response = client.post(url, data=data, format="json", HTTP_AUTHORIZATION=f"{token}")
assert response.status_code == status.HTTP_201_CREATED

View file

@ -297,7 +297,7 @@ def test_get_webhook_responses(
webhook.refresh_from_db()
response_count = 20
for i in range(0, response_count):
for _ in range(0, response_count):
make_webhook_response(
webhook=webhook,
trigger_type=webhook.trigger_type,

View file

@ -488,7 +488,7 @@ def parse_username_from_string(string: str) -> str:
Example output:
bob@company.com
"""
return re.sub(RE_PRIORITY, "", string.strip(), 1).strip()
return re.sub(RE_PRIORITY, "", string.strip(), count=1).strip()
def parse_priority_from_string(string: str) -> int:

View file

@ -1762,7 +1762,7 @@ def test_refresh_ical_final_schedule_ok(
),
}
for i in range(2):
for _ in range(2):
# running multiple times keeps the same events in place
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_AFTER", 1):
with patch("apps.schedules.models.on_call_schedule.EXPORT_WINDOW_DAYS_BEFORE", 0):

View file

@ -63,13 +63,16 @@ class AlertGroupSlackService:
pass
def publish_message_to_alert_group_thread(
self, alert_group: "AlertGroup", attachments=[], mrkdwn=True, unfurl_links=True, text=None
self, alert_group: "AlertGroup", attachments=None, mrkdwn=True, unfurl_links=True, text=None
) -> None:
# TODO: refactor checking the possibility of sending message to slack
# do not try to post message to slack if integration is rate limited
if alert_group.channel.is_rate_limited_in_slack:
return
if attachments is None:
attachments = []
try:
result = self._slack_client.chat_postMessage(
channel=alert_group.slack_message.channel_id,

View file

@ -85,7 +85,8 @@ class SlackOAuth2V2(SlackOAuth2):
)
self.process_error(response)
access_token = response["authed_user"]["access_token"]
return self.do_auth(access_token, response=response, *args, **kwargs)
kwargs.update(response=response)
return self.do_auth(access_token, *args, **kwargs)
@handle_http_errors
def do_auth(self, access_token, *args, **kwargs):

View file

@ -52,9 +52,9 @@ def test_integration_does_not_raise_exception_organization_moved(
try:
integration_view.dispatch(alert_channel_key=alert_receive_channel.token)
assert False
raise AssertionError()
except OrganizationMovedException:
assert False
raise AssertionError()
except Exception:
assert True
@ -83,7 +83,7 @@ def test_integration_raises_exception_organization_moved(
try:
integration_view.dispatch(alert_channel_key=alert_receive_channel.token)
assert False
raise AssertionError()
except OrganizationMovedException as e:
assert e.organization == organization
@ -137,7 +137,7 @@ def test_api_token_does_not_raise_exception_organization_moved(
api_auth.authenticate_credentials(token)
assert True
except OrganizationMovedException:
assert False
raise AssertionError()
@pytest.mark.django_db
@ -155,7 +155,7 @@ def test_api_token_raises_exception_organization_moved(
try:
api_auth = ApiTokenAuthentication()
api_auth.authenticate_credentials(token)
assert False
raise AssertionError()
except OrganizationMovedException as e:
assert e.organization == organization
@ -178,7 +178,7 @@ def test_schedule_export_token_does_not_raise_exception_organization_moved(
schedule_auth.authenticate_credentials(token, schedule.public_primary_key)
assert True
except OrganizationMovedException:
assert False
raise AssertionError()
@pytest.mark.django_db
@ -198,7 +198,7 @@ def test_schedule_export_token_raises_exception_organization_moved(
try:
schedule_auth = ScheduleExportAuthentication()
schedule_auth.authenticate_credentials(token, schedule.public_primary_key)
assert False
raise AssertionError()
except OrganizationMovedException as e:
assert e.organization == organization
@ -218,7 +218,7 @@ def test_user_schedule_export_token_does_not_raise_exception_organization_moved(
user_schedule_auth.authenticate_credentials(token, admin.public_primary_key)
assert True
except OrganizationMovedException:
assert False
raise AssertionError()
@pytest.mark.django_db
@ -236,7 +236,7 @@ def test_user_schedule_export_token_raises_exception_organization_moved(
try:
user_schedule_auth = UserScheduleExportAuthentication()
user_schedule_auth.authenticate_credentials(token, admin.public_primary_key)
assert False
raise AssertionError()
except OrganizationMovedException as e:
assert e.organization == organization

View file

@ -256,7 +256,7 @@ class Webhook(models.Model):
elif self.http_method == "OPTIONS":
r = requests.options(url, timeout=OUTGOING_WEBHOOK_TIMEOUT, **request_kwargs)
else:
raise Exception(f"Unsupported http method: {self.http_method}")
raise ValueError(f"Unsupported http method: {self.http_method}")
return r
# Insight logs

View file

@ -260,7 +260,7 @@ def test_execute_webhook_ok_forward_all(
alert_group = make_alert_group(
alert_receive_channel, acknowledged_at=timezone.now(), acknowledged=True, acknowledged_by=user.pk
)
for i in range(3):
for _ in range(3):
make_user_notification_policy_log_record(
author=notified_user,
alert_group=alert_group,

View file

@ -239,8 +239,8 @@ def test_make_request(make_organization, make_custom_webhook):
assert expected_call.call_args == call("url", timeout=OUTGOING_WEBHOOK_TIMEOUT, foo="bar")
# invalid
with pytest.raises(Exception):
webhook = make_custom_webhook(organization=organization, http_method="NOT")
webhook = make_custom_webhook(organization=organization, http_method="NOT")
with pytest.raises(ValueError):
webhook.make_request("url", {"foo": "bar"})

View file

@ -259,7 +259,7 @@ NOTIFICATION_CHANNEL_TO_TEMPLATER_MAP = {
}
# add additionally supported messaging backends
for backend_id, backend in get_messaging_backends():
for _, backend in get_messaging_backends():
if backend.templater is not None:
NOTIFICATION_CHANNEL_OPTIONS.append(backend.slug)
NOTIFICATION_CHANNEL_TO_TEMPLATER_MAP[backend.slug] = backend.get_templater_class()

View file

@ -123,7 +123,7 @@ def format_state_for_insight_log(diff_dict):
"""
fields_to_prune = ()
fields_to_hide = ("verified_phone_number", "unverified_phone_number")
for k, v in diff_dict.items():
for k, _ in diff_dict.items():
if k in fields_to_prune:
diff_dict[k] = "Diff not supported"
if k in fields_to_hide:

View file

@ -245,7 +245,7 @@ def test_ordered_model_create_concurrent():
exceptions = []
def create():
for loop in range(LOOPS):
for _ in range(LOOPS):
try:
TestOrderedModel.objects.create(test_field="test")
except Exception as e:

View file

@ -223,7 +223,7 @@ def urlize_with_respect_to_a(html):
soup = BeautifulSoup(html, features="html.parser")
textNodes = soup.find_all(string=True)
for textNode in textNodes:
if textNode.parent and getattr(textNode.parent, "name") == "a":
if textNode.parent and getattr(textNode.parent, "name", None) == "a":
continue
urlizedText = urlize(textNode)
textNode.replaceWith(BeautifulSoup(urlizedText, features="html.parser"))

View file

@ -187,7 +187,7 @@ def mock_apply_async(monkeypatch):
@pytest.fixture(autouse=True)
def mock_is_labels_feature_enabled(settings):
setattr(settings, "FEATURE_LABELS_ENABLED_FOR_ALL", True)
settings.FEATURE_LABELS_ENABLED_FOR_ALL = True
@pytest.fixture(autouse=True)
@ -199,8 +199,8 @@ def clear_ical_users_cache():
@pytest.fixture
def mock_is_labels_feature_enabled_for_org(settings):
def _mock_is_labels_feature_enabled_for_org(org_id):
setattr(settings, "FEATURE_LABELS_ENABLED_FOR_ALL", False)
setattr(settings, "FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS", [org_id])
settings.FEATURE_LABELS_ENABLED_FOR_ALL = False
settings.FEATURE_LABELS_ENABLED_FOR_GRAFANA_ORGS = [org_id]
return _mock_is_labels_feature_enabled_for_org