diff --git a/src/sentry/digests/backends/redis.py b/src/sentry/digests/backends/redis.py index 04cb6169477404..2db5c24cd54b43 100644 --- a/src/sentry/digests/backends/redis.py +++ b/src/sentry/digests/backends/redis.py @@ -161,9 +161,7 @@ def schedule( yield ScheduleEntry(key.decode("utf-8"), float(timestamp)) except Exception as error: logger.error( - "Failed to perform scheduling for partition %r due to error: %r", - host, - error, + f"Failed to perform scheduling for partition {host} due to error: {error}", exc_info=True, ) @@ -183,9 +181,7 @@ def maintenance(self, deadline: float, timestamp: Optional[float] = None) -> Non self.__maintenance_partition(host, deadline, timestamp) except Exception as error: logger.error( - "Failed to perform maintenance on digest partition %r due to error: %r", - host, - error, + f"Failed to perform maintenance on digest partition {host} due to error: {error}", exc_info=True, ) diff --git a/src/sentry/digests/notifications.py b/src/sentry/digests/notifications.py index e69c027f78dfa7..1174866fe95d03 100644 --- a/src/sentry/digests/notifications.py +++ b/src/sentry/digests/notifications.py @@ -44,14 +44,13 @@ def split_key(key: str) -> Tuple["Project", "ActionTargetType", Optional[str]]: def unsplit_key( project: "Project", target_type: ActionTargetType, target_identifier: Optional[str] ) -> str: - return "mail:p:{}:{}:{}".format( - project.id, target_type.value, target_identifier if target_identifier is not None else "" - ) + target_str = target_identifier if target_identifier is not None else "" + return f"mail:p:{project.id}:{target_type.value}:{target_str}" def event_to_record(event: Event, rules: Sequence[Rule]) -> Record: if not rules: - logger.warning("Creating record for %r that does not contain any rules!", event) + logger.warning(f"Creating record for {event} that does not contain any rules!") return Record( event.event_id, @@ -174,7 +173,7 @@ def rewrite_record( if group is not None: event.group = group else: - logger.debug("%r could not be associated with a group.", record) + logger.debug(f"{record} could not be associated with a group.") return None return Record( @@ -190,7 +189,7 @@ def group_records( group = record.value.event.group rules = record.value.rules if not rules: - logger.debug("%r has no associated rules, and will not be added to any groups.", record) + logger.debug(f"{record} has no associated rules, and will not be added to any groups.") for rule in rules: groups[rule][group].append(record) diff --git a/src/sentry/digests/utilities.py b/src/sentry/digests/utilities.py index e5ceaf1331fa0f..6bfb9b7900200d 100644 --- a/src/sentry/digests/utilities.py +++ b/src/sentry/digests/utilities.py @@ -123,7 +123,7 @@ def convert_actors_to_users( elif actor.type == User: events_by_user[actor.id].update(events) else: - raise ValueError("Unknown Actor type: %s" % actor.type) + raise ValueError(f"Unknown Actor type: {actor.type}") return events_by_user diff --git a/src/sentry/integrations/bitbucket/integration.py b/src/sentry/integrations/bitbucket/integration.py index 625b6add43b66c..9316ff76bbd1ff 100644 --- a/src/sentry/integrations/bitbucket/integration.py +++ b/src/sentry/integrations/bitbucket/integration.py @@ -96,8 +96,8 @@ def get_repositories(self, query=None): for repo in resp.get("values", []) ] - exact_query = ('name="%s"' % (query)).encode("utf-8") - fuzzy_query = ('name~"%s"' % (query)).encode("utf-8") + exact_query = f'name="{query}"'.encode() + fuzzy_query = f'name~"{query}"'.encode() exact_search_resp = self.get_client().search_repositories(username, exact_query) fuzzy_search_resp = self.get_client().search_repositories(username, fuzzy_query) diff --git a/src/sentry/integrations/bitbucket/search.py b/src/sentry/integrations/bitbucket/search.py index 53e01fce25fcab..7443ae17a55242 100644 --- a/src/sentry/integrations/bitbucket/search.py +++ b/src/sentry/integrations/bitbucket/search.py @@ -32,7 +32,7 @@ def get(self, request, organization, integration_id): if not repo: return Response({"detail": "repo is a required parameter"}, status=400) - full_query = ('title~"%s"' % (query)).encode("utf-8") + full_query = f'title~"{query}"'.encode() try: resp = installation.get_client().search_issues(repo, full_query) except ApiError as e: diff --git a/src/sentry/integrations/bitbucket_server/client.py b/src/sentry/integrations/bitbucket_server/client.py index 132d3ea21770e5..1de5490c8b3bb4 100644 --- a/src/sentry/integrations/bitbucket_server/client.py +++ b/src/sentry/integrations/bitbucket_server/client.py @@ -207,13 +207,12 @@ def _get_values(self, uri, params, max_pages=1000000): new_params = dict.copy(params) new_params["start"] = start logger.debug( - "Loading values for paginated uri starting from %s", - start, + f"Loading values for paginated uri starting from {start}", extra={"uri": uri, "params": new_params}, ) data = self.get(uri, auth=self.get_auth(), params=new_params) logger.debug( - "%s values loaded", len(data["values"]), extra={"uri": uri, "params": new_params} + f'{len(data["values"])} values loaded', extra={"uri": uri, "params": new_params} ) values += data["values"] diff --git a/src/sentry/integrations/gitlab/integration.py b/src/sentry/integrations/gitlab/integration.py index 6a855ed8dcbedb..3aa6464b4b4778 100644 --- a/src/sentry/integrations/gitlab/integration.py +++ b/src/sentry/integrations/gitlab/integration.py @@ -248,8 +248,7 @@ def dispatch(self, request, pipeline): return render_to_response( template="sentry/integrations/gitlab-config.html", context={ - "next_url": "%s%s" - % (absolute_uri("extensions/gitlab/setup/"), "?completed_installation_guide"), + "next_url": f'{absolute_uri("extensions/gitlab/setup/")}?completed_installation_guide', "setup_values": [ {"label": "Name", "value": "Sentry"}, {"label": "Redirect URI", "value": absolute_uri("/extensions/gitlab/setup/")}, diff --git a/src/sentry/integrations/jira/client.py b/src/sentry/integrations/jira/client.py index de02e9d107049f..c274f4b9763194 100644 --- a/src/sentry/integrations/jira/client.py +++ b/src/sentry/integrations/jira/client.py @@ -135,11 +135,12 @@ def get_issue(self, issue_id): return self.get(self.ISSUE_URL % (issue_id,)) def search_issues(self, query): + q = query.replace('"', '\\"') # check if it looks like an issue id if ISSUE_KEY_RE.match(query): - jql = 'id="%s"' % query.replace('"', '\\"') + jql = f'id="{q}"' else: - jql = 'text ~ "%s"' % query.replace('"', '\\"') + jql = f'text ~ "{q}"' return self.get(self.SEARCH_URL, params={"jql": jql}) def create_comment(self, issue_key, comment): diff --git a/src/sentry/integrations/pagerduty/integration.py b/src/sentry/integrations/pagerduty/integration.py index afa947a74bdcd3..ee50b71d09176f 100644 --- a/src/sentry/integrations/pagerduty/integration.py +++ b/src/sentry/integrations/pagerduty/integration.py @@ -176,10 +176,7 @@ def get_app_url(self, account_name=None): app_id = options.get("pagerduty.app-id") setup_url = absolute_uri("/extensions/pagerduty/setup/") - return ( - "https://%s.pagerduty.com/install/integration?app_id=%s&redirect_url=%s&version=2" - % (account_name, app_id, setup_url) - ) + return f"https://{account_name}.pagerduty.com/install/integration?app_id={app_id}&redirect_url={setup_url}&version=2" def dispatch(self, request, pipeline): if "config" in request.GET: diff --git a/src/sentry/integrations/slack/unfurl/discover.py b/src/sentry/integrations/slack/unfurl/discover.py index 80d42642a61dad..ea57e954d005dd 100644 --- a/src/sentry/integrations/slack/unfurl/discover.py +++ b/src/sentry/integrations/slack/unfurl/discover.py @@ -119,8 +119,7 @@ def unfurl_discover( except Exception as exc: logger.error( - "Failed to load saved query for unfurl: %s", - str(exc), + f"Failed to load saved query for unfurl: {exc}", exc_info=True, ) else: @@ -210,8 +209,7 @@ def unfurl_discover( url = generate_chart(style, chart_data) except RuntimeError as exc: logger.error( - "Failed to generate chart for discover unfurl: %s", - str(exc), + f"Failed to generate chart for discover unfurl: {exc}", exc_info=True, ) continue diff --git a/src/sentry/integrations/vercel/integration.py b/src/sentry/integrations/vercel/integration.py index a52be85cc7aa52..32ac132f6be7ce 100644 --- a/src/sentry/integrations/vercel/integration.py +++ b/src/sentry/integrations/vercel/integration.py @@ -94,10 +94,8 @@ def metadata(self): def get_dynamic_display_information(self): organization = Organization.objects.get_from_cache(id=self.organization_id) - source_code_link = absolute_uri( - "/settings/%s/integrations/?%s" - % (organization.slug, urlencode({"category": "source code management"})) - ) + qs = urlencode({"category": "source code management"}) + source_code_link = absolute_uri(f"/settings/{organization.slug}/integrations/?{qs}") add_project_link = absolute_uri(f"/organizations/{organization.slug}/projects/new/") return { "configure_integration": { diff --git a/src/sentry/integrations/vsts/search.py b/src/sentry/integrations/vsts/search.py index 82587eb398711f..9d82d3733784ab 100644 --- a/src/sentry/integrations/vsts/search.py +++ b/src/sentry/integrations/vsts/search.py @@ -31,8 +31,7 @@ def get(self, request: Request, organization: Organization, integration_id: int) return Response( [ { - "label": "(%s) %s" - % (i["fields"]["system.id"], i["fields"]["system.title"]), + "label": f'({i["fields"]["system.id"]}) {i["fields"]["system.title"]}', "value": i["fields"]["system.id"], } for i in resp.get("results", []) diff --git a/src/sentry/tasks/digests.py b/src/sentry/tasks/digests.py index 35a3271121ef3f..90e8bab7717304 100644 --- a/src/sentry/tasks/digests.py +++ b/src/sentry/tasks/digests.py @@ -40,7 +40,7 @@ def deliver_digest(key, schedule_timestamp=None): try: project, target_type, target_identifier = split_key(key) except Project.DoesNotExist as error: - logger.info("Cannot deliver digest %r due to error: %s", key, error) + logger.info(f"Cannot deliver digest {key} due to error: {error}") digests.delete(key) return @@ -53,7 +53,7 @@ def deliver_digest(key, schedule_timestamp=None): with digests.digest(key, minimum_delay=minimum_delay) as records: digest, logs = build_digest(project, records) except InvalidState as error: - logger.info("Skipped digest delivery: %s", error, exc_info=True) + logger.info(f"Skipped digest delivery: {error}", exc_info=True) return if digest: