Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions src/sentry/hybridcloud/tasks/deliver_webhooks.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import datetime
import logging
from concurrent.futures import ThreadPoolExecutor, as_completed
from typing import Never

import orjson
import sentry_sdk
Expand Down Expand Up @@ -85,7 +84,7 @@ class DeliveryFailed(Exception):
namespace=hybridcloud_control_tasks,
),
)
def schedule_webhook_delivery(**kwargs: Never) -> None:
def schedule_webhook_delivery() -> None:
"""
Find mailboxes that contain undelivered webhooks that were scheduled
to be delivered now or in the past.
Expand Down
8 changes: 2 additions & 6 deletions src/sentry/incidents/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,7 @@

from django.db import router, transaction

from sentry.incidents.models.alert_rule import (
AlertRuleStatus,
AlertRuleTriggerAction,
AlertRuleTriggerActionMethod,
)
from sentry.incidents.models.alert_rule import AlertRuleStatus, AlertRuleTriggerAction
from sentry.incidents.models.incident import (
Incident,
IncidentActivity,
Expand Down Expand Up @@ -98,7 +94,7 @@ def handle_trigger_action(
action_id: int,
incident_id: int,
project_id: int,
method: AlertRuleTriggerActionMethod,
method: str,
new_status: int,
metric_value: int | None = None,
**kwargs: Any,
Expand Down
38 changes: 17 additions & 21 deletions src/sentry/relocation/tasks/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,9 +172,7 @@
),
),
)
def uploading_start(
uuid: UUID | str, replying_region_name: str | None, org_slug: str | None
) -> None:
def uploading_start(uuid: str, replying_region_name: str | None, org_slug: str | None) -> None:
"""
The very first action in the relocation pipeline. In the case of a `SAAS_TO_SAAS` relocation, it
will trigger the export of the requested organization from the region it currently live in. If
Expand Down Expand Up @@ -459,9 +457,7 @@ def fulfill_cross_region_export_request(
),
),
)
def cross_region_export_timeout_check(
uuid: UUID | str,
) -> None:
def cross_region_export_timeout_check(uuid: str) -> None:
"""
Not part of the primary `OrderedTask` queue. This task is only used to ensure that cross-region
export requests don't hang indefinitely.
Expand Down Expand Up @@ -524,7 +520,7 @@ def cross_region_export_timeout_check(
),
),
)
def uploading_complete(uuid: UUID | str) -> None:
def uploading_complete(uuid: str) -> None:
"""
Just check to ensure that uploading the (potentially very large!) backup file has completed
before we try to do all sorts of fun stuff with it.
Expand Down Expand Up @@ -578,7 +574,7 @@ def uploading_complete(uuid: UUID | str) -> None:
),
),
)
def preprocessing_scan(uuid: UUID | str) -> None:
def preprocessing_scan(uuid: str) -> None:
"""
Performs the very first part of the `PREPROCESSING` step of a `Relocation`, which involves
decrypting the user-supplied tarball and picking out some useful information for it. This let's
Expand Down Expand Up @@ -759,7 +755,7 @@ def preprocessing_scan(uuid: UUID | str) -> None:
),
),
)
def preprocessing_transfer(uuid: UUID | str) -> None:
def preprocessing_transfer(uuid: str) -> None:
"""
We currently have the user's relocation data stored in the main filestore bucket, but we need to
move it to the relocation bucket. This task handles that transfer.
Expand Down Expand Up @@ -854,7 +850,7 @@ def preprocessing_transfer(uuid: UUID | str) -> None:
),
),
)
def preprocessing_baseline_config(uuid: UUID | str) -> None:
def preprocessing_baseline_config(uuid: str) -> None:
"""
Pulls down the global config data we'll need to check for collisions and global data integrity.

Expand Down Expand Up @@ -912,7 +908,7 @@ def preprocessing_baseline_config(uuid: UUID | str) -> None:
),
),
)
def preprocessing_colliding_users(uuid: UUID | str) -> None:
def preprocessing_colliding_users(uuid: str) -> None:
"""
Pulls down any already existing users whose usernames match those found in the import - we'll
need to validate that none of these are mutated during import.
Expand Down Expand Up @@ -968,7 +964,7 @@ def preprocessing_colliding_users(uuid: UUID | str) -> None:
),
),
)
def preprocessing_complete(uuid: UUID | str) -> None:
def preprocessing_complete(uuid: str) -> None:
"""
This task ensures that every file CloudBuild will need to do its work is actually present and
available. Even if we've "finished" our uploads from the previous step, they may still not (yet)
Expand Down Expand Up @@ -1192,7 +1188,7 @@ def _update_relocation_validation_attempt(
),
),
)
def validating_start(uuid: UUID | str) -> None:
def validating_start(uuid: str) -> None:
"""
Calls into Google CloudBuild and kicks off a validation run.

Expand Down Expand Up @@ -1276,7 +1272,7 @@ def camel_to_snake_keep_underscores(value):
retry=Retry(times=MAX_VALIDATION_POLLS, on=(Exception,), times_exceeded=LastAction.Discard),
),
)
def validating_poll(uuid: UUID | str, build_id: str) -> None:
def validating_poll(uuid: str, build_id: str) -> None:
"""
Checks the progress of a Google CloudBuild validation run.

Expand Down Expand Up @@ -1381,7 +1377,7 @@ def validating_poll(uuid: UUID | str, build_id: str) -> None:
),
),
)
def validating_complete(uuid: UUID | str, build_id: str) -> None:
def validating_complete(uuid: str, build_id: str) -> None:
"""
Wraps up a validation run, and reports on what we found. If this task is being called, the
CloudBuild run as completed successfully, so we just need to figure out if there were any
Expand Down Expand Up @@ -1476,7 +1472,7 @@ def validating_complete(uuid: UUID | str, build_id: str) -> None:
),
),
)
def importing(uuid: UUID | str) -> None:
def importing(uuid: str) -> None:
"""
Perform the import on the actual live instance we are targeting.

Expand Down Expand Up @@ -1545,7 +1541,7 @@ def importing(uuid: UUID | str) -> None:
),
),
)
def postprocessing(uuid: UUID | str) -> None:
def postprocessing(uuid: str) -> None:
"""
Make the owner of this relocation an owner of all of the organizations we just imported.
"""
Expand Down Expand Up @@ -1643,7 +1639,7 @@ def postprocessing(uuid: UUID | str) -> None:
),
),
)
def notifying_unhide(uuid: UUID | str) -> None:
def notifying_unhide(uuid: str) -> None:
"""
Un-hide the just-imported organizations, making them visible to users in the UI.
"""
Expand Down Expand Up @@ -1696,7 +1692,7 @@ def notifying_unhide(uuid: UUID | str) -> None:
),
),
)
def notifying_users(uuid: UUID | str) -> None:
def notifying_users(uuid: str) -> None:
"""
Send an email to all users that have been imported, telling them to claim their accounts.
"""
Expand Down Expand Up @@ -1776,7 +1772,7 @@ def notifying_users(uuid: UUID | str) -> None:
),
),
)
def notifying_owner(uuid: UUID | str) -> None:
def notifying_owner(uuid: str) -> None:
"""
Send an email to the creator and owner, telling them that their relocation was successful.
"""
Expand Down Expand Up @@ -1830,7 +1826,7 @@ def notifying_owner(uuid: UUID | str) -> None:
),
),
)
def completed(uuid: UUID | str) -> None:
def completed(uuid: str) -> None:
"""
Finish up a relocation by marking it a success.
"""
Expand Down
2 changes: 1 addition & 1 deletion src/sentry/tasks/beacon.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def send_beacon() -> None:
queue="update",
taskworker_config=TaskworkerConfig(namespace=selfhosted_tasks),
)
def send_beacon_metric(metrics: list[dict[str, Any]], **kwargs: object) -> None:
def send_beacon_metric(metrics: list[dict[str, Any]], **kwargs: Any) -> None:
install_id = get_install_id()

if should_skip_beacon(install_id):
Expand Down
30 changes: 9 additions & 21 deletions src/sentry/tasks/codeowners/update_code_owners_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,7 @@
from typing import Any

from sentry import features
from sentry.integrations.models.integration import Integration
from sentry.integrations.services.integration import RpcIntegration
from sentry.models.organization import Organization
from sentry.models.project import Project
from sentry.silo.base import SiloMode
from sentry.tasks.base import instrumented_task, load_model_from_db, retry
from sentry.taskworker.config import TaskworkerConfig
Expand All @@ -31,47 +28,38 @@
)
@retry
def update_code_owners_schema(
organization: Organization | int,
integration: Integration | RpcIntegration | int | None = None,
projects: Iterable[Project | int] | None = None,
organization: int,
integration: int | None = None,
projects: list[int] | None = None,
**kwargs: Any,
) -> None:
from sentry.integrations.models.repository_project_path_config import (
RepositoryProjectPathConfig,
)
from sentry.models.projectcodeowners import ProjectCodeOwners

organization = load_model_from_db(Organization, organization)
org = load_model_from_db(Organization, organization)

if not features.has("organizations:integrations-codeowners", organization):
if not features.has("organizations:integrations-codeowners", org):
return
try:
code_owners: Iterable[ProjectCodeOwners] = []

if projects:
projects = [load_model_from_db(Project, project) for project in projects]
code_owners = ProjectCodeOwners.objects.filter(project__in=projects)

integration_id = _unpack_integration_id(integration)
if integration_id is not None:
if integration is not None:
code_mapping_ids = RepositoryProjectPathConfig.objects.filter(
organization_id=organization.id,
integration_id=integration_id,
organization_id=org.id,
integration_id=integration,
).values_list("id", flat=True)

code_owners = ProjectCodeOwners.objects.filter(
repository_project_path_config__in=code_mapping_ids
)

for code_owner in code_owners:
code_owner.update_schema(organization=organization)
code_owner.update_schema(organization=org)

# TODO(nisanthan): May need to add logging for the cases where we might want to have more information if something fails
except (RepositoryProjectPathConfig.DoesNotExist, ProjectCodeOwners.DoesNotExist):
return


def _unpack_integration_id(integration: Integration | RpcIntegration | int | None) -> int | None:
if isinstance(integration, (Integration, RpcIntegration)):
return integration.id
return integration
3 changes: 1 addition & 2 deletions src/sentry/tasks/digests.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import logging
import time
from datetime import datetime

from sentry.digests import get_option_key
from sentry.digests.backends.base import InvalidState
Expand Down Expand Up @@ -54,7 +53,7 @@ def schedule_digests() -> None:
)
def deliver_digest(
key: str,
schedule_timestamp: datetime | None = None,
schedule_timestamp: float | None = None,
notification_uuid: str | None = None,
) -> None:
from sentry import digests
Expand Down
16 changes: 6 additions & 10 deletions src/sentry/tasks/email.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import logging
from typing import Any

from django.core.mail import EmailMultiAlternatives

from sentry.auth import access
from sentry.models.group import Group
from sentry.silo.base import SiloMode
Expand Down Expand Up @@ -63,10 +61,9 @@ def process_inbound_email(mailfrom: str, group_id: int, payload: str) -> None:
),
),
)
def send_email(message: EmailMultiAlternatives | dict[str, Any]) -> None:
if not isinstance(message, EmailMultiAlternatives):
message = message_from_dict(message)
send_messages([message])
def send_email(message: dict[str, Any]) -> None:
django_message = message_from_dict(message)
send_messages([django_message])


@instrumented_task(
Expand All @@ -82,7 +79,6 @@ def send_email(message: EmailMultiAlternatives | dict[str, Any]) -> None:
),
),
)
def send_email_control(message: EmailMultiAlternatives | dict[str, Any]) -> None:
if not isinstance(message, EmailMultiAlternatives):
message = message_from_dict(message)
send_messages([message])
def send_email_control(message: dict[str, Any]) -> None:
django_message = message_from_dict(message)
send_messages([django_message])
3 changes: 2 additions & 1 deletion src/sentry/tasks/on_demand_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import logging
from collections.abc import Sequence
from typing import Any

import sentry_sdk
from celery.exceptions import SoftTimeLimitExceeded
Expand Down Expand Up @@ -196,7 +197,7 @@ def schedule_on_demand_check() -> None:
processing_deadline_duration=120,
),
)
def process_widget_specs(widget_query_ids: list[int], *args: object, **kwargs: object) -> None:
def process_widget_specs(widget_query_ids: list[int], **kwargs: Any) -> None:
"""
Child task spawned from :func:`schedule_on_demand_check`.
"""
Expand Down
15 changes: 4 additions & 11 deletions src/sentry/tasks/summaries/weekly_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,6 @@
from sentry.taskworker.namespaces import reports_tasks
from sentry.taskworker.retry import Retry
from sentry.types.group import GroupSubStatus
from sentry.users.models.user import User
from sentry.users.services.user import RpcUser
from sentry.utils import json, redis
from sentry.utils.dates import floor_to_utc_day, to_datetime
from sentry.utils.email import MessageBuilder
Expand Down Expand Up @@ -130,13 +128,13 @@ def prepare_organization_report(
timestamp: float,
duration: int,
organization_id: int,
batch_id: uuid.UUID | str,
batch_id: str,
dry_run: bool = False,
target_user: User | int | None = None,
target_user: int | None = None,
email_override: str | None = None,
):
batch_id = str(batch_id)
if target_user and not isinstance(target_user, int) and not hasattr(target_user, "id"):
if email_override and not isinstance(target_user, int):
logger.error(
"Target user must have an ID",
extra={
Expand All @@ -147,11 +145,6 @@ def prepare_organization_report(
},
)
return
target_user_id: int | None = None
if isinstance(target_user, (User, RpcUser)):
target_user_id = target_user.id
elif isinstance(target_user, int):
target_user_id = target_user
organization = Organization.objects.get(id=organization_id)
set_tag("org.slug", organization.slug)
set_tag("org.id", organization_id)
Expand Down Expand Up @@ -257,7 +250,7 @@ def prepare_organization_report(
return

# Finally, deliver the reports
batch = OrganizationReportBatch(ctx, batch_id, dry_run, target_user_id, email_override)
batch = OrganizationReportBatch(ctx, batch_id, dry_run, target_user, email_override)
with sentry_sdk.start_span(op="weekly_reports.deliver_reports"):
logger.info(
"weekly_reports.deliver_reports",
Expand Down
Loading