Skip to content

Commit

Permalink
enable most e rules
Browse files Browse the repository at this point in the history
  • Loading branch information
ajay-sentry committed May 22, 2024
1 parent ad629f2 commit 3ab3ddc
Show file tree
Hide file tree
Showing 12 changed files with 21 additions and 18 deletions.
2 changes: 1 addition & 1 deletion codecov_auth/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def log(objects, message, user, action_flag=None, add_traceback=False):
if action_flag is None:
action_flag = CHANGE

if type(objects) is not list:
if not isinstance(objects, list):
objects = [objects]

if add_traceback:
Expand Down
2 changes: 1 addition & 1 deletion core/commands/commit/interactors/get_file_content.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ async def get_file_from_service(self, commit, path):

# When a file received from GH that is larger than 1MB the result will be
# pre-decoded and of string type; no need to decode again in that case
if type(content.get("content")) == str:
if isinstance(content.get("content"), str):
return content.get("content")
return content.get("content").decode("utf-8")
# TODO raise this to the API so we can handle it.
Expand Down
4 changes: 2 additions & 2 deletions graphql_api/tests/test_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def test_fetch_branches(self):
query = query_branches % (self.org.username, self.repo.name)
data = self.gql_request(query, variables=variables)
branches = data["owner"]["repository"]["branches"]["edges"]
assert type(branches) == list
assert isinstance(branches, list)
assert len(branches) == 3
assert branches == [
{"node": {"name": "test2"}},
Expand Down Expand Up @@ -239,7 +239,7 @@ def test_fetch_branches_with_filters(self):
query = query_branches % (self.org.username, self.repo.name, "test2")
data = self.gql_request(query, variables=variables)
branches = data["owner"]["repository"]["branches"]["edges"]
assert type(branches) == list
assert isinstance(branches, list)
assert len(branches) == 1
assert branches == [
{"node": {"name": "test2"}},
Expand Down
7 changes: 5 additions & 2 deletions graphql_api/types/impacted_file/impacted_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,14 @@
from graphql_api.types.errors import ProviderError, UnknownPath
from graphql_api.types.errors.errors import UnknownFlags
from graphql_api.types.segment_comparison.segment_comparison import SegmentComparisons
from services.comparison import Comparison, MissingComparisonReport, Segment
from services.comparison import (
Comparison,
ImpactedFile,
MissingComparisonReport,
)
from services.profiling import ProfilingSummary

impacted_file_bindable = ObjectType("ImpactedFile")
from services.comparison import ImpactedFile


@impacted_file_bindable.field("fileName")
Expand Down
2 changes: 1 addition & 1 deletion graphql_api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ async def post(self, request, *args, **kwargs):
return HttpResponseBadRequest(
JsonResponse("Your query is too costly.")
)
except:
except Exception:
pass
return response

Expand Down
2 changes: 1 addition & 1 deletion graphs/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
class GraphBadgeAPIMixin(object):
def get(self, request, *args, **kwargs):
ext = self.kwargs.get("ext")
if not ext in self.extensions:
if ext not in self.extensions:
return Response(
{
"detail": f"File extension should be one of [ {' || '.join(self.extensions)} ]"
Expand Down
2 changes: 1 addition & 1 deletion graphs/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class BadgeHandler(APIView, RepoPropertyMixin, GraphBadgeAPIMixin):
def get_object(self, request, *args, **kwargs):
# Validate coverage precision
precision = self.request.query_params.get("precision", "0")
if not precision in self.precisions:
if precision not in self.precisions:
raise NotFound("Coverage precision should be one of [ 0 || 1 || 2 ]")

coverage, coverage_range = self.get_coverage()
Expand Down
6 changes: 3 additions & 3 deletions ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,9 @@ indent-width = 4
target-version = "py312"

[lint]
# Currently only enabled for F541 and I: https://docs.astral.sh/ruff/rules/
select = ["F", "I"]
ignore = ["F841", "F401", "F405", "F403"]
# Currently only enabled for F, I, and E rules, with a few exclusions: https://docs.astral.sh/ruff/rules/
select = ["F", "I", "E"]
ignore = ["F841", "F401", "F405", "F403", "E501", "E712", "E711"]

# Allow fix for all enabled rules (when `--fix`) is provided.
# The preferred method (for now) w.r.t. fixable rules is to manually update the makefile
Expand Down
6 changes: 3 additions & 3 deletions services/comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def _get_line(self, report_file, ln):

# copied from ReportFile._line, minus dataclass instantiation
if line:
if type(line) is list:
if isinstance(line, list):
return line
else:
# these are old versions
Expand Down Expand Up @@ -675,7 +675,7 @@ def get_file_comparison(self, file_name, with_src=False, bypass_max_diff=False):
file_name, self.head_commit.commitid
)["content"]
# make sure the file is str utf-8
if type(file_content) is not str:
if not isinstance(file_content, str):
file_content = str(file_content, "utf-8")
src = file_content.splitlines()
else:
Expand Down Expand Up @@ -1009,7 +1009,7 @@ def _fetch_raw_comparison_data(self) -> dict:
try:
data = archive_service.read_file(self.commit_comparison.report_storage_path)
return json.loads(data)
except:
except Exception:
log.error(
"ComparisonReport - couldn't fetch data from storage", exc_info=True
)
Expand Down
2 changes: 1 addition & 1 deletion services/profiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def summary_data(
try:
data = archive_service.read_file(profiling_commit.summarized_location)
return ProfilingSummaryDataAnalyzer(json.loads(data))
except:
except Exception:
log.error(
"failed to read summarized profiling data from storage", exc_info=True
)
Expand Down
2 changes: 1 addition & 1 deletion services/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def fetch_commit_yaml(commit: Commit, owner: Owner) -> Optional[Dict]:
)
yaml_dict = safe_load(yaml_str)
return validate_yaml(yaml_dict, show_secrets_for=None)
except:
except Exception:
# fetching, parsing, validating the yaml inside the commit can
# have various exceptions, which we do not care about to get the final
# yaml used for a commit, as any error here, the codecov.yaml would not
Expand Down
2 changes: 1 addition & 1 deletion webhook_handlers/views/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def validate_signature(self, request):
"webhook_secret",
default=b"testixik8qdauiab1yiffydimvi72ekq",
)
if type(key) is str:
if isinstance(key, str):
# If "key" comes from k8s secret, it is of type str, so
# must convert to bytearray for use with hmac
key = bytes(key, "utf-8")
Expand Down

0 comments on commit 3ab3ddc

Please sign in to comment.