Skip to content

Commit

Permalink
Replace flake8, isort, pyupgrade with ruff (#1578)
Browse files Browse the repository at this point in the history
https://github.com/charliermarsh/ruff/ is a faster replacement of most
of the linting tools we use, and is starting to picked up by several
other projects. Even project like Pandas have adopted Ruff
(pandas-dev/pandas#50160)

This PR replaces flake8, isort, pyupgrade.

```
❯ time pre-commit run flake8 --all-files
flake8...................................................................Passed
pre-commit run flake8 --all-files  3.48s user 0.55s system 372% cpu 1.084 total

❯ time pre-commit run isort --all-files
Run isort................................................................Passed
pre-commit run isort --all-files  0.31s user 0.18s system 50% cpu 0.973 total

❯ time pre-commit run pyupgrade --all-files
pyupgrade................................................................Passed
pre-commit run pyupgrade --all-files  1.26s user 0.23s system 280% cpu 0.530 total
```
 vs
```
❯ time pre-commit run ruff --all-files
ruff.....................................................................Passed
pre-commit run ruff --all-files  0.37s user 0.16s system 142% cpu 0.373 total
```

**_Drops from 5s to 0.3s_**


Some other popular tools that have adopted it:

- [FastAPI](https://github.com/tiangolo/fastapi)
- [Bokeh](https://github.com/bokeh/bokeh)
- [Zulip](https://github.com/zulip/zulip)
- [Pydantic](https://github.com/pydantic/pydantic)
- [Sphinx](https://github.com/sphinx-doc/sphinx)
- [Hatch](https://github.com/pypa/hatch)
- [Jupyter](https://github.com/jupyter-server/jupyter_server)
- [Synapse](https://github.com/matrix-org/synapse)
- [Saleor](https://github.com/saleor/saleor)
- [Polars](https://github.com/pola-rs/polars)
- [Ibis](https://github.com/ibis-project/ibis)
- [OpenBB](https://github.com/OpenBB-finance/OpenBBTerminal)

It will also be used by Apache Airflow :) and should be used in
Astronomer-providers too.
  • Loading branch information
kaxil authored and utkarsharma2 committed Jan 17, 2023
1 parent d32dd03 commit dbf088f
Show file tree
Hide file tree
Showing 13 changed files with 58 additions and 59 deletions.
10 changes: 0 additions & 10 deletions .flake8

This file was deleted.

8 changes: 0 additions & 8 deletions .isort.cfg

This file was deleted.

31 changes: 5 additions & 26 deletions .pre-commit-config.yaml
Expand Up @@ -70,34 +70,13 @@ repos:
alias: black
additional_dependencies: [black>=22.10.0]

- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.219'
hooks:
- id: flake8
additional_dependencies:
- flake8-builtins
- flake8-comprehensions
- flake8-colors
- flake8-assertive
- flake8-typing-imports
- flake8-logging-format

- repo: https://github.com/PyCQA/isort
rev: 5.11.4
hooks:
- id: isort
name: Run isort
# Exclude auto-generated example files from being changed
exclude: ^sql-cli/include/base/.airflow/dags
- id: ruff
args:
# These options are duplicated to known_first_party in .isort.cfg,
# Please keep these in sync for now. (See comments there for details.)
- --profile=black
- -l=110
- --combine-as
- -p=astro
- -p=tests
- -p=sql_cli
- --config=./ruff.toml

- repo: https://github.com/codespell-project/codespell
rev: v2.2.2
hooks:
Expand Down
Expand Up @@ -28,7 +28,7 @@
log = logging.getLogger(__file__)


def load_file_to_delta(
def load_file_to_delta( # noqa: C901
input_file: File,
delta_table: BaseTable,
databricks_job_name: str,
Expand Down
Expand Up @@ -138,9 +138,9 @@ def process_checks(self):
passed_tests.extend(_get_success_checks(checks, column))

if len(failed_tests) > 0:
raise AirflowException(f"The following tests have failed:" f"\n{''.join(failed_tests)}")
raise AirflowException(f"The following tests have failed: \n{''.join(failed_tests)}")
if len(passed_tests) > 0:
print(f"The following tests have passed:" f"\n{''.join(passed_tests)}")
print(f"The following tests have passed: \n{''.join(passed_tests)}")


def _get_failed_checks(checks, col=None):
Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/export_to_file.py
Expand Up @@ -105,7 +105,7 @@ def get_openlineage_facets_on_complete(self, task_instance): # skipcq: PYL-W061
)
]
output_uri = (
f"{self.output_file.openlineage_dataset_namespace}" f"{self.output_file.openlineage_dataset_name}"
f"{self.output_file.openlineage_dataset_namespace}{self.output_file.openlineage_dataset_name}"
)
output_dataset = [
OpenlineageDataset(
Expand Down
4 changes: 2 additions & 2 deletions python-sdk/src/astro/sql/operators/raw_sql.py
Expand Up @@ -5,12 +5,12 @@
from typing import Any, Callable

try:
from airflow.decorators.base import TaskDecorator, task_decorator_factory
from airflow.decorators.base import TaskDecorator
except ImportError:
from airflow.decorators.base import task_decorator_factory
from airflow.decorators import _TaskDecorator as TaskDecorator

import airflow
from airflow.decorators.base import task_decorator_factory

if airflow.__version__ >= "2.3":
from sqlalchemy.engine.row import LegacyRow as SQLAlcRow
Expand Down
5 changes: 2 additions & 3 deletions python-sdk/src/astro/sql/operators/transform.py
Expand Up @@ -4,12 +4,11 @@
from typing import Any, Callable

try:
from airflow.decorators.base import TaskDecorator, task_decorator_factory
from airflow.decorators.base import TaskDecorator
except ImportError:
from airflow.decorators.base import task_decorator_factory
from airflow.decorators import _TaskDecorator as TaskDecorator

from airflow.decorators.base import get_unique_task_id
from airflow.decorators.base import get_unique_task_id, task_decorator_factory
from airflow.models.xcom_arg import XComArg
from sqlalchemy.sql.functions import Function

Expand Down
2 changes: 1 addition & 1 deletion python-sdk/src/astro/sql/operators/upstream_task_mixin.py
Expand Up @@ -23,5 +23,5 @@ def __init__(self, **kwargs):
self.set_upstream(task)
else:
raise AirflowException(
"Cannot upstream a non-task, please only use XcomArg or operators for this" " parameter"
"Cannot upstream a non-task, please only use XcomArg or operators for this parameter"
)
4 changes: 2 additions & 2 deletions python-sdk/tests/benchmark/dags/benchmark_gcs_to_big_query.py
Expand Up @@ -83,9 +83,9 @@
task_id="load_five_gb",
bucket="astro-sdk",
source_objects=[
("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + str(i) + ".ndjson")
f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000{str(i)}.ndjson"
if i >= 10
else ("benchmark/trimmed/pypi/pypi-downloads-2021-03-28-0000000000" + "0" + str(i) + ".ndjson")
else f"benchmark/trimmed/pypi/pypi-downloads-2021-03-28-00000000000{str(i)}.ndjson"
for i in range(20)
],
destination_project_dataset_table=f"{DATASET_NAME}.{TABLE_NAME}",
Expand Down
4 changes: 2 additions & 2 deletions python-sdk/tests/files/locations/test_location_base.py
Expand Up @@ -40,7 +40,7 @@ def test_get_class_name_method_valid_name():
"""Test valid case of implicit naming dependency among the module name and class name for dynamic imports"""

class Test: # skipcq: PY-D0002
__name__ = "test.some"
__name__ = "test.some" # noqa: A003

class TestLocation: # skipcq: PY-D0002
pass
Expand Down Expand Up @@ -96,7 +96,7 @@ def test_get_class_name_method_invalid_name():
"""Test invalid case of implicit naming dependency among the module name and class name for dynamic imports"""

class Test: # skipcq: PY-D0002
__name__ = "test.some"
__name__ = "test.some" # noqa: A003

class SomethingElseLocation: # skipcq: PY-D0002
pass
Expand Down
Expand Up @@ -141,7 +141,8 @@ def test_is_valid_snow_identifier(self): # skipcq PYL-R0201
]
invalid_strings = [
"$invalid",
"Infvalid\x00" "Invalid Name",
"Infvalid\x00",
"Invalid Name",
'"Invalid " Name"',
'"Also Invalid Name""',
]
Expand Down
38 changes: 38 additions & 0 deletions ruff.toml
@@ -0,0 +1,38 @@
line-length = 120

# Enable Pyflakes `E` and `F` codes by default.
extend-select = [
"W", # pycodestyle warnings
"I", # isort
"C90", # Complexity
# "B", # flake8-bugbear
"C", # flake8-comprehensions
# "ANN", # flake8-comprehensions
"ISC", # flake8-implicit-str-concat
"T10", # flake8-debugger
"A", # flake8-builtins
"UP", # pyupgrade
]
extend-ignore = ["A002"]

# Exclude a variety of commonly ignored directories.
extend-exclude = [
"__pycache__",
"docs/source/conf.py",
]

target-version = "py37"
fix = true

[per-file-ignores]
"python-sdk/src/astro/sql/__init__.py" = ["F401"]
"python-sdk/src/astro/lineage/__init__.py" = ["F401"]
"python-sdk/src/astro/sql/table.py" = ["F401"]


[mccabe]
max-complexity = 6

[isort]
combine-as-imports = true
known-first-party = ["astro", "tests", "sql_cli"]

0 comments on commit dbf088f

Please sign in to comment.