From e0d51ec89292f470e7bc2e92e5b73204c766cd48 Mon Sep 17 00:00:00 2001 From: Alexander Frenzel Date: Mon, 11 Jan 2021 13:19:42 -0800 Subject: [PATCH] chore: update project structure to 78c5496a * chore: update project structure to 78c5496a * fix: refresh package registry first --- .cruft.json | 2 +- .github/workflows/test.yml | 14 ++++ .github/workflows/update.yml | 5 +- poetry.lock | 83 ++++++++++++++++++- pyproject.toml | 4 + test_proj/conftest.py | 15 ++-- .../media_library/tests/test_encoding.py | 2 +- test_proj/media_library/tests/test_signals.py | 6 +- video_encoding/models.py | 2 +- video_encoding/tasks.py | 4 +- 10 files changed, 118 insertions(+), 19 deletions(-) diff --git a/.cruft.json b/.cruft.json index e245b3f..22d62b1 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,6 +1,6 @@ { "template": "https://github.com/escaped/cookiecutter-pypackage.git", - "commit": "46edce6ba837a29c6a7b6867ab259ce93391fd13", + "commit": "78c5496a422a0047307d337f970c73f01dd9e392", "context": { "cookiecutter": { "author": "Alexander Frenzel", diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 94412c3..f927b0d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,8 +8,20 @@ on: - main jobs: + lint_cruft: + name: Check if automatic project update was successful + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: Fail if .rej files exist as structure update was not successful + run: test -z "$(find . -iname '*.rej')" + lint: name: Lint + needs: [lint_cruft] runs-on: ubuntu-latest steps: - name: Checkout code @@ -30,6 +42,7 @@ jobs: test: name: Test + needs: [lint_cruft] runs-on: ${{ matrix.platform }} strategy: max-parallel: 4 @@ -48,6 +61,7 @@ jobs: run: | python -m pip install --upgrade pip pip install tox tox-gh-actions coveralls + sudo apt-get update sudo apt-get install -y ffmpeg - name: Test with tox run: tox diff --git a/.github/workflows/update.yml b/.github/workflows/update.yml index fa5e8f6..671eb02 100644 --- a/.github/workflows/update.yml +++ b/.github/workflows/update.yml @@ -20,7 +20,8 @@ jobs: run: | cruft update -y poetry lock --no-update # add new dependencies - poetry run pre-commit run -a + poetry install + poetry run pre-commit run -a || true # we have to fix other issue manually - name: Get new template version # extract new cooiecutter template version @@ -34,7 +35,7 @@ jobs: token: ${{ secrets.AUTO_UPDATE_GITHUB_TOKEN }} commit-message: >- chore: update project structure to ${{ env.TEMPLATE_COMMIT }} - title: "[Cruft] Auto-Update project structure" + title: "[Actions] Auto-Sync cookiecutter template" body: "" branch: chore/cookiecutter-pypackage delete-branch: true diff --git a/poetry.lock b/poetry.lock index 40d62ae..11087bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -189,6 +189,56 @@ flake8 = ">=3.0.0" [package.extras] dev = ["coverage", "black", "hypothesis", "hypothesmith"] +[[package]] +name = "flake8-builtins" +version = "1.5.3" +description = "Check for python builtins being used as variables or parameters." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + +[package.extras] +test = ["coverage", "coveralls", "mock", "pytest", "pytest-cov"] + +[[package]] +name = "flake8-comprehensions" +version = "3.3.1" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "flake8-debugger" +version = "4.0.0" +description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +flake8 = ">=3.0" +pycodestyle = "*" +six = "*" + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + [[package]] name = "identify" version = "1.5.6" @@ -318,6 +368,17 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pep8-naming" +version = "0.11.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8-polyfill = ">=1.0.2,<2" + [[package]] name = "pillow" version = "8.0.0" @@ -593,7 +654,7 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake [metadata] lock-version = "1.1" python-versions = ">=3.6.1, <4.0" -content-hash = "d60e819d39f0b441d816a6ab7390b61f081d44870cff2693c0e2e4a1ccd23712" +content-hash = "da66810f3af7c97a01a722c6bd6c883cca7e9d2d538a343eccb546653df873df" [metadata.files] appdirs = [ @@ -695,6 +756,22 @@ flake8-bugbear = [ {file = "flake8-bugbear-20.11.1.tar.gz", hash = "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538"}, {file = "flake8_bugbear-20.11.1-py36.py37.py38-none-any.whl", hash = "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703"}, ] +flake8-builtins = [ + {file = "flake8-builtins-1.5.3.tar.gz", hash = "sha256:09998853b2405e98e61d2ff3027c47033adbdc17f9fe44ca58443d876eb00f3b"}, + {file = "flake8_builtins-1.5.3-py2.py3-none-any.whl", hash = "sha256:7706babee43879320376861897e5d1468e396a40b8918ed7bccf70e5f90b8687"}, +] +flake8-comprehensions = [ + {file = "flake8-comprehensions-3.3.1.tar.gz", hash = "sha256:e734bf03806bb562886d9bf635d23a65a1a995c251b67d7e007a7b608af9bd22"}, + {file = "flake8_comprehensions-3.3.1-py3-none-any.whl", hash = "sha256:6d80dfafda0d85633f88ea5bc7de949485f71f1e28db7af7719563fe5f62dcb1"}, +] +flake8-debugger = [ + {file = "flake8-debugger-4.0.0.tar.gz", hash = "sha256:e43dc777f7db1481db473210101ec2df2bd39a45b149d7218a618e954177eda6"}, + {file = "flake8_debugger-4.0.0-py3-none-any.whl", hash = "sha256:82e64faa72e18d1bdd0000407502ebb8ecffa7bc027c62b9d4110ce27c091032"}, +] +flake8-polyfill = [ + {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, + {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, +] identify = [ {file = "identify-1.5.6-py2.py3-none-any.whl", hash = "sha256:3139bf72d81dfd785b0a464e2776bd59bdc725b4cc10e6cf46b56a0db931c82e"}, {file = "identify-1.5.6.tar.gz", hash = "sha256:969d844b7a85d32a5f9ac4e163df6e846d73c87c8b75847494ee8f4bd2186421"}, @@ -755,6 +832,10 @@ pathspec = [ {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, ] +pep8-naming = [ + {file = "pep8-naming-0.11.1.tar.gz", hash = "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724"}, + {file = "pep8_naming-0.11.1-py2.py3-none-any.whl", hash = "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"}, +] pillow = [ {file = "Pillow-8.0.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:b04569ff215b85ce3e2954979d2d5e0bf84007e43ddcf84b632fc6bc18e07909"}, {file = "Pillow-8.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:594f2f25b7bcfd9542c41b9df156fb5104f19f5fcefa51b1447f1d9f64c9cc14"}, diff --git a/pyproject.toml b/pyproject.toml index 64b7893..3c1465e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,8 +42,12 @@ autoflake = "^1.4" black = "^20.8b1" flake8 = "^3.8.3" flake8-bugbear = "^20.11.1" +flake8-builtins = "^1.5.3" +flake8-comprehensions = "^3.3.1" +flake8-debugger = "^4.0.0" isort = "^5.5.2" mypy = "^0.782" +pep8-naming = "^0.11.1" pre-commit = "^2.7.1" pytest = "^6.0.1" pytest-cov = "^2.10.1" diff --git a/test_proj/conftest.py b/test_proj/conftest.py index 01b86d9..5f318a3 100644 --- a/test_proj/conftest.py +++ b/test_proj/conftest.py @@ -45,24 +45,23 @@ def local_video(video_path) -> Generator[Video, None, None]: # file has already been deleted pass - for format in video.format_set.all(): - format.file.delete() + for format_ in video.format_set.all(): + format_.file.delete() video.delete() @pytest.fixture -def format(video_path, local_video) -> Generator[Format, None, None]: - format = Format.objects.create( +def video_format(video_path, local_video) -> Generator[Format, None, None]: + format_ = Format.objects.create( object_id=local_video.pk, content_type=ContentType.objects.get_for_model(local_video), field_name='file', format='mp4_hd', progress=100, ) - # - format.file.save('test.MTS', File(open(video_path, 'rb')), save=True) - yield format + format_.file.save('test.MTS', File(open(video_path, 'rb')), save=True) + yield format_ @pytest.fixture @@ -110,7 +109,7 @@ def delete(self, name: str) -> None: def exists(self, name: str) -> bool: return self.__path(name).exists() - def open(self, name: str, mode: str) -> IO[Any]: + def open(self, name: str, mode: str) -> IO[Any]: # noqa: A003 return open(self.__path(name), mode) def path(self, *args, **kwargs): diff --git a/test_proj/media_library/tests/test_encoding.py b/test_proj/media_library/tests/test_encoding.py index c082403..d018c01 100644 --- a/test_proj/media_library/tests/test_encoding.py +++ b/test_proj/media_library/tests/test_encoding.py @@ -12,7 +12,7 @@ def test_encoding(video): assert video.format_set.count() == 4 - formats = dict([(o['name'], o) for o in settings.VIDEO_ENCODING_FORMATS['FFmpeg']]) + formats = {o['name']: o for o in settings.VIDEO_ENCODING_FORMATS['FFmpeg']} assert set(video.format_set.values_list('format', flat=True)) == set( formats.keys() ) # NOQA diff --git a/test_proj/media_library/tests/test_signals.py b/test_proj/media_library/tests/test_signals.py index d81ea67..3eb53d7 100644 --- a/test_proj/media_library/tests/test_signals.py +++ b/test_proj/media_library/tests/test_signals.py @@ -130,7 +130,7 @@ def test_signals__encoding_failed( @pytest.mark.django_db def test_signals__encoding_skipped( - monkeypatch, mocker, local_video: models.Video, format: models.Format + monkeypatch, mocker, local_video: models.Video, video_format: models.Format ) -> None: """ Make sure encoding signal reports skipped, if file had been encoded before. @@ -143,8 +143,8 @@ def test_signals__encoding_skipped( mocker.patch.object(tasks, '_encode') # don't encode anything # encoding has already been done for the given format - format.format = encoding_format["name"] - format.save() + video_format.format = encoding_format["name"] + video_format.save() listener = mocker.MagicMock() signals.format_started.connect(listener) diff --git a/video_encoding/models.py b/video_encoding/models.py index ee88f16..c7848a7 100644 --- a/video_encoding/models.py +++ b/video_encoding/models.py @@ -34,7 +34,7 @@ class Format(models.Model): editable=False, verbose_name=_("Progress"), ) - format = models.CharField( + format = models.CharField( # noqa: A003 max_length=255, editable=False, verbose_name=_("Format"), diff --git a/video_encoding/tasks.py b/video_encoding/tasks.py index 96d88b2..5642609 100644 --- a/video_encoding/tasks.py +++ b/video_encoding/tasks.py @@ -20,8 +20,8 @@ def convert_all_videos(app_label, model_name, object_pk): Automatically converts all videos of a given instance. """ # get instance - Model = apps.get_model(app_label=app_label, model_name=model_name) - instance = Model.objects.get(pk=object_pk) + model_class = apps.get_model(app_label=app_label, model_name=model_name) + instance = model_class.objects.get(pk=object_pk) # search for `VideoFields` fields = instance._meta.fields