Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions .github/workflows/check-pull-request.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: Check-pull-request

on: [pull_request]

jobs:
Execute:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8]

steps:
- uses: actions/checkout@master

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
pip3 install --upgrade coveralls
pip3 install -e .[dev]

- name: Run checks
run: ./precommit.py

- name: Upload Coverage
run: coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true

Finish-Coveralls:
name: Finish Coveralls
needs: Execute
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Finish Coveralls
run: |
pip3 install --upgrade coveralls
coveralls --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
35 changes: 35 additions & 0 deletions .github/workflows/check-push.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: Check-push

on:
push:
branches:
- master

jobs:
Execute:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8]

steps:
- uses: actions/checkout@master

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: Install dependencies
run: |
python3 -m pip install --upgrade pip
pip3 install -e .[dev]
pip3 install coveralls

- name: Run checks
run: ./precommit.py

- name: Upload coverage to coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: coveralls
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,5 @@ venv3
*.egg-info
.tox
dist/
venv/
.coverage
18 changes: 17 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,23 @@
temppathlib
===========

temppathlib provides wrappers around ``tempfile`` so that you can directly use them together with ``pathlib`` module.
.. image:: https://github.com/Parquery/temppathlib/workflows/Check-push/badge.svg
:target: https://github.com/Parquery/temppathlib/actions?query=workflow%3ACheck-push
:alt: Check status

.. image:: https://coveralls.io/repos/github/Parquery/temppathlib/badge.svg?branch=master
:target: https://coveralls.io/github/Parquery/temppathlib
:alt: Test coverage

.. image:: https://badge.fury.io/py/temppathlib.svg
:target: https://pypi.org/project/temppathlib/
:alt: PyPI - version

.. image:: https://img.shields.io/pypi/pyversions/temppathlib.svg
:target: https://pypi.org/project/temppathlib/
:alt: PyPI - Python Version

Temppathlib provides wrappers around ``tempfile`` so that you can directly use them together with ``pathlib`` module.
We found it cumbersome to convert ``tempfile`` objects manually to ``pathlib.Path`` whenever we needed a temporary
file.

Expand Down
249 changes: 76 additions & 173 deletions precommit.py
Original file line number Diff line number Diff line change
@@ -1,202 +1,105 @@
#!/usr/bin/env python3
"""
Runs precommit checks on the repository.
"""
"""Run precommit checks on the repository."""
import argparse
import concurrent.futures
import hashlib
import os
import pathlib
import re
import subprocess
import sys
from typing import List, Union, Tuple # pylint: disable=unused-import

import yapf.yapflib.yapf_api


def compute_hash(text: str) -> str:
"""
:param text: to hash
:return: hash digest
"""
md5 = hashlib.md5()
md5.update(text.encode())
return md5.hexdigest()


class Hasher:
"""
Hashes the source code files and reports if they differed to one of the previous hashings.
"""

def __init__(self, source_dir: pathlib.Path, hash_dir: pathlib.Path) -> None:
self.source_dir = source_dir
self.hash_dir = hash_dir

def __hash_dir(self, path: pathlib.Path) -> pathlib.Path:
"""
:param path: to a source file
:return: path to the file holding the hash of the source text
"""
if self.source_dir not in path.parents:
raise ValueError("Expected the path to be beneath the source directory {!r}, got: {!r}".format(
str(self.source_dir), str(path)))

return self.hash_dir / path.relative_to(self.source_dir).parent / path.name

def hash_differs(self, path: pathlib.Path) -> bool:
"""
:param path: to the source file
:return: True if the hash of the content differs to one of the previous hashings.
"""
hash_dir = self.__hash_dir(path=path)

if not hash_dir.exists():
return True

prev_hashes = set([pth.name for pth in hash_dir.iterdir()])

new_hsh = compute_hash(text=path.read_text())

return not new_hsh in prev_hashes

def update_hash(self, path: pathlib.Path) -> None:
"""
Hashes the file content and stores it on disk.

:param path: to the source file
:return:
"""
hash_dir = self.__hash_dir(path=path)
hash_dir.mkdir(exist_ok=True, parents=True)

new_hsh = compute_hash(text=path.read_text())

pth = hash_dir / new_hsh
pth.write_text('passed')

def main() -> int:
"""Execute the main routine."""
parser = argparse.ArgumentParser()
parser.add_argument(
"--overwrite",
help="Overwrites the unformatted source files with the "
"well-formatted code in place. If not set, "
"an exception is raised if any of the files do not conform "
"to the style guide.",
action='store_true')

def check(path: pathlib.Path, py_dir: pathlib.Path, overwrite: bool) -> Union[None, str]:
"""
Runs all the checks on the given file.
args = parser.parse_args()

:param path: to the source file
:param py_dir: path to the source files
:param overwrite: if True, overwrites the source file in place instead of reporting that it was not well-formatted.
:return: None if all checks passed. Otherwise, an error message.
"""
style_config = py_dir / 'style.yapf'
overwrite = bool(args.overwrite)

report = []
repo_root = pathlib.Path(__file__).parent

# yapf
if not overwrite:
formatted, _, changed = yapf.yapflib.yapf_api.FormatFile(
filename=str(path), style_config=str(style_config), print_diff=True)
# yapf: disable
source_files = (
sorted((repo_root / "temppathlib").glob("**/*.py")) +
sorted((repo_root / "tests").glob("**/*.py")))
# yapf: enable

if changed:
report.append("Failed to yapf {}:\n{}".format(path, formatted))
if overwrite:
print('Removing trailing whitespace...')
for pth in source_files:
pth.write_text(re.sub(r'[ \t]+$', '', pth.read_text(), flags=re.MULTILINE))

print("YAPF'ing...")
yapf_targets = ["tests", "temppathlib", "setup.py", "precommit.py"]
if overwrite:
# yapf: disable
subprocess.check_call(
["yapf", "--in-place", "--style=style.yapf", "--recursive"] +
yapf_targets,
cwd=str(repo_root))
# yapf: enable
else:
yapf.yapflib.yapf_api.FormatFile(filename=str(path), style_config=str(style_config), in_place=True)

# mypy
env = os.environ.copy()
env['PYTHONPATH'] = ":".join([py_dir.as_posix(), env.get("PYTHONPATH", "")])

proc = subprocess.Popen(
['mypy', str(path), '--ignore-missing-imports'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=env,
universal_newlines=True)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
report.append("Failed to mypy {}:\nOutput:\n{}\n\nError:\n{}".format(path, stdout, stderr))

# pylint
proc = subprocess.Popen(
['pylint', str(path), '--rcfile={}'.format(py_dir / 'pylint.rc')],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True)

stdout, stderr = proc.communicate()
if proc.returncode != 0:
report.append("Failed to pylint {}:\nOutput:\n{}\n\nError:\n{}".format(path, stdout, stderr))
# yapf: disable
subprocess.check_call(
["yapf", "--diff", "--style=style.yapf", "--recursive"] +
yapf_targets,
cwd=str(repo_root))
# yapf: enable

if len(report) > 0:
return "\n".join(report)

return None
print("Mypy'ing...")
subprocess.check_call(["mypy", "--strict", "temppathlib", "tests"], cwd=str(repo_root))

print("Isort'ing...")
# yapf: disable
isort_files = map(str, source_files)
# yapf: enable

def main() -> int:
""""
Main routine
"""
# pylint: disable=too-many-locals
parser = argparse.ArgumentParser()
parser.add_argument(
"--overwrite",
help="Overwrites the unformatted source files with the well-formatted code in place. "
"If not set, an exception is raised if any of the files do not conform to the style guide.",
action='store_true')
# yapf: disable
subprocess.check_call(
["isort", "--project", "temppathlib", '--line-width', '120'] +
([] if overwrite else ['--check-only']) +
[str(pth) for pth in source_files])
# yapf: enable

parser.add_argument("--all", help="checks all the files even if they didn't change", action='store_true')
print("Pydocstyle'ing...")
subprocess.check_call(["pydocstyle", "temppathlib"], cwd=str(repo_root))

args = parser.parse_args()
print("Pylint'ing...")
subprocess.check_call(["pylint", "--rcfile=pylint.rc", "tests", "temppathlib"], cwd=str(repo_root))

overwrite = bool(args.overwrite)
check_all = bool(args.all)
print("Testing...")
env = os.environ.copy()
env['ICONTRACT_SLOW'] = 'true'

py_dir = pathlib.Path(__file__).parent
# yapf: disable
subprocess.check_call(
["coverage", "run",
"--source", "temppathlib",
"-m", "unittest", "discover", "tests"],
cwd=str(repo_root),
env=env)
# yapf: enable

hash_dir = py_dir / '.precommit_hashes'
hash_dir.mkdir(exist_ok=True)
subprocess.check_call(["coverage", "report"])

hasher = Hasher(source_dir=py_dir, hash_dir=hash_dir)
print("Doctesting...")
doctest_files = ([repo_root / "README.rst"] + sorted((repo_root / "temppathlib").glob("**/*.py")))

# yapf: disable
pths = sorted(
list(py_dir.glob("*.py")) +
list((py_dir / 'tests').glob("*.py"))
)
# yapf: enable
for pth in doctest_files:
subprocess.check_call([sys.executable, "-m", "doctest", str(pth)])

# see which files changed:
pending_pths = [] # type: List[pathlib.Path]
print("Checking setup.py sdist ...")
subprocess.check_call([sys.executable, "setup.py", "sdist"], cwd=str(repo_root))

if check_all:
pending_pths = pths
else:
for pth in pths:
if hasher.hash_differs(path=pth):
pending_pths.append(pth)

print("There are {} file(s) that need to be individually checked...".format(len(pending_pths)))

success = True

futures_paths = [] # type: List[Tuple[concurrent.futures.Future, pathlib.Path]]
with concurrent.futures.ThreadPoolExecutor() as executor:
for pth in pending_pths:
future = executor.submit(fn=check, path=pth, py_dir=py_dir, overwrite=overwrite)
futures_paths.append((future, pth))

for future, pth in futures_paths:
report = future.result()
if report is None:
print("Passed all checks: {}".format(pth))
hasher.update_hash(path=pth)
else:
print("One or more checks failed for {}:\n{}".format(pth, report))
success = False

success = subprocess.call(['python3', '-m', 'unittest', 'discover', (py_dir / 'tests').as_posix()]) == 0 and success

if not success:
print("One or more checks failed.")
return 1
print("Checking with twine...")
subprocess.check_call(["twine", "check", "dist/*"], cwd=str(repo_root))

return 0

Expand Down
Loading