Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
matrix:
python: ["3.7", "3.8", "3.9", "3.10"]
os: ["ubuntu-latest"]
pytest_ops: [""]
pytest_opts: ["--workers 4 --tests-per-worker 1"]
requirements: [""]
include:
- os: "ubuntu-latest"
Expand All @@ -32,13 +32,18 @@ jobs:
python: "3.10"
# ignore doctests, as they involve calls to github, and all mac machines
# use the same IP address
pytest_opts: "--workers 4 --tests-per-worker 1 -k pins/tests"
- os: "windows-latest"
python: "3.10"
# ignore doctests
pytest_opts: "-k pins/tests"
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python }}
- name: Install dependencies
shell: bash
run: |
python -m pip install --upgrade pip

Expand All @@ -57,14 +62,16 @@ jobs:
export_default_credentials: true

- name: Run tests
shell: bash
run: |
pytest pins -m 'not fs_rsc and not skip_on_github' --workers 4 --tests-per-worker 1 $PYTEST_OPTS
pytest pins -m 'not fs_rsc and not skip_on_github' $PYTEST_OPTS
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "us-east-1"
PYTEST_OPTS: ${{ matrix.pytest_opts }}
REQUIREMENTS: ${{ matrix.requirements }}
ACTION_OS: ${{ matrix.os }}
# fixes error on macosx virtual machine with pytest-parallel
# https://github.com/browsertron/pytest-parallel/issues/93
no_proxy: "*"
Expand Down
12 changes: 5 additions & 7 deletions pins/rsconnect/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,15 +371,13 @@ def post_content_bundle(self, guid, fname, gzip=True) -> Bundle:
if p.is_dir() and gzip:
import tarfile

with tempfile.NamedTemporaryFile(mode="wb", suffix=".tar.gz") as tmp:
with tarfile.open(fileobj=tmp.file, mode="w:gz") as tar:
tar.add(str(p.absolute()), arcname="")
with tempfile.TemporaryDirectory() as tmp_dir:
p_archive = Path(tmp_dir) / "bundle.tar.gz"

# close the underlying file. note we don't call the top-level
# close method, since that would delete the temporary file
tmp.file.close()
with tarfile.open(p_archive, mode="w:gz") as tar:
tar.add(str(p.absolute()), arcname="")

with open(tmp.name, "rb") as f:
with open(p_archive, "rb") as f:
result = f_request(data=f)
else:
with open(str(p.absolute()), "rb") as f:
Expand Down
33 changes: 30 additions & 3 deletions pins/tests/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,23 @@
)

from fsspec import filesystem
from pathlib import Path

# NOTE: windows time.time() implementation appears to have 16 millisecond precision, so
# we need to add a small delay, in order to avoid prune checks appearing to happen at the
# exact same moment something earlier was created / accessed.
# see: https://stackoverflow.com/a/1938096/1144523


# Utilities ===================================================================


def _sleep():
# time-based issues keep arising erratically in windows checks, so try to shoot
# well past
time.sleep(0.3)


@pytest.fixture
def some_file(tmp_dir2):
p = tmp_dir2 / "some_file.txt"
Expand All @@ -34,7 +46,7 @@ def test_touch_access_time_manual(some_file):
def test_touch_access_time_auto(some_file):
orig_access = some_file.stat().st_atime

time.sleep(0.2)
_sleep()
new_time = touch_access_time(some_file)

assert some_file.stat().st_atime == new_time
Expand All @@ -55,9 +67,14 @@ def test_pins_cache_url_hash_name():
cache = PinsUrlCache(fs=filesystem("file"))
hashed = cache.hash_name("http://example.com/a.txt", True)

p_hash = Path(hashed)

# should have form <url_hash>/<version_placeholder>/<filename>
assert hashed.endswith("/a.txt")
assert hashed.count("/") == 2
assert p_hash.name == "a.txt"

# count parent dirs, excluding root (e.g. "." or "/")
n_parents = len(p_hash.parents) - 1
assert n_parents == 2


@pytest.mark.skip("TODO")
Expand Down Expand Up @@ -106,6 +123,8 @@ def pin2_v3(a_cache):


def test_cache_pruner_old_versions_none(a_cache, pin1_v1):
_sleep()

pruner = CachePruner(a_cache)

old = pruner.old_versions(days=1)
Expand All @@ -114,6 +133,8 @@ def test_cache_pruner_old_versions_none(a_cache, pin1_v1):


def test_cache_pruner_old_versions_days0(a_cache, pin1_v1):
_sleep()

pruner = CachePruner(a_cache)
old = pruner.old_versions(days=0)

Expand All @@ -122,6 +143,8 @@ def test_cache_pruner_old_versions_days0(a_cache, pin1_v1):


def test_cache_pruner_old_versions_some(a_cache, pin1_v1, pin1_v2):
_sleep()

# create: tmp_dir/pin1/version1

pruner = CachePruner(a_cache)
Expand All @@ -133,6 +156,8 @@ def test_cache_pruner_old_versions_some(a_cache, pin1_v1, pin1_v2):


def test_cache_pruner_old_versions_multi_pins(a_cache, pin1_v2, pin2_v3):
_sleep()

pruner = CachePruner(a_cache)
old = pruner.old_versions(days=1)

Expand All @@ -141,6 +166,8 @@ def test_cache_pruner_old_versions_multi_pins(a_cache, pin1_v2, pin2_v3):


def test_cache_prune_prompt(a_cache, pin1_v1, pin2_v3, monkeypatch):
_sleep()

cache_prune(days=1, cache_root=a_cache.parent, prompt=False)

versions = list(a_cache.glob("*/*"))
Expand Down
9 changes: 8 additions & 1 deletion pins/tests/test_constructors.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,14 @@ def check_dir_writable(p_dir):


def check_cache_file_path(p_file, p_cache):
assert str(p_file.relative_to(p_cache)).count("/") == 2
rel_path = p_file.relative_to(p_cache)

# parents has every entry you'd get if you called .parents all the way to some root.
# for a relative path, the root is likely ".", so we subtract 1 to get the number
# of parent directories.
# note this essentially counts slashes, in a inter-OS friendly way.
n_parents = len(rel_path.parents) - 1
assert n_parents == 2


def construct_from_board(board):
Expand Down