Skip to content

Commit

Permalink
Update test_main_clean.py to use zlib instead of bzip2 (#13065)
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard committed Sep 9, 2023
1 parent e9870cc commit 0a2244f
Show file tree
Hide file tree
Showing 2 changed files with 71 additions and 72 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ jobs:

- name: Setup environment
shell: bash -l {0}
timeout-minutes: 10
timeout-minutes: 15
run: |
./dev/macos/setup.sh
Expand Down
141 changes: 70 additions & 71 deletions tests/cli/test_main_clean.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@

import json
from datetime import datetime
from os import PathLike, walk
from os.path import basename, exists, isdir, join
from shutil import copy
from pathlib import Path
from typing import Iterable

import pytest
from pytest_mock import MockerFixture

from conda.base.constants import (
CONDA_LOGS_DIR,
Expand All @@ -17,54 +17,53 @@
)
from conda.cli.main_clean import _get_size
from conda.core.subdir_data import create_cache_dir
from conda.gateways.disk.create import mkdir_p
from conda.gateways.logging import set_verbosity
from conda.testing import CondaCLIFixture, TmpEnvFixture
from conda.testing.integration import make_temp_package_cache


def _get_pkgs(pkgs_dir):
_, dirs, _ = next(walk(pkgs_dir))
return [join(pkgs_dir, pkg) for pkg in dirs]
def _get_pkgs(pkgs_dir: str | Path) -> list[Path]:
return [package for package in Path(pkgs_dir).iterdir() if package.is_dir()]


def _get_tars(pkgs_dir):
_, _, files = next(walk(pkgs_dir))
def _get_tars(pkgs_dir: str | Path) -> list[Path]:
return [
join(pkgs_dir, file)
for file in files
if file.endswith(CONDA_PACKAGE_EXTENSIONS)
file
for file in Path(pkgs_dir).iterdir()
if file.is_file() and file.name.endswith(CONDA_PACKAGE_EXTENSIONS)
]


def _get_index_cache():
cache_dir = create_cache_dir()
_, _, files = next(walk(cache_dir))
return [join(cache_dir, file) for file in files if file.endswith(".json")]
def _get_index_cache() -> list[Path]:
return [
file
for file in Path(create_cache_dir()).iterdir()
if file.is_file() and file.name.endswith(".json")
]


def _get_tempfiles(pkgs_dir):
_, _, files = next(walk(pkgs_dir))
def _get_tempfiles(pkgs_dir: str | Path) -> list[Path]:
return [
join(pkgs_dir, file) for file in files if file.endswith(CONDA_TEMP_EXTENSIONS)
file
for file in Path(pkgs_dir).iterdir()
if file.is_file() and file.name.endswith(CONDA_TEMP_EXTENSIONS)
]


def _get_logfiles(pkgs_dir):
root, _, files = next(walk(join(pkgs_dir, CONDA_LOGS_DIR)), [None, None, []])
return [join(root, file) for file in files]
def _get_logfiles(pkgs_dir: str | Path) -> list[Path]:
try:
return [file for file in Path(pkgs_dir, CONDA_LOGS_DIR).iterdir()]
except FileNotFoundError:
# FileNotFoundError: CONDA_LOGS_DIR doesn't exist
return []


def _get_all(pkgs_dir):
def _get_all(pkgs_dir: str | Path) -> tuple[list[Path], list[Path], list[Path]]:
return _get_pkgs(pkgs_dir), _get_tars(pkgs_dir), _get_index_cache()


def assert_any_pkg(name, contents):
assert any(basename(content).startswith(f"{name}-") for content in contents)


def assert_not_pkg(name, contents):
assert not any(basename(content).startswith(f"{name}-") for content in contents)
def has_pkg(name: str, contents: Iterable[str | Path]) -> bool:
return any(Path(content).name.startswith(f"{name}-") for content in contents)


# conda clean --force-pkgs-dirs
Expand All @@ -73,21 +72,22 @@ def test_clean_force_pkgs_dirs(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache() as pkgs_dir:
# pkgs_dir is a directory
assert isdir(pkgs_dir)
pkgs_dir = Path(pkgs_dir)
assert pkgs_dir.is_dir()

with tmp_env(pkg):
stdout, _, _ = conda_cli("clean", "--force-pkgs-dirs", "--yes", "--json")
json.loads(stdout) # assert valid json

# pkgs_dir is removed
assert not exists(pkgs_dir)
assert not pkgs_dir.exists()

# pkgs_dir is still removed
assert not exists(pkgs_dir)
assert not pkgs_dir.exists()


# conda clean --packages
Expand All @@ -96,32 +96,32 @@ def test_clean_and_packages(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache() as pkgs_dir:
# pkg doesn't exist ahead of time
assert_not_pkg(pkg, _get_pkgs(pkgs_dir))
assert not has_pkg(pkg, _get_pkgs(pkgs_dir))

with tmp_env(pkg) as prefix:
# pkg exists
assert_any_pkg(pkg, _get_pkgs(pkgs_dir))
assert has_pkg(pkg, _get_pkgs(pkgs_dir))

# --json flag is regression test for #5451
stdout, _, _ = conda_cli("clean", "--packages", "--yes", "--json")
json.loads(stdout) # assert valid json

# pkg still exists since its in use by temp env
assert_any_pkg(pkg, _get_pkgs(pkgs_dir))
assert has_pkg(pkg, _get_pkgs(pkgs_dir))

conda_cli("remove", "--prefix", prefix, pkg, "--yes", "--json")
stdout, _, _ = conda_cli("clean", "--packages", "--yes", "--json")
json.loads(stdout) # assert valid json

# pkg is removed
assert_not_pkg(pkg, _get_pkgs(pkgs_dir))
assert not has_pkg(pkg, _get_pkgs(pkgs_dir))

# pkg is still removed
assert_not_pkg(pkg, _get_pkgs(pkgs_dir))
assert not has_pkg(pkg, _get_pkgs(pkgs_dir))


# conda clean --tarballs
Expand All @@ -130,25 +130,25 @@ def test_clean_tarballs(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache() as pkgs_dir:
# tarball doesn't exist ahead of time
assert_not_pkg(pkg, _get_tars(pkgs_dir))
assert not has_pkg(pkg, _get_tars(pkgs_dir))

with tmp_env(pkg):
# tarball exists
assert_any_pkg(pkg, _get_tars(pkgs_dir))
assert has_pkg(pkg, _get_tars(pkgs_dir))

# --json flag is regression test for #5451
stdout, _, _ = conda_cli("clean", "--tarballs", "--yes", "--json")
json.loads(stdout) # assert valid json

# tarball is removed
assert_not_pkg(pkg, _get_tars(pkgs_dir))
assert not has_pkg(pkg, _get_tars(pkgs_dir))

# tarball is still removed
assert_not_pkg(pkg, _get_tars(pkgs_dir))
assert not has_pkg(pkg, _get_tars(pkgs_dir))


# conda clean --index-cache
Expand All @@ -157,7 +157,7 @@ def test_clean_index_cache(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache():
# index cache doesn't exist ahead of time
Expand Down Expand Up @@ -193,7 +193,7 @@ def test_clean_tempfiles(
Since the presence of .c~ and .trash files are dependent upon irregular termination we create
our own temporary files to confirm they get cleaned up.
"""
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache() as pkgs_dir:
# tempfiles don't exist ahead of time
Expand All @@ -203,7 +203,7 @@ def test_clean_tempfiles(
# mimic tempfiles being created
path = _get_tars(pkgs_dir)[0] # grab any tarball
for ext in CONDA_TEMP_EXTENSIONS:
copy(path, f"{path}{ext}")
(path.parent / f"{path.name}{ext}").touch()

# tempfiles exist
assert len(_get_tempfiles(pkgs_dir)) == len(CONDA_TEMP_EXTENSIONS)
Expand Down Expand Up @@ -232,19 +232,18 @@ def test_clean_logfiles(
Since these log files were uniquely created during the experimental
phase of the conda-libmamba-solver.
"""
pkg = "bzip2"
pkg = "zlib"

with make_temp_package_cache() as pkgs_dir:
# logfiles don't exist ahead of time
assert not _get_logfiles(pkgs_dir)

with tmp_env(pkg):
# mimic logfiles being created
logs = join(pkgs_dir, CONDA_LOGS_DIR)
mkdir_p(logs)
path = join(logs, f"{datetime.utcnow():%Y%m%d-%H%M%S-%f}.log")
with open(path, "w"):
pass
logs_dir = Path(pkgs_dir, CONDA_LOGS_DIR)
logs_dir.mkdir(parents=True, exist_ok=True)
path = logs_dir / f"{datetime.utcnow():%Y%m%d-%H%M%S-%f}.log"
path.touch()

# logfiles exist
assert path in _get_logfiles(pkgs_dir)
Expand All @@ -268,23 +267,23 @@ def test_clean_all(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"
args = ("--yes", "--json")
if verbose:
args = (*args, "--verbose")

with make_temp_package_cache() as pkgs_dir:
# pkg, tarball, & index cache doesn't exist ahead of time
pkgs, tars, cache = _get_all(pkgs_dir)
assert_not_pkg(pkg, pkgs)
assert_not_pkg(pkg, tars)
assert not has_pkg(pkg, pkgs)
assert not has_pkg(pkg, tars)
assert not cache

with tmp_env(pkg) as prefix:
# pkg, tarball, & index cache exists
pkgs, tars, cache = _get_all(pkgs_dir)
assert_any_pkg(pkg, pkgs)
assert_any_pkg(pkg, tars)
assert has_pkg(pkg, pkgs)
assert has_pkg(pkg, tars)
assert cache

stdout, _, _ = conda_cli("clean", "--all", *args)
Expand All @@ -294,8 +293,8 @@ def test_clean_all(
# tarball is removed
# index cache is cleared
pkgs, tars, cache = _get_all(pkgs_dir)
assert_any_pkg(pkg, pkgs)
assert_not_pkg(pkg, tars)
assert has_pkg(pkg, pkgs)
assert not has_pkg(pkg, tars)
assert not cache

conda_cli("remove", "--prefix", prefix, pkg, *args)
Expand All @@ -306,16 +305,16 @@ def test_clean_all(
# tarball is still removed
# index cache is still cleared
pkgs, tars, index_cache = _get_all(pkgs_dir)
assert_not_pkg(pkg, pkgs)
assert_not_pkg(pkg, tars)
assert not has_pkg(pkg, pkgs)
assert not has_pkg(pkg, tars)
assert not cache

# pkg is still removed
# tarball is still removed
# index cache is still cleared
pkgs, tars, index_cache = _get_all(pkgs_dir)
assert_not_pkg(pkg, pkgs)
assert_not_pkg(pkg, tars)
assert not has_pkg(pkg, pkgs)
assert not has_pkg(pkg, tars)
assert not cache

set_verbosity(0) # reset verbosity
Expand All @@ -330,16 +329,16 @@ def test_clean_all_mock_lstat(
conda_cli: CondaCLIFixture,
tmp_env: TmpEnvFixture,
):
pkg = "bzip2"
pkg = "zlib"
args = ("--yes", "--verbose")
if as_json:
args = (*args, "--json")

with make_temp_package_cache() as pkgs_dir, tmp_env(pkg) as prefix:
# pkg, tarball, & index cache exists
pkgs, tars, cache = _get_all(pkgs_dir)
assert_any_pkg(pkg, pkgs)
assert_any_pkg(pkg, tars)
assert has_pkg(pkg, pkgs)
assert has_pkg(pkg, tars)
assert cache

mocker.patch("os.lstat", side_effect=OSError)
Expand All @@ -352,16 +351,16 @@ def test_clean_all_mock_lstat(

# pkg, tarball, & index cache still exists
pkgs, tars, index_cache = _get_all(pkgs_dir)
assert_any_pkg(pkg, pkgs)
assert_any_pkg(pkg, tars)
assert has_pkg(pkg, pkgs)
assert has_pkg(pkg, tars)
assert cache

set_verbosity(0) # reset verbosity


# _get_size unittest, valid file
def test_get_size(tmp_path: Path):
warnings = []
warnings: list[str] = []
path = tmp_path / "file"
path.write_text("hello")
assert _get_size(path, warnings=warnings)
Expand All @@ -376,7 +375,7 @@ def test_get_size_None():

# _get_size unittest, invalid file and collect warnings
def test_get_size_list():
warnings = []
warnings: list[str] = []
with pytest.raises(NotImplementedError):
_get_size("not-a-file", warnings=warnings)
assert warnings

0 comments on commit 0a2244f

Please sign in to comment.