Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ jobs:
- name: Install PEST++ suite using get-pestpp
shell: bash -l {0}
run: |
get-pestpp :home
get-pestpp --owner pestpp --repo pestpp-nightly-builds :home
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

Expand All @@ -129,7 +129,7 @@ jobs:
-n=auto \
-rA -vv --tb=native \
--durations=20 \
--cov=pyemu --cov-report=lcov
--cov=pyemu --cov-report=lcov \
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
MPLBACKEND: Agg # non-interactive backend for matplotlib
Expand Down
63 changes: 39 additions & 24 deletions autotest/get_pestpp_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
"home": Path.home() / ".local" / "bin",
}
owner_options = [
"usgs",
"usgs", "pestpp"
]
repo_options = {
"pestpp": [
Expand All @@ -40,6 +40,16 @@
"pestpp-sqp",
"pestpp-swp",
],
"pestpp-nightly-builds": [
"pestpp-da",
"pestpp-glm",
"pestpp-ies",
"pestpp-mou",
"pestpp-opt",
"pestpp-sen",
"pestpp-sqp",
"pestpp-swp"
]
}

if system() == "Windows":
Expand Down Expand Up @@ -97,27 +107,23 @@ def test_get_release(repo):
tag = "latest"
release = get_release(repo=repo, tag=tag)
assets = release["assets"]
release_tag_name = release["tag_name"]
if len(release["body"]) > 0:
# if nightly build tag is in body, use that
release_tag_name = release["body"].split()[-1]
else:
release_tag_name = release["tag_name"]

expected_assets = [
f"pestpp-{release_tag_name}-linux.tar.gz",
f"pestpp-{release_tag_name}-mac.tar.gz",
f"pestpp-{release_tag_name}-iwin.zip",
f"pestpp-{release_tag_name}-linux",
f"pestpp-{release_tag_name}-mac",
f"pestpp-{release_tag_name}-win",
]
expected_ostags = [a.replace(".zip", "") for a in expected_assets]
expected_ostags = [a.replace("tar.gz", "") for a in expected_assets]
actual_assets = [asset["name"] for asset in assets]

if repo == "pestpp":
# can remove if modflow6 releases follow asset name conventions followed in executables and nightly build repos
assert {a.rpartition("_")[2] for a in actual_assets} >= {
a for a in expected_assets if not a.startswith("win")
}
else:
for ostag in expected_ostags:
assert any(
ostag in a for a in actual_assets
), f"dist not found for {ostag}"
actual_assets = [asset["name"].replace("tar.gz", "").replace(".zip", "") for asset in assets]

for ostag in expected_assets:
assert any(
ostag in a for a in actual_assets
), f"dist not found for {ostag}"


@pytest.mark.parametrize("bindir", bindir_options.keys())
Expand Down Expand Up @@ -238,7 +244,10 @@ def test_script_valid_options(function_tmpdir, downloads_dir):
@requires_github
@pytest.mark.parametrize("owner", owner_options)
@pytest.mark.parametrize("repo", repo_options.keys())
def test_script(function_tmpdir, owner, repo, downloads_dir):
def test_script(request, function_tmpdir, owner, repo, downloads_dir):
if ((repo == "pestpp-nightly-builds" and owner != "pestpp") or
(owner == "pestpp" and repo != "pestpp-nightly-builds")):
request.applymarker(pytest.mark.xfail)
bindir = str(function_tmpdir)
stdout, stderr, returncode = run_get_pestpp_script(
bindir,
Expand All @@ -251,7 +260,8 @@ def test_script(function_tmpdir, owner, repo, downloads_dir):
)
if rate_limit_msg in stderr:
pytest.skip(f"GitHub {rate_limit_msg}")

elif returncode != 0:
raise RuntimeError(stderr)
paths = list(function_tmpdir.glob("*"))
names = [p.name for p in paths]
expected_names = [append_ext(p) for p in repo_options[repo]]
Expand All @@ -262,13 +272,18 @@ def test_script(function_tmpdir, owner, repo, downloads_dir):
@requires_github
@pytest.mark.parametrize("owner", owner_options)
@pytest.mark.parametrize("repo", repo_options.keys())
def test_python_api(function_tmpdir, owner, repo, downloads_dir):
def test_python_api(request, function_tmpdir, owner, repo, downloads_dir):
if ((repo == "pestpp-nightly-builds" and owner != "pestpp") or
(owner == "pestpp" and repo != "pestpp-nightly-builds")):
request.applymarker(pytest.mark.xfail)
bindir = str(function_tmpdir)
try:
get_pestpp(bindir, owner=owner, repo=repo, downloads_dir=downloads_dir)
except HTTPError as err:
if err.code == 403:
except (HTTPError, IOError) as err:
if '403' in str(err):
pytest.skip(f"GitHub {rate_limit_msg}")
else:
raise err

paths = list(function_tmpdir.glob("*"))
names = [p.name for p in paths]
Expand Down
41 changes: 30 additions & 11 deletions autotest/utils_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -2706,12 +2706,13 @@ def ppw_worker(id_num,case,t_d,host,port,frun):


@pytest.mark.timeout(method="thread")
def pypestworker_test(tmp_path):
def test_pypestworker(tmp_path):
from datetime import datetime
import numpy as np
import subprocess as sp
import multiprocessing as mp
import sys
import time

host = "localhost"
port = 4111
Expand All @@ -2733,18 +2734,18 @@ def pypestworker_test(tmp_path):
sys.path.insert(1, t_d.as_posix())
from forward_run import helper as frun

m_d = "{0}_ppw_master".format(case)
m_d = tmp_path / "{0}_ppw_master".format(case)

if os.path.exists(m_d):
shutil.rmtree(m_d)
shutil.copytree(t_d,m_d)

# start the master
start = datetime.now()
b_d = os.getcwd()
os.chdir(m_d)
try:
p = sp.Popen([mou_exe_path, "{0}.pst".format(case), "/h", ":{0}".format(port)])
p = sp.Popen([mou_exe_path, "{0}.pst".format(case), "/h", ":{0}".format(port)], stderr=sp.PIPE)
except Exception as e:
print("failed to start master process")
os.chdir(b_d)
Expand All @@ -2754,19 +2755,37 @@ def pypestworker_test(tmp_path):
#return

num_workers=5

# looper over and start the workers - in this
# case they dont need unique dirs since they aren't writing
# anything
# little pause to let master get going (and possibly fail)
time.sleep(5)
procs = []
for i in range(num_workers):
pp = mp.Process(target=ppw_worker,args=(i,case,t_d,host,port,frun))
pp.start()
procs.append(pp)
# check master still running before deploying worker
if p.poll() is not None:
err = p.stderr.read()
raise RuntimeError("master process failed before all workers started:\n\n"+
err.decode())
try: # make sure we kill the master if worker startup returns an error
pp = mp.Process(target=ppw_worker,args=(i,case,t_d,host,port,frun))
# procs.append(pp)
pp.start()
procs.append(pp)
except Exception as e:
print("failed to start worker {0}".format(i))
p.terminate()
raise e
# if everything worked, the workers should receive the
# shutdown signal from the master and exit gracefully...
for pp in procs:
pp.join()
for i, pp in enumerate(procs):
try: # make sure we kill the master if worker startup returns an error
pp.join()
except Exception as e:
print(f"exception thrown by worker {i}")
p.terminate()
raise e

# wait for the master to finish...but should already be finished
p.wait()
Expand Down
44 changes: 30 additions & 14 deletions pyemu/utils/get_pestpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
# key is the repo name, value is the renamed file prefix for the download
renamed_prefix = {
"pestpp": "pestpp",
"pestpp-nightly-builds": "pestpp",
}
available_repos = list(renamed_prefix.keys())
available_ostags = ["linux", "mac", "win"]
Expand Down Expand Up @@ -97,8 +98,7 @@ def get_releases(
owner=None, repo=None, quiet=False, per_page=None
) -> List[str]:
"""Get list of available releases."""
owner = default_owner if owner is None else owner
repo = default_repo if repo is None else repo
owner, repo = _get_defaults(owner, repo)
req_url = f"https://api.github.com/repos/{owner}/{repo}/releases"

params = {}
Expand Down Expand Up @@ -137,10 +137,24 @@ def get_releases(
return avail_releases


def _get_defaults(owner=None, repo=None):
"""Get default owner and repo if not provided."""
default_owner_dict = {'pestpp': "usgs",
'pestpp-nightly-builds': "pestpp"}
default_repo_dict = {o: r for r, o in default_owner_dict.items()}
# if nothing passed
if owner is None and repo is None:
owner = default_owner

if repo is None:
repo = default_repo_dict.get(owner, default_repo)
elif owner is None:
owner = default_owner_dict.get(repo, default_owner)
return owner, repo

def get_release(owner=None, repo=None, tag="latest", quiet=False) -> dict:
"""Get info about a particular release."""
owner = default_owner if owner is None else owner
repo = default_repo if repo is None else repo
owner, repo = _get_defaults(owner, repo)
api_url = f"https://api.github.com/repos/{owner}/{repo}"
req_url = (
f"{api_url}/releases/latest"
Expand All @@ -165,9 +179,9 @@ def get_release(owner=None, repo=None, tag="latest", quiet=False) -> dict:
break
except urllib.error.HTTPError as err:
if err.code == 401 and os.environ.get("GITHUB_TOKEN"):
raise ValueError("GITHUB_TOKEN env is invalid") from err
raise IOError("GITHUB_TOKEN env is invalid") from err
elif err.code == 403 and "rate limit exceeded" in err.reason:
raise ValueError(
raise IOError(
f"use GITHUB_TOKEN env to bypass rate limit ({err})"
) from err
elif err.code == 404:
Expand Down Expand Up @@ -416,7 +430,7 @@ def run_main(
}

for asset in assets:
if inconsistent_ostag_dict[ostag] in asset["name"]:
if ostag in asset["name"] or inconsistent_ostag_dict[ostag] in asset["name"]:
break
else:
raise ValueError(
Expand All @@ -425,13 +439,13 @@ def run_main(
)
asset_name = asset["name"]
download_url = asset["browser_download_url"]
asset_pth = Path(asset_name)
asset_stem = asset_pth.stem
if str(asset_pth).endswith("tar.gz"):
asset_suffix = ".tar.gz"
else:
asset_suffix = asset_pth.suffix
if repo == "pestpp":
asset_pth = Path(asset_name)
asset_stem = asset_pth.stem
if str(asset_pth).endswith("tar.gz"):
asset_suffix = ".tar.gz"
else:
asset_suffix = asset_pth.suffix
dst_fname = "-".join([repo, release["tag_name"], ostag]) + asset_suffix
else:
# change local download name so it is more unique
Expand Down Expand Up @@ -529,11 +543,13 @@ def run_main(
download_pth = zip_path

with zipfile.ZipFile(download_pth, "r") as zipf:
# First gather files within internal directories named "bin"
# First gather files within internal directories named "bin" or "dist/*/"
for pth in zipf.namelist():
p = Path(pth)
if p.parent.name == "bin":
full_path[p.name] = pth
elif p.parent.parent.name == "dist":
full_path[p.name] = pth
files = set(full_path.keys())

if not files:
Expand Down
Loading