Skip to content

Commit

Permalink
Merge pull request #126 from TRI-AMDD/development
Browse files Browse the repository at this point in the history
v1.0.0
  • Loading branch information
d-cogswell committed Apr 4, 2024
2 parents 5fdfd82 + aca1a00 commit be680cb
Show file tree
Hide file tree
Showing 99 changed files with 17,353 additions and 205 deletions.
73 changes: 73 additions & 0 deletions .github/workflows/mpet-regression-test-sourceforge-daetools.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
name: MPET regression test with daetools 2.2.0 from sourceforge on python 3.10

on: [push, workflow_dispatch]

jobs:
test:

runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
defaults:
run:
shell: bash -l {0}
steps:

- uses: actions/checkout@v3
with:
fetch-depth: 1
path: mpet

- uses: conda-incubator/setup-miniconda@v2
with:
python-version: ${{ matrix.python-version }}
mamba-version: "*"
channels: conda-forge,defaults
activate-environment: mpet-env

- name: Install dependencies for daetools
run: |
mamba install numpy scipy matplotlib pyqt lxml pandas h5py openpyxl
- name: Install daetools from sourceforge
run: |
curl -L 'https://master.dl.sourceforge.net/project/daetools/daetools/2.2.0/daetools-2.2.0-gnu_linux-x86_64.zip' -o dae.zip
unzip dae.zip
cd daetools*
python setup.py install
- name: Install additional dependencies using mpet's setup.py
run: |
cd mpet
pip install .[test]
- name: Set up test for modified branch
run: |
cd mpet/bin
rm -rf workdir
mkdir workdir
cd workdir
cp ../run_tests.py .
ln -s ../../mpet .
ln -s ../../tests .
- name: run tests for modified branch and get coverage
run: |
cd mpet/bin/workdir
coverage run --source=../../mpet/ run_tests.py --test_dir ./tests --output_dir ../../bin/workdir/modified > /dev/null
- name: upload Coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
cd mpet/bin/workdir
coveralls --service=github || : #Dont fret if if fails
- name: Checks test results
run: |
cd mpet/tests
pytest --baseDir=ref_outputs --modDir=../bin/workdir/modified compare_tests.py
9 changes: 4 additions & 5 deletions .github/workflows/mpet-regression-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,14 @@ jobs:

- uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: ${{ matrix.python-version }}
mamba-version: "*"
channels: conda-forge,defaults
activate-environment: mpet-env

- name: Install daetools via conda
- name: Install daetools via mamba
run: |
conda activate mpet-env
conda install -c conda-forge daetools python=${{ matrix.python-version }} pip
mamba install daetools
- name: Install additional dependencies using mpet's setup.py
run: |
Expand All @@ -49,7 +49,6 @@ jobs:
- name: run tests for modified branch and get coverage
run: |
conda activate mpet-env
cd mpet/bin/workdir
coverage run --source=../../mpet/ run_tests.py --test_dir ./tests --output_dir ../../bin/workdir/modified > /dev/null
Expand Down
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,8 @@ venv/
# documentation
docs/_build
docs/apidocs

# ignore python biuld files
build/
# ignore daetools
daetools*
52 changes: 52 additions & 0 deletions bin/create_ensemble.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/usr/bin/env python3

import configparser
import sys
import itertools
import os


def ensemble_definitions():
# Values that need ensemble
ensemble = [
[("Geometry","L_c"), ["2","43"]],
[("Geometry","L_a"), ["3","5","43"]],
]

# helpers
keys = [vals[0] for vals in ensemble]
val = [vals[1] for vals in ensemble]
return keys, val


def create_ensemble(cff, keys=None, val=None):
with open('ensemble_parallel_configs.txt', "w") as ff:
if keys is None and val is None:
keys, val = ensemble_definitions()
cfg = configparser.ConfigParser()
cfg.optionxform = str
cfg.read(cff)
# Create all variations
combinations = list(itertools.product(*val))
for combination in combinations:
params = dict(zip(keys,combination))
new_cfg = cfg
nicename = []
for key, val in params.items():
new_cfg[key[0]][key[1]] = val
nicename.append(key[1] + "=" + val)

# Write config
cfg_dir = os.path.dirname(cff)
with open(cfg_dir + "/" + "-".join(nicename) + ".cfg", "w") as f:
new_cfg.write(f)
ff.write(str(cfg_dir + "/" + "-".join(nicename) + ".cfg\n"))
return


if __name__ == '__main__':
# Read in file
if len(sys.argv) < 2:
print("need the config file [python create_enamble.py <baseconfig>]")
exit(1)
create_ensemble(sys.argv[1])
101 changes: 101 additions & 0 deletions bin/mpet_create_runjobs_dashboard.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
from create_ensemble import create_ensemble
from run_jobs import create_slurm_cluster, create_pbs_cluster, create_local_cluster, run_mpet
# import mpet_plot_app
import os
import subprocess
import shutil
from dask.distributed import Client


# --------------------------------------------------------------
# Fill all these out
# --------------------------------------------------------------
''' Ensemble settings/ '''
# The default config file that you want to adjust in the ensemble
# (path relative to main folder)
cff = 'configs/params_system.cfg'
# Values that need ensemble
ensemble = [
[("Sim Params","Nvol_c"), ["4", "5"]]
]


class ClusterSettings():
'''Cluster settings.'''
# time = 00:00:00 # Max walltime per job (hh:mm:ss format). Argument is not used with a local.
nproc = 1 # type=int, Number of CPU cores per job. Argument is not used with a local cluster.
mem = 1 # Max memory usage per job. For alocal cluster it sets the memory limit per worker.
queue = 1 # Queue to use. Argument is not used with a local cluster.
dashboard_port = 4096 # Port for dask dashboard


class MainSettings():
'''Cluster script settings.'''
scheduler = 'local' # choices=('slurm', 'pbs', 'local'); Scheduling system to use
min_jobs = 1 # int; Minimum number of jobs to launch. Argument is not used with local cluster.
max_jobs = 1 # int; Maximum number of jobs to launch. Argument is not used with local cluster.
# Text file containg the path to each MPET config file to run;
# 'parallel_configs.txt' is what create_ensemble saves to automatically (so don't change it)
mpet_configs = 'ensemble_parallel_configs.txt'


# --------------------------------------------------------------
'''
End of parameters to fill out.
To run: execute run_mpet_create_run_dashboard.sh from the terminal;
. ./run_mpet_create_run_dashboard.sh
To see the resulting dashboard open the http link presented in the terminal:
" Dash is running on http://127.0.0.1:8050/ "
'''
# --------------------------------------------------------------


# helpers for ensemble, do not change
keys = [vals[0] for vals in ensemble]
val = [vals[1] for vals in ensemble]


def call_run_cluster(output_folder):
# split cluster settings from the rest
args = ClusterSettings()
main_settings = MainSettings()
'''
for arg in ["scheduler", "mpet_configs", "min_jobs", "max_jobs"]:
main_settings[arg] = getattr(args, arg)
delattr(args.__class__, arg)'''
cluster_settings = vars(args)

# create cluster
if main_settings.scheduler == 'slurm':
cluster = create_slurm_cluster(**cluster_settings)
elif main_settings.scheduler == 'pbs':
cluster = create_pbs_cluster(**cluster_settings)
elif main_settings.scheduler == 'local':
cluster = create_local_cluster(args.mem, args.dashboard_port)

# Scale Dask cluster automatically based on scheduler activity (only if not local cluster)
if main_settings.scheduler != 'local':
cluster.adapt(minimum_jobs=main_settings['min_jobs'],
maximum_jobs=main_settings['max_jobs'])
client = Client(cluster)

run_mpet(client, output_folder, os.path.abspath(main_settings.mpet_configs))
return


if __name__ == '__main__':
# Read in config file
create_ensemble(cff, keys, val)

# Define output folder
# Store output in folder this script was called from
output_folder = './runjobs_dashboard'
# remove sim output if it already exists to only keep newest output
if os.path.exists(os.path.join(output_folder, 'sim_output')):
shutil.rmtree(os.path.join(output_folder, 'sim_output'))
# create output folder if it does not exist yet
if not os.path.exists(output_folder):
os.mkdir(output_folder)
call_run_cluster(output_folder)
subprocess.call(["python", "./bin/mpet_plot_app.py", "-d",
str(os.path.join(output_folder, 'sim_output'))])

0 comments on commit be680cb

Please sign in to comment.