Skip to content

Commit

Permalink
Testbook notebooks testing (#114)
Browse files Browse the repository at this point in the history
  • Loading branch information
carldlaird committed Jun 16, 2023
1 parent 973b041 commit dcca13c
Show file tree
Hide file tree
Showing 13 changed files with 75 additions and 26 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ jobs:
- name: "Run tox targets for ${{ matrix.python-version }}"
run: "tox"

- name: "Run tox notebooks targets for ${{ matrix.python-version }}"
run: |
shopt -s globstar
tox -e notebooks docs/**/*.ipynb
# - name: "Run tox notebooks targets for ${{ matrix.python-version }}"
# run: |
# shopt -s globstar
# tox -e notebooks docs/**/*.ipynb

- name: "Convert coverage"
run: "python -m coverage xml"
Expand Down
4 changes: 4 additions & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ install_requires =
networkx
pyomo
numpy
protobuf==3.20.3


[options.packages.find]
Expand All @@ -67,6 +68,7 @@ testing =
setuptools
pytest
pytest-cov
testbook
nbmake
tox
flake8
Expand All @@ -84,11 +86,13 @@ testing =
torch
torchvision
tqdm
protobuf==3.20.3

testing_lean =
setuptools
pytest
pytest-cov
testbook
nbmake
tox
flake8
Expand Down
3 changes: 1 addition & 2 deletions src/omlt/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from pyomo.common.dependencies import attempt_import

# check for onnx and create shortcut if available
# check for dependencies and create shortcut if available
onnx, onnx_available = attempt_import("onnx")

keras, keras_available = attempt_import("tensorflow.keras")
14 changes: 5 additions & 9 deletions src/omlt/formulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ class _PyomoFormulation(_PyomoFormulationInterface):
"""

def __init__(self):

super(_PyomoFormulation, self).__init__()
self.__block = None

Expand All @@ -79,14 +78,11 @@ def scalar_or_tuple(x):

def _setup_scaled_inputs_outputs(block, scaler=None, scaled_input_bounds=None):
if scaled_input_bounds is not None:

def bounds_rule(m, *k):
return scaled_input_bounds.__getitem__(scalar_or_tuple(k))

# bounds_rule = lambda m, *k : scaled_input_bounds.__getitem__(scalar_or_tuple(k))
block.scaled_inputs = pyo.Var(
block.inputs_set, initialize=0, bounds=bounds_rule
)
bnds = {
k: (float(scaled_input_bounds[k][0]), float(scaled_input_bounds[k][1]))
for k in block.inputs_set
}
block.scaled_inputs = pyo.Var(block.inputs_set, initialize=0, bounds=bnds)
else:
block.scaled_inputs = pyo.Var(block.inputs_set, initialize=0)

Expand Down
2 changes: 1 addition & 1 deletion src/omlt/io/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from omlt.dependencies import onnx_available, keras_available
from omlt.dependencies import keras_available, onnx_available

if onnx_available:
from omlt.io.onnx import (
Expand Down
2 changes: 1 addition & 1 deletion src/omlt/neuralnet/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,7 @@ def __str__(self):

def _eval_at_index(self, x, out_d, out_r, out_c):
acc = 0.0
for (k, index) in self.kernel_with_input_indexes(out_d, out_r, out_c):
for k, index in self.kernel_with_input_indexes(out_d, out_r, out_c):
acc += k * x[index]
return acc

Expand Down
1 change: 0 additions & 1 deletion src/omlt/neuralnet/network_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ def __init__(

# Process input bounds to insure scaled input bounds exist for formulations
if scaled_input_bounds is None:

if unscaled_input_bounds is not None and scaling_object is not None:
lbs = scaling_object.get_scaled_input_expressions(
{k: t[0] for k, t in unscaled_input_bounds.items()}
Expand Down
1 change: 0 additions & 1 deletion src/omlt/scaling.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ class OffsetScaling(ScalingInterface):
"""

def __init__(self, offset_inputs, factor_inputs, offset_outputs, factor_outputs):

super(OffsetScaling, self).__init__()
self.__x_offset = convert_to_dict(offset_inputs)
self.__x_factor = convert_to_dict(factor_inputs)
Expand Down
3 changes: 1 addition & 2 deletions tests/gbt/test_gbt_formulation.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
from pathlib import Path

from omlt.dependencies import onnx, onnx_available

import pyomo.environ as pe
import pytest

from omlt import OmltBlock
from omlt.dependencies import onnx, onnx_available
from omlt.gbt.gbt_formulation import GBTBigMFormulation
from omlt.gbt.model import GradientBoostedTreeModel

Expand Down
6 changes: 3 additions & 3 deletions tests/neuralnet/test_keras.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
import numpy as np
import pyomo.environ as pyo
import pytest

from pyomo.common.dependencies import DeferredImportError

from omlt.dependencies import keras, keras_available

if keras_available:
from omlt.io import load_keras_sequential

from conftest import get_neural_network_data

from omlt.block import OmltBlock
from omlt.neuralnet import FullSpaceNNFormulation, ReducedSpaceNNFormulation
from omlt.neuralnet.activations import ComplementarityReLUActivation
from omlt.scaling import OffsetScaling

from conftest import get_neural_network_data


@pytest.mark.skipif(keras_available, reason="Test only valid when keras not available")
def test_keras_not_available_exception(datadir):
Expand Down
1 change: 0 additions & 1 deletion tests/neuralnet/test_network_definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ def test_input_bounds_no_scaler():


def test_input_bound_scaling_1D():

xoffset = {i: float(i) for i in range(3)}
xfactor = {i: 0.5 * (i + 1) for i in range(3)}
yoffset = {i: -0.25 * i for i in range(2)}
Expand Down
2 changes: 1 addition & 1 deletion tests/neuralnet/test_onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import numpy as np
import pytest
from pyomo.common.dependencies import DeferredImportError

from omlt.dependencies import onnx, onnx_available
from pyomo.common.dependencies import DeferredImportError

if onnx_available:
import onnxruntime as ort
Expand Down
54 changes: 54 additions & 0 deletions tests/notebooks/test_run_notebooks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import os

import pytest
from pyomo.common.fileutils import this_file_dir
from testbook import testbook

from omlt.dependencies import keras_available, onnx_available


# TODO: These will be replaced with stronger tests using testbook soon
def _test_run_notebook(folder, notebook_fname, n_cells):
# change to notebook directory to allow testing
cwd = os.getcwd()
os.chdir(os.path.join(this_file_dir(), "..", "..", "docs", "notebooks", folder))
with testbook(notebook_fname, timeout=300, execute=True) as tb:
assert tb.code_cells_executed == n_cells
os.chdir(cwd)


@pytest.mark.skipif(not keras_available, reason="keras needed for this notebook")
def test_autothermal_relu_notebook():
_test_run_notebook("neuralnet", "auto-thermal-reformer-relu.ipynb", 13)


@pytest.mark.skipif(not keras_available, reason="keras needed for this notebook")
def test_autothermal_reformer():
_test_run_notebook("neuralnet", "auto-thermal-reformer.ipynb", 13)


def test_build_network():
_test_run_notebook("neuralnet", "build_network.ipynb", 37)


@pytest.mark.skipif(
(not onnx_available) or (not keras_available),
reason="onnx and keras needed for this notebook",
)
def test_import_network():
_test_run_notebook("neuralnet", "import_network.ipynb", 16)


@pytest.mark.skipif(not onnx_available, reason="onnx needed for this notebook")
def test_mnist_example_convolutional():
_test_run_notebook("neuralnet", "mnist_example_convolutional.ipynb", 13)


@pytest.mark.skipif(not onnx_available, reason="onnx needed for this notebook")
def test_mnist_example_dense():
_test_run_notebook("neuralnet", "mnist_example_dense.ipynb", 13)


@pytest.mark.skipif(not keras_available, reason="keras needed for this notebook")
def test_neural_network_formulations():
_test_run_notebook("neuralnet", "neural_network_formulations.ipynb", 21)

0 comments on commit dcca13c

Please sign in to comment.