Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove duplicated tests in zero code change tests for TF, and make them pytest compatible #110

Merged
merged 5 commits into from
Dec 11, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion tests/zero_code_change/pytorch_integration_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from smdebug.core.utils import SagemakerSimulator, ScriptSimulator


def test_pytorch(script_mode: bool, use_loss_module=False):
def test_pytorch(script_mode: bool = False, use_loss_module=False):
smd.del_hook()

sim_class = ScriptSimulator if script_mode else SagemakerSimulator
Expand Down Expand Up @@ -82,6 +82,10 @@ def test_pytorch(script_mode: bool, use_loss_module=False):
)


def test_pytorch_loss_module(script_mode: bool = False):
test_pytorch(script_mode=script_mode, use_loss_module=True)


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
Expand Down
64 changes: 47 additions & 17 deletions tests/zero_code_change/tensorflow_integration_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,13 @@
import tensorflow_datasets as tfds
from tests.tensorflow.hooks.test_mirrored_strategy import test_basic
from tests.tensorflow.keras.test_keras_mirrored import test_tf_keras
from tf_utils import (

# First Party
import smdebug.tensorflow as smd
from smdebug.core.utils import SagemakerSimulator

# Local
from .tf_utils import (
get_data,
get_estimator,
get_input_fns,
Expand All @@ -31,12 +37,8 @@
get_train_op_and_placeholders,
)

# First Party
import smdebug.tensorflow as smd
from smdebug.core.utils import SagemakerSimulator


def test_estimator(script_mode: bool):
def test_estimator(script_mode: bool = False):
""" Works as intended. """
smd.del_hook()
tf.reset_default_graph()
Expand Down Expand Up @@ -134,7 +136,15 @@ def test_estimator_gradients_zcc(nested=False, mirrored=False):
assert len(trial.modes()) == 2


def test_linear_classifier(script_mode: bool):
def test_estimator_gradients_zcc_nested():
test_estimator_gradients_zcc(nested=True)


def test_estimator_gradients_zcc_mirrored():
test_estimator_gradients_zcc(nested=False, mirrored=True)


def test_linear_classifier(script_mode: bool = False):
""" Works as intended. """
smd.del_hook()
tf.reset_default_graph()
Expand All @@ -160,11 +170,20 @@ def test_linear_classifier(script_mode: bool):
assert len(trial.tensor_names()) > 0, "Tensors were not saved."


def test_monitored_session(script_mode: bool):
def test_monitored_session(script_mode: bool = False):
""" Works as intended. """
smd.del_hook()
tf.reset_default_graph()
with SagemakerSimulator() as sim:
json_file_contents = """
{
"S3OutputPath": "s3://sagemaker-test",
"LocalPath": "/opt/ml/output/tensors",
"HookParameters" : {
"save_interval": "100"
}
}
"""
with SagemakerSimulator(json_file_contents=json_file_contents) as sim:
train_op, X, Y = get_train_op_and_placeholders()
init = tf.compat.v1.global_variables_initializer()
mnist = get_data()
Expand Down Expand Up @@ -195,6 +214,9 @@ def test_monitored_session_gradients_zcc():
{
"S3OutputPath": "s3://sagemaker-test",
"LocalPath": "/opt/ml/output/tensors",
"HookParameters" : {
"save_interval": "100"
},
"CollectionConfigurations": [
{
"CollectionName": "gradients"
Expand Down Expand Up @@ -227,7 +249,7 @@ def test_monitored_session_gradients_zcc():
assert len(trial.tensor_names(collection="gradients")) > 0


def test_keras_v1(script_mode: bool):
def test_keras_v1(script_mode: bool = False):
""" Works as intended. """
smd.del_hook()
tf.reset_default_graph()
Expand Down Expand Up @@ -258,7 +280,7 @@ def test_keras_v1(script_mode: bool):
assert len(trial.tensor_names()) > 0, "Tensors were not saved."


def test_keras_gradients(script_mode: bool, tf_optimizer: bool = False):
def test_keras_gradients(script_mode: bool = False, tf_optimizer: bool = False):
""" Works as intended. """
smd.del_hook()
tf.reset_default_graph()
Expand Down Expand Up @@ -320,6 +342,10 @@ def test_keras_gradients(script_mode: bool, tf_optimizer: bool = False):
assert len(trial.tensor_names(collection="optimizer_variables")) > 0


def test_keras_gradients_tf_opt(script_mode: bool = False):
test_keras_gradients(script_mode=script_mode, tf_optimizer=True)


def test_keras_gradients_mirrored(include_workers="one"):
""" Works as intended. """
smd.del_hook()
Expand Down Expand Up @@ -366,7 +392,11 @@ def test_keras_gradients_mirrored(include_workers="one"):
test_tf_keras("/opt/ml/output/tensors", zcc=True, include_workers=include_workers)


def test_keras_to_estimator(script_mode: bool):
def test_keras_gradients_mirrored_all_workers():
test_keras_gradients_mirrored(include_workers="all")


def test_keras_to_estimator(script_mode: bool = False):
""" Works as intended. """
import tensorflow.compat.v1.keras as keras

Expand Down Expand Up @@ -426,14 +456,14 @@ def input_fn():
test_monitored_session_gradients_zcc()
test_estimator(script_mode=script_mode)
if not script_mode:
test_estimator_gradients_zcc(nested=True)
test_estimator_gradients_zcc(nested=False)
test_estimator_gradients_zcc(nested=False, mirrored=True)
test_estimator_gradients_zcc()
test_estimator_gradients_zcc_nested()
test_estimator_gradients_zcc_mirrored()
test_linear_classifier(script_mode=script_mode)
test_keras_v1(script_mode=script_mode)
test_keras_gradients(script_mode=script_mode)
test_keras_gradients(script_mode=script_mode, tf_optimizer=True)
test_keras_gradients_tf_opt(script_mode=script_mode)
test_keras_to_estimator(script_mode=script_mode)
if not script_mode:
test_keras_gradients_mirrored(include_workers="all")
test_keras_gradients_mirrored_all_workers()
test_keras_gradients_mirrored()
Loading