Navigation Menu

Skip to content

Commit

Permalink
PR #79: Fix TravisCI
Browse files Browse the repository at this point in the history
 * Disable distributed tests which cause timeouts.
 * Fix failing logging in distributed placement tests.
 * Clean up install script.
 * Add TF 1.13.
 * Add requirements.txt for installing dependencies.

PiperOrigin-RevId: 238671430
  • Loading branch information
cweill committed Mar 15, 2019
1 parent 46be352 commit e858929
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 73 deletions.
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -6,6 +6,7 @@ docs/build*

# Bazel outputs.
bazel-*
*_pb2.py

# Byte-compiled
_pycache__/
Expand Down
21 changes: 17 additions & 4 deletions .travis.yml
@@ -1,4 +1,5 @@
sudo: required
dist: xenial
language: python
git:
depth: 10
Expand All @@ -8,8 +9,8 @@ python:
- "3.6"
env:
global:
- TF_LATEST="1.12.*"
- CODECOV_TOKEN="0d2c482b-f42c-4d4c-b092-cb628ad20857"
- BAZEL_VERSION=0.20.0
matrix:
# We test against recent versions of TensorFlow and tf-nightly.
# If updating, also update TF_LATEST above
Expand All @@ -18,12 +19,24 @@ env:
- TF_VERSION="1.10.*"
- TF_VERSION="1.11.*"
- TF_VERSION="1.12.*"
- TF_VERSION="1.13.*"
- TF_VERSION="tf-nightly"
before_install:
- sudo apt-get -y install pkg-config zip g++ zlib1g-dev unzip
- wget https://github.com/bazelbuild/bazel/releases/download/"$BAZEL_VERSION"/bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh
- chmod +x bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh
- ./bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh --user
addons:
apt:
update: true
install:
# Install codecov for tracking coverage over time.
- pip install codecov
- pip install -r requirements.txt
- pip install numpy --upgrade
- ./oss_scripts/oss_pip_install.sh
script:
- ./oss_scripts/oss_test.sh
- nosetests
- cat /tmp/absl_testing/**/*.txt || true # Don't error if file does not exist.
after_success:
# Install codecov for tracking coverage over time.
- pip install codecov
- codecov
4 changes: 2 additions & 2 deletions adanet/core/distributed/placement.py
Expand Up @@ -232,8 +232,8 @@ def _worker_task(self, num_subnetworks):
self._num_workers):
tf.logging.log_first_n(
tf.logging.WARN,
"With drop_remainer=True, {} workers and {} subnetworks, the last {} "
"subnetworks will be dropped and will not be trained",
"With drop_remainer=True, %s workers and %s subnetworks, the last %s "
"subnetworks will be dropped and will not be trained", 1,
self._num_workers, num_subnetworks,
num_subnetworks - self._num_workers - 1)
# The first worker will always build the ensemble so we add 1.
Expand Down
10 changes: 5 additions & 5 deletions adanet/core/iteration.py
Expand Up @@ -187,18 +187,18 @@ def _check_numerics(self, features, labels):
if not _is_numeric(features[name]):
continue
tf.logging.info("DEBUG: Checking numerics of float feature '%s'.", name)
checked_features[name] = tf.debugging.check_numerics(
features[name], "features '{}'".format(name))
checked_features[name] = tf.check_numerics(features[name],
"features '{}'".format(name))
if isinstance(labels, dict):
for name in sorted(labels):
if not _is_numeric(labels[name]):
continue
tf.logging.info("DEBUG: Checking numerics of float label '%s'.", name)
checked_labels[name] = tf.debugging.check_numerics(
labels[name], "labels '{}'".format(name))
checked_labels[name] = tf.check_numerics(labels[name],
"labels '{}'".format(name))
elif labels is not None and _is_numeric(labels):
tf.logging.info("DEBUG: Checking numerics of labels.")
checked_labels = tf.debugging.check_numerics(labels, "'labels'")
checked_labels = tf.check_numerics(labels, "'labels'")
return checked_features, checked_labels

def build_iteration(self,
Expand Down
7 changes: 5 additions & 2 deletions adanet/pip_package/setup.py
Expand Up @@ -26,8 +26,11 @@
exec(in_file.read())

REQUIRED_PACKAGES = [
'numpy >= 1.12.0',
'six >= 1.10.0',
'six==1.11.*',
'numpy==1.15.*',

This comment has been minimized.

Copy link
@martinobertoni

martinobertoni Apr 9, 2019

@cweill I've issues in using adanet 0.6 because of this since other packages are working with numpy 1.16.*
Is this requirement strict?

This comment has been minimized.

Copy link
@cweill

cweill Apr 10, 2019

Author Contributor

I don't believe it's a strict requirement. >=1.12.0 should be good.

'nose==1.3.*',
'rednose==1.3.*',
'coverage==4.5.*',
'protobuf >= 3.6.0',
]

Expand Down
29 changes: 3 additions & 26 deletions oss_scripts/oss_pip_install.sh
Expand Up @@ -28,31 +28,8 @@ else
pip install -q "tensorflow==$TF_VERSION"
fi

# Make sure we have the latest version of numpy - avoid problems we were
# seeing with Python 3
pip install -q -U numpy

# Install Bazel for tests.
# Step 1: Install required packages
sudo apt-get install pkg-config zip g++ zlib1g-dev unzip python

# Step 2: Download Bazel binary installer
wget https://github.com/bazelbuild/bazel/releases/download/"$BAZEL_VERSION"/bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh

# Step 3: Install Bazel
chmod +x bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh
./bazel-"$BAZEL_VERSION"-installer-linux-x86_64.sh --user
export PATH="$PATH:$HOME/bin"

# Build adanet pip packaging script
bazel build //adanet/pip_package:build_pip_package

# Create the adanet pip package
bazel-bin/adanet/pip_package/build_pip_package /tmp/adanet_pkg

# Install and test the pip package
pip install /tmp/adanet_pkg/*.whl
bazel build -c opt //... --local_resources 2048,.5,1.0

# Finally try importing `adanet` in Python outside the cloned directory:
cd ..
python -c "import adanet"
# Copy generated proto files.
cp bazel-genfiles/adanet/core/report_pb2.py adanet/core
33 changes: 0 additions & 33 deletions oss_scripts/oss_test.sh

This file was deleted.

6 changes: 6 additions & 0 deletions requirements.txt
@@ -0,0 +1,6 @@
six==1.11.*
numpy==1.15.*
nose==1.3.*
rednose==1.3.*
coverage==4.5.*
protobuf==3.6.*
3 changes: 2 additions & 1 deletion setup.cfg
@@ -1,5 +1,6 @@
[nosetests]
verbosity=3
rednose=1
with-coverage=1
cover-package=adanet
exclude=.*nasnet.*.py
exclude=.*nasnet.*.py|.*estimator_distributed_test.py

0 comments on commit e858929

Please sign in to comment.