Skip to content

Commit

Permalink
Timeouts and scala 2.12 for deeplearning4j-nlp-korean workaround (#380)
Browse files Browse the repository at this point in the history
* Increase default timeout on Spark tests

Signed-off-by: Alex Black <blacka101@gmail.com>

* #8840 disable deeplearning4j-nlp-korean module for scala 2.12

Signed-off-by: Alex Black <blacka101@gmail.com>

* Fix for change-scala-versions.sh

Signed-off-by: Alex Black <blacka101@gmail.com>

* CUDA test fixes + more timeout issues

Signed-off-by: Alex Black <blacka101@gmail.com>

* More CUDA

Signed-off-by: Alex Black <blacka101@gmail.com>

* Small fix for cuDNN subsampling + same mode

Signed-off-by: Alex Black <blacka101@gmail.com>

* Flaky test fix

Signed-off-by: Alex Black <blacka101@gmail.com>

* Reduce memory requirements for ValidateCuDNN BN test

Signed-off-by: Alex Black <blacka101@gmail.com>

* Fix slow/ineffirient scalnet tests

Signed-off-by: Alex Black <blacka101@gmail.com>

* Increase timeouts to avoid failures if CI machines are slower than expected

Signed-off-by: Alex Black <blacka101@gmail.com>

* Ignore flaky test (issue #8849) and increase timeout for slow CI downloads

Signed-off-by: Alex Black <blacka101@gmail.com>
  • Loading branch information
AlexDBlack committed Apr 20, 2020
1 parent 163222e commit 73aa760
Show file tree
Hide file tree
Showing 41 changed files with 247 additions and 61 deletions.
10 changes: 10 additions & 0 deletions change-scala-versions.sh
Expand Up @@ -88,5 +88,15 @@ find "$BASEDIR" -name 'pom.xml' -not -path '*target*' \
#Scala maven plugin, <scalaVersion>2.11</scalaVersion>
find "$BASEDIR" -name 'pom.xml' -not -path '*target*' \
-exec bash -c "sed_i 's/\(scalaVersion>\)'$FROM_VERSION'<\/scalaVersion>/\1'$TO_VERSION'<\/scalaVersion>/g' {}" \;

# Disable deeplearning4j-nlp-korean for scala 2.12 - see https://github.com/eclipse/deeplearning4j/issues/8840
if [ $TO_VERSION = $SCALA_211_VERSION ]; then
#Enable
sed -i 's/ <!--<module>deeplearning4j-nlp-korean<\/module>-->/ <module>deeplearning4j-nlp-korean<\/module>/g' deeplearning4j/deeplearning4j-nlp-parent/pom.xml
else
#Disable
sed -i 's/ <module>deeplearning4j-nlp-korean<\/module>/ <!--<module>deeplearning4j-nlp-korean<\/module>-->/g' deeplearning4j/deeplearning4j-nlp-parent/pom.xml
fi


echo "Done updating Scala versions.";
Expand Up @@ -31,6 +31,7 @@
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.deeplearning4j.util.ModelSerializer;
import org.nd4j.base.Preconditions;
import org.nd4j.linalg.api.buffer.DataType;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.random.impl.BernoulliDistribution;
import org.nd4j.linalg.factory.Nd4j;
Expand Down Expand Up @@ -124,12 +125,20 @@ public static INDArray randomOneHot(long examples, long nOut){
return randomOneHot(examples, nOut, new Random(12345));
}

public static INDArray randomOneHot(DataType dataType, long examples, long nOut){
return randomOneHot(dataType, examples, nOut, new Random(12345));
}

public static INDArray randomOneHot(long examples, long nOut, long rngSeed){
return randomOneHot(examples, nOut, new Random(rngSeed));
}

public static INDArray randomOneHot(long examples, long nOut, Random rng){
INDArray arr = Nd4j.create(examples, nOut);
public static INDArray randomOneHot(long examples, long nOut, Random rng) {
return randomOneHot(Nd4j.defaultFloatingPointType(), examples,nOut, rng);
}

public static INDArray randomOneHot(DataType dataType, long examples, long nOut, Random rng){
INDArray arr = Nd4j.create(dataType, examples, nOut);
for( int i=0; i<examples; i++ ){
arr.putScalar(i, rng.nextInt((int) nOut), 1.0);
}
Expand Down
Expand Up @@ -59,7 +59,7 @@ public class DataSetIteratorTest extends BaseDL4JTest {

@Override
public long getTimeoutMilliseconds() {
return 90000;
return 360000; //Should run quickly; increased to large timeout due to occasonal slow CI downloads
}

@Test
Expand Down
Expand Up @@ -46,6 +46,11 @@ public class AttentionLayerTest extends BaseDL4JTest {
@Rule
public ExpectedException exceptionRule = ExpectedException.none();

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testSelfAttentionLayer() {
int nIn = 3;
Expand Down
Expand Up @@ -61,6 +61,11 @@ public class BNGradientCheckTest extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testGradient2dSimple() {
DataNormalization scaler = new NormalizerMinMaxScaler();
Expand Down
Expand Up @@ -54,6 +54,11 @@ public class CNN1DGradientCheckTest extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testCnn1DWithLocallyConnected1D() {
Nd4j.getRandom().setSeed(1337);
Expand Down
Expand Up @@ -55,6 +55,11 @@ public class CNN3DGradientCheckTest extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testCnn3DPlain() {
Nd4j.getRandom().setSeed(1337);
Expand Down
Expand Up @@ -62,6 +62,11 @@ public class CNNGradientCheckTest extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testGradientCNNMLN() {
//Parameterized test, testing combinations of:
Expand Down
Expand Up @@ -43,6 +43,11 @@

public class CapsnetGradientCheckTest extends BaseDL4JTest {

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testCapsNet() {

Expand Down
Expand Up @@ -58,6 +58,11 @@ public class DropoutGradientCheck extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testDropoutGradient() {
int minibatch = 3;
Expand Down
Expand Up @@ -53,6 +53,11 @@ public class GlobalPoolingGradientCheckTests extends BaseDL4JTest {
private static final double DEFAULT_MAX_REL_ERROR = 1e-3;
private static final double DEFAULT_MIN_ABS_ERROR = 1e-8;

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testRNNGlobalPoolingBasicMultiLayer() {
//Basic test of global pooling w/ LSTM
Expand Down
Expand Up @@ -68,6 +68,11 @@ public class GradientCheckTests extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testMinibatchApplication() {
IrisDataSetIterator iter = new IrisDataSetIterator(30, 150);
Expand Down
Expand Up @@ -69,6 +69,11 @@ public class GradientCheckTestsComputationGraph extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testBasicIris() {
Nd4j.getRandom().setSeed(12345);
Expand Down
Expand Up @@ -53,6 +53,11 @@ public class LRNGradientCheckTests extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}


@Test
public void testGradientLRNSimple() {
Expand Down
Expand Up @@ -54,6 +54,11 @@ public class LSTMGradientCheckTests extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testLSTMBasicMultiLayer() {
//Basic test of GravesLSTM layer
Expand Down
Expand Up @@ -73,6 +73,11 @@ public class LossFunctionGradientCheck extends BaseDL4JTest {
private static final double DEFAULT_MAX_REL_ERROR = 1e-5;
private static final double DEFAULT_MIN_ABS_ERROR = 1e-8;

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void lossFunctionGradientCheck() {
ILossFunction[] lossFunctions = new ILossFunction[] {new LossBinaryXENT(), new LossBinaryXENT(),
Expand Down
Expand Up @@ -48,6 +48,11 @@ public class NoBiasGradientCheckTests extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testGradientNoBiasDenseOutput() {

Expand Down
Expand Up @@ -48,6 +48,11 @@ public class OutputLayerGradientChecks extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testRnnLossLayer() {
Nd4j.getRandom().setSeed(12345L);
Expand Down
Expand Up @@ -52,6 +52,11 @@ public class RnnGradientChecks extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
@Ignore("AB 2019/06/24 - Ignored to get to all passing baseline to prevent regressions via CI - see issue #7912")
public void testBidirectionalWrapper() {
Expand Down
Expand Up @@ -52,6 +52,11 @@ public class UtilLayerGradientChecks extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testMaskLayer() {
Nd4j.getRandom().setSeed(12345);
Expand Down
Expand Up @@ -56,6 +56,11 @@ public class VaeGradientCheckTests extends BaseDL4JTest {
Nd4j.setDataType(DataType.DOUBLE);
}

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testVaeAsMLP() {
//Post pre-training: a VAE can be used as a MLP, by taking the mean value from p(z|x) as the output
Expand Down
Expand Up @@ -64,6 +64,11 @@ public class YoloGradientCheckTests extends BaseDL4JTest {
@Rule
public TemporaryFolder testDir = new TemporaryFolder();

@Override
public long getTimeoutMilliseconds() {
return 90000L;
}

@Test
public void testYoloOutputLayer() {
int depthIn = 2;
Expand Down
Expand Up @@ -63,7 +63,8 @@ public class TransferLearningMLNTest extends BaseDL4JTest {
public void simpleFineTune() {

long rng = 12345L;
DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));
Nd4j.getRandom().setSeed(rng);
DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT, 10, 4), TestUtils.randomOneHot(DataType.FLOAT, 10, 3));
//original conf
NeuralNetConfiguration.Builder confToChange =
new NeuralNetConfiguration.Builder().seed(rng).optimizationAlgo(OptimizationAlgorithm.LBFGS)
Expand Down Expand Up @@ -123,7 +124,8 @@ public void simpleFineTune() {

@Test
public void testNoutChanges() {
DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 2));
Nd4j.getRandom().setSeed(12345);
DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT, 10, 4), TestUtils.randomOneHot(DataType.FLOAT,10, 2));

NeuralNetConfiguration.Builder equivalentConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1));
FineTuneConfiguration overallConf = new FineTuneConfiguration.Builder().updater(new Sgd(0.1))
Expand Down Expand Up @@ -185,7 +187,8 @@ public void testNoutChanges() {

@Test
public void testRemoveAndAdd() {
DataSet randomData = new DataSet(Nd4j.rand(10, 4), Nd4j.rand(10, 3));
Nd4j.getRandom().setSeed(12345);
DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT,10, 4), TestUtils.randomOneHot(DataType.FLOAT, 10, 3));

NeuralNetConfiguration.Builder equivalentConf = new NeuralNetConfiguration.Builder().updater(new Sgd(0.1));
FineTuneConfiguration overallConf = new FineTuneConfiguration.Builder().updater(new Sgd(0.1)).build();
Expand Down Expand Up @@ -377,8 +380,9 @@ public void testRemoveAndProcessing() {

@Test
public void testAllWithCNN() {
Nd4j.getRandom().setSeed(12345);

DataSet randomData = new DataSet(Nd4j.rand(10, 28 * 28 * 3).reshape(10, 3, 28, 28), Nd4j.rand(10, 10));
DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT, 10, 28 * 28 * 3).reshape(10, 3, 28, 28), TestUtils.randomOneHot(DataType.FLOAT,10, 10));
MultiLayerNetwork modelToFineTune =
new MultiLayerNetwork(
new NeuralNetConfiguration.Builder().seed(123)
Expand Down Expand Up @@ -528,8 +532,9 @@ public void testFineTuneOverride() {

@Test
public void testAllWithCNNNew() {
Nd4j.getRandom().setSeed(12345);

DataSet randomData = new DataSet(Nd4j.rand(10, 28 * 28 * 3).reshape(10, 3, 28, 28), Nd4j.rand(10, 10));
DataSet randomData = new DataSet(Nd4j.rand(DataType.FLOAT,10, 28 * 28 * 3).reshape(10, 3, 28, 28), TestUtils.randomOneHot(10, 10));
MultiLayerNetwork modelToFineTune =
new MultiLayerNetwork(
new NeuralNetConfiguration.Builder().seed(123)
Expand Down

0 comments on commit 73aa760

Please sign in to comment.