From 0c1d9288e435494925d4aa4d9acadb2c9415bac5 Mon Sep 17 00:00:00 2001 From: Adam Gibson Date: Tue, 5 May 2015 15:07:45 -0700 Subject: [PATCH 1/2] update convolution layer conf --- .../org/deeplearning4j/optimize/solvers/LBFGS.java | 4 ++-- .../stepfunctions/DefaultStepFunction.java | 2 +- .../layers/ConvolutionDownSampleLayerTest.java | 6 +++--- .../nn/multilayer/MultiLayerTest.java | 2 +- .../org/deeplearning4j/plot/BarnesHutTsneTest.java | 2 +- .../java/org/deeplearning4j/plot/TsneTest.java | 3 +-- .../embeddings/inmemory/InMemoryLookupTable.java | 12 ++++++------ .../java/org/deeplearning4j/models/rntn/RNTN.java | 14 +++++++------- .../perform/models/word2vec/Word2VecPerformer.java | 8 ++++---- .../models/word2vec/WordVectorSerializerTest.java | 2 +- .../scaleout/api/ir/ParameterVectorUpdateable.java | 2 +- .../models/word2vec/Word2VecPerformer.java | 8 ++++---- .../spark/models/embeddings/common/SaxpyMap.java | 2 +- .../models/embeddings/word2vec/SentenceBatch.java | 8 ++++---- .../embeddings/word2vec/Word2VecPerformer.java | 8 ++++---- .../embeddings/word2vec/Word2VecPerformerVoid.java | 8 ++++---- 16 files changed, 45 insertions(+), 46 deletions(-) diff --git a/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java b/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java index 226162a96faa..9df59e966164 100644 --- a/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java +++ b/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/solvers/LBFGS.java @@ -118,7 +118,7 @@ public void preProcessLine(INDArray line) { if(i > rho.size()) throw new IllegalStateException("I > rho size"); alpha.putScalar(i, rho.get(i) * Nd4j.getBlasWrapper().dot(gradient, s.get(i))); - if(alpha.data().dataType() == DataBuffer.DOUBLE) + if(alpha.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(-1.0 * alpha.getDouble(i), gradient, y.get(i)); else Nd4j.getBlasWrapper().axpy(-1.0f * alpha.getFloat(i), gradient, y.get(i)); @@ -133,7 +133,7 @@ public void preProcessLine(INDArray line) { if(i >= alpha.length()) break; double beta = rho.get(i) * Nd4j.getBlasWrapper().dot(y.get(i),gradient); - if(alpha.data().dataType() == DataBuffer.DOUBLE) + if(alpha.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(alpha.getDouble(i) * beta, gradient, s.get(i)); else Nd4j.getBlasWrapper().axpy(alpha.getFloat(i) * (float) beta, gradient, s.get(i)); diff --git a/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/stepfunctions/DefaultStepFunction.java b/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/stepfunctions/DefaultStepFunction.java index 74e0be16833e..228b5e009f5e 100644 --- a/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/stepfunctions/DefaultStepFunction.java +++ b/deeplearning4j-core/src/main/java/org/deeplearning4j/optimize/stepfunctions/DefaultStepFunction.java @@ -32,7 +32,7 @@ public class DefaultStepFunction implements StepFunction { public void step(INDArray x, INDArray line, Object[] params) { double alam = (double) params[0]; double oldAlam = (double) params[1]; - if(x.data().dataType() == DataBuffer.DOUBLE) { + if(x.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(alam - oldAlam, line, x); } else { diff --git a/deeplearning4j-core/src/test/java/org/deeplearning4j/models/layers/ConvolutionDownSampleLayerTest.java b/deeplearning4j-core/src/test/java/org/deeplearning4j/models/layers/ConvolutionDownSampleLayerTest.java index 79724e5fe2d8..0d22eaebd9d6 100755 --- a/deeplearning4j-core/src/test/java/org/deeplearning4j/models/layers/ConvolutionDownSampleLayerTest.java +++ b/deeplearning4j-core/src/test/java/org/deeplearning4j/models/layers/ConvolutionDownSampleLayerTest.java @@ -55,8 +55,8 @@ public class ConvolutionDownSampleLayerTest { @Test public void testConvolution() throws Exception { boolean switched = false; - if(Nd4j.dtype == DataBuffer.FLOAT) { - Nd4j.dtype = DataBuffer.DOUBLE; + if(Nd4j.dtype == DataBuffer.Type.FLOAT) { + Nd4j.dtype = DataBuffer.Type.DOUBLE; switched = true; } MnistDataFetcher data = new MnistDataFetcher(true); @@ -70,7 +70,7 @@ public void testConvolution() throws Exception { ConvolutionDownSampleLayer c = LayerFactories.getFactory(n.getLayer()).create(n); if(switched) { - Nd4j.dtype = DataBuffer.FLOAT; + Nd4j.dtype = DataBuffer.Type.FLOAT; } } diff --git a/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java b/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java index 12493a463f5f..6fc3749931f2 100644 --- a/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java +++ b/deeplearning4j-core/src/test/java/org/deeplearning4j/nn/multilayer/MultiLayerTest.java @@ -58,7 +58,7 @@ public class MultiLayerTest { @Test public void testDbnFaces() { - Nd4j.dtype = DataBuffer.DOUBLE; + Nd4j.dtype = DataBuffer.Type.DOUBLE; DataSetIterator iter = new LFWDataSetIterator(28,28); DataSet next = iter.next(); diff --git a/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java b/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java index 93ef0fa91798..7dca95fa97cb 100755 --- a/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java +++ b/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/BarnesHutTsneTest.java @@ -38,7 +38,7 @@ public class BarnesHutTsneTest { @Test public void testTsne() throws Exception { Nd4j.ENFORCE_NUMERICAL_STABILITY = true; - Nd4j.factory().setDType(DataBuffer.DOUBLE); + Nd4j.factory().setDType(DataBuffer.Type.DOUBLE); Nd4j.getRandom().setSeed(123); BarnesHutTsne b = new BarnesHutTsne.Builder().stopLyingIteration(250) .theta(0.5).learningRate(500).useAdaGrad(false) diff --git a/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/TsneTest.java b/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/TsneTest.java index 822319e18adc..db1dd7aadd08 100755 --- a/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/TsneTest.java +++ b/deeplearning4j-core/src/test/java/org/deeplearning4j/plot/TsneTest.java @@ -35,11 +35,10 @@ public class TsneTest { @Test public void testTsne() throws Exception { - Nd4j.dtype = DataBuffer.DOUBLE; + Nd4j.dtype = DataBuffer.Type.DOUBLE; Tsne calculation = new Tsne.Builder().setMaxIter(1).usePca(false).setSwitchMomentumIteration(20) .normalize(true).useAdaGrad(false).learningRate(500).perplexity(20).minGain(1e-1f) .build(); - Nd4j.getResourceManager().disable(); ClassPathResource resource = new ClassPathResource("/mnist2500_X.txt"); File f = resource.getFile(); INDArray data = Nd4j.readTxt(f.getAbsolutePath()," "); diff --git a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java index 43bfc64e0c1f..5cb8a7279313 100644 --- a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java +++ b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/embeddings/inmemory/InMemoryLookupTable.java @@ -231,7 +231,7 @@ public void iterateSample(VocabWord w1, VocabWord w2,AtomicLong nextRandom,doub //gradient double g = useAdaGrad ? w1.getGradient(i, (1 - code - f)) : (1 - code - f) * alpha; - if(neu1e.data().dataType() == DataBuffer.FLOAT) { + if(neu1e.data().dataType() == DataBuffer.Type.FLOAT) { Nd4j.getBlasWrapper().axpy((float) g, syn1, neu1e); Nd4j.getBlasWrapper().axpy((float) g, l1, syn1); @@ -281,18 +281,18 @@ else if (f < -MAX_EXP) g = label * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn0.data().dataType() == DataBuffer.DOUBLE) + if(syn0.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn0.data().dataType() == DataBuffer.DOUBLE) + if(syn0.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg.slice(target),l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg.slice(target),l1); } - if(syn0.data().dataType() == DataBuffer.DOUBLE) + if(syn0.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else @@ -366,7 +366,7 @@ public void iterate(VocabWord w1, VocabWord w2) { //gradient double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if(syn0.data().dataType() == DataBuffer.DOUBLE) { + if(syn0.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } @@ -380,7 +380,7 @@ public void iterate(VocabWord w1, VocabWord w2) { - if(syn0.data().dataType() == DataBuffer.DOUBLE) + if(syn0.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else diff --git a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/rntn/RNTN.java b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/rntn/RNTN.java index 67d342b311ee..4f7337d8119b 100755 --- a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/rntn/RNTN.java +++ b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/models/rntn/RNTN.java @@ -332,7 +332,7 @@ public INDArray randomTransformMatrix() { binary.put(indices,block); NDArrayIndex[] indices2 = new NDArrayIndex[]{interval(0,block.rows()),interval(numHidden,numHidden + block.columns())}; binary.put(indices2,randomTransformBlock()); - if(binary.data().dataType() == DataBuffer.DOUBLE) + if(binary.data().dataType() == DataBuffer.Type.DOUBLE) return Nd4j.getBlasWrapper().scal(scalingForInit,binary); return Nd4j.getBlasWrapper().scal((float) scalingForInit,binary); } @@ -352,7 +352,7 @@ INDArray randomClassificationMatrix() { INDArray ret = Nd4j.zeros(numOuts,numHidden + 1); INDArray insert = Nd4j.rand(numOuts,numHidden,-range,range,rng); ret.put(new NDArrayIndex[] {interval(0,numOuts),interval(0,numHidden)},insert); - if(ret.data().dataType() == (DataBuffer.DOUBLE)) + if(ret.data().dataType() == (DataBuffer.Type.DOUBLE)) return Nd4j.getBlasWrapper().scal(scalingForInit,ret); return Nd4j.getBlasWrapper().scal((float) scalingForInit, ret); @@ -546,7 +546,7 @@ public INDArray getBinaryINDArray(String left, String right) { double cost = 0.0f; // the regularization cost for (MultiDimensionalMap.Entry entry : currentMatrices.entrySet()) { INDArray D = derivatives.get(entry.getFirstKey(), entry.getSecondKey()); - if(D.data().dataType() == DataBuffer.DOUBLE) + if(D.data().dataType() == DataBuffer.Type.DOUBLE) D = Nd4j.getBlasWrapper().scal(scale,D).addi(Nd4j.getBlasWrapper().scal(regCost, entry.getValue())); else D = Nd4j.getBlasWrapper().scal((float) scale,D).addi(Nd4j.getBlasWrapper().scal((float) regCost, entry.getValue())); @@ -566,7 +566,7 @@ public INDArray getBinaryINDArray(String left, String right) { for (String s : currentMatrices.keySet()) { INDArray D = derivatives.get(s); INDArray vector = currentMatrices.get(s); - if(D.data().dataType() == DataBuffer.DOUBLE) + if(D.data().dataType() == DataBuffer.Type.DOUBLE) D = Nd4j.getBlasWrapper().scal(scale,D).addi(Nd4j.getBlasWrapper().scal(regCost,vector)); else D = Nd4j.getBlasWrapper().scal((float) scale,D).addi(Nd4j.getBlasWrapper().scal((float) regCost,vector)); @@ -587,7 +587,7 @@ public INDArray getBinaryINDArray(String left, String right) { for (String s : vocabCache.words()) { INDArray D = derivatives.get(s); INDArray vector = currentMatrices.vector(s); - if(D.data().dataType() == DataBuffer.DOUBLE) + if(D.data().dataType() == DataBuffer.Type.DOUBLE) D = Nd4j.getBlasWrapper().scal(scale,D).addi(Nd4j.getBlasWrapper().scal(regCost,vector)); else D = Nd4j.getBlasWrapper().scal((float) scale,D).addi(Nd4j.getBlasWrapper().scal((float) regCost,vector)); @@ -732,7 +732,7 @@ private INDArray computeINDArrayDeltaDown(INDArray deltaFull, INDArray leftVecto INDArray deltaINDArray = Nd4j.create(size * 2, 1); INDArray fullVector = Nd4j.concat(0, leftVector, rightVector); for (int slice = 0; slice < size; ++slice) { - if(deltaFull.data().dataType() == DataBuffer.DOUBLE) { + if(deltaFull.data().dataType() == DataBuffer.Type.DOUBLE) { INDArray scaledFullVector = Nd4j.getBlasWrapper().scal(deltaFull.getScalar(slice).getDouble(0),fullVector); deltaINDArray = deltaINDArray.add(Wt.slice(slice).add(Wt.slice(slice).transpose()).mmul(scaledFullVector)); } @@ -824,7 +824,7 @@ private INDArray getDoubleTensorGradient(INDArray deltaFull, INDArray leftVector INDArray Wt_df = Nd4j.create(size * 2, size * 2, size); INDArray fullVector = Nd4j.concat(0,leftVector, rightVector); for (int slice = 0; slice < size; ++slice) { - if(Wt_df.data().dataType() == DataBuffer.DOUBLE) + if(Wt_df.data().dataType() == DataBuffer.Type.DOUBLE) Wt_df.putSlice(slice, Nd4j.getBlasWrapper().scal(deltaFull.getDouble(slice),fullVector).mmul(fullVector.transpose())); else Wt_df.putSlice(slice, Nd4j.getBlasWrapper().scal((float) deltaFull.getDouble(slice),fullVector).mmul(fullVector.transpose())); diff --git a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/Word2VecPerformer.java b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/Word2VecPerformer.java index 423095b04424..78d81a21a129 100644 --- a/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/Word2VecPerformer.java +++ b/deeplearning4j-scaleout/deeplearning4j-nlp/src/main/java/org/deeplearning4j/scaleout/perform/models/word2vec/Word2VecPerformer.java @@ -339,7 +339,7 @@ public void iterateSample(Word2VecWork work,VocabWord w1, VocabWord w2,double a double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if(neu1e.data().dataType() == DataBuffer.DOUBLE) { + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } @@ -380,12 +380,12 @@ else if (f < -MAX_EXP) g = (label - 0) * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg,l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg,l1); @@ -395,7 +395,7 @@ else if (f < -MAX_EXP) - if(neu1e.data().dataType() == DataBuffer.DOUBLE) + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else diff --git a/deeplearning4j-scaleout/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/WordVectorSerializerTest.java b/deeplearning4j-scaleout/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/WordVectorSerializerTest.java index 44f185ea8826..93c7769955b6 100755 --- a/deeplearning4j-scaleout/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/WordVectorSerializerTest.java +++ b/deeplearning4j-scaleout/deeplearning4j-nlp/src/test/java/org/deeplearning4j/models/word2vec/WordVectorSerializerTest.java @@ -66,7 +66,7 @@ public void testLoaderBinary() throws IOException { @Test public void testCurrentFile() throws Exception { - Nd4j.dtype = DataBuffer.FLOAT; + Nd4j.dtype = DataBuffer.Type.FLOAT; String url = "https://s3.amazonaws.com/dl4j-distribution/GoogleNews-vectors-negative300.bin.gz"; String path = "GoogleNews-vectors-negative300.bin.gz"; File toDl = new File(path); diff --git a/deeplearning4j-scaleout/deeplearning4j-scaleout-api/src/main/java/org/deeplearning4j/scaleout/api/ir/ParameterVectorUpdateable.java b/deeplearning4j-scaleout/deeplearning4j-scaleout-api/src/main/java/org/deeplearning4j/scaleout/api/ir/ParameterVectorUpdateable.java index dabccbb37884..9d1c0c6df8c6 100644 --- a/deeplearning4j-scaleout/deeplearning4j-scaleout-api/src/main/java/org/deeplearning4j/scaleout/api/ir/ParameterVectorUpdateable.java +++ b/deeplearning4j-scaleout/deeplearning4j-scaleout-api/src/main/java/org/deeplearning4j/scaleout/api/ir/ParameterVectorUpdateable.java @@ -79,7 +79,7 @@ public ByteBuffer toBytes() { public void fromString(String s) { String[] split = s.split(" "); paramMessage = Nd4j.create(split.length); - if(Nd4j.dataType() == DataBuffer.DOUBLE) { + if(Nd4j.dataType() == DataBuffer.Type.DOUBLE) { for(int i = 0 ;i < split.length; i++) { paramMessage.putScalar(i,Double.valueOf(split[i])); } diff --git a/deeplearning4j-scaleout/hadoop-yarn/deeplearning4j-nlp-yarn/src/main/java/org/deeplearning4j/models/word2vec/Word2VecPerformer.java b/deeplearning4j-scaleout/hadoop-yarn/deeplearning4j-nlp-yarn/src/main/java/org/deeplearning4j/models/word2vec/Word2VecPerformer.java index 78e1b18f2824..c6039f7f61f9 100644 --- a/deeplearning4j-scaleout/hadoop-yarn/deeplearning4j-nlp-yarn/src/main/java/org/deeplearning4j/models/word2vec/Word2VecPerformer.java +++ b/deeplearning4j-scaleout/hadoop-yarn/deeplearning4j-nlp-yarn/src/main/java/org/deeplearning4j/models/word2vec/Word2VecPerformer.java @@ -338,7 +338,7 @@ public void iterateSample(Word2VecWork work,VocabWord w1, VocabWord w2,double a double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if(neu1e.data().dataType() == DataBuffer.DOUBLE) { + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } @@ -379,12 +379,12 @@ else if (f < -MAX_EXP) g = (label - 0) * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg,l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg,l1); @@ -394,7 +394,7 @@ else if (f < -MAX_EXP) - if(neu1e.data().dataType() == DataBuffer.DOUBLE) + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else diff --git a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/common/SaxpyMap.java b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/common/SaxpyMap.java index 625343ba1549..fe39a040c22e 100644 --- a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/common/SaxpyMap.java +++ b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/common/SaxpyMap.java @@ -37,7 +37,7 @@ public SaxpyMap(Number alpha, int fromIndex, int toIndex, String from, String to @Override public InMemoryLookupTable apply(InMemoryLookupTable inMemoryLookupTable) { - if(inMemoryLookupTable.getSyn0().data().dataType() == DataBuffer.DOUBLE) + if(inMemoryLookupTable.getSyn0().data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(alpha.doubleValue(),getFrom(inMemoryLookupTable),getTo(inMemoryLookupTable)); else Nd4j.getBlasWrapper().axpy(alpha.floatValue(),getFrom(inMemoryLookupTable),getTo(inMemoryLookupTable)); diff --git a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java index eab2ac5a45f8..2b216f5cd17b 100644 --- a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java +++ b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java @@ -146,7 +146,7 @@ public void iterateSample(Word2VecParam param,VocabWord w1, VocabWord w2,double double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if (neu1e.data().dataType() == DataBuffer.DOUBLE) { + if (neu1e.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } else { @@ -189,12 +189,12 @@ else if (f < -MAX_EXP) g = label * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg,l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg,l1); @@ -203,7 +203,7 @@ else if (f < -MAX_EXP) } } - if(neu1e.data().dataType() == DataBuffer.DOUBLE) + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else diff --git a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java index 6a423ec8b44b..26398174223a 100644 --- a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java +++ b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformer.java @@ -185,7 +185,7 @@ public void iterateSample(VocabWord w1, VocabWord w2,double alpha) { double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if (neu1e.data().dataType() == DataBuffer.DOUBLE) { + if (neu1e.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } else { @@ -226,19 +226,19 @@ else if (f < -MAX_EXP) g = label * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg,l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg,l1); } } - if(neu1e.data().dataType() == DataBuffer.DOUBLE) + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else diff --git a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java index cbf675d07098..9be9c919c9dc 100644 --- a/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java +++ b/deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/Word2VecPerformerVoid.java @@ -316,7 +316,7 @@ public void iterateSample(VocabWord w1, VocabWord w2,double alpha) { double g = (1 - code - f) * (useAdaGrad ? w1.getGradient(i, alpha) : alpha); - if (neu1e.data().dataType() == DataBuffer.DOUBLE) { + if (neu1e.data().dataType() == DataBuffer.Type.DOUBLE) { Nd4j.getBlasWrapper().axpy(g, syn1, neu1e); Nd4j.getBlasWrapper().axpy(g, l1, syn1); } else { @@ -357,19 +357,19 @@ else if (f < -MAX_EXP) g = label * (useAdaGrad ? w1.getGradient(target, alpha) : alpha); else g = useAdaGrad ? w1.getGradient(target, label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) : (label - expTable[(int)((f + MAX_EXP) * (expTable.length / MAX_EXP / 2))]) * alpha; - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,neu1e,l1); else Nd4j.getBlasWrapper().axpy((float) g,neu1e,l1); - if(syn1Neg.data().dataType() == DataBuffer.DOUBLE) + if(syn1Neg.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(g,syn1Neg,l1); else Nd4j.getBlasWrapper().axpy((float) g,syn1Neg,l1); } } - if(neu1e.data().dataType() == DataBuffer.DOUBLE) + if(neu1e.data().dataType() == DataBuffer.Type.DOUBLE) Nd4j.getBlasWrapper().axpy(1.0,neu1e,l1); else From eb73b4eef873c4157fd19cad569dc3034cbce155 Mon Sep 17 00:00:00 2001 From: Adam Gibson Date: Tue, 5 May 2015 16:26:32 -0700 Subject: [PATCH 2/2] added gui --- .../java/org/deeplearning4j/ui/UiServer.java | 1 + .../word2vec/NearestNeighborsQuery.java | 72 ++++++++++++ .../word2vec/NearestNeighborsResource.java | 104 ++++++++++++++++++ .../word2vec/NearestNeighborsView.java | 31 ++++++ .../ui/nearestneighbors/index.ftl | 14 +-- .../ui/nearestneighbors/word2vec/index.ftl | 57 ++++++++++ .../ui/nearestneighbors/word2vec/package.json | 9 ++ .../ui/nearestneighbors/word2vec/readme.md | 38 +++++++ 8 files changed, 319 insertions(+), 7 deletions(-) create mode 100644 deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsQuery.java create mode 100755 deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsResource.java create mode 100755 deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsView.java create mode 100644 deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/index.ftl create mode 100644 deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/package.json create mode 100644 deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/readme.md diff --git a/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/UiServer.java b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/UiServer.java index 5ed66d472a5b..c66bc4886dff 100644 --- a/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/UiServer.java +++ b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/UiServer.java @@ -57,6 +57,7 @@ public void run(UIConfiguration uiConfiguration, Environment environment) throws environment.jersey().register(new NearestNeighborsResource(conf.getUploadPath())); environment.jersey().register(new WeightResource()); environment.jersey().register(new RendersResource()); + environment.jersey().register(new org.deeplearning4j.ui.nearestneighbors.word2vec.NearestNeighborsResource(conf.getUploadPath())); configureCors(environment); } diff --git a/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsQuery.java b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsQuery.java new file mode 100644 index 000000000000..c8129bfe3298 --- /dev/null +++ b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsQuery.java @@ -0,0 +1,72 @@ +/* + * + * * Copyright 2015 Skymind,Inc. + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package org.deeplearning4j.ui.nearestneighbors.word2vec; + +import java.io.Serializable; + +/** + * @author Adam Gibson + */ +public class NearestNeighborsQuery implements Serializable { + private String word; + private int numWords; + + public NearestNeighborsQuery(String word, int numWords) { + this.word = word; + this.numWords = numWords; + } + + public NearestNeighborsQuery() { + } + + public String getWord() { + return word; + } + + public void setWord(String word) { + this.word = word; + } + + public int getNumWords() { + return numWords; + } + + public void setNumWords(int numWords) { + this.numWords = numWords; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + NearestNeighborsQuery that = (NearestNeighborsQuery) o; + + if (numWords != that.numWords) return false; + return !(word != null ? !word.equals(that.word) : that.word != null); + + } + + @Override + public int hashCode() { + int result = word != null ? word.hashCode() : 0; + result = 31 * result + numWords; + return result; + } +} diff --git a/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsResource.java b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsResource.java new file mode 100755 index 000000000000..3a4d99d2edec --- /dev/null +++ b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsResource.java @@ -0,0 +1,104 @@ +/* + * + * * Copyright 2015 Skymind,Inc. + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package org.deeplearning4j.ui.nearestneighbors.word2vec; + +import io.dropwizard.views.View; +import org.apache.commons.collections.map.HashedMap; +import org.deeplearning4j.berkeley.Pair; +import org.deeplearning4j.clustering.sptree.DataPoint; +import org.deeplearning4j.clustering.vptree.VPTree; +import org.deeplearning4j.models.embeddings.WeightLookupTable; +import org.deeplearning4j.models.embeddings.inmemory.InMemoryLookupTable; +import org.deeplearning4j.models.embeddings.loader.WordVectorSerializer; +import org.deeplearning4j.models.embeddings.wordvectors.WordVectors; +import org.deeplearning4j.models.word2vec.VocabWord; +import org.deeplearning4j.models.word2vec.wordstore.VocabCache; +import org.deeplearning4j.ui.uploads.FileResource; + +import javax.ws.rs.GET; +import javax.ws.rs.POST; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import java.io.File; +import java.util.*; + +/** + * Nearest neighbors + * + * @author Adam Gibson + */ +@Path("/word2vec") +public class NearestNeighborsResource extends FileResource { + private WordVectors vectors; + private List words; + private Map theVocab; + private VocabCache vocab; + /** + * The file path for uploads + *y + * @param filePath the file path for uploads + */ + public NearestNeighborsResource(String filePath) { + super(filePath); + } + + @GET + public View get() { + return new NearestNeighborsView(); + } + + @POST + @Path("/vocab") + @Produces(MediaType.APPLICATION_JSON) + public Response getVocab() { + List words = new ArrayList<>(); + for(VocabWord word : this.words) + words.add(word.getWord()); + return Response.ok((new ArrayList<>(words))).build(); + } + + @POST + @Produces(MediaType.APPLICATION_JSON) + @Path("/words") + public Response getWords(NearestNeighborsQuery query) { + Collection nearestNeighors = vectors.wordsNearest(query.getWord(),query.getNumWords()); + Map map = new HashedMap(); + for(String s : nearestNeighors) + map.put(s,0.0); + return Response.ok(map).build(); + } + + + @Override + public void handleUpload(File path) { + try { + Pair vocab = WordVectorSerializer.loadTxt(path); + vectors = WordVectorSerializer.fromPair(vocab); + words = new ArrayList<>(vocab.getSecond().vocabWords()); + theVocab = new HashMap<>(); + for(VocabWord word : words) + theVocab.put(word.getIndex(),word); + this.vocab = vocab.getSecond(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsView.java b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsView.java new file mode 100755 index 000000000000..c4e661dca7c7 --- /dev/null +++ b/deeplearning4j-ui/src/main/java/org/deeplearning4j/ui/nearestneighbors/word2vec/NearestNeighborsView.java @@ -0,0 +1,31 @@ +/* + * + * * Copyright 2015 Skymind,Inc. + * * + * * Licensed under the Apache License, Version 2.0 (the "License"); + * * you may not use this file except in compliance with the License. + * * You may obtain a copy of the License at + * * + * * http://www.apache.org/licenses/LICENSE-2.0 + * * + * * Unless required by applicable law or agreed to in writing, software + * * distributed under the License is distributed on an "AS IS" BASIS, + * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * * See the License for the specific language governing permissions and + * * limitations under the License. + * + */ + +package org.deeplearning4j.ui.nearestneighbors.word2vec; + +import io.dropwizard.views.View; + +/** + * Created by agibsonccc on 10/8/14. + */ +public class NearestNeighborsView extends View { + public NearestNeighborsView() { + super("index.ftl"); + + } +} diff --git a/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/index.ftl b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/index.ftl index 9f2beb3ac0bb..6792288b6838 100644 --- a/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/index.ftl +++ b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/index.ftl @@ -3,14 +3,14 @@ Nearest Neighbors - - - - + + + + - - - + + + diff --git a/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/index.ftl b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/index.ftl new file mode 100644 index 000000000000..6792288b6838 --- /dev/null +++ b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/index.ftl @@ -0,0 +1,57 @@ + + + + + Nearest Neighbors + + + + + + + + + + + +
+
+ + +
+
+

Deeplearning4j

+
+

k Nearest Neighbors

+

+
    +
  1. Upload a vectorized text file.
  2. +
      +
    • The text file should be space-delimited.
    • +
    • Each row should be a feature vector separated by spaces.
    • +
    • If an individual feature has multiple words, use underscore to separate the words.
    • +
    +
  3. Enter an integer value for k (number of nearest neighbors).
  4. +
  5. Then select a word on the left panel.
  6. +
  7. A list of k nearest neighbors will appear on this page.
  8. +
  9. Optional: Select a new word to update nearest neighbors.

+
+
+
+ +
+ +
+
+
+ Enter an integer value for k: +
+
+
+
+
+ +
+
+ + diff --git a/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/package.json b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/package.json new file mode 100644 index 000000000000..75c1b65a9106 --- /dev/null +++ b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/package.json @@ -0,0 +1,9 @@ +{ + "private": true, + "dependencies": { + "director": "^1.2.0", + "react": "^0.12.0", + "todomvc-app-css": "^1.0.0", + "todomvc-common": "^1.0.1" + } +} diff --git a/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/readme.md b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/readme.md new file mode 100644 index 000000000000..82eba44ea7eb --- /dev/null +++ b/deeplearning4j-ui/src/main/resources/org/deeplearning4j/ui/nearestneighbors/word2vec/readme.md @@ -0,0 +1,38 @@ +# React TodoMVC Example + +> React is a JavaScript library for creating user interfaces. Its core principles are declarative code, efficiency, and flexibility. Simply specify what your component looks like and React will keep it up-to-date when the underlying data changes. + +> _[React - facebook.github.io/react](http://facebook.github.io/react)_ + + +## Learning React + +The [React getting started documentation](http://facebook.github.io/react/docs/getting-started.html) is a great way to get started. + +Here are some links you may find helpful: + +* [Documentation](http://facebook.github.io/react/docs/getting-started.html) +* [API Reference](http://facebook.github.io/react/docs/reference.html) +* [Blog](http://facebook.github.io/react/blog/) +* [React on GitHub](https://github.com/facebook/react) +* [Support](http://facebook.github.io/react/support.html) + +Articles and guides from the community: + +* [Philosophy](http://www.quora.com/Pete-Hunt/Posts/React-Under-the-Hood) +* [How is Facebook's React JavaScript library](http://www.quora.com/React-JS-Library/How-is-Facebooks-React-JavaScript-library) +* [React: Under the hood](http://www.quora.com/Pete-Hunt/Posts/React-Under-the-Hood) + +Get help from other React users: + +* [React on StackOverflow](http://stackoverflow.com/questions/tagged/reactjs) +* [Mailing list on Google Groups](https://groups.google.com/forum/#!forum/reactjs) +* +_If you have other helpful links to share, or find any of the links above no longer work, please [let us know](https://github.com/tastejs/todomvc/issues)._ + + +## Running + +The app is built with [JSX](http://facebook.github.io/react/docs/jsx-in-depth.html) and compiled at runtime for a lighter and more fun code reading experience. As stated in the link, JSX is not mandatory. + +To run the app, spin up an HTTP server (e.g. `python -m SimpleHTTPServer`) and visit http://localhost/.../myexample/.