From 24f11aeb7f7f5ba6a2c9e96c959ffba87fff885a Mon Sep 17 00:00:00 2001 From: "Michael A. Alcorn" Date: Thu, 2 Nov 2017 12:38:09 -0500 Subject: [PATCH 01/22] Implement RankNet. --- .../org/apache/solr/ltr/model/RankNet.java | 169 ++++++++++++++++++ 1 file changed, 169 insertions(+) create mode 100644 solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java new file mode 100644 index 000000000000..5ddb953d8c48 --- /dev/null +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java @@ -0,0 +1,169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.ltr.model; + +import java.lang.Math; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Explanation; +import org.apache.solr.ltr.feature.Feature; +import org.apache.solr.ltr.norm.Normalizer; + +/** + * A scoring model that computes scores using a neural network. + *

+ * Example configuration: +

{
+    "class" : "org.apache.solr.ltr.model.RankNet",
+    "name" : "rankNetModel",
+    "features" : [
+        { "name" : "documentRecency" },
+        { "name" : "isBook" },
+        { "name" : "originalScore" }
+    ],
+    "params" : {
+        "weights" : [
+            "1,2,3\n4,5,6\n7,8,9\n10,11,12",
+            "13,14,15,16\n17,18,19,20",
+            "21,22"
+        ],
+        "nonlinearity": "relu"
+    }
+}
+ *

+ * Training libraries: + *

+ *

+ * Background reading: + *

+ */ +public class RankNet extends LTRScoringModel { + + protected ArrayList weightMatrices; + protected String nonlinearity; + + public void setWeights(Object weights) { + + final List weightStrings = (List) weights; + weightMatrices = new ArrayList(); + + for (String matrixString : weightStrings) { + + String[] rows = matrixString.split("\n"); + int numRows = rows.length; + int numCols = rows[0].split(",").length; + + float[][] weightMatrix = new float[numRows][numCols]; + for (int i = 0; i < numRows; i++) { + String[] vals = rows[i].split(","); + for (int j = 0; j < numCols; j++) { + weightMatrix[i][j] = Float.parseFloat(vals[j]); + } + } + + weightMatrices.add(weightMatrix); + } + } + + public void setNonlinearity(Object nonlinearityStr) { + nonlinearity = (String) nonlinearityStr; + } + + private float[] dot(float[][] matrix, float[] inputVec) { + + int matrixRows = matrix.length; + int matrixCols = matrix[0].length; + float[] outputVec = new float[matrixRows]; + + for (int i = 0; i < matrixRows; i++) { + float outputVal = matrix[i][matrixCols - 1]; // Bias. + for (int j = 0; j < matrixCols - 1; j++) { + outputVal += matrix[i][j] * inputVec[j]; + } + outputVec[i] = outputVal; + } + + return outputVec; + } + + private float doNonlinearity(float x) { + if (nonlinearity.equals("relu")) { + return x < 0 ? 0 : x; + } else if (nonlinearity.equals("sigmoid")) { + return (float)(1 / (1 + Math.exp(-x))); + } else { + return x; + } + } + + public RankNet(String name, List features, + List norms, + String featureStoreName, List allFeatures, + Map params) { + super(name, features, norms, featureStoreName, allFeatures, params); + } + + @Override + public float score(float[] modelFeatureValuesNormalized) { + + float[] outputVec = modelFeatureValuesNormalized; + float[][] weightMatrix; + int layers = weightMatrices.size(); + + for (int layer = 0; layer < layers; layer++) { + + weightMatrix = weightMatrices.get(layer); + outputVec = dot(weightMatrix, outputVec); + + if (layer < layers - 1) { + for (int i = 0; i < outputVec.length; i++) { + outputVec[i] = doNonlinearity(outputVec[i]); + } + } + } + + return outputVec[0]; + } + + @Override + public Explanation explain(LeafReaderContext context, int doc, + float finalScore, List featureExplanations) { + + String modelDescription = ""; + for (int layer = 0; layer < weightMatrices.size(); layer++) { + float[][] weightMatrix = weightMatrices.get(layer); + int numRows = weightMatrix.length; + int numCols = weightMatrix[layer].length; + if (layer == 0) { + modelDescription += String.format("Input has %1$d features.", numCols - 1); + } else { + modelDescription += String.format("%nHidden layer #%1$d has %2$d units.", layer, numCols); + } + } + return Explanation.match(finalScore, modelDescription); + } + +} From fe1c9c99a455ca6928c637dc6b2c20341668b9ab Mon Sep 17 00:00:00 2001 From: "Michael A. Alcorn" Date: Thu, 2 Nov 2017 12:39:45 -0500 Subject: [PATCH 02/22] Wording. --- .../contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java index 5ddb953d8c48..a59a887e8c5b 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java @@ -27,7 +27,7 @@ import org.apache.solr.ltr.norm.Normalizer; /** - * A scoring model that computes scores using a neural network. + * A scoring model that computes document scores using a neural network. *

* Example configuration:

{

From d752b243e4dad968e862d71d3f3b1bf34ab94aa2 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Thu, 2 Nov 2017 13:15:07 -0500
Subject: [PATCH 03/22] Fix explain.

---
 .../contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
index a59a887e8c5b..2de3d0c9ad11 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
@@ -156,7 +156,7 @@ public Explanation explain(LeafReaderContext context, int doc,
     for (int layer = 0; layer < weightMatrices.size(); layer++) {
       float[][] weightMatrix = weightMatrices.get(layer);
       int numRows = weightMatrix.length;
-      int numCols = weightMatrix[layer].length;
+      int numCols = weightMatrix[0].length;
       if (layer == 0) {
         modelDescription += String.format("Input has %1$d features.", numCols - 1);
       } else {

From 3ab54db66e4305dc66a5069694d62ff4b6c2dc85 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Wed, 15 Nov 2017 15:58:48 -0600
Subject: [PATCH 04/22] Address Christine Poerschke's comments.

---
 .../{RankNet.java => NeuralNetworkModel.java} |  82 ++++++---
 .../neuralnetworkmodel_features.json          |  30 ++++
 .../neuralnetworkmodel_bad_nonlinearity.json  |  17 ++
 .../neuralnetworkmodel_mismatch_input.json    |  17 ++
 .../neuralnetworkmodel_mismatch_layers.json   |  17 ++
 .../neuralnetworkmodel_too_many_rows.json     |  17 ++
 .../ltr/model/TestNeuralNetworkModel.java     | 156 ++++++++++++++++++
 solr/solr-ref-guide/src/learning-to-rank.adoc |   1 +
 8 files changed, 315 insertions(+), 22 deletions(-)
 rename solr/contrib/ltr/src/java/org/apache/solr/ltr/model/{RankNet.java => NeuralNetworkModel.java} (61%)
 create mode 100644 solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
 create mode 100644 solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
similarity index 61%
rename from solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
rename to solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
index 2de3d0c9ad11..b7f25fa6bbf3 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
@@ -31,7 +31,7 @@
  * 

* Example configuration:

{
-    "class" : "org.apache.solr.ltr.model.RankNet",
+    "class" : "org.apache.solr.ltr.model.NeuralNetworkModel",
     "name" : "rankNetModel",
     "features" : [
         { "name" : "documentRecency" },
@@ -40,9 +40,9 @@
     ],
     "params" : {
         "weights" : [
-            "1,2,3\n4,5,6\n7,8,9\n10,11,12",
-            "13,14,15,16\n17,18,19,20",
-            "21,22"
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ], [ 13.0, 14.0, 15.0, 16.0 ] ],
+            [ [ 13.0, 14.0, 15.0, 16.0, 17.0 ], [ 18.0, 19.0, 20.0, 21.0, 22.0 ] ],
+            [ [ 23.0, 24.0, 25.0 ] ]
         ],
         "nonlinearity": "relu"
     }
@@ -60,27 +60,25 @@
  * Proceedings of the 22nd International Conference on Machine Learning (ICML), ACM, 2005.
  * 
  */
-public class RankNet extends LTRScoringModel {
+public class NeuralNetworkModel extends LTRScoringModel {
 
   protected ArrayList weightMatrices;
   protected String nonlinearity;
 
   public void setWeights(Object weights) {
+    final List>> matrixList = (List>>) weights;
 
-    final List weightStrings = (List) weights;
     weightMatrices = new ArrayList();
 
-    for (String matrixString : weightStrings) {
-
-      String[] rows = matrixString.split("\n");
-      int numRows = rows.length;
-      int numCols = rows[0].split(",").length;
+    for (List> matrix : matrixList) {
+      int numRows = matrix.size();
+      int numCols = matrix.get(0).size();;
 
       float[][] weightMatrix = new float[numRows][numCols];
+
       for (int i = 0; i < numRows; i++) {
-        String[] vals = rows[i].split(",");
         for (int j = 0; j < numCols; j++) {
-          weightMatrix[i][j] = Float.parseFloat(vals[j]);
+          weightMatrix[i][j] = matrix.get(i).get(j).floatValue();
         }
       }
 
@@ -112,14 +110,12 @@ private float[] dot(float[][] matrix, float[] inputVec) {
   private float doNonlinearity(float x) {
     if (nonlinearity.equals("relu")) {
       return x < 0 ? 0 : x;
-    } else if (nonlinearity.equals("sigmoid")) {
-      return (float)(1 / (1 + Math.exp(-x)));
     } else {
-      return x;
+      return (float) (1 / (1 + Math.exp(-x)));
     }
   }
 
-  public RankNet(String name, List features,
+  public NeuralNetworkModel(String name, List features,
                  List norms,
                  String featureStoreName, List allFeatures,
                  Map params) {
@@ -127,9 +123,47 @@ public RankNet(String name, List features,
   }
 
   @Override
-  public float score(float[] modelFeatureValuesNormalized) {
+  protected void validate() throws ModelException {
+    super.validate();
+
+    if (!nonlinearity.matches("relu|sigmoid")) {
+      throw new ModelException("Invalid nonlinearity for model " + name + ". " +
+                               "\"" + nonlinearity + "\" is not \"relu\" or \"sigmoid\".");
+    }
+
+    int inputDim = features.size();
+
+    for (int i = 0; i < weightMatrices.size(); i++) {
+      float[][] weightMatrix = weightMatrices.get(i);
 
-    float[] outputVec = modelFeatureValuesNormalized;
+      int numRows = weightMatrix.length;
+      int numCols = weightMatrix[0].length;
+
+      if (inputDim != numCols - 1) {
+        if (i == 0) {
+          throw new ModelException("Dimension mismatch. Input for model " + name + " has " + Integer.toString(inputDim)
+                                   + " features, but matrix #0 has " + Integer.toString(numCols - 1) +
+                                   " non-bias columns.");
+        } else {
+          throw new ModelException("Dimension mismatch. Matrix #" + Integer.toString(i - 1) + " for model " + name +
+                                   " has " + Integer.toString(inputDim) + " rows, but matrix #" + Integer.toString(i) +
+                                   " has " + Integer.toString(numCols - 1) + " non-bias columns.");
+        }
+      }
+      
+      if (i == weightMatrices.size() - 1 & numRows != 1) {
+        throw new ModelException("Final matrix for model " + name + " has " + Integer.toString(numRows) +
+                                 " rows, but should have 1 row.");
+      }
+      
+      inputDim = numRows;
+    }
+  }
+
+  @Override
+  public float score(float[] inputFeatures) {
+
+    float[] outputVec = inputFeatures;
     float[][] weightMatrix;
     int layers = weightMatrices.size();
 
@@ -153,14 +187,18 @@ public Explanation explain(LeafReaderContext context, int doc,
                              float finalScore, List featureExplanations) {
 
     String modelDescription = "";
+
     for (int layer = 0; layer < weightMatrices.size(); layer++) {
+
       float[][] weightMatrix = weightMatrices.get(layer);
-      int numRows = weightMatrix.length;
       int numCols = weightMatrix[0].length;
+
       if (layer == 0) {
-        modelDescription += String.format("Input has %1$d features.", numCols - 1);
+        modelDescription += "Input has " + Integer.toString(numCols - 1) + " features.";
       } else {
-        modelDescription += String.format("%nHidden layer #%1$d has %2$d units.", layer, numCols);
+        modelDescription += System.lineSeparator();
+        modelDescription += "Hidden layer #" + Integer.toString(layer) + " has " + Integer.toString(numCols - 1);
+        modelDescription += " fully connected units.";
       }
     }
     return Explanation.match(finalScore, modelDescription);
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
new file mode 100644
index 000000000000..36973f439556
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
@@ -0,0 +1,30 @@
+[
+    {
+        "name": "constantOne",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantTwo",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantThree",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantFour",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    }
+]
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
new file mode 100644
index 000000000000..f8059764589a
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_bad_nonlinearity",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "sig"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
new file mode 100644
index 000000000000..42ea18200db9
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_mismatch_input",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
new file mode 100644
index 000000000000..99ff89d44a7c
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_mismatch_layers",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0 ], [ 6.0, 7.0, 8.0, 9.0, 10.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
new file mode 100644
index 000000000000..312191ffecf8
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_too_many_rows",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 6.0, 7.0, 8.0, 9.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
new file mode 100644
index 000000000000..2b0eff728edc
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNeuralNetworkModel extends TestRerankBase {
+
+  public static LTRScoringModel createNeuralNetworkModel(String name, List features,
+      List norms,
+      String featureStoreName, List allFeatures,
+      Map params) throws ModelException {
+    final LTRScoringModel model = LTRScoringModel.getInstance(solrResourceLoader,
+        NeuralNetworkModel.class.getCanonicalName(),
+        name,
+        features, norms, featureStoreName, allFeatures, params);
+    return model;
+  }
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest(false);
+  }
+  
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testLinearAlgebra() {
+    final ArrayList rawMatrices = new ArrayList();
+    double[][] matrixOne = { { 1.0, 2.0, 3.0, 4.0, 5.0 },
+                             { 6.0, 7.0, 8.0, 9.0, 10.0 },
+                            { 11.0, 12.0, 13.0, 14.0, 15.0 } };
+    double[][] matrixTwo = { { 1.0, 2.0, 3.0, 4.0 } };
+    rawMatrices.add(matrixOne);
+    rawMatrices.add(matrixTwo);
+    
+    final ArrayList>> weights = new ArrayList>>();
+    for (int matrixNum = 0; matrixNum < rawMatrices.size(); matrixNum++) {
+      double[][] matrix = rawMatrices.get(matrixNum);
+      weights.add(new ArrayList>());
+      for (int row = 0; row < matrix.length; row++) {
+        weights.get(matrixNum).add(new ArrayList());
+        for (int col = 0; col < matrix[row].length; col++) {
+          weights.get(matrixNum).get(row).add(matrix[row][col]);
+        }
+      }
+    }
+
+    Map params = new HashMap();
+    final List features = getFeatures(new String[] {"constantOne", "constantTwo",
+                                                             "constantThree", "constantFour"});
+    final List norms =
+        new ArrayList(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    
+    params.put("weights", weights);
+    String nonlinearity = "relu";
+    params.put("nonlinearity", nonlinearity);
+    
+    final LTRScoringModel ltrScoringModel = createNeuralNetworkModel("test_score",
+        features, norms, "test_score", features, params);
+
+    float[] testVec = {1.0f, 1.0f, 1.0f, 1.0f};
+    ltrScoringModel.score(testVec);
+    assertEquals(294, ltrScoringModel.score(testVec), 0.001);
+  }
+
+  @Test
+  public void badNonlinearityTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Invalid nonlinearity for model neuralnetworkmodel_bad_nonlinearity. " +
+                           "\"sig\" is not \"relu\" or \"sigmoid\".");
+    try {
+        createModelFromFiles("neuralnetworkmodel_bad_nonlinearity.json",
+              "neuralnetworkmodel_features.json");
+        fail("badNonlinearityTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void inputDimensionMismatchTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Dimension mismatch. Input for model neuralnetworkmodel_mismatch_input has " + 
+                           "4 features, but matrix #0 has 3 non-bias columns.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_mismatch_input.json",
+              "neuralnetworkmodel_features.json");
+        fail("inputDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void layerDimensionMismatchTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Dimension mismatch. Matrix #0 for model neuralnetworkmodel_mismatch_layers has " + 
+                           "2 rows, but matrix #1 has 3 non-bias columns.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_mismatch_layers.json",
+              "neuralnetworkmodel_features.json");
+        fail("layerDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+  
+  @Test
+  public void tooManyRowsTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Final matrix for model neuralnetworkmodel_too_many_rows has 2 rows, " +
+                           "but should have 1 row.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_too_many_rows.json",
+              "neuralnetworkmodel_features.json");
+        fail("layerDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+}
diff --git a/solr/solr-ref-guide/src/learning-to-rank.adoc b/solr/solr-ref-guide/src/learning-to-rank.adoc
index 02475c6b4fd2..0c0f651e9a7d 100644
--- a/solr/solr-ref-guide/src/learning-to-rank.adoc
+++ b/solr/solr-ref-guide/src/learning-to-rank.adoc
@@ -87,6 +87,7 @@ Feature selection and model training take place offline and outside Solr. The lt
 |General form |Class |Specific examples
 |Linear |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/LinearModel.html[LinearModel] |RankSVM, Pranking
 |Multiple Additive Trees |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.html[MultipleAdditiveTreesModel] |LambdaMART, Gradient Boosted Regression Trees (GBRT)
+|Neural Network |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/NeuralNetworkModel.html[NeuralNetworkModel] |RankNet
 |(custom) |(custom class extending {solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/LTRScoringModel.html[LTRScoringModel]) |(not applicable)
 |===
 

From 92a17db3457df7e941c6344a00f0a5f07129aa5e Mon Sep 17 00:00:00 2001
From: Christine Poerschke 
Date: Fri, 22 Dec 2017 12:50:38 +0000
Subject: [PATCH 05/22] extend TestNeuralNetworkModel coverage

* a model need not use all the features in a feature store
* model and store feature order can vary
* additional algebra tests, show workings for expected outputs
---
 .../ltr/model/TestNeuralNetworkModel.java     | 114 +++++++++++++++---
 1 file changed, 100 insertions(+), 14 deletions(-)

diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index 2b0eff728edc..93ded132c7ea 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -56,10 +56,29 @@ public static void after() throws Exception {
   @Test
   public void testLinearAlgebra() {
     final ArrayList rawMatrices = new ArrayList();
-    double[][] matrixOne = { { 1.0, 2.0, 3.0, 4.0, 5.0 },
-                             { 6.0, 7.0, 8.0, 9.0, 10.0 },
-                            { 11.0, 12.0, 13.0, 14.0, 15.0 } };
-    double[][] matrixTwo = { { 1.0, 2.0, 3.0, 4.0 } };
+    final double layer1Node1Weight1 = 1.0;
+    final double layer1Node1Weight2 = 2.0;
+    final double layer1Node1Weight3 = 3.0;
+    final double layer1Node1Weight4 = 4.0;
+    final double layer1Node1Bias    = 5.0;
+    final double layer1Node2Weight1 = 6.0;
+    final double layer1Node2Weight2 = 7.0;
+    final double layer1Node2Weight3 = 8.0;
+    final double layer1Node2Weight4 = 9.0;
+    final double layer1Node2Bias    = 10.0;
+    final double layer1Node3Weight1 = 11.0;
+    final double layer1Node3Weight2 = 12.0;
+    final double layer1Node3Weight3 = 13.0;
+    final double layer1Node3Weight4 = 14.0;
+    final double layer1Node3Bias    = 15.0;
+    double[][] matrixOne = { { layer1Node1Weight1, layer1Node1Weight2, layer1Node1Weight3, layer1Node1Weight4, layer1Node1Bias },
+                             { layer1Node2Weight1, layer1Node2Weight2, layer1Node2Weight3, layer1Node2Weight4, layer1Node2Bias },
+                             { layer1Node3Weight1, layer1Node3Weight2, layer1Node3Weight3, layer1Node3Weight4, layer1Node3Bias } };
+    final double outputNodeWeight1 = 1.0;
+    final double outputNodeWeight2 = 2.0;
+    final double outputNodeWeight3 = 3.0;
+    final double outputNodeBias = 4.0;
+    double[][] matrixTwo = { { outputNodeWeight1, outputNodeWeight2, outputNodeWeight3, outputNodeBias } };
     rawMatrices.add(matrixOne);
     rawMatrices.add(matrixTwo);
     
@@ -76,22 +95,89 @@ public void testLinearAlgebra() {
     }
 
     Map params = new HashMap();
-    final List features = getFeatures(new String[] {"constantOne", "constantTwo",
-                                                             "constantThree", "constantFour"});
-    final List norms =
-        new ArrayList(
-            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
-    
     params.put("weights", weights);
     String nonlinearity = "relu";
     params.put("nonlinearity", nonlinearity);
+
+    final List allFeaturesInStore
+       = getFeatures(new String[] {"constantOne", "constantTwo",
+          "constantThree", "constantFour", "constantFive"});
     
+    final List featuresInModel = new ArrayList<>(allFeaturesInStore);
+    Collections.shuffle(featuresInModel, random()); // store and model order of features can vary
+    featuresInModel.remove(0); // models need not use all the store's features
+    assertEquals(4, featuresInModel.size()); // the test model uses four features
+
+    final List norms =
+        new ArrayList(
+            Collections.nCopies(featuresInModel.size(),IdentityNormalizer.INSTANCE));
     final LTRScoringModel ltrScoringModel = createNeuralNetworkModel("test_score",
-        features, norms, "test_score", features, params);
+        featuresInModel, norms, "test_score", allFeaturesInStore, params);
+
+    {
+      // pretend all features scored zero
+      float[] testVec = {0.0f, 0.0f, 0.0f, 0.0f};
+      // with all zero inputs the layer1 node outputs are layer1 node biases only
+      final double layer1Node1Output = layer1Node1Bias;
+      final double layer1Node2Output = layer1Node2Bias;
+      final double layer1Node3Output = layer1Node3Bias;
+      // with just one layer the output node calculation is easy
+      final double outputNodeOutput =
+          (layer1Node1Output*outputNodeWeight1) +
+          (layer1Node2Output*outputNodeWeight2) +
+          (layer1Node3Output*outputNodeWeight3) +
+          outputNodeBias;
+      assertEquals(74.0, outputNodeOutput, 0.001);
+      // and the expected score is that of the output node
+      final double expectedScore = outputNodeOutput;
+      float score = ltrScoringModel.score(testVec);
+      assertEquals(expectedScore, score, 0.001);
+    }
 
-    float[] testVec = {1.0f, 1.0f, 1.0f, 1.0f};
-    ltrScoringModel.score(testVec);
-    assertEquals(294, ltrScoringModel.score(testVec), 0.001);
+    {
+      // pretend all features scored one
+      float[] testVec = {1.0f, 1.0f, 1.0f, 1.0f};
+      // with all one inputs the layer1 node outputs are simply sum of weights and biases
+      final double layer1Node1Output = layer1Node1Weight1 + layer1Node1Weight2 + layer1Node1Weight3 + layer1Node1Weight4 + layer1Node1Bias;
+      final double layer1Node2Output = layer1Node2Weight1 + layer1Node2Weight2 + layer1Node2Weight3 + layer1Node2Weight4 + layer1Node2Bias;
+      final double layer1Node3Output = layer1Node3Weight1 + layer1Node3Weight2 + layer1Node3Weight3 + layer1Node3Weight4 + layer1Node3Bias;
+      // with just one layer the output node calculation is easy
+      final double outputNodeOutput =
+          (layer1Node1Output*outputNodeWeight1) +
+          (layer1Node2Output*outputNodeWeight2) +
+          (layer1Node3Output*outputNodeWeight3) +
+          outputNodeBias;
+      assertEquals(294.0, outputNodeOutput, 0.001);
+      // and the expected score is that of the output node
+      final double expectedScore = outputNodeOutput;
+      float score = ltrScoringModel.score(testVec);
+      assertEquals(expectedScore, score, 0.001);
+    }
+
+    {
+      // pretend all features scored random numbers in 0.0 to 1.0 range
+      final float input1 = random().nextFloat();
+      final float input2 = random().nextFloat();
+      final float input3 = random().nextFloat();
+      final float input4 = random().nextFloat();
+      float[] testVec = {input1, input2, input3, input4};
+      // the layer1 node outputs are sum of input-times-weight plus bias
+      final double layer1Node1Output = input1*layer1Node1Weight1 + input2*layer1Node1Weight2 + input3*layer1Node1Weight3 + input4*layer1Node1Weight4 + layer1Node1Bias;
+      final double layer1Node2Output = input1*layer1Node2Weight1 + input2*layer1Node2Weight2 + input3*layer1Node2Weight3 + input4*layer1Node2Weight4 + layer1Node2Bias;
+      final double layer1Node3Output = input1*layer1Node3Weight1 + input2*layer1Node3Weight2 + input3*layer1Node3Weight3 + input4*layer1Node3Weight4 + layer1Node3Bias;
+      // with just one layer the output node calculation is easy
+      final double outputNodeOutput =
+          (layer1Node1Output*outputNodeWeight1) +
+          (layer1Node2Output*outputNodeWeight2) +
+          (layer1Node3Output*outputNodeWeight3) +
+          outputNodeBias;
+      assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // all zero inputs produced output 74
+      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // all one inputs produced output 294
+      // and the expected score is that of the output node
+      final double expectedScore = outputNodeOutput;
+      float score = ltrScoringModel.score(testVec);
+      assertEquals(expectedScore, score, 0.001);
+    }
   }
 
   @Test

From d0bdf43e29e9a621a3c4b2a0c1c9dd4fcfff0faa Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Tue, 2 Jan 2018 08:23:14 -0600
Subject: [PATCH 06/22] Change comment to reflect test.

---
 .../org/apache/solr/ltr/model/TestNeuralNetworkModel.java     | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index 93ded132c7ea..d6582f231803 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -171,8 +171,8 @@ public void testLinearAlgebra() {
           (layer1Node2Output*outputNodeWeight2) +
           (layer1Node3Output*outputNodeWeight3) +
           outputNodeBias;
-      assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // all zero inputs produced output 74
-      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // all one inputs produced output 294
+      assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // inputs between zero and one produced output less than 74
+      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output less than 294
       // and the expected score is that of the output node
       final double expectedScore = outputNodeOutput;
       float score = ltrScoringModel.score(testVec);

From 421a662297605a9c53ccd69ad7eae13bbe284e13 Mon Sep 17 00:00:00 2001
From: Christine Poerschke 
Date: Fri, 2 Feb 2018 19:43:01 +0000
Subject: [PATCH 07/22] s/less/more in test comment

---
 .../test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index d6582f231803..8ee9ea24fa5a 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -172,7 +172,7 @@ public void testLinearAlgebra() {
           (layer1Node3Output*outputNodeWeight3) +
           outputNodeBias;
       assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // inputs between zero and one produced output less than 74
-      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output less than 294
+      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output more than 294
       // and the expected score is that of the output node
       final double expectedScore = outputNodeOutput;
       float score = ltrScoringModel.score(testVec);

From 4ff8bd8dad7455be5320f961fff0b4f44d5d9524 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Mon, 5 Feb 2018 08:23:45 -0600
Subject: [PATCH 08/22] Change comment wording to reflect assertion.

---
 .../org/apache/solr/ltr/model/TestNeuralNetworkModel.java     | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index 8ee9ea24fa5a..9fe8d9dbdeb3 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -171,8 +171,8 @@ public void testLinearAlgebra() {
           (layer1Node2Output*outputNodeWeight2) +
           (layer1Node3Output*outputNodeWeight3) +
           outputNodeBias;
-      assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // inputs between zero and one produced output less than 74
-      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output more than 294
+      assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // inputs between zero and one produced output greater than 74
+      assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output less than 294
       // and the expected score is that of the output node
       final double expectedScore = outputNodeOutput;
       float score = ltrScoringModel.score(testVec);

From 80a88b727f04101791b6599452087f340bb3c75d Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Thu, 2 Nov 2017 12:38:09 -0500
Subject: [PATCH 09/22] Implement RankNet.

---
 .../org/apache/solr/ltr/model/RankNet.java    | 169 ++++++++++++++++++
 1 file changed, 169 insertions(+)
 create mode 100644 solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
new file mode 100644
index 000000000000..5ddb953d8c48
--- /dev/null
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.lang.Math;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.Explanation;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.Normalizer;
+
+/**
+ * A scoring model that computes scores using a neural network.
+ * 

+ * Example configuration: +

{
+    "class" : "org.apache.solr.ltr.model.RankNet",
+    "name" : "rankNetModel",
+    "features" : [
+        { "name" : "documentRecency" },
+        { "name" : "isBook" },
+        { "name" : "originalScore" }
+    ],
+    "params" : {
+        "weights" : [
+            "1,2,3\n4,5,6\n7,8,9\n10,11,12",
+            "13,14,15,16\n17,18,19,20",
+            "21,22"
+        ],
+        "nonlinearity": "relu"
+    }
+}
+ *

+ * Training libraries: + *

+ *

+ * Background reading: + *

+ */ +public class RankNet extends LTRScoringModel { + + protected ArrayList weightMatrices; + protected String nonlinearity; + + public void setWeights(Object weights) { + + final List weightStrings = (List) weights; + weightMatrices = new ArrayList(); + + for (String matrixString : weightStrings) { + + String[] rows = matrixString.split("\n"); + int numRows = rows.length; + int numCols = rows[0].split(",").length; + + float[][] weightMatrix = new float[numRows][numCols]; + for (int i = 0; i < numRows; i++) { + String[] vals = rows[i].split(","); + for (int j = 0; j < numCols; j++) { + weightMatrix[i][j] = Float.parseFloat(vals[j]); + } + } + + weightMatrices.add(weightMatrix); + } + } + + public void setNonlinearity(Object nonlinearityStr) { + nonlinearity = (String) nonlinearityStr; + } + + private float[] dot(float[][] matrix, float[] inputVec) { + + int matrixRows = matrix.length; + int matrixCols = matrix[0].length; + float[] outputVec = new float[matrixRows]; + + for (int i = 0; i < matrixRows; i++) { + float outputVal = matrix[i][matrixCols - 1]; // Bias. + for (int j = 0; j < matrixCols - 1; j++) { + outputVal += matrix[i][j] * inputVec[j]; + } + outputVec[i] = outputVal; + } + + return outputVec; + } + + private float doNonlinearity(float x) { + if (nonlinearity.equals("relu")) { + return x < 0 ? 0 : x; + } else if (nonlinearity.equals("sigmoid")) { + return (float)(1 / (1 + Math.exp(-x))); + } else { + return x; + } + } + + public RankNet(String name, List features, + List norms, + String featureStoreName, List allFeatures, + Map params) { + super(name, features, norms, featureStoreName, allFeatures, params); + } + + @Override + public float score(float[] modelFeatureValuesNormalized) { + + float[] outputVec = modelFeatureValuesNormalized; + float[][] weightMatrix; + int layers = weightMatrices.size(); + + for (int layer = 0; layer < layers; layer++) { + + weightMatrix = weightMatrices.get(layer); + outputVec = dot(weightMatrix, outputVec); + + if (layer < layers - 1) { + for (int i = 0; i < outputVec.length; i++) { + outputVec[i] = doNonlinearity(outputVec[i]); + } + } + } + + return outputVec[0]; + } + + @Override + public Explanation explain(LeafReaderContext context, int doc, + float finalScore, List featureExplanations) { + + String modelDescription = ""; + for (int layer = 0; layer < weightMatrices.size(); layer++) { + float[][] weightMatrix = weightMatrices.get(layer); + int numRows = weightMatrix.length; + int numCols = weightMatrix[layer].length; + if (layer == 0) { + modelDescription += String.format("Input has %1$d features.", numCols - 1); + } else { + modelDescription += String.format("%nHidden layer #%1$d has %2$d units.", layer, numCols); + } + } + return Explanation.match(finalScore, modelDescription); + } + +} From 7be5cdd824f4474d892fb7c26205190a578b4c45 Mon Sep 17 00:00:00 2001 From: "Michael A. Alcorn" Date: Thu, 2 Nov 2017 12:39:45 -0500 Subject: [PATCH 10/22] Wording. --- .../contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java index 5ddb953d8c48..a59a887e8c5b 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java @@ -27,7 +27,7 @@ import org.apache.solr.ltr.norm.Normalizer; /** - * A scoring model that computes scores using a neural network. + * A scoring model that computes document scores using a neural network. *

* Example configuration:

{

From 093a3e931410e69525d9c23f109b14a754f34b89 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Thu, 2 Nov 2017 13:15:07 -0500
Subject: [PATCH 11/22] Fix explain.

---
 .../contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
index a59a887e8c5b..2de3d0c9ad11 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
@@ -156,7 +156,7 @@ public Explanation explain(LeafReaderContext context, int doc,
     for (int layer = 0; layer < weightMatrices.size(); layer++) {
       float[][] weightMatrix = weightMatrices.get(layer);
       int numRows = weightMatrix.length;
-      int numCols = weightMatrix[layer].length;
+      int numCols = weightMatrix[0].length;
       if (layer == 0) {
         modelDescription += String.format("Input has %1$d features.", numCols - 1);
       } else {

From 25efdd04195d3d9be8d34753deee24bcae6f9f18 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Wed, 15 Nov 2017 15:58:48 -0600
Subject: [PATCH 12/22] Address Christine Poerschke's comments.

---
 .../{RankNet.java => NeuralNetworkModel.java} |  82 ++++++---
 .../neuralnetworkmodel_features.json          |  30 ++++
 .../neuralnetworkmodel_bad_nonlinearity.json  |  17 ++
 .../neuralnetworkmodel_mismatch_input.json    |  17 ++
 .../neuralnetworkmodel_mismatch_layers.json   |  17 ++
 .../neuralnetworkmodel_too_many_rows.json     |  17 ++
 .../ltr/model/TestNeuralNetworkModel.java     | 156 ++++++++++++++++++
 solr/solr-ref-guide/src/learning-to-rank.adoc |   1 +
 8 files changed, 315 insertions(+), 22 deletions(-)
 rename solr/contrib/ltr/src/java/org/apache/solr/ltr/model/{RankNet.java => NeuralNetworkModel.java} (61%)
 create mode 100644 solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
 create mode 100644 solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
similarity index 61%
rename from solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
rename to solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
index 2de3d0c9ad11..b7f25fa6bbf3 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/RankNet.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
@@ -31,7 +31,7 @@
  * 

* Example configuration:

{
-    "class" : "org.apache.solr.ltr.model.RankNet",
+    "class" : "org.apache.solr.ltr.model.NeuralNetworkModel",
     "name" : "rankNetModel",
     "features" : [
         { "name" : "documentRecency" },
@@ -40,9 +40,9 @@
     ],
     "params" : {
         "weights" : [
-            "1,2,3\n4,5,6\n7,8,9\n10,11,12",
-            "13,14,15,16\n17,18,19,20",
-            "21,22"
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ], [ 13.0, 14.0, 15.0, 16.0 ] ],
+            [ [ 13.0, 14.0, 15.0, 16.0, 17.0 ], [ 18.0, 19.0, 20.0, 21.0, 22.0 ] ],
+            [ [ 23.0, 24.0, 25.0 ] ]
         ],
         "nonlinearity": "relu"
     }
@@ -60,27 +60,25 @@
  * Proceedings of the 22nd International Conference on Machine Learning (ICML), ACM, 2005.
  * 
  */
-public class RankNet extends LTRScoringModel {
+public class NeuralNetworkModel extends LTRScoringModel {
 
   protected ArrayList weightMatrices;
   protected String nonlinearity;
 
   public void setWeights(Object weights) {
+    final List>> matrixList = (List>>) weights;
 
-    final List weightStrings = (List) weights;
     weightMatrices = new ArrayList();
 
-    for (String matrixString : weightStrings) {
-
-      String[] rows = matrixString.split("\n");
-      int numRows = rows.length;
-      int numCols = rows[0].split(",").length;
+    for (List> matrix : matrixList) {
+      int numRows = matrix.size();
+      int numCols = matrix.get(0).size();;
 
       float[][] weightMatrix = new float[numRows][numCols];
+
       for (int i = 0; i < numRows; i++) {
-        String[] vals = rows[i].split(",");
         for (int j = 0; j < numCols; j++) {
-          weightMatrix[i][j] = Float.parseFloat(vals[j]);
+          weightMatrix[i][j] = matrix.get(i).get(j).floatValue();
         }
       }
 
@@ -112,14 +110,12 @@ private float[] dot(float[][] matrix, float[] inputVec) {
   private float doNonlinearity(float x) {
     if (nonlinearity.equals("relu")) {
       return x < 0 ? 0 : x;
-    } else if (nonlinearity.equals("sigmoid")) {
-      return (float)(1 / (1 + Math.exp(-x)));
     } else {
-      return x;
+      return (float) (1 / (1 + Math.exp(-x)));
     }
   }
 
-  public RankNet(String name, List features,
+  public NeuralNetworkModel(String name, List features,
                  List norms,
                  String featureStoreName, List allFeatures,
                  Map params) {
@@ -127,9 +123,47 @@ public RankNet(String name, List features,
   }
 
   @Override
-  public float score(float[] modelFeatureValuesNormalized) {
+  protected void validate() throws ModelException {
+    super.validate();
+
+    if (!nonlinearity.matches("relu|sigmoid")) {
+      throw new ModelException("Invalid nonlinearity for model " + name + ". " +
+                               "\"" + nonlinearity + "\" is not \"relu\" or \"sigmoid\".");
+    }
+
+    int inputDim = features.size();
+
+    for (int i = 0; i < weightMatrices.size(); i++) {
+      float[][] weightMatrix = weightMatrices.get(i);
 
-    float[] outputVec = modelFeatureValuesNormalized;
+      int numRows = weightMatrix.length;
+      int numCols = weightMatrix[0].length;
+
+      if (inputDim != numCols - 1) {
+        if (i == 0) {
+          throw new ModelException("Dimension mismatch. Input for model " + name + " has " + Integer.toString(inputDim)
+                                   + " features, but matrix #0 has " + Integer.toString(numCols - 1) +
+                                   " non-bias columns.");
+        } else {
+          throw new ModelException("Dimension mismatch. Matrix #" + Integer.toString(i - 1) + " for model " + name +
+                                   " has " + Integer.toString(inputDim) + " rows, but matrix #" + Integer.toString(i) +
+                                   " has " + Integer.toString(numCols - 1) + " non-bias columns.");
+        }
+      }
+      
+      if (i == weightMatrices.size() - 1 & numRows != 1) {
+        throw new ModelException("Final matrix for model " + name + " has " + Integer.toString(numRows) +
+                                 " rows, but should have 1 row.");
+      }
+      
+      inputDim = numRows;
+    }
+  }
+
+  @Override
+  public float score(float[] inputFeatures) {
+
+    float[] outputVec = inputFeatures;
     float[][] weightMatrix;
     int layers = weightMatrices.size();
 
@@ -153,14 +187,18 @@ public Explanation explain(LeafReaderContext context, int doc,
                              float finalScore, List featureExplanations) {
 
     String modelDescription = "";
+
     for (int layer = 0; layer < weightMatrices.size(); layer++) {
+
       float[][] weightMatrix = weightMatrices.get(layer);
-      int numRows = weightMatrix.length;
       int numCols = weightMatrix[0].length;
+
       if (layer == 0) {
-        modelDescription += String.format("Input has %1$d features.", numCols - 1);
+        modelDescription += "Input has " + Integer.toString(numCols - 1) + " features.";
       } else {
-        modelDescription += String.format("%nHidden layer #%1$d has %2$d units.", layer, numCols);
+        modelDescription += System.lineSeparator();
+        modelDescription += "Hidden layer #" + Integer.toString(layer) + " has " + Integer.toString(numCols - 1);
+        modelDescription += " fully connected units.";
       }
     }
     return Explanation.match(finalScore, modelDescription);
diff --git a/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
new file mode 100644
index 000000000000..36973f439556
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json
@@ -0,0 +1,30 @@
+[
+    {
+        "name": "constantOne",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantTwo",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantThree",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    },
+    {
+        "name": "constantFour",
+        "class": "org.apache.solr.ltr.feature.ValueFeature",
+        "params": {
+            "value": 1.0
+        }
+    }
+]
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
new file mode 100644
index 000000000000..f8059764589a
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_bad_nonlinearity",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "sig"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
new file mode 100644
index 000000000000..42ea18200db9
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_mismatch_input",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
new file mode 100644
index 000000000000..99ff89d44a7c
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_mismatch_layers",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0 ], [ 6.0, 7.0, 8.0, 9.0, 10.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
new file mode 100644
index 000000000000..312191ffecf8
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.NeuralNetworkModel",
+    "name":"neuralnetworkmodel_too_many_rows",
+    "features":[
+        { "name": "constantOne"},
+        { "name": "constantTwo"},
+        { "name": "constantThree"},
+        { "name": "constantFour"}
+    ],
+    "params":{
+        "weights": [
+            [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ],
+            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 6.0, 7.0, 8.0, 9.0 ] ]
+        ],
+        "nonlinearity": "relu"
+    }
+}
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
new file mode 100644
index 000000000000..2b0eff728edc
--- /dev/null
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.ltr.model;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.ltr.TestRerankBase;
+import org.apache.solr.ltr.feature.Feature;
+import org.apache.solr.ltr.norm.IdentityNormalizer;
+import org.apache.solr.ltr.norm.Normalizer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNeuralNetworkModel extends TestRerankBase {
+
+  public static LTRScoringModel createNeuralNetworkModel(String name, List features,
+      List norms,
+      String featureStoreName, List allFeatures,
+      Map params) throws ModelException {
+    final LTRScoringModel model = LTRScoringModel.getInstance(solrResourceLoader,
+        NeuralNetworkModel.class.getCanonicalName(),
+        name,
+        features, norms, featureStoreName, allFeatures, params);
+    return model;
+  }
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    setuptest(false);
+  }
+  
+  @AfterClass
+  public static void after() throws Exception {
+    aftertest();
+  }
+
+  @Test
+  public void testLinearAlgebra() {
+    final ArrayList rawMatrices = new ArrayList();
+    double[][] matrixOne = { { 1.0, 2.0, 3.0, 4.0, 5.0 },
+                             { 6.0, 7.0, 8.0, 9.0, 10.0 },
+                            { 11.0, 12.0, 13.0, 14.0, 15.0 } };
+    double[][] matrixTwo = { { 1.0, 2.0, 3.0, 4.0 } };
+    rawMatrices.add(matrixOne);
+    rawMatrices.add(matrixTwo);
+    
+    final ArrayList>> weights = new ArrayList>>();
+    for (int matrixNum = 0; matrixNum < rawMatrices.size(); matrixNum++) {
+      double[][] matrix = rawMatrices.get(matrixNum);
+      weights.add(new ArrayList>());
+      for (int row = 0; row < matrix.length; row++) {
+        weights.get(matrixNum).add(new ArrayList());
+        for (int col = 0; col < matrix[row].length; col++) {
+          weights.get(matrixNum).get(row).add(matrix[row][col]);
+        }
+      }
+    }
+
+    Map params = new HashMap();
+    final List features = getFeatures(new String[] {"constantOne", "constantTwo",
+                                                             "constantThree", "constantFour"});
+    final List norms =
+        new ArrayList(
+            Collections.nCopies(features.size(),IdentityNormalizer.INSTANCE));
+    
+    params.put("weights", weights);
+    String nonlinearity = "relu";
+    params.put("nonlinearity", nonlinearity);
+    
+    final LTRScoringModel ltrScoringModel = createNeuralNetworkModel("test_score",
+        features, norms, "test_score", features, params);
+
+    float[] testVec = {1.0f, 1.0f, 1.0f, 1.0f};
+    ltrScoringModel.score(testVec);
+    assertEquals(294, ltrScoringModel.score(testVec), 0.001);
+  }
+
+  @Test
+  public void badNonlinearityTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Invalid nonlinearity for model neuralnetworkmodel_bad_nonlinearity. " +
+                           "\"sig\" is not \"relu\" or \"sigmoid\".");
+    try {
+        createModelFromFiles("neuralnetworkmodel_bad_nonlinearity.json",
+              "neuralnetworkmodel_features.json");
+        fail("badNonlinearityTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void inputDimensionMismatchTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Dimension mismatch. Input for model neuralnetworkmodel_mismatch_input has " + 
+                           "4 features, but matrix #0 has 3 non-bias columns.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_mismatch_input.json",
+              "neuralnetworkmodel_features.json");
+        fail("inputDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+
+  @Test
+  public void layerDimensionMismatchTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Dimension mismatch. Matrix #0 for model neuralnetworkmodel_mismatch_layers has " + 
+                           "2 rows, but matrix #1 has 3 non-bias columns.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_mismatch_layers.json",
+              "neuralnetworkmodel_features.json");
+        fail("layerDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+  
+  @Test
+  public void tooManyRowsTest() throws Exception {
+    final ModelException expectedException =
+        new ModelException("Final matrix for model neuralnetworkmodel_too_many_rows has 2 rows, " +
+                           "but should have 1 row.");
+    try {
+        createModelFromFiles("neuralnetworkmodel_too_many_rows.json",
+              "neuralnetworkmodel_features.json");
+        fail("layerDimensionMismatchTest failed to throw exception: "+expectedException);
+    } catch (Exception actualException) {
+      Throwable rootError = getRootCause(actualException);
+      assertEquals(expectedException.toString(), rootError.toString());
+    }
+  }
+}
diff --git a/solr/solr-ref-guide/src/learning-to-rank.adoc b/solr/solr-ref-guide/src/learning-to-rank.adoc
index c165a3679d69..173d9d2f379c 100644
--- a/solr/solr-ref-guide/src/learning-to-rank.adoc
+++ b/solr/solr-ref-guide/src/learning-to-rank.adoc
@@ -87,6 +87,7 @@ Feature selection and model training take place offline and outside Solr. The lt
 |General form |Class |Specific examples
 |Linear |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/LinearModel.html[LinearModel] |RankSVM, Pranking
 |Multiple Additive Trees |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/MultipleAdditiveTreesModel.html[MultipleAdditiveTreesModel] |LambdaMART, Gradient Boosted Regression Trees (GBRT)
+|Neural Network |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/NeuralNetworkModel.html[NeuralNetworkModel] |RankNet
 |(wrapper) |{solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/DefaultWrapperModel.html[DefaultWrapperModel] |(not applicable)
 |(custom) |(custom class extending {solr-javadocs}/solr-ltr/org/apache/solr/ltr/model/LTRScoringModel.html[LTRScoringModel]) |(not applicable)
 |===

From 96746d1e97380848d06eaea03d52a892af6f3794 Mon Sep 17 00:00:00 2001
From: "Michael A. Alcorn" 
Date: Tue, 6 Feb 2018 08:12:57 -0600
Subject: [PATCH 13/22] Changes to RankNet based on Jira feedback.

---
 .../solr/ltr/model/NeuralNetworkModel.java    | 246 ++++++++++++------
 .../neuralnetworkmodel_bad_activation.json    |  24 ++
 .../neuralnetworkmodel_bad_nonlinearity.json  |  17 --
 .../neuralnetworkmodel_mismatch_bias.json     |  24 ++
 .../neuralnetworkmodel_mismatch_input.json    |  17 +-
 .../neuralnetworkmodel_mismatch_layers.json   |  17 +-
 .../neuralnetworkmodel_too_many_rows.json     |  17 +-
 .../ltr/model/TestNeuralNetworkModel.java     | 168 +++++++-----
 8 files changed, 353 insertions(+), 177 deletions(-)
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json
 delete mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json
 create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
index b7f25fa6bbf3..fe9c76258279 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
@@ -25,6 +25,7 @@
 import org.apache.lucene.search.Explanation;
 import org.apache.solr.ltr.feature.Feature;
 import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.util.SolrPluginUtils;
 
 /**
  * A scoring model that computes document scores using a neural network.
@@ -39,12 +40,27 @@
         { "name" : "originalScore" }
     ],
     "params" : {
-        "weights" : [
-            [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ], [ 13.0, 14.0, 15.0, 16.0 ] ],
-            [ [ 13.0, 14.0, 15.0, 16.0, 17.0 ], [ 18.0, 19.0, 20.0, 21.0, 22.0 ] ],
-            [ [ 23.0, 24.0, 25.0 ] ]
-        ],
-        "nonlinearity": "relu"
+        "layers" : [
+            {
+                "matrix" : [ [ 1.0, 2.0, 3.0 ],
+                             [ 4.0, 5.0, 6.0 ],
+                             [ 7.0, 8.0, 9.0 ],
+                             [ 10.0, 11.0, 12.0 ] ],
+                "bias" : [ 13.0, 14.0, 15.0, 16.0 ],
+                "activation" : "relu"
+            },
+            {
+                "matrix" : [ [ 17.0, 18.0, 19.0, 20.0 ],
+                             [ 21.0, 22.0, 23.0, 24.0 ] ],
+                "bias" : [ 25.0, 26.0 ],
+                "activation" : "relu"
+            },
+            {
+                "matrix" : [ [ 27.0, 28.0 ] ],
+                "bias" : [ 29.0 ],
+                "activation" : "none"
+            }
+        ]
     }
 }
*

@@ -62,56 +78,126 @@ */ public class NeuralNetworkModel extends LTRScoringModel { - protected ArrayList weightMatrices; - protected String nonlinearity; + private List layers; - public void setWeights(Object weights) { - final List>> matrixList = (List>>) weights; - - weightMatrices = new ArrayList(); + protected interface Activation { + // similar to UnaryOperator + float apply(float in); + } - for (List> matrix : matrixList) { - int numRows = matrix.size(); - int numCols = matrix.get(0).size();; + public class Layer { + private int layerID; + private float[][] weightMatrix; + private int matrixRows; + private int matrixCols; + private float[] biasVector; + private int numUnits; + private String activationStr; + private Activation activation; + + public Layer() { + layerID = layers.size(); + } - float[][] weightMatrix = new float[numRows][numCols]; + public void setMatrix(Object matrixObj) { + final List> matrix = (List>) matrixObj; + this.matrixRows = matrix.size(); + this.matrixCols = matrix.get(0).size(); + this.weightMatrix = new float[this.matrixRows][this.matrixCols]; - for (int i = 0; i < numRows; i++) { - for (int j = 0; j < numCols; j++) { - weightMatrix[i][j] = matrix.get(i).get(j).floatValue(); + for (int i = 0; i < this.matrixRows; i++) { + for (int j = 0; j < this.matrixCols; j++) { + this.weightMatrix[i][j] = matrix.get(i).get(j).floatValue(); } } + } - weightMatrices.add(weightMatrix); + public void setBias(Object biasObj) { + final List vector = (List) biasObj; + this.numUnits = vector.size(); + this.biasVector = new float[numUnits]; + + for (int i = 0; i < this.numUnits; i++) { + this.biasVector[i] = vector.get(i).floatValue(); + } } - } - public void setNonlinearity(Object nonlinearityStr) { - nonlinearity = (String) nonlinearityStr; - } + public void setActivation(Object activationStr) { + this.activationStr = (String) activationStr; + switch (this.activationStr) { + case "relu": + + this.activation = new Activation() { + @Override + public float apply(float in) { + return in < 0 ? 0 : in; + } + }; + + break; + case "sigmoid": + + this.activation = new Activation() { + @Override + public float apply(float in) { + return (float) (1 / (1 + Math.exp(-in))); + } + }; + + break; + default: + + this.activation = new Activation() { + @Override + public float apply(float in) { + return in; + } + }; + break; + } + } - private float[] dot(float[][] matrix, float[] inputVec) { + private float[] calculateOutput(float[] inputVec) { - int matrixRows = matrix.length; - int matrixCols = matrix[0].length; - float[] outputVec = new float[matrixRows]; + float[] outputVec = new float[this.matrixRows]; - for (int i = 0; i < matrixRows; i++) { - float outputVal = matrix[i][matrixCols - 1]; // Bias. - for (int j = 0; j < matrixCols - 1; j++) { - outputVal += matrix[i][j] * inputVec[j]; + for (int i = 0; i < this.matrixRows; i++) { + float outputVal = this.biasVector[i]; + for (int j = 0; j < this.matrixCols; j++) { + outputVal += this.weightMatrix[i][j] * inputVec[j]; + } + outputVec[i] = this.activation.apply(outputVal); } - outputVec[i] = outputVal; + + return outputVec; } - return outputVec; + public void validate() throws ModelException { + if (this.numUnits != this.matrixRows) { + throw new ModelException("Dimension mismatch in model \"" + name + "\". Layer " + + Integer.toString(this.layerID) + " has " + Integer.toString(this.numUnits) + + " bias weights but " + Integer.toString(this.matrixRows) + " weight matrix rows."); + } + if (!this.activationStr.matches("relu|sigmoid|none")) { + throw new ModelException("Invalid activation function in model \"" + name + "\". " + + "\"" + activationStr + "\" is not \"relu\", \"sigmoid\", or \"none\"."); + } + } } - private float doNonlinearity(float x) { - if (nonlinearity.equals("relu")) { - return x < 0 ? 0 : x; - } else { - return (float) (1 / (1 + Math.exp(-x))); + private Layer createLayer(Map map) { + final Layer layer = new Layer(); + if (map != null) { + SolrPluginUtils.invokeSetters(layer, map.entrySet()); + } + return layer; + } + + public void setLayers(Object layers) { + this.layers = new ArrayList(); + for (final Object o : (List) layers) { + final Layer layer = createLayer((Map) o); + this.layers.add(layer); } } @@ -126,37 +212,30 @@ public NeuralNetworkModel(String name, List features, protected void validate() throws ModelException { super.validate(); - if (!nonlinearity.matches("relu|sigmoid")) { - throw new ModelException("Invalid nonlinearity for model " + name + ". " + - "\"" + nonlinearity + "\" is not \"relu\" or \"sigmoid\"."); - } - int inputDim = features.size(); - for (int i = 0; i < weightMatrices.size(); i++) { - float[][] weightMatrix = weightMatrices.get(i); - - int numRows = weightMatrix.length; - int numCols = weightMatrix[0].length; + for (int i = 0; i < layers.size(); i++) { - if (inputDim != numCols - 1) { + Layer layer = layers.get(i); + if (inputDim != layer.matrixCols) { if (i == 0) { - throw new ModelException("Dimension mismatch. Input for model " + name + " has " + Integer.toString(inputDim) - + " features, but matrix #0 has " + Integer.toString(numCols - 1) + - " non-bias columns."); + throw new ModelException("Dimension mismatch in model \"" + name + "\". The input has " + + Integer.toString(inputDim) + " features, but the weight matrix for layer 0 has " + + Integer.toString(layer.matrixCols) + " columns."); } else { - throw new ModelException("Dimension mismatch. Matrix #" + Integer.toString(i - 1) + " for model " + name + - " has " + Integer.toString(inputDim) + " rows, but matrix #" + Integer.toString(i) + - " has " + Integer.toString(numCols - 1) + " non-bias columns."); + throw new ModelException("Dimension mismatch in model \"" + name + "\". The weight matrix for layer " + + Integer.toString(i - 1) + " has " + Integer.toString(inputDim) + " rows, but the " + + "weight matrix for layer " + Integer.toString(i) + " has " + + Integer.toString(layer.matrixCols) + " columns."); } } - if (i == weightMatrices.size() - 1 & numRows != 1) { - throw new ModelException("Final matrix for model " + name + " has " + Integer.toString(numRows) + - " rows, but should have 1 row."); + if (i == layers.size() - 1 & layer.matrixRows != 1) { + throw new ModelException("The output matrix for model \"" + name + "\" has " + Integer.toString(layer.matrixRows) + + " rows, but should only have one."); } - inputDim = numRows; + inputDim = layer.matrixRows; } } @@ -164,19 +243,9 @@ protected void validate() throws ModelException { public float score(float[] inputFeatures) { float[] outputVec = inputFeatures; - float[][] weightMatrix; - int layers = weightMatrices.size(); - - for (int layer = 0; layer < layers; layer++) { - weightMatrix = weightMatrices.get(layer); - outputVec = dot(weightMatrix, outputVec); - - if (layer < layers - 1) { - for (int i = 0; i < outputVec.length; i++) { - outputVec[i] = doNonlinearity(outputVec[i]); - } - } + for (Layer layer : layers) { + outputVec = layer.calculateOutput(outputVec); } return outputVec[0]; @@ -186,22 +255,31 @@ public float score(float[] inputFeatures) { public Explanation explain(LeafReaderContext context, int doc, float finalScore, List featureExplanations) { - String modelDescription = ""; - - for (int layer = 0; layer < weightMatrices.size(); layer++) { + final StringBuilder modelDescription = new StringBuilder(); - float[][] weightMatrix = weightMatrices.get(layer); - int numCols = weightMatrix[0].length; + modelDescription.append("(name=").append(getName()); + modelDescription.append(",featureValues=["); - if (layer == 0) { - modelDescription += "Input has " + Integer.toString(numCols - 1) + " features."; - } else { - modelDescription += System.lineSeparator(); - modelDescription += "Hidden layer #" + Integer.toString(layer) + " has " + Integer.toString(numCols - 1); - modelDescription += " fully connected units."; + for (int i = 0; i < featureExplanations.size(); i++) { + Explanation featureExplain = featureExplanations.get(i); + if (i > 0) { + modelDescription.append(","); } + final String key = features.get(i).getName(); + modelDescription.append(key).append("=").append(featureExplain.getValue()); + } + + modelDescription.append("])"); + + for (int i = 0; i < layers.size(); i++) { + Layer layer = layers.get(i); + modelDescription.append(System.lineSeparator()); + modelDescription.append("Hidden layer ").append(Integer.toString(i)).append(" has a "); + modelDescription.append(Integer.toString(layer.matrixRows)).append("x").append(Integer.toString(layer.matrixCols)); + modelDescription.append(" weight matrix, ").append(Integer.toString(layer.numUnits)).append(" bias weights, "); + modelDescription.append(" and a \"").append(layer.activationStr).append("\" activation function."); } - return Explanation.match(finalScore, modelDescription); + return Explanation.match(finalScore, modelDescription.toString()); } } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json new file mode 100644 index 000000000000..6694ad3a3741 --- /dev/null +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json @@ -0,0 +1,24 @@ +{ + "class":"org.apache.solr.ltr.model.NeuralNetworkModel", + "name":"neuralnetworkmodel_bad_activation", + "features":[ + { "name": "constantOne"}, + { "name": "constantTwo"}, + { "name": "constantThree"}, + { "name": "constantFour"} + ], + "params":{ + "layers": [ + { + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "bias" : [ 13.0, 14.0, 15.0 ], + "activation": "sig" + }, + { + "matrix": [ [ 16.0, 17.0, 18.0 ] ], + "bias" : [ 19.0 ], + "activation": "sigmoid" + } + ] + } +} diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json deleted file mode 100644 index f8059764589a..000000000000 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_nonlinearity.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "class":"org.apache.solr.ltr.model.NeuralNetworkModel", - "name":"neuralnetworkmodel_bad_nonlinearity", - "features":[ - { "name": "constantOne"}, - { "name": "constantTwo"}, - { "name": "constantThree"}, - { "name": "constantFour"} - ], - "params":{ - "weights": [ - [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ], - [ [ 1.0, 2.0, 3.0, 4.0 ] ] - ], - "nonlinearity": "sig" - } -} diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json new file mode 100644 index 000000000000..324600098450 --- /dev/null +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json @@ -0,0 +1,24 @@ +{ + "class":"org.apache.solr.ltr.model.NeuralNetworkModel", + "name":"neuralnetworkmodel_mismatch_bias", + "features":[ + { "name": "constantOne"}, + { "name": "constantTwo"}, + { "name": "constantThree"}, + { "name": "constantFour"} + ], + "params":{ + "layers": [ + { + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "bias" : [ 13.0, 14.0 ], + "activation": "relu" + }, + { + "matrix": [ [ 16.0, 17.0, 18.0 ] ], + "bias" : [ 19.0 ], + "activation": "relu" + } + ] + } +} diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json index 42ea18200db9..2a377583fb41 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json @@ -8,10 +8,17 @@ { "name": "constantFour"} ], "params":{ - "weights": [ - [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], - [ [ 1.0, 2.0, 3.0, 4.0 ] ] - ], - "nonlinearity": "relu" + "layers": [ + { + "matrix": [ [ 1.0, 2.0, 3.0 ], [ 5.0, 6.0, 7.0 ], [ 9.0, 10.0, 11.0 ] ], + "bias" : [ 13.0, 14.0, 15.0 ], + "activation": "relu" + }, + { + "matrix": [ [ 16.0, 17.0, 18.0 ] ], + "bias" : [ 19.0 ], + "activation": "relu" + } + ] } } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json index 99ff89d44a7c..0dd30f6ed3b6 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json @@ -8,10 +8,17 @@ { "name": "constantFour"} ], "params":{ - "weights": [ - [ [ 1.0, 2.0, 3.0, 4.0, 5.0 ], [ 6.0, 7.0, 8.0, 9.0, 10.0 ] ], - [ [ 1.0, 2.0, 3.0, 4.0 ] ] - ], - "nonlinearity": "relu" + "layers": [ + { + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ] ], + "bias" : [ 13.0, 14.0 ], + "activation": "relu" + }, + { + "matrix": [ [ 16.0, 17.0, 18.0 ] ], + "bias" : [ 19.0 ], + "activation": "relu" + } + ] } } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json index 312191ffecf8..bedb2adef502 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json @@ -8,10 +8,17 @@ { "name": "constantFour"} ], "params":{ - "weights": [ - [ [ 1.0, 2.0, 3.0, 4.0, 5.0], [ 6.0, 7.0, 8.0, 9.0, 10.0 ], [ 11.0, 12.0, 13.0, 14.0, 15.0 ] ], - [ [ 1.0, 2.0, 3.0, 4.0 ], [ 6.0, 7.0, 8.0, 9.0 ] ] - ], - "nonlinearity": "relu" + "layers": [ + { + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "bias" : [ 13.0, 14.0, 15.0 ], + "activation": "relu" + }, + { + "matrix": [ [ 16.0, 17.0, 18.0 ], [ 19.0, 20.0, 21.0 ], + "bias" : [ 19.0 ], + "activation": "relu" + } + ] } } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 9fe8d9dbdeb3..3d33f862a367 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -36,11 +36,10 @@ public static LTRScoringModel createNeuralNetworkModel(String name, List norms, String featureStoreName, List allFeatures, Map params) throws ModelException { - final LTRScoringModel model = LTRScoringModel.getInstance(solrResourceLoader, + return LTRScoringModel.getInstance(solrResourceLoader, NeuralNetworkModel.class.getCanonicalName(), name, features, norms, featureStoreName, allFeatures, params); - return model; } @BeforeClass @@ -55,54 +54,86 @@ public static void after() throws Exception { @Test public void testLinearAlgebra() { - final ArrayList rawMatrices = new ArrayList(); + final double layer1Node1Weight1 = 1.0; final double layer1Node1Weight2 = 2.0; final double layer1Node1Weight3 = 3.0; final double layer1Node1Weight4 = 4.0; - final double layer1Node1Bias = 5.0; - final double layer1Node2Weight1 = 6.0; - final double layer1Node2Weight2 = 7.0; - final double layer1Node2Weight3 = 8.0; - final double layer1Node2Weight4 = 9.0; - final double layer1Node2Bias = 10.0; - final double layer1Node3Weight1 = 11.0; - final double layer1Node3Weight2 = 12.0; - final double layer1Node3Weight3 = 13.0; - final double layer1Node3Weight4 = 14.0; - final double layer1Node3Bias = 15.0; - double[][] matrixOne = { { layer1Node1Weight1, layer1Node1Weight2, layer1Node1Weight3, layer1Node1Weight4, layer1Node1Bias }, - { layer1Node2Weight1, layer1Node2Weight2, layer1Node2Weight3, layer1Node2Weight4, layer1Node2Bias }, - { layer1Node3Weight1, layer1Node3Weight2, layer1Node3Weight3, layer1Node3Weight4, layer1Node3Bias } }; - final double outputNodeWeight1 = 1.0; - final double outputNodeWeight2 = 2.0; - final double outputNodeWeight3 = 3.0; - final double outputNodeBias = 4.0; - double[][] matrixTwo = { { outputNodeWeight1, outputNodeWeight2, outputNodeWeight3, outputNodeBias } }; - rawMatrices.add(matrixOne); - rawMatrices.add(matrixTwo); - - final ArrayList>> weights = new ArrayList>>(); - for (int matrixNum = 0; matrixNum < rawMatrices.size(); matrixNum++) { - double[][] matrix = rawMatrices.get(matrixNum); - weights.add(new ArrayList>()); - for (int row = 0; row < matrix.length; row++) { - weights.get(matrixNum).add(new ArrayList()); - for (int col = 0; col < matrix[row].length; col++) { - weights.get(matrixNum).get(row).add(matrix[row][col]); - } + final double layer1Node2Weight1 = 5.0; + final double layer1Node2Weight2 = 6.0; + final double layer1Node2Weight3 = 7.0; + final double layer1Node2Weight4 = 8.0; + final double layer1Node3Weight1 = 9.0; + final double layer1Node3Weight2 = 10.0; + final double layer1Node3Weight3 = 11.0; + final double layer1Node3Weight4 = 12.0; + + double[][] matrixOne = { { layer1Node1Weight1, layer1Node1Weight2, layer1Node1Weight3, layer1Node1Weight4 }, + { layer1Node2Weight1, layer1Node2Weight2, layer1Node2Weight3, layer1Node2Weight4 }, + { layer1Node3Weight1, layer1Node3Weight2, layer1Node3Weight3, layer1Node3Weight4 } }; + + final ArrayList> matrixOneList = new ArrayList>(); + for (int row = 0; row < matrixOne.length; row++) { + matrixOneList.add(new ArrayList()); + for (int col = 0; col < matrixOne[row].length; col++) { + matrixOneList.get(row).add(matrixOne[row][col]); } } + final double layer1Node1Bias = 13.0; + final double layer1Node2Bias = 14.0; + final double layer1Node3Bias = 15.0; + + double[] biasOne = { layer1Node1Bias, layer1Node2Bias, layer1Node3Bias }; + + final ArrayList biasOneList = new ArrayList(); + for (int i = 0; i < biasOne.length; i++) { + biasOneList.add(biasOne[i]); + } + + final double outputNodeWeight1 = 16.0; + final double outputNodeWeight2 = 17.0; + final double outputNodeWeight3 = 18.0; + + double[][] matrixTwo = { { outputNodeWeight1, outputNodeWeight2, outputNodeWeight3 } }; + + final ArrayList> matrixTwoList = new ArrayList>(); + for (int row = 0; row < matrixTwo.length; row++) { + matrixTwoList.add(new ArrayList()); + for (int col = 0; col < matrixTwo[row].length; col++) { + matrixTwoList.get(row).add(matrixTwo[row][col]); + } + } + + final double outputNodeBias = 19.0; + + double[] biasTwo = { outputNodeBias }; + + final ArrayList biasTwoList = new ArrayList(); + for (int i = 0; i < biasTwo.length; i++) { + biasTwoList.add(biasTwo[i]); + } + Map params = new HashMap(); - params.put("weights", weights); - String nonlinearity = "relu"; - params.put("nonlinearity", nonlinearity); + ArrayList> layers = new ArrayList>(); + + HashMap layerOne = new HashMap(); + layerOne.put("matrix", matrixOne); + layerOne.put("bias", biasOne); + layerOne.put("activation", "relu"); + layers.add(layerOne); + + HashMap layerTwo = new HashMap(); + layerTwo.put("matrix", matrixTwo); + layerTwo.put("bias", biasTwo); + layerTwo.put("activation", "relu"); + layers.add(layerTwo); + + params.put("layers", layers); final List allFeaturesInStore - = getFeatures(new String[] {"constantOne", "constantTwo", - "constantThree", "constantFour", "constantFive"}); - + = getFeatures(new String[] {"constantOne", "constantTwo", "constantThree", "constantFour", "constantFive"}); + final List featuresInModel = new ArrayList<>(allFeaturesInStore); Collections.shuffle(featuresInModel, random()); // store and model order of features can vary featuresInModel.remove(0); // models need not use all the store's features @@ -116,7 +147,7 @@ public void testLinearAlgebra() { { // pretend all features scored zero - float[] testVec = {0.0f, 0.0f, 0.0f, 0.0f}; + float[] testVec = { 0.0f, 0.0f, 0.0f, 0.0f }; // with all zero inputs the layer1 node outputs are layer1 node biases only final double layer1Node1Output = layer1Node1Bias; final double layer1Node2Output = layer1Node2Bias; @@ -127,7 +158,7 @@ public void testLinearAlgebra() { (layer1Node2Output*outputNodeWeight2) + (layer1Node3Output*outputNodeWeight3) + outputNodeBias; - assertEquals(74.0, outputNodeOutput, 0.001); + assertEquals(735.0, outputNodeOutput, 0.001); // and the expected score is that of the output node final double expectedScore = outputNodeOutput; float score = ltrScoringModel.score(testVec); @@ -136,7 +167,7 @@ public void testLinearAlgebra() { { // pretend all features scored one - float[] testVec = {1.0f, 1.0f, 1.0f, 1.0f}; + float[] testVec = { 1.0f, 1.0f, 1.0f, 1.0f }; // with all one inputs the layer1 node outputs are simply sum of weights and biases final double layer1Node1Output = layer1Node1Weight1 + layer1Node1Weight2 + layer1Node1Weight3 + layer1Node1Weight4 + layer1Node1Bias; final double layer1Node2Output = layer1Node2Weight1 + layer1Node2Weight2 + layer1Node2Weight3 + layer1Node2Weight4 + layer1Node2Bias; @@ -147,7 +178,7 @@ public void testLinearAlgebra() { (layer1Node2Output*outputNodeWeight2) + (layer1Node3Output*outputNodeWeight3) + outputNodeBias; - assertEquals(294.0, outputNodeOutput, 0.001); + assertEquals(2093.0, outputNodeOutput, 0.001); // and the expected score is that of the output node final double expectedScore = outputNodeOutput; float score = ltrScoringModel.score(testVec); @@ -171,8 +202,8 @@ public void testLinearAlgebra() { (layer1Node2Output*outputNodeWeight2) + (layer1Node3Output*outputNodeWeight3) + outputNodeBias; - assertTrue("outputNodeOutput="+outputNodeOutput, 74.0 <= outputNodeOutput); // inputs between zero and one produced output greater than 74 - assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 294.0); // inputs between zero and one produced output less than 294 + assertTrue("outputNodeOutput="+outputNodeOutput, 735.0 <= outputNodeOutput); // inputs between zero and one produced output greater than 74 + assertTrue("outputNodeOutput="+outputNodeOutput, outputNodeOutput <= 2093.0); // inputs between zero and one produced output less than 294 // and the expected score is that of the output node final double expectedScore = outputNodeOutput; float score = ltrScoringModel.score(testVec); @@ -181,14 +212,29 @@ public void testLinearAlgebra() { } @Test - public void badNonlinearityTest() throws Exception { + public void badActivationTest() throws Exception { + final ModelException expectedException = + new ModelException("Invalid activation function in model \"neuralnetworkmodel_bad_activation\". " + + "\"sig\" is not \"relu\", \"sigmoid\", or \"none\"."); + try { + createModelFromFiles("neuralnetworkmodel_bad_activation.json", + "neuralnetworkmodel_features.json"); + fail("badActivationTest failed to throw exception: "+expectedException); + } catch (Exception actualException) { + Throwable rootError = getRootCause(actualException); + assertEquals(expectedException.toString(), rootError.toString()); + } + } + + @Test + public void biasDimensionMismatchTest() throws Exception { final ModelException expectedException = - new ModelException("Invalid nonlinearity for model neuralnetworkmodel_bad_nonlinearity. " + - "\"sig\" is not \"relu\" or \"sigmoid\"."); + new ModelException("Dimension mismatch in model \"neuralnetworkmodel_mismatch_bias\". " + + "Layer 0 has 2 bias weights but 3 weight matrix rows."); try { - createModelFromFiles("neuralnetworkmodel_bad_nonlinearity.json", - "neuralnetworkmodel_features.json"); - fail("badNonlinearityTest failed to throw exception: "+expectedException); + createModelFromFiles("neuralnetworkmodel_mismatch_bias.json", + "neuralnetworkmodel_features.json"); + fail("biasDimensionMismatchTest failed to throw exception: "+expectedException); } catch (Exception actualException) { Throwable rootError = getRootCause(actualException); assertEquals(expectedException.toString(), rootError.toString()); @@ -198,11 +244,11 @@ public void badNonlinearityTest() throws Exception { @Test public void inputDimensionMismatchTest() throws Exception { final ModelException expectedException = - new ModelException("Dimension mismatch. Input for model neuralnetworkmodel_mismatch_input has " + - "4 features, but matrix #0 has 3 non-bias columns."); + new ModelException("Dimension mismatch in model \"neuralnetworkmodel_mismatch_input\". The input has " + + "4 features, but the weight matrix for layer 0 has 3 columns."); try { createModelFromFiles("neuralnetworkmodel_mismatch_input.json", - "neuralnetworkmodel_features.json"); + "neuralnetworkmodel_features.json"); fail("inputDimensionMismatchTest failed to throw exception: "+expectedException); } catch (Exception actualException) { Throwable rootError = getRootCause(actualException); @@ -213,11 +259,11 @@ public void inputDimensionMismatchTest() throws Exception { @Test public void layerDimensionMismatchTest() throws Exception { final ModelException expectedException = - new ModelException("Dimension mismatch. Matrix #0 for model neuralnetworkmodel_mismatch_layers has " + - "2 rows, but matrix #1 has 3 non-bias columns."); + new ModelException("Dimension mismatch in model \"neuralnetworkmodel_mismatch_layers\". The weight matrix " + + "for layer 0 has 2 rows, but the weight matrix for layer 1 has 3 columns."); try { createModelFromFiles("neuralnetworkmodel_mismatch_layers.json", - "neuralnetworkmodel_features.json"); + "neuralnetworkmodel_features.json"); fail("layerDimensionMismatchTest failed to throw exception: "+expectedException); } catch (Exception actualException) { Throwable rootError = getRootCause(actualException); @@ -228,11 +274,11 @@ public void layerDimensionMismatchTest() throws Exception { @Test public void tooManyRowsTest() throws Exception { final ModelException expectedException = - new ModelException("Final matrix for model neuralnetworkmodel_too_many_rows has 2 rows, " + - "but should have 1 row."); + new ModelException("The output matrix for model \"neuralnetworkmodel_too_many_rows\" has 2 rows, " + + "but should only have one."); try { createModelFromFiles("neuralnetworkmodel_too_many_rows.json", - "neuralnetworkmodel_features.json"); + "neuralnetworkmodel_features.json"); fail("layerDimensionMismatchTest failed to throw exception: "+expectedException); } catch (Exception actualException) { Throwable rootError = getRootCause(actualException); From 6d836d6563f3200ac3865e137a980861e836a776 Mon Sep 17 00:00:00 2001 From: "Michael A. Alcorn" Date: Tue, 6 Feb 2018 08:37:57 -0600 Subject: [PATCH 14/22] Model formatting. --- .../modelExamples/neuralnetworkmodel_bad_activation.json | 6 ++++-- .../modelExamples/neuralnetworkmodel_mismatch_bias.json | 6 ++++-- .../modelExamples/neuralnetworkmodel_mismatch_input.json | 6 ++++-- .../neuralnetworkmodel_mismatch_layers.json | 5 +++-- .../modelExamples/neuralnetworkmodel_too_many_rows.json | 9 ++++++--- 5 files changed, 21 insertions(+), 11 deletions(-) diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json index 6694ad3a3741..fda90c5233bf 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json @@ -10,14 +10,16 @@ "params":{ "layers": [ { - "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], + [ 5.0, 6.0, 7.0, 8.0 ], + [ 9.0, 10.0, 11.0, 12.0 ] ], "bias" : [ 13.0, 14.0, 15.0 ], "activation": "sig" }, { "matrix": [ [ 16.0, 17.0, 18.0 ] ], "bias" : [ 19.0 ], - "activation": "sigmoid" + "activation": "none" } ] } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json index 324600098450..bbbae2d89c9e 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json @@ -10,14 +10,16 @@ "params":{ "layers": [ { - "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], + [ 5.0, 6.0, 7.0, 8.0 ], + [ 9.0, 10.0, 11.0, 12.0 ] ], "bias" : [ 13.0, 14.0 ], "activation": "relu" }, { "matrix": [ [ 16.0, 17.0, 18.0 ] ], "bias" : [ 19.0 ], - "activation": "relu" + "activation": "none" } ] } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json index 2a377583fb41..4cd8904d8cd5 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json @@ -10,14 +10,16 @@ "params":{ "layers": [ { - "matrix": [ [ 1.0, 2.0, 3.0 ], [ 5.0, 6.0, 7.0 ], [ 9.0, 10.0, 11.0 ] ], + "matrix": [ [ 1.0, 2.0, 3.0 ], + [ 5.0, 6.0, 7.0 ], + [ 9.0, 10.0, 11.0 ] ], "bias" : [ 13.0, 14.0, 15.0 ], "activation": "relu" }, { "matrix": [ [ 16.0, 17.0, 18.0 ] ], "bias" : [ 19.0 ], - "activation": "relu" + "activation": "none" } ] } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json index 0dd30f6ed3b6..3cd5bc837fd0 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json @@ -10,14 +10,15 @@ "params":{ "layers": [ { - "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ] ], + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], + [ 5.0, 6.0, 7.0, 8.0 ] ], "bias" : [ 13.0, 14.0 ], "activation": "relu" }, { "matrix": [ [ 16.0, 17.0, 18.0 ] ], "bias" : [ 19.0 ], - "activation": "relu" + "activation": "none" } ] } diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json index bedb2adef502..1a80b8394c70 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json @@ -10,14 +10,17 @@ "params":{ "layers": [ { - "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], [ 5.0, 6.0, 7.0, 8.0 ], [ 9.0, 10.0, 11.0, 12.0 ] ], + "matrix": [ [ 1.0, 2.0, 3.0, 4.0 ], + [ 5.0, 6.0, 7.0, 8.0 ], + [ 9.0, 10.0, 11.0, 12.0 ] ], "bias" : [ 13.0, 14.0, 15.0 ], "activation": "relu" }, { - "matrix": [ [ 16.0, 17.0, 18.0 ], [ 19.0, 20.0, 21.0 ], + "matrix": [ [ 16.0, 17.0, 18.0 ], + [ 19.0, 20.0, 21.0 ], "bias" : [ 19.0 ], - "activation": "relu" + "activation": "none" } ] } From 40dcb9fb18776a0fc8b46ec8e0fbd873f49da9a1 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Wed, 7 Feb 2018 20:37:27 +0000 Subject: [PATCH 15/22] factor out NeuralNetworkModel.Layer interface --- .../solr/ltr/model/NeuralNetworkModel.java | 81 ++++++++++--------- 1 file changed, 45 insertions(+), 36 deletions(-) diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java index fe9c76258279..9b188d5b66f1 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java @@ -85,7 +85,13 @@ protected interface Activation { float apply(float in); } - public class Layer { + public interface Layer { + public float[] calculateOutput(float[] inputVec); + public int validate(int inputDim) throws ModelException; + public String describe(); + } + + public class DefaultLayer implements Layer { private int layerID; private float[][] weightMatrix; private int matrixRows; @@ -95,7 +101,7 @@ public class Layer { private String activationStr; private Activation activation; - public Layer() { + public DefaultLayer() { layerID = layers.size(); } @@ -157,7 +163,7 @@ public float apply(float in) { } } - private float[] calculateOutput(float[] inputVec) { + public float[] calculateOutput(float[] inputVec) { float[] outputVec = new float[this.matrixRows]; @@ -172,7 +178,7 @@ private float[] calculateOutput(float[] inputVec) { return outputVec; } - public void validate() throws ModelException { + public int validate(int inputDim) throws ModelException { if (this.numUnits != this.matrixRows) { throw new ModelException("Dimension mismatch in model \"" + name + "\". Layer " + Integer.toString(this.layerID) + " has " + Integer.toString(this.numUnits) + @@ -182,13 +188,35 @@ public void validate() throws ModelException { throw new ModelException("Invalid activation function in model \"" + name + "\". " + "\"" + activationStr + "\" is not \"relu\", \"sigmoid\", or \"none\"."); } + if (inputDim != this.matrixCols) { + if (this.layerID == 0) { + throw new ModelException("Dimension mismatch in model \"" + name + "\". The input has " + + Integer.toString(inputDim) + " features, but the weight matrix for layer 0 has " + + Integer.toString(this.matrixCols) + " columns."); + } else { + throw new ModelException("Dimension mismatch in model \"" + name + "\". The weight matrix for layer " + + Integer.toString(this.layerID - 1) + " has " + Integer.toString(inputDim) + " rows, but the " + + "weight matrix for layer " + Integer.toString(this.layerID) + " has " + + Integer.toString(this.matrixCols) + " columns."); + } + } + return this.matrixRows; + } + + public String describe() { + final StringBuilder sb = new StringBuilder(); + sb.append("Hidden layer ").append(Integer.toString(this.layerID)).append(" has a "); + sb.append(Integer.toString(this.matrixRows)).append("x").append(Integer.toString(this.matrixCols)); + sb.append(" weight matrix, ").append(Integer.toString(this.numUnits)).append(" bias weights, "); + sb.append(" and a \"").append(this.activationStr).append("\" activation function."); + return sb.toString(); } } - private Layer createLayer(Map map) { - final Layer layer = new Layer(); - if (map != null) { - SolrPluginUtils.invokeSetters(layer, map.entrySet()); + protected Layer createLayer(Object o) { + final DefaultLayer layer = new DefaultLayer(); + if (o != null) { + SolrPluginUtils.invokeSetters(layer, ((Map) o).entrySet()); } return layer; } @@ -196,7 +224,7 @@ private Layer createLayer(Map map) { public void setLayers(Object layers) { this.layers = new ArrayList(); for (final Object o : (List) layers) { - final Layer layer = createLayer((Map) o); + final Layer layer = createLayer(o); this.layers.add(layer); } } @@ -214,28 +242,13 @@ protected void validate() throws ModelException { int inputDim = features.size(); - for (int i = 0; i < layers.size(); i++) { + for (Layer layer : layers) { + inputDim = layer.validate(inputDim); + } - Layer layer = layers.get(i); - if (inputDim != layer.matrixCols) { - if (i == 0) { - throw new ModelException("Dimension mismatch in model \"" + name + "\". The input has " + - Integer.toString(inputDim) + " features, but the weight matrix for layer 0 has " + - Integer.toString(layer.matrixCols) + " columns."); - } else { - throw new ModelException("Dimension mismatch in model \"" + name + "\". The weight matrix for layer " + - Integer.toString(i - 1) + " has " + Integer.toString(inputDim) + " rows, but the " + - "weight matrix for layer " + Integer.toString(i) + " has " + - Integer.toString(layer.matrixCols) + " columns."); - } - } - - if (i == layers.size() - 1 & layer.matrixRows != 1) { - throw new ModelException("The output matrix for model \"" + name + "\" has " + Integer.toString(layer.matrixRows) + - " rows, but should only have one."); - } - - inputDim = layer.matrixRows; + if (inputDim != 1) { + throw new ModelException("The output matrix for model \"" + name + "\" has " + Integer.toString(inputDim) + + " rows, but should only have one."); } } @@ -271,13 +284,9 @@ public Explanation explain(LeafReaderContext context, int doc, modelDescription.append("])"); - for (int i = 0; i < layers.size(); i++) { - Layer layer = layers.get(i); + for (Layer layer : layers) { modelDescription.append(System.lineSeparator()); - modelDescription.append("Hidden layer ").append(Integer.toString(i)).append(" has a "); - modelDescription.append(Integer.toString(layer.matrixRows)).append("x").append(Integer.toString(layer.matrixCols)); - modelDescription.append(" weight matrix, ").append(Integer.toString(layer.numUnits)).append(" bias weights, "); - modelDescription.append(" and a \"").append(layer.activationStr).append("\" activation function."); + modelDescription.append(layer.describe()); } return Explanation.match(finalScore, modelDescription.toString()); } From 813a2dad895fba2495b963328fb046be0231a6f7 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Thu, 8 Feb 2018 20:14:23 +0000 Subject: [PATCH 16/22] tweaks to fix two test failures --- .../neuralnetworkmodel_too_many_rows.json | 2 +- .../solr/ltr/model/TestNeuralNetworkModel.java | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json index 1a80b8394c70..7b6da229a437 100644 --- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json @@ -18,7 +18,7 @@ }, { "matrix": [ [ 16.0, 17.0, 18.0 ], - [ 19.0, 20.0, 21.0 ], + [ 19.0, 20.0, 21.0 ] ], "bias" : [ 19.0 ], "activation": "none" } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 3d33f862a367..f6165e0d09d1 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -118,14 +118,14 @@ public void testLinearAlgebra() { ArrayList> layers = new ArrayList>(); HashMap layerOne = new HashMap(); - layerOne.put("matrix", matrixOne); - layerOne.put("bias", biasOne); + layerOne.put("matrix", matrixOneList); + layerOne.put("bias", biasOneList); layerOne.put("activation", "relu"); layers.add(layerOne); HashMap layerTwo = new HashMap(); - layerTwo.put("matrix", matrixTwo); - layerTwo.put("bias", biasTwo); + layerTwo.put("matrix", matrixTwoList); + layerTwo.put("bias", biasTwoList); layerTwo.put("activation", "relu"); layers.add(layerTwo); @@ -274,8 +274,8 @@ public void layerDimensionMismatchTest() throws Exception { @Test public void tooManyRowsTest() throws Exception { final ModelException expectedException = - new ModelException("The output matrix for model \"neuralnetworkmodel_too_many_rows\" has 2 rows, " + - "but should only have one."); + new ModelException("Dimension mismatch in model \"neuralnetworkmodel_too_many_rows\". " + + "Layer 1 has 1 bias weights but 2 weight matrix rows."); try { createModelFromFiles("neuralnetworkmodel_too_many_rows.json", "neuralnetworkmodel_features.json"); From 239bb459e4a2d465b69ef2d0126f2c4f3d28d89d Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Fri, 9 Feb 2018 18:32:08 +0000 Subject: [PATCH 17/22] s/getCanonicalName/getName (based on SOLR-11931) --- .../test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index f6165e0d09d1..1f832abc592d 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -37,7 +37,7 @@ public static LTRScoringModel createNeuralNetworkModel(String name, List allFeatures, Map params) throws ModelException { return LTRScoringModel.getInstance(solrResourceLoader, - NeuralNetworkModel.class.getCanonicalName(), + NeuralNetworkModel.class.getName(), name, features, norms, featureStoreName, allFeatures, params); } From 54e5bd8db972311fce8d29542d25c66c51ed327c Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Fri, 9 Feb 2018 18:34:09 +0000 Subject: [PATCH 18/22] Add Keras link and sentence in javadocs. --- .../src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java index 9b188d5b66f1..cdb28ab4bc0f 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java @@ -66,7 +66,8 @@ *

* Training libraries: *

*

* Background reading: From ac5e76efd9a70eb58df137b086798bc438a426bf Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Fri, 9 Feb 2018 18:46:11 +0000 Subject: [PATCH 19/22] factor out TestNeuralNetworkModel.createLayerParams method --- .../ltr/model/TestNeuralNetworkModel.java | 62 ++++++++----------- 1 file changed, 25 insertions(+), 37 deletions(-) diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 1f832abc592d..18a0f13a6c7c 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -52,6 +52,29 @@ public static void after() throws Exception { aftertest(); } + protected static Map createLayerParams(double[][] matrix, double[] bias, String activation) { + + final ArrayList> matrixList = new ArrayList>(); + for (int row = 0; row < matrix.length; row++) { + matrixList.add(new ArrayList()); + for (int col = 0; col < matrix[row].length; col++) { + matrixList.get(row).add(matrix[row][col]); + } + } + + final ArrayList biasList = new ArrayList(); + for (int i = 0; i < bias.length; i++) { + biasList.add(bias[i]); + } + + final Map layer = new HashMap(); + layer.put("matrix", matrixList); + layer.put("bias", biasList); + layer.put("activation", activation); + + return layer; + } + @Test public void testLinearAlgebra() { @@ -72,62 +95,27 @@ public void testLinearAlgebra() { { layer1Node2Weight1, layer1Node2Weight2, layer1Node2Weight3, layer1Node2Weight4 }, { layer1Node3Weight1, layer1Node3Weight2, layer1Node3Weight3, layer1Node3Weight4 } }; - final ArrayList> matrixOneList = new ArrayList>(); - for (int row = 0; row < matrixOne.length; row++) { - matrixOneList.add(new ArrayList()); - for (int col = 0; col < matrixOne[row].length; col++) { - matrixOneList.get(row).add(matrixOne[row][col]); - } - } - final double layer1Node1Bias = 13.0; final double layer1Node2Bias = 14.0; final double layer1Node3Bias = 15.0; double[] biasOne = { layer1Node1Bias, layer1Node2Bias, layer1Node3Bias }; - final ArrayList biasOneList = new ArrayList(); - for (int i = 0; i < biasOne.length; i++) { - biasOneList.add(biasOne[i]); - } - final double outputNodeWeight1 = 16.0; final double outputNodeWeight2 = 17.0; final double outputNodeWeight3 = 18.0; double[][] matrixTwo = { { outputNodeWeight1, outputNodeWeight2, outputNodeWeight3 } }; - final ArrayList> matrixTwoList = new ArrayList>(); - for (int row = 0; row < matrixTwo.length; row++) { - matrixTwoList.add(new ArrayList()); - for (int col = 0; col < matrixTwo[row].length; col++) { - matrixTwoList.get(row).add(matrixTwo[row][col]); - } - } - final double outputNodeBias = 19.0; double[] biasTwo = { outputNodeBias }; - final ArrayList biasTwoList = new ArrayList(); - for (int i = 0; i < biasTwo.length; i++) { - biasTwoList.add(biasTwo[i]); - } - Map params = new HashMap(); ArrayList> layers = new ArrayList>(); - HashMap layerOne = new HashMap(); - layerOne.put("matrix", matrixOneList); - layerOne.put("bias", biasOneList); - layerOne.put("activation", "relu"); - layers.add(layerOne); - - HashMap layerTwo = new HashMap(); - layerTwo.put("matrix", matrixTwoList); - layerTwo.put("bias", biasTwoList); - layerTwo.put("activation", "relu"); - layers.add(layerTwo); + layers.add(createLayerParams(matrixOne, biasOne, "relu")); + layers.add(createLayerParams(matrixTwo, biasTwo, "relu")); params.put("layers", layers); From 3fcb905707fa126224938fcf4f26db0fa4d522dc Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Mon, 12 Feb 2018 19:29:42 +0000 Subject: [PATCH 20/22] add TestNeuralNetworkModel.testExplain() method --- .../solr/ltr/model/NeuralNetworkModel.java | 21 ++++++++------- .../neuralnetworkmodel_features.json | 6 ++--- .../neuralnetworkmodel_explainable.json | 26 +++++++++++++++++++ .../ltr/model/TestNeuralNetworkModel.java | 25 ++++++++++++++++++ 4 files changed, 65 insertions(+), 13 deletions(-) create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java index cdb28ab4bc0f..7e0b4b23d7d7 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java @@ -206,10 +206,9 @@ public int validate(int inputDim) throws ModelException { public String describe() { final StringBuilder sb = new StringBuilder(); - sb.append("Hidden layer ").append(Integer.toString(this.layerID)).append(" has a "); - sb.append(Integer.toString(this.matrixRows)).append("x").append(Integer.toString(this.matrixCols)); - sb.append(" weight matrix, ").append(Integer.toString(this.numUnits)).append(" bias weights, "); - sb.append(" and a \"").append(this.activationStr).append("\" activation function."); + sb + .append("(matrix=").append(Integer.toString(this.matrixCols)).append('x').append(Integer.toString(this.matrixRows)) + .append(",activation=").append(this.activationStr).append(")"); return sb.toString(); } } @@ -277,18 +276,20 @@ public Explanation explain(LeafReaderContext context, int doc, for (int i = 0; i < featureExplanations.size(); i++) { Explanation featureExplain = featureExplanations.get(i); if (i > 0) { - modelDescription.append(","); + modelDescription.append(','); } final String key = features.get(i).getName(); - modelDescription.append(key).append("=").append(featureExplain.getValue()); + modelDescription.append(key).append('=').append(featureExplain.getValue()); } - modelDescription.append("])"); + modelDescription.append("],layers=["); - for (Layer layer : layers) { - modelDescription.append(System.lineSeparator()); - modelDescription.append(layer.describe()); + for (int i = 0; i < layers.size(); i++) { + if (i > 0) modelDescription.append(','); + modelDescription.append(layers.get(i).describe()); } + modelDescription.append("])"); + return Explanation.match(finalScore, modelDescription.toString()); } diff --git a/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json index 36973f439556..2f0bea50cde1 100644 --- a/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json +++ b/solr/contrib/ltr/src/test-files/featureExamples/neuralnetworkmodel_features.json @@ -10,21 +10,21 @@ "name": "constantTwo", "class": "org.apache.solr.ltr.feature.ValueFeature", "params": { - "value": 1.0 + "value": 2.0 } }, { "name": "constantThree", "class": "org.apache.solr.ltr.feature.ValueFeature", "params": { - "value": 1.0 + "value": 3.0 } }, { "name": "constantFour", "class": "org.apache.solr.ltr.feature.ValueFeature", "params": { - "value": 1.0 + "value": 4.0 } } ] diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json new file mode 100644 index 000000000000..134f09a918cf --- /dev/null +++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json @@ -0,0 +1,26 @@ +{ + "class":"org.apache.solr.ltr.model.NeuralNetworkModel", + "name":"neuralnetworkmodel_explainable", + "features":[ + { "name": "constantOne"}, + { "name": "constantTwo"}, + { "name": "constantThree"}, + { "name": "constantFour"} + ], + "params":{ + "layers": [ + { + "matrix": [ + [ 11.0, 2.0, 33.0, 4.0 ], + [ 1.0, 22.0, 3.0, 44.0 ] ], + "bias" : [ 55.0, 66.0 ], + "activation": "relu" + }, + { + "matrix": [ [ 11.0, 22.0 ] ], + "bias" : [ 77.0 ], + "activation": "none" + } + ] + } +} diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java index 18a0f13a6c7c..ff09015f96ef 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; +import org.apache.lucene.search.Explanation; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.feature.Feature; import org.apache.solr.ltr.norm.IdentityNormalizer; @@ -273,4 +274,28 @@ public void tooManyRowsTest() throws Exception { assertEquals(expectedException.toString(), rootError.toString()); } } + + @Test + public void testExplain() throws Exception { + + final LTRScoringModel model = createModelFromFiles("neuralnetworkmodel_explainable.json", + "neuralnetworkmodel_features.json"); + + final float[] featureValues = { 1.2f, 3.4f, 5.6f, 7.8f }; + + final List explanations = new ArrayList(); + for (int ii=0; ii Date: Tue, 13 Feb 2018 18:17:19 +0000 Subject: [PATCH 21/22] three activation function related changes: * javadocs to mention which are supported * 'identity' instead of 'none' * test to show (and test) how derived class can support additional activation functions --- .../solr/ltr/model/NeuralNetworkModel.java | 27 +++---- .../neuralnetworkmodel_bad_activation.json | 2 +- .../neuralnetworkmodel_custom.json | 17 +++++ .../neuralnetworkmodel_explainable.json | 2 +- .../neuralnetworkmodel_mismatch_bias.json | 2 +- .../neuralnetworkmodel_mismatch_input.json | 2 +- .../neuralnetworkmodel_mismatch_layers.json | 2 +- .../neuralnetworkmodel_too_many_rows.json | 2 +- .../ltr/model/TestNeuralNetworkModel.java | 70 ++++++++++++++++++- 9 files changed, 104 insertions(+), 22 deletions(-) create mode 100644 solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_custom.json diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java index 7e0b4b23d7d7..02f38c69d832 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java @@ -30,6 +30,10 @@ /** * A scoring model that computes document scores using a neural network. *

+ * Supported activation functions are: + * identity, relu, sigmoid and + * contributions to support additional activation functions are welcome. + *

* Example configuration:

{
     "class" : "org.apache.solr.ltr.model.NeuralNetworkModel",
@@ -47,7 +51,7 @@
                              [ 7.0, 8.0, 9.0 ],
                              [ 10.0, 11.0, 12.0 ] ],
                 "bias" : [ 13.0, 14.0, 15.0, 16.0 ],
-                "activation" : "relu"
+                "activation" : "sigmoid"
             },
             {
                 "matrix" : [ [ 17.0, 18.0, 19.0, 20.0 ],
@@ -58,7 +62,7 @@
             {
                 "matrix" : [ [ 27.0, 28.0 ] ],
                 "bias" : [ 29.0 ],
-                "activation" : "none"
+                "activation" : "identity"
             }
         ]
     }
@@ -99,8 +103,8 @@ public class DefaultLayer implements Layer {
     private int matrixCols;
     private float[] biasVector;
     private int numUnits;
-    private String activationStr;
-    private Activation activation;
+    protected String activationStr;
+    protected Activation activation;
 
     public DefaultLayer() {
       layerID = layers.size();
@@ -133,27 +137,22 @@ public void setActivation(Object activationStr) {
       this.activationStr = (String) activationStr;
       switch (this.activationStr) {
         case "relu":
-
           this.activation = new Activation() {
             @Override
             public float apply(float in) {
               return in < 0 ? 0 : in;
             }
           };
-
           break;
         case "sigmoid":
-
           this.activation = new Activation() {
             @Override
             public float apply(float in) {
               return (float) (1 / (1 + Math.exp(-in)));
             }
           };
-
           break;
-        default:
-
+        case "identity":
           this.activation = new Activation() {
             @Override
             public float apply(float in) {
@@ -161,6 +160,9 @@ public float apply(float in) {
             }
           };
           break;
+        default:
+          this.activation = null;
+          break;
       }
     }
 
@@ -185,9 +187,8 @@ public int validate(int inputDim) throws ModelException {
                                  Integer.toString(this.layerID) + " has " + Integer.toString(this.numUnits) +
                                  " bias weights but " + Integer.toString(this.matrixRows) + " weight matrix rows.");
       }
-      if (!this.activationStr.matches("relu|sigmoid|none")) {
-        throw new ModelException("Invalid activation function in model \"" + name + "\". " +
-                                 "\"" + activationStr + "\" is not \"relu\", \"sigmoid\", or \"none\".");
+      if (this.activation == null) {
+        throw new ModelException("Invalid activation function (\""+this.activationStr+"\") in layer "+Integer.toString(this.layerID)+" of model \"" + name + "\".");
       }
       if (inputDim != this.matrixCols) {
         if (this.layerID == 0) {
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json
index fda90c5233bf..482d99f2d23e 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_bad_activation.json
@@ -19,7 +19,7 @@
             {
                 "matrix": [ [ 16.0, 17.0, 18.0 ] ],
                 "bias" : [ 19.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_custom.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_custom.json
new file mode 100644
index 000000000000..78b32e96a75a
--- /dev/null
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_custom.json
@@ -0,0 +1,17 @@
+{
+    "class":"org.apache.solr.ltr.model.TestNeuralNetworkModel$CustomNeuralNetworkModel",
+    "name":"neuralnetworkmodel_custom",
+    "features":[
+        { "name": "constantFour"},
+        { "name": "constantTwo"}
+    ],
+    "params":{
+        "layers": [
+            {
+                "matrix": [ [ 1.0, 1.0 ] ],
+                "bias" : [ 0.0 ],
+                "activation": "answer"
+            }
+        ]
+    }
+}
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json
index 134f09a918cf..7602f89c7e7d 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_explainable.json
@@ -19,7 +19,7 @@
             {
                 "matrix": [ [ 11.0, 22.0 ] ],
                 "bias" : [ 77.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json
index bbbae2d89c9e..05d6b429d3eb 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_bias.json
@@ -19,7 +19,7 @@
             {
                 "matrix": [ [ 16.0, 17.0, 18.0 ] ],
                 "bias" : [ 19.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
index 4cd8904d8cd5..4b1c07817d98 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_input.json
@@ -19,7 +19,7 @@
             {
                 "matrix": [ [ 16.0, 17.0, 18.0 ] ],
                 "bias" : [ 19.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
index 3cd5bc837fd0..e8d1dde5b356 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_mismatch_layers.json
@@ -18,7 +18,7 @@
             {
                 "matrix": [ [ 16.0, 17.0, 18.0 ] ],
                 "bias" : [ 19.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
index 7b6da229a437..b850506a64c5 100644
--- a/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
+++ b/solr/contrib/ltr/src/test-files/modelExamples/neuralnetworkmodel_too_many_rows.json
@@ -20,7 +20,7 @@
                 "matrix": [ [ 16.0, 17.0, 18.0 ],
                             [ 19.0, 20.0, 21.0 ] ],
                 "bias" : [ 19.0 ],
-                "activation": "none"
+                "activation": "identity"
             }
         ]
     }
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index ff09015f96ef..cfa5ad5be2ab 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -27,6 +27,7 @@
 import org.apache.solr.ltr.feature.Feature;
 import org.apache.solr.ltr.norm.IdentityNormalizer;
 import org.apache.solr.ltr.norm.Normalizer;
+import org.apache.solr.util.SolrPluginUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -203,8 +204,7 @@ public void testLinearAlgebra() {
   @Test
   public void badActivationTest() throws Exception {
     final ModelException expectedException =
-            new ModelException("Invalid activation function in model \"neuralnetworkmodel_bad_activation\". " +
-                               "\"sig\" is not \"relu\", \"sigmoid\", or \"none\".");
+            new ModelException("Invalid activation function (\"sig\") in layer 0 of model \"neuralnetworkmodel_bad_activation\".");
     try {
       createModelFromFiles("neuralnetworkmodel_bad_activation.json",
              "neuralnetworkmodel_features.json");
@@ -293,7 +293,71 @@ public void testExplain() throws Exception {
     final Explanation explanation = model.explain(null, 0, finalScore, explanations);
     assertEquals(finalScore+" = (name=neuralnetworkmodel_explainable"+
         ",featureValues=[constantOne=1.2,constantTwo=3.4,constantThree=5.6,constantFour=7.8]"+
-        ",layers=[(matrix=4x2,activation=relu),(matrix=2x1,activation=none)]"+
+        ",layers=[(matrix=4x2,activation=relu),(matrix=2x1,activation=identity)]"+
+        ")\n",
+        explanation.toString());
+  }
+
+  public static class CustomNeuralNetworkModel extends NeuralNetworkModel {
+
+    public CustomNeuralNetworkModel(String name, List features, List norms,
+        String featureStoreName, List allFeatures, Map params) {
+      super(name, features, norms, featureStoreName, allFeatures, params);
+    }
+
+    public class DefaultLayer extends org.apache.solr.ltr.model.NeuralNetworkModel.DefaultLayer {
+      @Override
+      public void setActivation(Object o) {
+        super.setActivation(o);
+        switch (this.activationStr) {
+          case "answer":
+            this.activation = new Activation() {
+              @Override
+              public float apply(float in) {
+                return in * 42f;
+              }
+            };
+            break;
+          default:
+            break;
+        }
+      }
+    }
+
+    @Override
+    protected Layer createLayer(Object o) {
+      final DefaultLayer layer = new DefaultLayer();
+      if (o != null) {
+        SolrPluginUtils.invokeSetters(layer, ((Map) o).entrySet());
+      }
+      return layer;
+    }
+
+  }
+
+  @Test
+  public void testCustom() throws Exception {
+
+    final LTRScoringModel model = createModelFromFiles("neuralnetworkmodel_custom.json",
+        "neuralnetworkmodel_features.json");
+
+    final float featureValue1 = 4f;
+    final float featureValue2 = 2f;
+    final float[] featureValues = { featureValue1, featureValue2 };
+
+    final double expectedScore = (featureValue1+featureValue2) * 42f;
+    float actualScore = model.score(featureValues);
+    assertEquals(expectedScore, actualScore, 0.001);
+
+    final List explanations = new ArrayList();
+    for (int ii=0; ii
Date: Tue, 13 Feb 2018 20:03:14 +0000
Subject: [PATCH 22/22] =?UTF-8?q?matrix=3D=3Fx=3F=20per=20conventions?=

---
 .../java/org/apache/solr/ltr/model/NeuralNetworkModel.java    | 2 +-
 .../org/apache/solr/ltr/model/TestNeuralNetworkModel.java     | 4 ++--
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
index 02f38c69d832..798b81c2916e 100644
--- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/model/NeuralNetworkModel.java
@@ -208,7 +208,7 @@ public int validate(int inputDim) throws ModelException {
     public String describe() {
       final StringBuilder sb = new StringBuilder();
       sb
-      .append("(matrix=").append(Integer.toString(this.matrixCols)).append('x').append(Integer.toString(this.matrixRows))
+      .append("(matrix=").append(Integer.toString(this.matrixRows)).append('x').append(Integer.toString(this.matrixCols))
       .append(",activation=").append(this.activationStr).append(")");
       return sb.toString();
     }
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
index cfa5ad5be2ab..712249f9d038 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/model/TestNeuralNetworkModel.java
@@ -293,7 +293,7 @@ public void testExplain() throws Exception {
     final Explanation explanation = model.explain(null, 0, finalScore, explanations);
     assertEquals(finalScore+" = (name=neuralnetworkmodel_explainable"+
         ",featureValues=[constantOne=1.2,constantTwo=3.4,constantThree=5.6,constantFour=7.8]"+
-        ",layers=[(matrix=4x2,activation=relu),(matrix=2x1,activation=identity)]"+
+        ",layers=[(matrix=2x4,activation=relu),(matrix=1x2,activation=identity)]"+
         ")\n",
         explanation.toString());
   }
@@ -357,7 +357,7 @@ public void testCustom() throws Exception {
     final Explanation explanation = model.explain(null, 0, actualScore, explanations);
     assertEquals(actualScore+" = (name=neuralnetworkmodel_custom"+
         ",featureValues=[constantFour=4.0,constantTwo=2.0]"+
-        ",layers=[(matrix=2x1,activation=answer)]"+
+        ",layers=[(matrix=1x2,activation=answer)]"+
         ")\n",
         explanation.toString());
   }