Skip to content
Browse files

Halfway done with being able to clone a neural network with a new lay…

…er. Fuck the Encog framework for not having that functionality already fucking built in..... bastards
  • Loading branch information...
1 parent 97ef0b3 commit 671f2ee3347133f6769eec7cc3ad561261f0d990 @Ccook committed Oct 14, 2012
View
14 src/main/java/edu/american/student/mnemosyne/core/BaseNetworkBuilderProcess.java
@@ -4,6 +4,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
+import java.util.Random;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat;
@@ -78,6 +79,8 @@ public void map(Key ik, Value iv, Context context)
conf.setOutputNeuronCount(num);
conf.setNumberOfCategories(num);
+ conf.setBasicMLInput(getRandomArray(inputNeuronCount));
+ conf.setBasicIdealMLOutput(getRandomArray(inputNeuronCount));
NNProcessor processor = NNProcessorFactory.getProcessorBean(conf);
try
{
@@ -100,11 +103,16 @@ public void map(Key ik, Value iv, Context context)
}
}
- private int calculateNeuronCount(int size)
+ private double[][] getRandomArray(int inputNeuronCount)
{
- // TODO Auto-generated method stub
- return 0;
+ double[][] toReturn = new double[1][inputNeuronCount];
+ for(int i=0;i<inputNeuronCount;i++)
+ {
+ toReturn[0][i]=new Random().nextDouble();
+ }
+ return toReturn;
}
+
}
}
View
70 src/main/java/edu/american/student/mnemosyne/core/TrainProcess.java
@@ -16,14 +16,14 @@
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
-import org.encog.neural.networks.layers.Layer;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
import edu.american.student.mnemosyne.conf.ClassificationNetworkConf;
import edu.american.student.mnemosyne.conf.HadoopJobConfiguration;
import edu.american.student.mnemosyne.core.framework.MnemosyneProcess;
import edu.american.student.mnemosyne.core.model.Artifact;
import edu.american.student.mnemosyne.core.util.AccumuloForeman;
+import edu.american.student.mnemosyne.core.util.ClassificationNetwork;
import edu.american.student.mnemosyne.core.util.HadoopForeman;
import edu.american.student.mnemosyne.core.util.NNInput;
import edu.american.student.mnemosyne.core.util.NNOutput;
@@ -35,25 +35,26 @@ public void process() throws Exception
{
artifactForeman.connect();
List<Artifact> artifacts = artifactForeman.returnArtifacts();
- for(Artifact artifact: artifacts)
+ for (Artifact artifact : artifacts)
{
HadoopForeman hForeman = new HadoopForeman();
HadoopJobConfiguration conf = new HadoopJobConfiguration();
conf.setJobName(HadoopJobConfiguration.buildJobName(this.getClass()));
conf.setMapperClass(NNTrainMapper.class);
conf.overrideDefaultTable(AccumuloForeman.getArtifactRepositoryName());
- Collection<Pair<Text,Text>> cfPairs = new ArrayList<Pair<Text,Text>>();
- cfPairs.add(new Pair<Text,Text>(new Text(artifact.getArtifactId()+":FIELD"),null));
+ Collection<Pair<Text, Text>> cfPairs = new ArrayList<Pair<Text, Text>>();
+ cfPairs.add(new Pair<Text, Text>(new Text(artifact.getArtifactId() + ":FIELD"), null));
conf.fetchColumns(cfPairs);
conf.setInputFormatClass(AccumuloInputFormat.class);
conf.setOutputFormatClass(AccumuloOutputFormat.class);
hForeman.runJob(conf);
}
}
-
+
public static class NNTrainMapper extends Mapper<Key, Value, Writable, Writable>
{
private AccumuloForeman aForeman = new AccumuloForeman();
+
@Override
public void map(Key ik, Value iv, Context context)
{
@@ -68,53 +69,44 @@ public void map(Key ik, Value iv, Context context)
baseConf = aForeman.getBaseNetworkConf(ik.getRow().toString());
error = aForeman.getBaseNetworkError(ik.getRow().toString());
}
- catch (Exception e){}
- System.out.println("base? "+base ==null);
- System.out.println("conf? "+baseConf == null);
- System.out.println("error?"+ error);
- if(base != null)
+ catch (Exception e)
+ {
+ }
+ if (base != null)
{
- //train shit
+ // train shit
System.out.println("Training ...");
double[] input = NNInput.inflate(iv.toString());
double[] output = NNOutput.inflate(iv.toString());
-
- //base.addLayer(new BasicLayer(baseConf.getHiddenActivation(),baseConf.getHiddenBias(),baseConf.getHiddenNeuronCount()*2));
- MLDataSet trainingSet = new BasicMLDataSet(new double[][]{input},new double[][]{output});
- List<Layer> layers = base.getStructure().getLayers();
- BasicNetwork newNetwork = new BasicNetwork();
- for(Layer layer:layers)
+ MLDataSet trainingSet = new BasicMLDataSet(new double[][]{ input }, new double[][]{ output });
+ BasicNetwork newNetwork = ClassificationNetwork.addLayerToNetwork(base, new BasicLayer(baseConf.getHiddenActivation(), baseConf.getHiddenBias(), baseConf.getHiddenNeuronCount() * 2));
+ final ResilientPropagation train = new ResilientPropagation(newNetwork, trainingSet);
+ int epoch = 1;
+ try
{
- System.out.println("adding layer");
- newNetwork.addLayer(layer);
+
+ do
+ {
+ train.iteration();
+ System.out.println("Epoch #" + epoch + " Error:" + train.getError());
+ epoch++;
+ }
+ while (train.getError() > error * .000000000000000000001);
}
- //newNetwork.addLayer(new BasicLayer(baseConf.getHiddenActivation(),baseConf.getHiddenBias(),baseConf.getHiddenNeuronCount()*2));
- newNetwork.getStructure().finalizeStructure();
-
- final ResilientPropagation train = new ResilientPropagation(newNetwork, trainingSet);
- int epoch =1;
- try{
-
- do {
- train.iteration();
- System.out.println("Epoch #" + epoch + " Error:" + train.getError());
- epoch++;
- } while(train.getError()>error*.000000000000000000001);
- }catch(Exception e)
- {
- e.printStackTrace();
- }
-
+ catch (Exception e)
+ {
+ e.printStackTrace();
+ }
+
}
-
+
}
}
public void setup() throws Exception
{
// TODO Auto-generated method stub
-
+
}
-
}
View
1 src/main/java/edu/american/student/mnemosyne/core/util/AccumuloForeman.java
@@ -25,6 +25,7 @@
import org.apache.accumulo.core.security.ColumnVisibility;
import org.apache.hadoop.io.Text;
import org.encog.neural.networks.BasicNetwork;
+import org.encog.util.obj.SerializeObject;
import edu.american.student.mnemosyne.conf.ClassificationNetworkConf;
import edu.american.student.mnemosyne.core.framework.ArtifactRepository;
View
99 src/main/java/edu/american/student/mnemosyne/core/util/ClassificationNetwork.java
@@ -1,5 +1,7 @@
package edu.american.student.mnemosyne.core.util;
+import org.encog.engine.network.activation.ActivationFunction;
+import org.encog.neural.flat.FlatNetwork;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
@@ -23,4 +25,101 @@ public static BasicNetwork constructNetworks(ClassificationNetworkConf conf)
return network;
}
+ public static BasicNetwork addLayerToNetwork(BasicNetwork network, BasicLayer layer)
+ {
+ FlatNetwork toCopy = network.getStructure().getFlat();
+ FlatNetwork toInstall = new FlatNetwork();
+
+ //these properties stay the same
+ toInstall.setConnectionLimit(toCopy.getConnectionLimit());
+ toInstall.setHasContext(toCopy.getHasContext());
+ toInstall.setInputCount(toCopy.getInputCount());
+ toInstall.setOutputCount(toCopy.getOutputCount());
+ toInstall.setBeginTraining(toCopy.getBeginTraining());
+
+
+ //Add new stuff before the output layer
+ ActivationFunction[] functions =toCopy.getActivationFunctions();
+ ActivationFunction[] toInstallFunc = new ActivationFunction[functions.length+1];
+
+ double[] toCopyBiasActivations = toCopy.getBiasActivation();
+ double[] toInstallBiasActivations = new double[toCopyBiasActivations.length+1];
+
+ int[] toCopyTargetOffset = toCopy.getContextTargetOffset();
+ int[] toInstallTargetOffset = new int[toCopyTargetOffset.length+1];
+
+ int[] toCopyTargetSize = toCopy.getContextTargetSize();
+ int[] toInstallTargetSize = new int[toCopyTargetSize.length+1];
+
+ int toCopyEndTraining = toCopy.getEndTraining();
+ int toInstallEndTraining = toCopyEndTraining+1;
+
+ int[] toCopyLayerContextCount = toCopy.getLayerContextCount();
+ int[] toInstallLayerContextCount = new int[toCopyLayerContextCount.length+1];
+
+ int[] toCopyLayerCounts = toCopy.getLayerCounts();
+ int[] toInstallLayerCounts = new int[toCopyLayerCounts.length+1];
+
+ //TODO
+ toCopy.getLayerFeedCounts();
+ toCopy.getLayerIndex();
+ toCopy.getLayerSums();
+ toCopy.getLayerOutput();
+ toCopy.getNeuronCount();
+ toCopy.getWeightIndex();
+ toCopy.getWeights();
+
+ for(int i=0;i<functions.length-1;i++)
+ {
+ toInstallFunc[i] = functions[i];
+ toInstallBiasActivations[i] = toCopyBiasActivations[i];
+ toInstallTargetOffset[i] = toCopyTargetOffset[i];
+ toInstallTargetSize[i] = toCopyTargetSize[i];
+ toInstallLayerContextCount[i] = toCopyLayerContextCount[i];
+ toInstallLayerCounts[i] = toCopyLayerCounts[i];
+ }
+
+ toInstallFunc[functions.length-1] = layer.getActivation();
+ toInstallFunc[functions.length] = functions[functions.length-1];
+
+ toInstallBiasActivations[toCopyBiasActivations.length-1] = layer.getBiasActivation();
+ toInstallBiasActivations[toCopyBiasActivations.length] = toCopyBiasActivations[toCopyBiasActivations.length-1];
+
+ toInstallTargetOffset[toCopyTargetOffset.length-1] = toCopyTargetOffset[toCopyTargetOffset.length-2];
+ toInstallTargetOffset[toCopyTargetOffset.length] = toCopyTargetOffset[toCopyTargetOffset.length-1];
+
+ toInstallTargetSize[toCopyTargetSize.length-1] = toCopyTargetSize[toCopyTargetSize.length-2];
+ toInstallTargetSize[toCopyTargetSize.length] = toCopyTargetSize[toCopyTargetSize.length-1];
+
+ toInstallLayerContextCount[toCopyLayerContextCount.length-1] = toCopyLayerContextCount[toCopyLayerContextCount.length-2];
+ toInstallLayerContextCount[toCopyLayerContextCount.length] = toCopyLayerContextCount[toCopyLayerContextCount.length-1];
+
+
+ toInstallLayerCounts[toCopyBiasActivations.length-1] =layer.getContextCount();
+ toInstallLayerCounts[toCopyBiasActivations.length] = toCopyLayerContextCount[toCopyLayerContextCount.length-1];
+
+
+ toInstall.setBiasActivation(toInstallBiasActivations);
+ toInstall.setActivationFunctions(toInstallFunc);
+ toInstall.setContextTargetOffset(toInstallTargetOffset);
+ toInstall.setContextTargetSize(toInstallTargetSize);
+ toInstall.setEndTraining(toInstallEndTraining);
+ toInstall.setLayerContextCount(toInstallLayerContextCount);
+
+
+ compare(toCopy,toInstall);
+ return network;
+
+ }
+
+ /**
+ * XXX: for debug
+ * @param toCopy
+ * @param toInstall
+ */
+ private static void compare(FlatNetwork toCopy, FlatNetwork toInstall)
+ {
+ // TODO Auto-generated method stub
+
+ }
}
View
26 src/test/java/edu/american/student/mnemosyne/core/BaseNetworkBuilderProcessTest.java
@@ -1,19 +1,25 @@
package edu.american.student.mnemosyne.core;
-import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.*;
+import java.io.File;
import java.util.List;
import org.encog.engine.network.activation.ActivationSigmoid;
+import org.encog.ml.data.MLDataSet;
+import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.layers.Layer;
+import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
+import org.encog.util.obj.SerializeObject;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
+import edu.american.student.mnemosyne.conf.ClassificationNetworkConf;
import edu.american.student.mnemosyne.core.model.Artifact;
import edu.american.student.mnemosyne.core.util.AccumuloForeman;
import edu.american.student.mnemosyne.core.util.ArtifactForeman;
@@ -59,20 +65,14 @@ public void test() throws Exception
{
System.out.println(artifact.getArtifactId());
BasicNetwork network =aForeman.getBaseNetwork(artifact.getArtifactId());
- BasicNetwork newNetwork = new BasicNetwork();
- List<Layer> layers= network.getStructure().getLayers();
- for(Layer layer:layers)
- {
- System.out.println("adding layer");
- newNetwork.addLayer(layer);
- }
- //newNetwork.addLayer(new BasicLayer(new ActivationSigmoid(),true,3));
- network.getStructure().finalizeStructure();
- System.out.println("LC "+network.getLayerCount());
+ // train the neural network
+ ClassificationNetworkConf conf= aForeman.getBaseNetworkConf(artifact.getArtifactId());
+ assertNotNull("conf is null",conf);
assertNotNull(network);
-
+
}
- // assertNotNull(aForeman.getBaseNetwork(artifactId));
}
+
+
}
View
67 src/test/java/edu/american/student/mnemosyne/util/ClassificationNetworkUtilTest.java
@@ -0,0 +1,67 @@
+package edu.american.student.mnemosyne.util;
+
+import java.util.List;
+
+import org.encog.engine.network.activation.ActivationSigmoid;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import edu.american.student.mnemosyne.conf.ClassificationNetworkConf;
+import edu.american.student.mnemosyne.core.model.Artifact;
+import edu.american.student.mnemosyne.core.util.AccumuloForeman;
+import edu.american.student.mnemosyne.core.util.ArtifactForeman;
+import edu.american.student.mnemosyne.core.util.ClassificationNetwork;
+import edu.american.student.mnemosyne.core.util.MnemosyneAccumuloAdministrator;
+
+public class ClassificationNetworkUtilTest
+{
+
+ static AccumuloForeman aForeman = new AccumuloForeman();
+ static ArtifactForeman artifactForeman = new ArtifactForeman();
+
+ @BeforeClass
+ public static void setUpBeforeClass() throws Exception
+ {
+ MnemosyneAccumuloAdministrator.setup();
+ TestHelper.ingestTestArtifacts();
+ TestHelper.buildArtifacts();
+ TestHelper.constructBaseClassificationNetwork();
+ aForeman.connect();
+ artifactForeman.connect();
+ }
+
+ @AfterClass
+ public static void tearDownAfterClass() throws Exception
+ {
+ }
+
+ @Before
+ public void setUp() throws Exception
+ {
+ }
+
+ @After
+ public void tearDown() throws Exception
+ {
+ }
+
+ @Test
+ public void test() throws Exception
+ {
+ List<Artifact> artifacts = artifactForeman.returnArtifacts();
+ for(Artifact artifact:artifacts)
+ {
+ System.out.println(artifact.getArtifactId());
+ BasicNetwork network =aForeman.getBaseNetwork(artifact.getArtifactId());
+ ClassificationNetworkConf conf= aForeman.getBaseNetworkConf(artifact.getArtifactId());
+ ClassificationNetwork.addLayerToNetwork(network,new BasicLayer(new ActivationSigmoid(),true,conf.getNumberOfCategories()));
+
+ }
+ }
+
+}

0 comments on commit 671f2ee

Please sign in to comment.
Something went wrong with that request. Please try again.