Permalink
Browse files

Updated examples to use new structure.

git-svn-id: http://encog-java.googlecode.com/svn/trunk/encog-examples@3200 f90f6e9a-ac51-0410-b353-d1b83c6f6923
  • Loading branch information...
1 parent a5d07a1 commit d8e4e7850d895ed8d19a9970a36f84a63c524d6e @jeffheaton jeffheaton committed Apr 17, 2011
Showing with 12,632 additions and 0 deletions.
  1. +10 −0 .classpath
  2. +13 −0 .project
  3. +5 −0 .settings/org.eclipse.jdt.core.prefs
  4. 0 LICENSE.txt
  5. 0 NOTICE.txt
  6. 0 README.txt
  7. +168 −0 pom.xml
  8. +49 −0 src/main/java/org/encog/examples/clustering/kmeans/SimpleKMeans.java
  9. +138 −0 src/main/java/org/encog/examples/neural/activation/ActivationSigmoidPosNeg.java
  10. +80 −0 src/main/java/org/encog/examples/neural/activation/CustomActivation.java
  11. +205 −0 src/main/java/org/encog/examples/neural/adaline/AdalineDigits.java
  12. +90 −0 src/main/java/org/encog/examples/neural/analyst/AnalystExample.java
  13. +98 −0 src/main/java/org/encog/examples/neural/art/art1/NeuralART1.java
  14. +182 −0 src/main/java/org/encog/examples/neural/bam/BidirectionalAssociativeMemory.java
  15. +54 −0 src/main/java/org/encog/examples/neural/benchmark/Benchmark.java
  16. +102 −0 src/main/java/org/encog/examples/neural/benchmark/BinaryVsMemory.java
  17. +59 −0 src/main/java/org/encog/examples/neural/benchmark/MatrixBenchmark.java
  18. +108 −0 src/main/java/org/encog/examples/neural/benchmark/MultiBench.java
  19. +103 −0 src/main/java/org/encog/examples/neural/benchmark/SimpleBenchmark.java
  20. +72 −0 src/main/java/org/encog/examples/neural/benchmark/ThreadCount.java
  21. +96 −0 src/main/java/org/encog/examples/neural/benchmark/WeightInitialization.java
  22. +175 −0 src/main/java/org/encog/examples/neural/boltzmann/BoltzTSP.java
  23. +392 −0 src/main/java/org/encog/examples/neural/cpn/RocketCPN.java
  24. +190 −0 src/main/java/org/encog/examples/neural/cross/CrossValidateSunspot.java
  25. +69 −0 src/main/java/org/encog/examples/neural/csv/XORCSV.java
  26. +114 −0 src/main/java/org/encog/examples/neural/forest/feedforward/Constant.java
  27. +141 −0 src/main/java/org/encog/examples/neural/forest/feedforward/Evaluate.java
  28. +176 −0 src/main/java/org/encog/examples/neural/forest/feedforward/ForestCover.java
  29. +128 −0 src/main/java/org/encog/examples/neural/forest/feedforward/GenerateData.java
  30. +71 −0 src/main/java/org/encog/examples/neural/forest/feedforward/TrainNetwork.java
  31. +148 −0 src/main/java/org/encog/examples/neural/gui/hopfield/HopfieldPanel.java
  32. +69 −0 src/main/java/org/encog/examples/neural/gui/hopfield/HopfieldPattern.java
  33. +352 −0 src/main/java/org/encog/examples/neural/gui/ocr/Entry.java
  34. +677 −0 src/main/java/org/encog/examples/neural/gui/ocr/OCR.java
  35. +115 −0 src/main/java/org/encog/examples/neural/gui/ocr/Sample.java
  36. +178 −0 src/main/java/org/encog/examples/neural/gui/ocr/SampleData.java
  37. +168 −0 src/main/java/org/encog/examples/neural/gui/predict/GraphPanel.java
  38. +108 −0 src/main/java/org/encog/examples/neural/gui/predict/PredictSIN.java
  39. +76 −0 src/main/java/org/encog/examples/neural/gui/som/MapPanel.java
  40. +104 −0 src/main/java/org/encog/examples/neural/gui/som/SomColors.java
  41. +253 −0 src/main/java/org/encog/examples/neural/hopfield/HopfieldAssociate.java
  42. +294 −0 src/main/java/org/encog/examples/neural/image/ImageNeuralNetwork.java
  43. +103 −0 src/main/java/org/encog/examples/neural/lunar/LanderSimulator.java
  44. +84 −0 src/main/java/org/encog/examples/neural/lunar/LunarLander.java
  45. +79 −0 src/main/java/org/encog/examples/neural/lunar/NeuralPilot.java
  46. +41 −0 src/main/java/org/encog/examples/neural/lunar/PilotScore.java
  47. +95 −0 src/main/java/org/encog/examples/neural/persist/EncogPersistence.java
  48. +94 −0 src/main/java/org/encog/examples/neural/persist/Serial.java
  49. +65 −0 src/main/java/org/encog/examples/neural/predict/market/Config.java
  50. +165 −0 src/main/java/org/encog/examples/neural/predict/market/MarketBuildTraining.java
  51. +107 −0 src/main/java/org/encog/examples/neural/predict/market/MarketEvaluate.java
  52. +56 −0 src/main/java/org/encog/examples/neural/predict/market/MarketPredict.java
  53. +66 −0 src/main/java/org/encog/examples/neural/predict/market/MarketTrain.java
  54. +207 −0 src/main/java/org/encog/examples/neural/predict/sunspot/PredictSunspot.java
  55. +221 −0 src/main/java/org/encog/examples/neural/predict/sunspot/PredictSunspotSVM.java
  56. +527 −0 src/main/java/org/encog/examples/neural/radial/MultiRadial.java
  57. +69 −0 src/main/java/org/encog/examples/neural/radial/XorSVD.java
  58. +171 −0 src/main/java/org/encog/examples/neural/recurrent/TemporalString.java
  59. +128 −0 src/main/java/org/encog/examples/neural/recurrent/elman/ElmanXOR.java
  60. +126 −0 src/main/java/org/encog/examples/neural/recurrent/jordan/JordanXOR.java
  61. +75 −0 src/main/java/org/encog/examples/neural/resume/TrainResume.java
  62. +76 −0 src/main/java/org/encog/examples/neural/som/SimpleSOM.java
  63. +61 −0 src/main/java/org/encog/examples/neural/util/TemporalXOR.java
  64. +37 −0 src/main/java/org/encog/examples/neural/util/XOR.java
  65. +91 −0 src/main/java/org/encog/examples/neural/xoranneal/XorAnneal.java
  66. +86 −0 src/main/java/org/encog/examples/neural/xorbackprop/XorBackprop.java
  67. +104 −0 src/main/java/org/encog/examples/neural/xordisplay/XORDisplay.java
  68. +79 −0 src/main/java/org/encog/examples/neural/xorflat/XORFlat.java
  69. +88 −0 src/main/java/org/encog/examples/neural/xorgaussian/XorGaussian.java
  70. +94 −0 src/main/java/org/encog/examples/neural/xorgenetic/XorGenetic.java
  71. +68 −0 src/main/java/org/encog/examples/neural/xorlma/XorLMA.java
  72. +89 −0 src/main/java/org/encog/examples/neural/xormanhattan/XORManhattan.java
  73. +87 −0 src/main/java/org/encog/examples/neural/xormulti/XORMulti.java
  74. +84 −0 src/main/java/org/encog/examples/neural/xorneat/XorNEAT.java
  75. +83 −0 src/main/java/org/encog/examples/neural/xorpartial/XORPartial.java
  76. +69 −0 src/main/java/org/encog/examples/neural/xorpartial/XORPartialAuto.java
  77. +86 −0 src/main/java/org/encog/examples/neural/xorpnn/XorPNN.java
  78. +68 −0 src/main/java/org/encog/examples/neural/xorradial/XorRadial.java
  79. +92 −0 src/main/java/org/encog/examples/neural/xorresilient/XORResilient.java
  80. +91 −0 src/main/java/org/encog/examples/neural/xorscg/XorSCG.java
  81. +113 −0 src/main/java/org/encog/examples/neural/xorsql/XORSQL.java
  82. +88 −0 src/main/java/org/encog/examples/neural/xorunbiased/XorUnBiased.java
  83. +103 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/Play21.java
  84. +162 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/Player.java
  85. +67 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/PlayerGenome.java
  86. +55 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/ScorePlayer.java
  87. +41 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/blackjack/Dealer.java
  88. +165 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/blackjack/Hand.java
  89. +86 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/blackjack/Participant.java
  90. +161 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/blackjack/Shoe.java
  91. +235 −0 src/main/java/org/encog/examples/nonlinear/basicstrategy/blackjack/Table.java
  92. +101 −0 src/main/java/org/encog/examples/nonlinear/tsp/City.java
  93. +157 −0 src/main/java/org/encog/examples/nonlinear/tsp/anneal/SolveTSP.java
  94. +133 −0 src/main/java/org/encog/examples/nonlinear/tsp/anneal/TSPSimulatedAnnealing.java
  95. +164 −0 src/main/java/org/encog/examples/nonlinear/tsp/genetic/SolveTSP.java
  96. +108 −0 src/main/java/org/encog/examples/nonlinear/tsp/genetic/TSPGenome.java
  97. +60 −0 src/main/java/org/encog/examples/nonlinear/tsp/genetic/TSPScore.java
  98. +54 −0 src/main/java/org/encog/examples/unfinished/maze/Constants.java
  99. +108 −0 src/main/java/org/encog/examples/unfinished/maze/EvaluateMouse.java
  100. +204 −0 src/main/java/org/encog/examples/unfinished/maze/Maze.java
  101. +36 −0 src/main/java/org/encog/examples/unfinished/maze/MazeCell.java
  102. +116 −0 src/main/java/org/encog/examples/unfinished/maze/MazePanel.java
  103. +62 −0 src/main/java/org/encog/examples/unfinished/maze/MouseFactory.java
  104. +193 −0 src/main/java/org/encog/examples/unfinished/maze/MouseMaze.java
  105. +218 −0 src/main/java/org/encog/examples/unfinished/maze/NeuralMouse.java
View
10 .classpath
@@ -0,0 +1,10 @@
+<classpath>
+ <classpathentry kind="src" path="src/test/java" output="target/test-classes" including="**/*.java"/>
+ <classpathentry kind="src" path="src/test/resources" output="target/test-classes" excluding="**/*.java"/>
+ <classpathentry kind="src" path="src/main/java" including="**/*.java"/>
+ <classpathentry kind="src" path="src/main/resources" excluding="**/*.java"/>
+ <classpathentry kind="output" path="target/classes"/>
+ <classpathentry kind="var" path="M2_REPO/org/encog/encog-core/3.0.0-SNAPSHOT/encog-core-3.0.0-SNAPSHOT.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/hsqldb/hsqldb/2.0.0/hsqldb-2.0.0.jar"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+</classpath>
View
13 .project
@@ -0,0 +1,13 @@
+<projectDescription>
+ <name>encog-release</name>
+ <comment>Examples and general release for Encog. NO_M2ECLIPSE_SUPPORT: Project files created with the maven-eclipse-plugin are not supported in M2Eclipse.</comment>
+ <projects/>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+</projectDescription>
View
5 .settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,5 @@
+#Sun Apr 17 14:56:35 CDT 2011
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
View
0 LICENSE.txt
No changes.
View
0 NOTICE.txt
No changes.
View
0 README.txt
No changes.
View
168 pom.xml
@@ -0,0 +1,168 @@
+<!--
+ Encog Artificial Intelligence Framework v3.0
+ Java Version
+ http://www.heatonresearch.com/encog/
+ http://code.google.com/p/encog-java/
+
+ Copyright 2008-2011, Heaton Research Inc., and individual contributors.
+ See the copyright.txt in the distribution for a full listing of
+ individual contributors.
+
+ This is free software; you can redistribute it and/or modify it
+ under the terms of the GNU Lesser General Public License as
+ published by the Free Software Foundation; either version 2.1 of
+ the License, or (at your option) any later version.
+
+ This software is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this software; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+ 02110-1301 USA, or see the FSF site: http://www.fsf.org.
+-->
+<!-- This POM makes Encog a Maven artifact, accessible from other programs
+ using Maven.
+
+Authors of this POM file: iirekm, jheaton
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.encog</groupId>
+ <artifactId>encog-release</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ <packaging>jar</packaging>
+ <name>Encog Examples Release</name>
+ <description>Examples and general release for Encog.</description>
+ <url>http://www.heatonresearch.com/encog/</url>
+ <licenses>
+ <license>
+ <name>The Apache Software License, Version 2.0</name>
+ <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://encog-java.googlecode.com/svn/trunk/encog-examples/</connection>
+ <developerConnection>scm:svn:https://encog-java.googlecode.com/svn/trunk/encog-examples/</developerConnection>
+ <url>http://code.google.com/p/encog-java/source/browse/</url>
+ </scm>
+ <developers>
+ <developer>
+ <id>jeffheatondotcom</id>
+ <name>Jeff Heaton</name>
+ <email>support@heatonresearch.com</email>
+ </developer>
+ </developers>
+
+ <parent>
+ <groupId>org.sonatype.oss</groupId>
+ <artifactId>oss-parent</artifactId>
+ <version>5</version>
+ </parent>
+
+
+ <build>
+
+ <plugins>
+ <!-- use javac 1.6 -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+
+ <!-- build also source jar - useful for working in Eclipse -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ <version>2.1.1</version>
+ <executions>
+ <execution>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ <!-- Generate Javadoc, required to push to repo -->
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>2.7</version>
+ <executions>
+ <execution>
+ <id>attach-javadocs</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ </plugin>
+
+
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2.1</version>
+ <configuration>
+ <descriptorRefs>
+ <descriptorRef>jar-with-dependencies</descriptorRef>
+ </descriptorRefs>
+ <archive>
+ <manifest>
+ <mainClass>org.encog.workbench.EncogWorkBench</mainClass>
+ </manifest>
+ </archive>
+ </configuration>
+ <executions>
+ <execution>
+ <id>make-assembly</id> <!-- this is used for inheritance merges -->
+ <phase>package</phase> <!-- bind to the packaging phase -->
+ <goals>
+ <goal>single</goal>
+ </goals>
+ </execution>
+ </executions>
+
+ </plugin>
+
+
+
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-gpg-plugin</artifactId>
+ <version>1.1</version>
+ <executions>
+ <execution>
+ <id>sign-artifacts</id>
+ <phase>verify</phase>
+ <goals>
+ <goal>sign</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.encog</groupId>
+ <artifactId>encog-core</artifactId>
+ <version>3.0.0-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+</project>
View
49 src/main/java/org/encog/examples/clustering/kmeans/SimpleKMeans.java
@@ -0,0 +1,49 @@
+package org.encog.examples.clustering.kmeans;
+
+import java.util.Arrays;
+
+import org.encog.engine.data.BasicEngineData;
+import org.encog.engine.data.EngineData;
+import org.encog.engine.data.EngineDataSet;
+import org.encog.ml.MLCluster;
+import org.encog.ml.kmeans.KMeansClustering;
+import org.encog.neural.data.basic.BasicNeuralData;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+
+public class SimpleKMeans {
+ public static final double[][] DATA = {
+ {28,15,22},
+ {16,15,32},
+ {32,20,44},
+ {1,2,3},
+ {3,2,1} };
+
+ public static void main (String args[]){
+
+ BasicNeuralDataSet set = new BasicNeuralDataSet();
+
+ for(int i=0;i<DATA.length;i++)
+ {
+ set.add(new BasicNeuralData(DATA[i]));
+ }
+
+ KMeansClustering kmeans = new KMeansClustering(2,set);
+
+ kmeans.iteration(100);
+ System.out.println("Final WCSS: " + kmeans.getWCSS());
+
+ int i = 1;
+ for(MLCluster cluster: kmeans.getClusters())
+ {
+ System.out.println("*** Cluster " + (i++) + " ***");
+ EngineDataSet ds = cluster.createDataSet();
+ EngineData pair = BasicEngineData.createPair(ds.getInputSize(), ds.getIdealSize());
+ for(int j=0;j<ds.getRecordCount();j++)
+ {
+ ds.getRecord(j, pair);
+ System.out.println(Arrays.toString(pair.getInputArray()));
+
+ }
+ }
+ }
+}
View
138 src/main/java/org/encog/examples/neural/activation/ActivationSigmoidPosNeg.java
@@ -0,0 +1,138 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.activation;
+
+import org.encog.mathutil.BoundMath;
+import org.encog.neural.activation.ActivationFunction;
+
+/**
+ * The sigmoid activation function takes on a sigmoidal shape. Only positive
+ * numbers are generated. Do not use this activation function if negative number
+ * output is desired.
+ */
+public class ActivationSigmoidPosNeg implements ActivationFunction {
+
+ /**
+ * The offset to the parameter that holds the sigmoid slope.
+ */
+ public static final int PARAM_SIGMOID_POS_NEG_SLOPE = 0;
+
+ /**
+ * Serial id for this class.
+ */
+ private static final long serialVersionUID = 5622349801036468572L;
+
+ /**
+ * The parameters.
+ */
+ private double[] params;
+
+ /**
+ * Construct a basic sigmoid function, with a slope of 1.
+ */
+ public ActivationSigmoidPosNeg() {
+ this.params = new double[1];
+ this.params[ActivationSigmoidPosNeg.PARAM_SIGMOID_POS_NEG_SLOPE] = 1;
+ }
+
+ /**
+ * @return The object cloned;
+ */
+ @Override
+ public ActivationFunction clone() {
+ return new ActivationSigmoidPosNeg();
+ }
+
+ /**
+ * @return Get the slope of the activation function.
+ */
+ public double getSlope() {
+ return this.params[ActivationSigmoidPosNeg.PARAM_SIGMOID_POS_NEG_SLOPE];
+ }
+
+ /**
+ * @return True, sigmoid has a derivative.
+ */
+ @Override
+ public boolean hasDerivative() {
+ return true;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void activationFunction(final double[] x, final int start,
+ final int size) {
+ for (int i = start; i < start + size; i++) {
+ x[i] = 2.0*(1.0 / (1.0 + BoundMath.exp(-params[0] * x[i])))-1.0;
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public double derivativeFunction(final double x) {
+ return Math.pow( params[0] * x * (1.0 - x),2);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String[] getParamNames() {
+ final String[] results = { "slope" };
+ return results;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public double[] getParams() {
+ // TODO Auto-generated method stub
+ return this.params;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void setParam(final int index, final double value) {
+ this.params[index] = value;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String getOpenCLExpression(final boolean derivative) {
+ if (derivative) {
+ return "(1.0f / (1.0f + exp(-slope * x)))";
+ } else {
+ return "(slope * x * (1.0f - x))";
+ }
+ }
+}
View
80 src/main/java/org/encog/examples/neural/activation/CustomActivation.java
@@ -0,0 +1,80 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.activation;
+
+import org.encog.neural.data.NeuralData;
+import org.encog.neural.data.NeuralDataPair;
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.training.Train;
+import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
+import org.encog.neural.networks.training.strategy.RequiredImprovementStrategy;
+import org.encog.util.logging.Logging;
+
+
+public class CustomActivation {
+
+ public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
+ { 0.0, 1.0 }, { 1.0, 1.0 } };
+
+ public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
+
+ public static void main(final String args[]) {
+
+ Logging.stopConsoleLogging();
+
+ BasicNetwork network = new BasicNetwork();
+ network.addLayer(new BasicLayer(null, true,2));
+ network.addLayer(new BasicLayer(new ActivationSigmoidPosNeg(), true,4));
+ network.addLayer(new BasicLayer(new ActivationSigmoidPosNeg(), true,1));
+ network.getStructure().finalizeStructure();
+ network.reset();
+
+ NeuralDataSet trainingSet = new BasicNeuralDataSet(XOR_INPUT, XOR_IDEAL);
+
+ // train the neural network
+ final Train train = new ResilientPropagation(network, trainingSet);
+ // reset if improve is less than 1% over 5 cycles
+ train.addStrategy(new RequiredImprovementStrategy(5));
+
+ int epoch = 1;
+
+ do {
+ train.iteration();
+ System.out
+ .println("Epoch #" + epoch + " Error:" + train.getError());
+ epoch++;
+ } while(train.getError() > 0.01);
+
+ // test the neural network
+ System.out.println("Neural Network Results:");
+ for(NeuralDataPair pair: trainingSet ) {
+ final NeuralData output = network.compute(pair.getInput());
+ System.out.println(pair.getInput().getData(0) + "," + pair.getInput().getData(1)
+ + ", actual=" + output.getData(0) + ",ideal=" + pair.getIdeal().getData(0));
+ }
+ }
+}
View
205 src/main/java/org/encog/examples/neural/adaline/AdalineDigits.java
@@ -0,0 +1,205 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.adaline;
+
+import org.encog.neural.data.NeuralData;
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.data.basic.BasicNeuralData;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.training.Train;
+import org.encog.neural.networks.training.simple.TrainAdaline;
+import org.encog.neural.pattern.ADALINEPattern;
+
+public class AdalineDigits {
+
+ public final static int CHAR_WIDTH = 5;
+ public final static int CHAR_HEIGHT = 7;
+
+ public static String[][] DIGITS = {
+ { " OOO ",
+ "O O",
+ "O O",
+ "O O",
+ "O O",
+ "O O",
+ " OOO " },
+
+ { " O ",
+ " OO ",
+ "O O ",
+ " O ",
+ " O ",
+ " O ",
+ " O " },
+
+ { " OOO ",
+ "O O",
+ " O",
+ " O ",
+ " O ",
+ " O ",
+ "OOOOO" },
+
+ { " OOO ",
+ "O O",
+ " O",
+ " OOO ",
+ " O",
+ "O O",
+ " OOO " },
+
+ { " O ",
+ " OO ",
+ " O O ",
+ "O O ",
+ "OOOOO",
+ " O ",
+ " O " },
+
+ { "OOOOO",
+ "O ",
+ "O ",
+ "OOOO ",
+ " O",
+ "O O",
+ " OOO " },
+
+ { " OOO ",
+ "O O",
+ "O ",
+ "OOOO ",
+ "O O",
+ "O O",
+ " OOO " },
+
+ { "OOOOO",
+ " O",
+ " O",
+ " O ",
+ " O ",
+ " O ",
+ "O " },
+
+ { " OOO ",
+ "O O",
+ "O O",
+ " OOO ",
+ "O O",
+ "O O",
+ " OOO " },
+
+ { " OOO ",
+ "O O",
+ "O O",
+ " OOOO",
+ " O",
+ "O O",
+ " OOO " } };
+
+ public static NeuralDataSet generateTraining()
+ {
+ NeuralDataSet result = new BasicNeuralDataSet();
+ for(int i=0;i<DIGITS.length;i++)
+ {
+ BasicNeuralData ideal = new BasicNeuralData(DIGITS.length);
+
+ // setup input
+ NeuralData input = image2data(DIGITS[i]);
+
+ // setup ideal
+ for(int j=0;j<DIGITS.length;j++)
+ {
+ if( j==i )
+ ideal.setData(j,1);
+ else
+ ideal.setData(j,-1);
+ }
+
+ // add training element
+ result.add(input,ideal);
+ }
+ return result;
+ }
+
+ public static NeuralData image2data(String[] image)
+ {
+ NeuralData result = new BasicNeuralData(CHAR_WIDTH*CHAR_HEIGHT);
+
+ for(int row = 0; row<CHAR_HEIGHT; row++)
+ {
+ for(int col = 0; col<CHAR_WIDTH; col++)
+ {
+ int index = (row*CHAR_WIDTH) + col;
+ char ch = image[row].charAt(col);
+ result.setData(index,ch=='O'?1:-1 );
+ }
+ }
+
+ return result;
+ }
+
+ public static void main(String args[])
+ {
+ int inputNeurons = CHAR_WIDTH * CHAR_HEIGHT;
+ int outputNeurons = DIGITS.length;
+
+ ADALINEPattern pattern = new ADALINEPattern();
+ pattern.setInputNeurons(inputNeurons);
+ pattern.setOutputNeurons(outputNeurons);
+ BasicNetwork network = (BasicNetwork)pattern.generate();
+
+ // train it
+ NeuralDataSet training = generateTraining();
+ Train train = new TrainAdaline(network,training,0.01);
+
+ int epoch = 1;
+ do {
+ train.iteration();
+ System.out
+ .println("Epoch #" + epoch + " Error:" + train.getError());
+ epoch++;
+ } while(train.getError() > 0.01);
+
+ //
+ System.out.println("Error:" + network.calculateError(training));
+
+ // test it
+ for(int i=0;i<DIGITS.length;i++)
+ {
+ int output = network.winner(image2data(DIGITS[i]));
+
+ for(int j=0;j<CHAR_HEIGHT;j++)
+ {
+ if( j==CHAR_HEIGHT-1 )
+ System.out.println(DIGITS[i][j]+" -> "+output);
+ else
+ System.out.println(DIGITS[i][j]);
+
+ }
+
+ System.out.println();
+ }
+ }
+}
View
90 src/main/java/org/encog/examples/neural/analyst/AnalystExample.java
@@ -0,0 +1,90 @@
+package org.encog.examples.neural.analyst;
+
+import java.io.File;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+import org.encog.ConsoleStatusReportable;
+import org.encog.app.analyst.AnalystFileFormat;
+import org.encog.app.analyst.ConsoleAnalystListener;
+import org.encog.app.analyst.EncogAnalyst;
+import org.encog.app.analyst.report.AnalystReport;
+import org.encog.app.analyst.wizard.AnalystWizard;
+import org.encog.util.csv.CSVFormat;
+import org.encog.util.logging.Logging;
+
+
+public class AnalystExample {
+
+ public static final String IRIS_SOURCE = "http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data";
+ public static final String FOREST_SOURCE = "http://archive.ics.uci.edu/ml/machine-learning-databases/covtype/covtype.data.gz";
+
+ public void irisExample(File dir) throws MalformedURLException {
+ System.out.println("Starting Iris dataset example.");
+ URL url = new URL(IRIS_SOURCE);
+ File analystFile = new File(dir,"iris.ega");
+ File rawFile = new File(dir,"iris_raw.csv");
+
+ EncogAnalyst encog = new EncogAnalyst();
+ encog.addAnalystListener(new ConsoleAnalystListener());
+ AnalystWizard wiz = new AnalystWizard(encog);
+
+ wiz.wizard(url, analystFile, rawFile, false, AnalystFileFormat.DECPNT_COMMA);
+
+ encog.executeTask("task-full");
+
+ encog.save(analystFile);
+
+ AnalystReport report = new AnalystReport(encog);
+ report.produceReport(new File(dir,"report.html"));
+ }
+
+ public void forestExample(File dir) throws MalformedURLException {
+ System.out.println("Starting forest cover dataset example.");
+ URL url = new URL(FOREST_SOURCE);
+ File analystFile = new File(dir,"forest.ega");
+ File rawFile = new File(dir,"forest_raw.csv");
+
+ EncogAnalyst encog = new EncogAnalyst();
+ encog.addAnalystListener(new ConsoleAnalystListener());
+ AnalystWizard wiz = new AnalystWizard(encog);
+
+ wiz.wizard(url, analystFile, rawFile, false, AnalystFileFormat.DECPNT_COMMA);
+
+ encog.executeTask("task-full");
+
+ encog.save(analystFile);
+
+ //AnalystReport report = new AnalystReport(encog);
+ //report.produceReport(new File(dir,"report.html"));
+ }
+
+ public static void main(String[] args) {
+ if (args.length != 2) {
+ System.out
+ .println("Usage: AnalystExample [iris/forest] [data directory]");
+ System.out
+ .println("Data directory can be any empty directory. Raw files will be downloaded to here.");
+ System.exit(1);
+ }
+
+ Logging.stopConsoleLogging();
+
+ String command = args[0].trim().toLowerCase();
+ File dir = new File(args[1].trim());
+
+ AnalystExample example = new AnalystExample();
+
+ try {
+ if (command.equals("forest")) {
+ example.forestExample(dir);
+ } else if (command.equals("iris")) {
+ example.irisExample(dir);
+ } else {
+ System.out.println("Unknown command: " + command);
+ }
+ } catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+}
View
98 src/main/java/org/encog/examples/neural/art/art1/NeuralART1.java
@@ -0,0 +1,98 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.art.art1;
+
+import org.encog.neural.art.ART1;
+import org.encog.neural.data.bipolar.BiPolarNeuralData;
+
+public class NeuralART1 {
+
+ public static final int INPUT_NEURONS = 5;
+ public static final int OUTPUT_NEURONS = 10;
+
+ public static final String[] PATTERN = {
+ " O ",
+ " O O",
+ " O",
+ " O O",
+ " O",
+ " O O",
+ " O",
+ " OO O",
+ " OO ",
+ " OO O",
+ " OO ",
+ "OOO ",
+ "OO ",
+ "O ",
+ "OO ",
+ "OOO ",
+ "OOOO ",
+ "OOOOO",
+ "O ",
+ " O ",
+ " O ",
+ " O ",
+ " O",
+ " O O",
+ " OO O",
+ " OO ",
+ "OOO ",
+ "OO ",
+ "OOOO ",
+ "OOOOO" };
+
+ private boolean[][] input;
+
+ public void setupInput() {
+ this.input = new boolean[PATTERN.length][INPUT_NEURONS];
+ for (int n = 0; n < PATTERN.length; n++) {
+ for (int i = 0; i < INPUT_NEURONS; i++) {
+ this.input[n][i] = (PATTERN[n].charAt(i) == 'O');
+ }
+ }
+ }
+
+ public void run() {
+ this.setupInput();
+ ART1 logic = new ART1(INPUT_NEURONS,OUTPUT_NEURONS);
+
+ for (int i = 0; i < PATTERN.length; i++) {
+ BiPolarNeuralData in = new BiPolarNeuralData(this.input[i]);
+ BiPolarNeuralData out = new BiPolarNeuralData(OUTPUT_NEURONS);
+ logic.compute(in, out);
+ if (logic.hasWinner()) {
+ System.out.println(PATTERN[i] + " - " + logic.getWinner());
+ } else {
+ System.out.println(PATTERN[i]
+ + " - new Input and all Classes exhausted");
+ }
+ }
+ }
+
+ public static void main(String[] args) {
+ NeuralART1 art = new NeuralART1();
+ art.run();
+ }
+}
View
182 src/main/java/org/encog/examples/neural/bam/BidirectionalAssociativeMemory.java
@@ -0,0 +1,182 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.bam;
+
+import org.encog.neural.bam.BAM;
+import org.encog.neural.data.bipolar.BiPolarNeuralData;
+import org.encog.neural.networks.NeuralDataMapping;
+
+/**
+ * Simple class to recognize some patterns with a Bidirectional
+ * Associative Memory (BAM) Neural Network.
+ * This is very loosely based on a an example by Karsten Kutza,
+ * written in C on 1996-01-24.
+ * http://www.neural-networks-at-your-fingertips.com/bam.html
+ *
+ * I translated it to Java and adapted it to use Encog for neural
+ * network processing. I mainly kept the patterns from the
+ * original example.
+ *
+ */
+public class BidirectionalAssociativeMemory {
+
+ public static final String[] NAMES = { "TINA ", "ANTJE", "LISA " };
+
+ public static final String[] NAMES2 = { "TINE ", "ANNJE", "RITA " };
+
+ public static final String[] PHONES = { "6843726", "8034673", "7260915" };
+
+ public static final int IN_CHARS = 5;
+ public static final int OUT_CHARS = 7;
+
+ public static final int BITS_PER_CHAR = 6;
+ public static final char FIRST_CHAR = ' ';
+
+ public static final int INPUT_NEURONS = (IN_CHARS * BITS_PER_CHAR);
+ public static final int OUTPUT_NEURONS = (OUT_CHARS * BITS_PER_CHAR);
+
+ public BiPolarNeuralData stringToBipolar(String str)
+ {
+ BiPolarNeuralData result = new BiPolarNeuralData(str.length()*BITS_PER_CHAR);
+ int currentIndex = 0;
+ for(int i=0;i<str.length();i++)
+ {
+ char ch = Character.toUpperCase(str.charAt(i));
+ int idx = ch-FIRST_CHAR;
+
+ int place = 1;
+ for( int j=0;j<BITS_PER_CHAR;j++)
+ {
+ boolean value = (idx&place)>0;
+ result.setData(currentIndex++,value);
+ place*=2;
+ }
+
+ }
+ return result;
+ }
+
+ public String bipolalToString(BiPolarNeuralData data)
+ {
+ StringBuilder result = new StringBuilder();
+
+ int j,a,p;
+
+ for (int i=0; i<(data.size() / BITS_PER_CHAR); i++) {
+ a = 0;
+ p = 1;
+ for (j=0; j<BITS_PER_CHAR; j++) {
+ if( data.getBoolean(i*BITS_PER_CHAR+j) )
+ a+=p;
+
+ p *= 2;
+ }
+ result.append((char)(a + FIRST_CHAR));
+ }
+
+
+ return result.toString();
+ }
+
+ public BiPolarNeuralData randomBiPolar(int size)
+ {
+ BiPolarNeuralData result = new BiPolarNeuralData(size);
+ for(int i=0;i<size;i++)
+ {
+ if(Math.random()>0.5)
+ result.setData(i,-1);
+ else
+ result.setData(i,1);
+ }
+ return result;
+ }
+
+ public String mappingToString(NeuralDataMapping mapping)
+ {
+ StringBuilder result = new StringBuilder();
+ result.append( bipolalToString((BiPolarNeuralData)mapping.getFrom()) );
+ result.append(" -> ");
+ result.append( bipolalToString((BiPolarNeuralData)mapping.getTo()) );
+ return result.toString();
+ }
+
+ public void runBAM(BAM logic, NeuralDataMapping data )
+ {
+ StringBuilder line = new StringBuilder();
+ line.append(mappingToString(data));
+ logic.compute(data);
+ line.append(" | ");
+ line.append(mappingToString(data));
+ System.out.println(line.toString());
+ }
+
+ public void run()
+ {
+ BAM logic = new BAM(INPUT_NEURONS, OUTPUT_NEURONS);
+
+ // train
+ for(int i=0;i<NAMES.length;i++)
+ {
+ logic.addPattern(
+ stringToBipolar(NAMES[i]),
+ stringToBipolar(PHONES[i]));
+ }
+
+ // test
+ for(int i=0;i<NAMES.length;i++)
+ {
+ NeuralDataMapping data = new NeuralDataMapping(
+ stringToBipolar(NAMES[i]),
+ randomBiPolar(OUT_CHARS*BITS_PER_CHAR));
+ runBAM(logic, data);
+ }
+
+ System.out.println();
+
+ for(int i=0;i<PHONES.length;i++)
+ {
+ NeuralDataMapping data = new NeuralDataMapping(
+ stringToBipolar(PHONES[i]),
+ randomBiPolar(IN_CHARS*BITS_PER_CHAR) );
+ runBAM(logic, data);
+ }
+
+ System.out.println();
+
+ for(int i=0;i<NAMES.length;i++)
+ {
+ NeuralDataMapping data = new NeuralDataMapping(
+ stringToBipolar(NAMES2[i]),
+ randomBiPolar(OUT_CHARS*BITS_PER_CHAR));
+ runBAM(logic, data);
+ }
+
+
+ }
+
+ public static void main(String[] args) {
+ BidirectionalAssociativeMemory program = new BidirectionalAssociativeMemory();
+ program.run();
+ }
+}
View
54 src/main/java/org/encog/examples/neural/benchmark/Benchmark.java
@@ -0,0 +1,54 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import org.encog.ConsoleStatusReportable;
+import org.encog.Encog;
+import org.encog.util.benchmark.EncogBenchmark;
+import org.encog.util.logging.Logging;
+
+/**
+ * Simple console app that uses the Encog benchmarking class.
+ * This will print out a number that shows how fast your computer is
+ * with Encog. The lower the better.
+ * @author jeff
+ *
+ */
+public class Benchmark {
+
+ public static void main(final String args[]) {
+ Logging.stopConsoleLogging();
+ final Benchmark b = new Benchmark();
+ System.out.println("Benchmark result: " + b.run());
+
+ Encog.getInstance().shutdown();
+ }
+
+ public String run() {
+ final EncogBenchmark mark = new EncogBenchmark(new ConsoleStatusReportable());
+ String result = mark.process();
+
+ return result;
+ }
+}
View
102 src/main/java/org/encog/examples/neural/benchmark/BinaryVsMemory.java
@@ -0,0 +1,102 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import java.io.File;
+
+import org.encog.neural.data.NeuralDataPair;
+import org.encog.neural.data.basic.BasicNeuralDataPair;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.data.buffer.BufferedNeuralDataSet;
+import org.encog.util.Format;
+import org.encog.util.benchmark.Evaluate;
+import org.encog.util.benchmark.RandomTrainingFactory;
+
+public class BinaryVsMemory {
+ private static int evalMemory()
+ {
+ final BasicNeuralDataSet training = RandomTrainingFactory.generate(1000,
+ 10000, 10, 10, -1, 1);
+
+ final long start = System.currentTimeMillis();
+ final long stop = start + (10*Evaluate.MILIS);
+ int record = 0;
+
+ NeuralDataPair pair = BasicNeuralDataPair.createPair(10, 10);
+
+ int iterations = 0;
+ while( System.currentTimeMillis()<stop ) {
+ iterations++;
+ training.getRecord(record++, pair);
+ if( record>=training.getRecordCount() )
+ record = 0;
+ }
+
+ System.out.println("In 10 seconds, the memory dataset read " +
+ Format.formatInteger( iterations) + " records.");
+
+ return iterations;
+ }
+
+ private static int evalBinary()
+ {
+ File file = new File("temp.egb");
+
+ final BasicNeuralDataSet training = RandomTrainingFactory.generate(1000,
+ 10000, 10, 10, -1, 1);
+
+ // create the binary file
+
+ file.delete();
+ BufferedNeuralDataSet training2 = new BufferedNeuralDataSet(file);
+ training2.load(training);
+
+ final long start = System.currentTimeMillis();
+ final long stop = start + (10*Evaluate.MILIS);
+ int record = 0;
+
+ NeuralDataPair pair = BasicNeuralDataPair.createPair(10, 10);
+
+ int iterations = 0;
+ while( System.currentTimeMillis()<stop ) {
+ iterations++;
+ training2.getRecord(record++, pair);
+ if( record>=training2.getRecordCount() )
+ record = 0;
+ }
+
+ System.out.println("In 10 seconds, the disk(binary) dataset read " +
+ Format.formatInteger( iterations) + " records.");
+ file.delete();
+ return iterations;
+ }
+
+ public static void main(String[] args)
+ {
+ int memory = evalMemory();
+ int binary = evalBinary();
+ System.out.println( "Memory is " + Format.formatInteger(memory/binary) + " times the speed of disk.");
+ }
+
+}
View
59 src/main/java/org/encog/examples/neural/benchmark/MatrixBenchmark.java
@@ -0,0 +1,59 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import org.encog.mathutil.matrices.Matrix;
+import org.encog.mathutil.matrices.MatrixMath;
+
+public class MatrixBenchmark {
+
+ public static Matrix generateRandomMatrix(int size)
+ {
+ Matrix result = new Matrix(size,size);
+ for(int row=0;row<size;row++)
+ {
+ for(int col=0;col<size;col++)
+ {
+ result.set(row, col, Math.random()*100);
+ }
+ }
+ return result;
+ }
+
+ public static void main(String args[])
+ {
+ long start,stop;
+
+ start = System.currentTimeMillis();
+ Matrix a = generateRandomMatrix(500);
+ Matrix b = generateRandomMatrix(500);
+ stop = System.currentTimeMillis();
+ System.out.println("Setup matrix: " + ((double)(stop-start))/1000.0 );
+
+ start = System.currentTimeMillis();
+ MatrixMath.multiply(a, b);
+ stop = System.currentTimeMillis();
+ System.out.println("Multiply matrix: " + ((double)(stop-start))/1000.0 );
+ }
+}
View
108 src/main/java/org/encog/examples/neural/benchmark/MultiBench.java
@@ -0,0 +1,108 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
+import org.encog.util.benchmark.RandomTrainingFactory;
+import org.encog.util.logging.Logging;
+
+public class MultiBench {
+
+ public static final int INPUT_COUNT = 40;
+ public static final int HIDDEN_COUNT = 60;
+ public static final int OUTPUT_COUNT = 20;
+
+ public static BasicNetwork generateNetwork()
+ {
+ final BasicNetwork network = new BasicNetwork();
+ network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
+ network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
+ network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
+ network.getStructure().finalizeStructure();
+ network.reset();
+ return network;
+ }
+
+ public static NeuralDataSet generateTraining()
+ {
+ final NeuralDataSet training = RandomTrainingFactory.generate(1000,50000,
+ INPUT_COUNT, OUTPUT_COUNT, -1, 1);
+ return training;
+ }
+
+ public static double evaluateRPROP(BasicNetwork network,NeuralDataSet data)
+ {
+
+ ResilientPropagation train = new ResilientPropagation(network,data);
+ train.setNumThreads(1);
+ long start = System.currentTimeMillis();
+ System.out.println("Training 20 Iterations with RPROP");
+ for(int i=1;i<=20;i++)
+ {
+ train.iteration();
+ System.out.println("Iteration #" + i + " Error:" + train.getError());
+ }
+ train.finishTraining();
+ long stop = System.currentTimeMillis();
+ double diff = ((double)(stop - start))/1000.0;
+ System.out.println("RPROP Result:" + diff + " seconds." );
+ System.out.println("Final RPROP error: " + network.calculateError(data));
+ return diff;
+ }
+
+ public static double evaluateMPROP(BasicNetwork network,NeuralDataSet data)
+ {
+
+ ResilientPropagation train = new ResilientPropagation(network,data);
+ train.setNumThreads(0);
+ long start = System.currentTimeMillis();
+ System.out.println("Training 20 Iterations with MPROP");
+ for(int i=1;i<=20;i++)
+ {
+ train.iteration();
+ System.out.println("Iteration #" + i + " Error:" + train.getError());
+ }
+ train.finishTraining();
+ long stop = System.currentTimeMillis();
+ double diff = ((double)(stop - start))/1000.0;
+ System.out.println("MPROP Result:" + diff + " seconds." );
+ System.out.println("Final MPROP error: " + network.calculateError(data));
+ return diff;
+ }
+
+ public static void main(String args[])
+ {
+ Logging.stopConsoleLogging();
+ BasicNetwork network = generateNetwork();
+ NeuralDataSet data = generateTraining();
+
+ double rprop = evaluateRPROP(network,data);
+ double mprop = evaluateMPROP(network,data);
+ double factor = rprop/mprop;
+ System.out.println("Factor improvement:" + factor);
+ }
+}
View
103 src/main/java/org/encog/examples/neural/benchmark/SimpleBenchmark.java
@@ -0,0 +1,103 @@
+package org.encog.examples.neural.benchmark;
+
+import org.encog.neural.activation.ActivationSigmoid;
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.flat.FlatNetwork;
+import org.encog.neural.flat.train.prop.TrainFlatNetworkBackPropagation;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.training.Train;
+import org.encog.neural.networks.training.propagation.back.Backpropagation;
+import org.encog.util.Format;
+import org.encog.util.Stopwatch;
+import org.encog.util.logging.Logging;
+
+public class SimpleBenchmark {
+
+
+ public static final int ROW_COUNT = 100000;
+ public static final int INPUT_COUNT = 10;
+ public static final int OUTPUT_COUNT = 1;
+ public static final int HIDDEN_COUNT = 20;
+ public static final int ITERATIONS = 10;
+
+ public static void BenchmarkEncog(double[][] input, double[][] output)
+ {
+ BasicNetwork network = new BasicNetwork();
+ network.addLayer(new BasicLayer(new ActivationSigmoid(), true, input[0].length));
+ network.addLayer(new BasicLayer(new ActivationSigmoid(), true, HIDDEN_COUNT));
+ network.addLayer(new BasicLayer(new ActivationSigmoid(), false, output[0].length));
+ network.getStructure().finalizeStructure();
+ network.reset();
+
+ NeuralDataSet trainingSet = new BasicNeuralDataSet(input, output);
+
+ // train the neural network
+ Train train = new Backpropagation(network, trainingSet,
+ 0.7, 0.7);
+
+ Stopwatch sw = new Stopwatch();
+ sw.start();
+ // run epoch of learning procedure
+ for (int i = 0; i < ITERATIONS; i++)
+ {
+ train.iteration();
+ }
+ sw.stop();
+
+ System.out.println("Encog:" + Format.formatInteger((int)sw.getElapsedMilliseconds()) + "ms" );
+ }
+
+ public static void BenchmarkEncogFlat(double[][] input, double[][] output)
+ {
+ FlatNetwork network = new FlatNetwork(input[0].length, HIDDEN_COUNT, 0, output[0].length, false);
+ network.randomize();
+ BasicNeuralDataSet trainingSet = new BasicNeuralDataSet(input, output);
+
+ TrainFlatNetworkBackPropagation train = new TrainFlatNetworkBackPropagation(network, trainingSet, 0.7, 0.7);
+
+ double[] a = new double[2];
+ double[] b = new double[1];
+
+ Stopwatch sw = new Stopwatch();
+ sw.start();
+ // run epoch of learning procedure
+ for (int i = 0; i < ITERATIONS; i++)
+ {
+ train.iteration();
+ }
+ sw.stop();
+
+ System.out.println("EncogFlat:" + Format.formatInteger((int)sw.getElapsedMilliseconds()) + "ms" );
+ }
+
+
+ static double[][] Generate(int rows, int columns)
+ {
+ double[][] result = new double[rows][columns];
+
+ for (int i = 0; i < rows; i++)
+ {
+ for (int j = 0; j < columns; j++)
+ {
+ result[i][j] = Math.random();
+ }
+ }
+
+ return result;
+ }
+
+
+
+ public static void main(String[] args)
+ {
+ Logging.stopConsoleLogging();
+ // initialize input and output values
+ double[][] input = Generate(ROW_COUNT, INPUT_COUNT);
+ double[][] output = Generate(ROW_COUNT, OUTPUT_COUNT);
+
+ BenchmarkEncog(input, output);
+ BenchmarkEncogFlat(input, output);
+ }
+}
View
72 src/main/java/org/encog/examples/neural/benchmark/ThreadCount.java
@@ -0,0 +1,72 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.layers.BasicLayer;
+import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
+import org.encog.util.benchmark.RandomTrainingFactory;
+import org.encog.util.logging.Logging;
+
+public class ThreadCount {
+
+ public static final int INPUT_COUNT = 40;
+ public static final int HIDDEN_COUNT = 60;
+ public static final int OUTPUT_COUNT = 20;
+
+ public static void perform(int thread)
+ {
+ long start = System.currentTimeMillis();
+ final BasicNetwork network = new BasicNetwork();
+ network.addLayer(new BasicLayer(MultiBench.INPUT_COUNT));
+ network.addLayer(new BasicLayer(MultiBench.HIDDEN_COUNT));
+ network.addLayer(new BasicLayer(MultiBench.OUTPUT_COUNT));
+ network.getStructure().finalizeStructure();
+ network.reset();
+
+ final NeuralDataSet training = RandomTrainingFactory.generate(1000,50000,
+ INPUT_COUNT, OUTPUT_COUNT, -1, 1);
+
+ ResilientPropagation rprop = new ResilientPropagation(network,training);
+ rprop.setNumThreads(thread);
+ for(int i=0;i<5;i++)
+ {
+ rprop.iteration();
+ }
+ long stop = System.currentTimeMillis();
+ System.out.println("Result with " + thread + " was " + (stop-start));
+ }
+
+
+
+ public static void main(String[] args)
+ {
+ Logging.stopConsoleLogging();
+ for(int i=1;i<16;i++)
+ {
+ perform(i);
+ }
+ }
+}
View
96 src/main/java/org/encog/examples/neural/benchmark/WeightInitialization.java
@@ -0,0 +1,96 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.benchmark;
+
+import org.encog.mathutil.randomize.FanInRandomizer;
+import org.encog.mathutil.randomize.GaussianRandomizer;
+import org.encog.mathutil.randomize.NguyenWidrowRandomizer;
+import org.encog.mathutil.randomize.Randomizer;
+import org.encog.mathutil.randomize.RangeRandomizer;
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;
+import org.encog.util.logging.Logging;
+import org.encog.util.simple.EncogUtility;
+
+/**
+ * There are several ways to init the weights in an Encog neural network. This
+ * example benhmarks each of the methods that Encog offers. A simple neural
+ * network is created for the XOR operator and is trained a number of times with
+ * each of the randomizers. The score for each randomizer is display, the score
+ * is the average amount of error improvement, higher is better.
+ */
+public class WeightInitialization {
+
+ public static final int SAMPLE_SIZE = 1000;
+ public static final int ITERATIONS = 50;
+
+ public static double XOR_INPUT[][] = { { 0.0, 0.0 }, { 1.0, 0.0 },
+ { 0.0, 1.0 }, { 1.0, 1.0 } };
+
+ public static double XOR_IDEAL[][] = { { 0.0 }, { 1.0 }, { 1.0 }, { 0.0 } };
+
+ public static double evaluate(BasicNetwork network, NeuralDataSet training) {
+ ResilientPropagation rprop = new ResilientPropagation(network, training);
+ double startingError = network.calculateError(training);
+ for (int i = 0; i < ITERATIONS; i++) {
+ rprop.iteration();
+ }
+ double finalError = network.calculateError(training);
+ return startingError - finalError;
+ }
+
+ public static double evaluateRandomizer(Randomizer randomizer,
+ BasicNetwork network, NeuralDataSet training) {
+ double total = 0;
+ for (int i = 0; i < SAMPLE_SIZE; i++) {
+ randomizer.randomize(network);
+ total += evaluate(network, training);
+ }
+ return total / SAMPLE_SIZE;
+ }
+
+ public static void main(final String args[]) {
+
+ Logging.stopConsoleLogging();
+ RangeRandomizer rangeRandom = new RangeRandomizer(-1, 1);
+ NguyenWidrowRandomizer nwrRandom = new NguyenWidrowRandomizer(-1, 1);
+ FanInRandomizer fanRandom = new FanInRandomizer();
+ GaussianRandomizer gaussianRandom = new GaussianRandomizer(0, 1);
+
+ BasicNeuralDataSet training = new BasicNeuralDataSet(XOR_INPUT,
+ XOR_IDEAL);
+ BasicNetwork network = EncogUtility.simpleFeedForward(2, 10, 0, 1, true);
+
+ System.out.println("Range random: "
+ + evaluateRandomizer(rangeRandom, network, training));
+ System.out.println("Nguyen-Widrow: "
+ + evaluateRandomizer(nwrRandom, network, training));
+ System.out.println("Fan-In: "
+ + evaluateRandomizer(fanRandom, network, training));
+ System.out.println("Gaussian: "
+ + evaluateRandomizer(gaussianRandom, network, training));
+ }
+}
View
175 src/main/java/org/encog/examples/neural/boltzmann/BoltzTSP.java
@@ -0,0 +1,175 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.boltzmann;
+
+import org.encog.neural.data.bipolar.BiPolarNeuralData;
+import org.encog.neural.thermal.BoltzmannMachine;
+
+public class BoltzTSP {
+
+ public static final int NUM_CITIES = 10;
+ public static final int NEURON_COUNT = NUM_CITIES * NUM_CITIES;
+
+ private double gamma = 7;
+ private double[][] distance;
+
+ public double sqr(double x) {
+ return x * x;
+ }
+
+ public void createCities() {
+ double x1, x2, y1, y2;
+ double alpha1, alpha2;
+
+ this.distance = new double[NUM_CITIES][NUM_CITIES];
+
+ for (int n1 = 0; n1 < NUM_CITIES; n1++) {
+ for (int n2 = 0; n2 < NUM_CITIES; n2++) {
+ alpha1 = ((double) n1 / NUM_CITIES) * 2 * Math.PI;
+ alpha2 = ((double) n2 / NUM_CITIES) * 2 * Math.PI;
+ x1 = Math.cos(alpha1);
+ y1 = Math.sin(alpha1);
+ x2 = Math.cos(alpha2);
+ y2 = Math.sin(alpha2);
+ distance[n1][n2] = Math.sqrt(sqr(x1 - x2) + sqr(y1 - y2));
+ }
+ }
+ }
+
+ public boolean isValidTour(BiPolarNeuralData data) {
+ int cities, stops;
+
+ for (int n1 = 0; n1 < NUM_CITIES; n1++) {
+ cities = 0;
+ stops = 0;
+ for (int n2 = 0; n2 < NUM_CITIES; n2++) {
+ if (data.getBoolean(n1 * NUM_CITIES + n2)) {
+ if (++cities > 1)
+ return false;
+ }
+ if (data.getBoolean(n2 * NUM_CITIES + n1)) {
+ if (++stops > 1)
+ return false;
+ }
+ }
+ if ((cities != 1) || (stops != 1))
+ return false;
+ }
+ return true;
+ }
+
+ public double lengthOfTour(BiPolarNeuralData data) {
+ double result;
+ int n1, n2, n3;
+
+ result = 0;
+ for (n1 = 0; n1 < NUM_CITIES; n1++) {
+ for (n2 = 0; n2 < NUM_CITIES; n2++) {
+ if (data.getBoolean(((n1) % NUM_CITIES) * NUM_CITIES + n2))
+ break;
+ }
+ for (n3 = 0; n3 < NUM_CITIES; n3++) {
+ if (data.getBoolean(((n1 + 1) % NUM_CITIES) * NUM_CITIES + n3))
+ break;
+ }
+ result += distance[n2][n3];
+ }
+ return result;
+ }
+
+ String displayTour(BiPolarNeuralData data) {
+ StringBuilder result = new StringBuilder();
+
+ int n1, n2;
+ boolean first;
+
+ for (n1 = 0; n1 < NUM_CITIES; n1++) {
+ first = true;
+ result.append("[");
+ for (n2 = 0; n2 < NUM_CITIES; n2++) {
+ if (data.getBoolean(n1 * NUM_CITIES + n2)) {
+ if (first) {
+ first = false;
+ result.append(n2);
+ } else {
+ result.append(", " + n2);
+ }
+ }
+ }
+ result.append("]");
+ if (n1 != NUM_CITIES - 1) {
+ result.append(" -> ");
+ }
+ }
+ return result.toString();
+ }
+
+ public void calculateWeights(BoltzmannMachine logic) {
+
+ for (int sourceTour = 0; sourceTour < NUM_CITIES; sourceTour++) {
+ for (int sourceCity = 0; sourceCity < NUM_CITIES; sourceCity++) {
+ int sourceIndex = sourceTour * NUM_CITIES + sourceCity;
+ for (int targetTour = 0; targetTour < NUM_CITIES; targetTour++) {
+ for (int targetCity = 0; targetCity < NUM_CITIES; targetCity++) {
+ int targetIndex = targetTour * NUM_CITIES + targetCity;
+ double weight = 0;
+ if (sourceIndex != targetIndex) {
+ int predTargetTour = (targetTour == 0 ? NUM_CITIES - 1 : targetTour - 1);
+ int succTargetTour = (targetTour == NUM_CITIES - 1 ? 0 : targetTour + 1);
+ if ((sourceTour == targetTour) || (sourceCity == targetCity))
+ weight = -gamma;
+ else if ((sourceTour == predTargetTour) || (sourceTour == succTargetTour))
+ weight = -distance[sourceCity][targetCity];
+ }
+ logic.setWeight(sourceIndex, targetIndex, weight);
+ }
+ }
+ logic.getThreshold()[sourceIndex] = -gamma / 2;
+ }
+ }
+ }
+
+
+ public void run() {
+ BoltzmannMachine boltz = new BoltzmannMachine(NEURON_COUNT);
+
+ createCities();
+ calculateWeights(boltz);
+
+ boltz.setTemperature(100);
+ do {
+ boltz.establishEquilibrium();
+ System.out.println(boltz.getTemperature()+" : "+displayTour(boltz.getCurrentState()));
+ boltz.decreaseTemperature(0.99);
+ } while (!isValidTour(boltz.getCurrentState()));
+
+ System.out.println("Final Length: " + this.lengthOfTour(boltz.getCurrentState()) );
+ }
+
+ public static void main(String[] args) {
+ BoltzTSP program = new BoltzTSP();
+ program.run();
+ }
+
+}
View
392 src/main/java/org/encog/examples/neural/cpn/RocketCPN.java
@@ -0,0 +1,392 @@
+/*
+ * Encog(tm) Examples v2.6
+ * http://www.heatonresearch.com/encog/
+ * http://code.google.com/p/encog-java/
+
+ * Copyright 2008-2010 Heaton Research, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * For more information on Heaton Research copyrights, licenses
+ * and trademarks visit:
+ * http://www.heatonresearch.com/copyright
+ */
+package org.encog.examples.neural.cpn;
+
+import org.encog.neural.cpn.CPN;
+import org.encog.neural.cpn.training.TrainInstar;
+import org.encog.neural.cpn.training.TrainOutstar;
+import org.encog.neural.data.NeuralData;
+import org.encog.neural.data.NeuralDataSet;
+import org.encog.neural.data.basic.BasicNeuralData;
+import org.encog.neural.data.basic.BasicNeuralDataSet;
+import org.encog.neural.networks.BasicNetwork;
+import org.encog.neural.networks.training.Train;
+import org.encog.neural.pattern.CPNPattern;
+
+public class RocketCPN {
+
+ public static final int WIDTH = 11;
+ public static final int HEIGHT = 11;
+
+ public static final String[][] PATTERN1 = { {
+ " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " OOOOO ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ "OOOOO ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " ",
+ " OO ",
+ " OOOOO ",
+ " OOOOOOO ",
+ " OOOOO ",
+ " OO ",
+ " ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ "OOOOO ",
+ " OOOOO ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " O ",
+ " O ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " OOOOO ",
+ " OOOOO ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " O ",
+ " O ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " OOOOO",
+ " OOOOO ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " O ",
+ " O ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " ",
+ " OO ",
+ " OOOOO ",
+ " OOOOOOO ",
+ " OOOOO ",
+ " OO ",
+ " ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " OOOOO",
+ " ",
+ " " } };
+
+ String[][] PATTERN2 = { {
+ " ",
+ " ",
+ " O ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " O O ",
+ " O O ",
+ " O O ",
+ " O O ",
+ " O O ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " ",
+ " ",
+ " O ",
+ " O ",
+ " O ",
+ " OOO ",
+ " ",
+ " ",
+ " " },
+
+ { " ",
+ " O ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OO ",
+ " OOO O",
+ " OOOO ",
+ " OOOOO ",
+ " ",
+ " O " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " OOOOO ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ " OOOOO ",
+ " ",
+ " " },
+
+ { " ",
+ " ",
+ " O ",
+ " O ",
+ " OOO ",
+ " OOO ",
+ " OOO ",
+ " OOOOO ",
+ "OOOOO ",
+ " ",
+ " " } };
+ public static final double HI = 1;
+ public static final double LO = 0;
+
+
+ private double[][] input1;
+ private double[][] input2;
+ private double[][] ideal1;
+
+ private int inputNeurons;
+ private int instarNeurons;
+ private int outstarNeurons;
+
+ public void prepareInput()
+ {
+ int n,i,j;
+
+ this.inputNeurons = WIDTH * HEIGHT;
+ this.instarNeurons = PATTERN1.length;
+ this.outstarNeurons = 2;
+
+ this.input1 = new double[PATTERN1.length][this.inputNeurons];
+ this.input2 = new double[PATTERN2.length][this.inputNeurons];
+ this.ideal1 = new double[PATTERN1.length][this.outstarNeurons];
+
+ for (n=0; n<PATTERN1.length; n++) {
+ for (i=0; i<HEIGHT; i++) {
+ for (j=0; j<WIDTH; j++) {
+ input1[n][i*WIDTH+j] = (PATTERN1[n][i].charAt(j) == 'O') ? HI : LO;
+ input2[n][i*WIDTH+j] = (PATTERN2[n][i].charAt(j) == 'O') ? HI : LO;
+ }
+ }
+ }
+ normalizeInput();
+ for (n=0; n<PATTERN1.length; n++) {
+ this.ideal1[n][0] = Math.sin(n * 0.25 * Math.PI);
+ this.ideal1[n][1] = Math.cos(n * 0.25 * Math.PI);
+ }
+
+ }
+
+ public double sqr(double x)
+ {
+ return x*x;
+ }
+
+
+ void normalizeInput()
+ {
+ int n,i;
+ double length1, length2;
+
+ for (n=0; n<PATTERN1.length; n++) {
+ length1 = 0;
+ length2 = 0;
+ for (i=0; i<this.inputNeurons; i++) {
+ length1 += sqr(this.input1[n][i]);
+ length2 += sqr(this.input2[n][i]);
+ }
+ length1 = Math.sqrt(length1);
+ length2 = Math.sqrt(length2);
+
+ for (i=0; i<this.inputNeurons; i++) {
+ input1[n][i] /= length1;
+ input2[n][i] /= length2;
+ }
+ }
+ }
+
+ public CPN createNetwork()
+ {
+ CPN result = new CPN(this.inputNeurons, this.instarNeurons, this.outstarNeurons,1);
+ return result;
+ }
+
+ public void trainInstar(CPN network,NeuralDataSet training)
+ {