Package org.encog.neural.networks

Examples of org.encog.neural.networks.BasicNetwork


  @Test
  public void testRPROP() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain rprop = new ResilientPropagation(network, trainingData);
    NetworkUtil.testTraining(rprop,0.03);
  }
View Full Code Here


  @Test
  public void testLMA() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain rprop = new LevenbergMarquardtTraining(network, trainingData);
    NetworkUtil.testTraining(rprop,0.03);
  }
View Full Code Here

  @Test
  public void testBPROP() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();

    MLTrain bprop = new Backpropagation(network, trainingData, 0.7, 0.9);
    NetworkUtil.testTraining(bprop,0.01);
  }
View Full Code Here

  @Test
  public void testManhattan() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain bprop = new ManhattanPropagation(network, trainingData, 0.01);
    NetworkUtil.testTraining(bprop,0.01);
  }
View Full Code Here

  @Test
  public void testSCG() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    MLTrain bprop = new ScaledConjugateGradient(network, trainingData);
    NetworkUtil.testTraining(bprop,0.04);
  }
View Full Code Here

 
  @Test
  public void testAnneal() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    CalculateScore score = new TrainingSetScore(trainingData);
    NeuralSimulatedAnnealing anneal = new NeuralSimulatedAnnealing(network,score,10,2,100);
    NetworkUtil.testTraining(anneal,0.01);
  }
View Full Code Here

 
  @Test
  public void testGenetic() throws Throwable
  {
    MLDataSet trainingData = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);   
    BasicNetwork network = NetworkUtil.createXORNetworkUntrained();
    CalculateScore score = new TrainingSetScore(trainingData);
    NeuralGeneticAlgorithm genetic = new NeuralGeneticAlgorithm(network, new RangeRandomizer(-1,1), score, 500,0.1,0.25);
    NetworkUtil.testTraining(genetic,0.00001);
  }
View Full Code Here

  public static final String TYPE_SOM = "som";
 
  public void testFactoryFeedforward() {
    String architecture = "?:B->TANH->3->LINEAR->?:B";
    MLMethodFactory factory = new MLMethodFactory();
    BasicNetwork network = (BasicNetwork)factory.create(MLMethodFactory.TYPE_FEEDFORWARD, architecture, 1, 4);
    Assert.assertTrue(network.isLayerBiased(0));
    Assert.assertFalse(network.isLayerBiased(1));
    Assert.assertTrue(network.isLayerBiased(2));
    Assert.assertEquals(3, network.getLayerCount());
    Assert.assertTrue(network.getActivation(0) instanceof ActivationLinear );
    Assert.assertTrue(network.getActivation(1) instanceof ActivationTANH );
    Assert.assertTrue(network.getActivation(2) instanceof ActivationLinear );
    Assert.assertEquals(18,network.encodedArrayLength());
    Assert.assertEquals(1,network.getLayerNeuronCount(0));
    Assert.assertEquals(3,network.getLayerNeuronCount(1));
    Assert.assertEquals(4,network.getLayerNeuronCount(2));
  }
View Full Code Here

public class TestPruneSelective extends TestCase {
 
  private BasicNetwork obtainNetwork()
  {
    BasicNetwork network = EncogUtility.simpleFeedForward(2,3,0,4,false);
    double[] weights = { 1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25 };
    NetworkCODEC.arrayToNetwork(weights, network);
   
    Assert.assertEquals(1.0, network.getWeight(1, 0, 0),0.01);   
    Assert.assertEquals(2.0, network.getWeight(1, 1, 0),0.01);
    Assert.assertEquals(3.0, network.getWeight(1, 2, 0),0.01);
    Assert.assertEquals(4.0, network.getWeight(1, 3, 0),0.01);
   
    Assert.assertEquals(5.0, network.getWeight(1, 0, 1),0.01);
    Assert.assertEquals(6.0, network.getWeight(1, 1, 1),0.01);
    Assert.assertEquals(7.0, network.getWeight(1, 2, 1),0.01);
    Assert.assertEquals(8.0, network.getWeight(1, 3, 1),0.01);
   
    Assert.assertEquals(9.0, network.getWeight(1, 0, 2),0.01);
    Assert.assertEquals(10.0, network.getWeight(1, 1, 2),0.01);
    Assert.assertEquals(11.0, network.getWeight(1, 2, 2),0.01);
    Assert.assertEquals(12.0, network.getWeight(1, 3, 2),0.01);
   
    Assert.assertEquals(13.0, network.getWeight(1, 0, 3),0.01);
    Assert.assertEquals(14.0, network.getWeight(1, 1, 3),0.01);
    Assert.assertEquals(15.0, network.getWeight(1, 2, 3),0.01);
    Assert.assertEquals(16.0, network.getWeight(1, 3, 3),0.01);
   
    Assert.assertEquals(17.0, network.getWeight(0, 0, 0),0.01);
    Assert.assertEquals(18.0, network.getWeight(0, 1, 0),0.01);
    Assert.assertEquals(19.0, network.getWeight(0, 2, 0),0.01);
    Assert.assertEquals(20.0, network.getWeight(0, 0, 1),0.01);
    Assert.assertEquals(21.0, network.getWeight(0, 1, 1),0.01);
    Assert.assertEquals(22.0, network.getWeight(0, 2, 1),0.01);
   
    Assert.assertEquals(20.0, network.getWeight(0, 0, 1),0.01);
    Assert.assertEquals(21.0, network.getWeight(0, 1, 1),0.01);
    Assert.assertEquals(22.0, network.getWeight(0, 2, 1),0.01);
   
    Assert.assertEquals(23.0, network.getWeight(0, 0, 2),0.01);
    Assert.assertEquals(24.0, network.getWeight(0, 1, 2),0.01);
    Assert.assertEquals(25.0, network.getWeight(0, 2, 2),0.01);

   
    return network;
  }
View Full Code Here

  public void testAdalineNet() throws Throwable
  {
    ADALINEPattern pattern = new ADALINEPattern();
    pattern.setInputNeurons(2);
    pattern.setOutputNeurons(1);
    BasicNetwork network = (BasicNetwork)pattern.generate();
   
    // train it
    MLDataSet training = new BasicMLDataSet(XOR.XOR_INPUT,XOR.XOR_IDEAL);
    MLTrain train = new TrainAdaline(network,training,0.01);
    NetworkUtil.testTraining(train,0.01);
View Full Code Here

TOP

Related Classes of org.encog.neural.networks.BasicNetwork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.