Package com.github.neuralnetworks.architecture

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl


    @Test
    public void testValuesProvider() {
  ValuesProvider vp = new ValuesProvider();

  NeuralNetworkImpl nn = new NeuralNetworkImpl();
  Layer i = new Layer();
  Layer h = new Layer();
  Layer o = new Layer();

  nn.addLayer(i);

  NNFactory.addFullyConnectedLayer(nn, h, 2, 3, true);
  NNFactory.addFullyConnectedLayer(nn, o, 4, 1, true);

  Matrix im = new Matrix(2, 2);
  vp.addValues(i, im);
  Matrix hm1 = vp.getValues(h, 3);
  Matrix hm2 = new Matrix(4, 2);
  vp.addValues(h, hm2);

  Matrix om = vp.getValues(o);

  assertTrue(im == vp.getValues(i, 2));
  assertTrue(im == vp.getValues(i));
  assertTrue(hm1 == vp.getValues(h, 3));
  assertTrue(hm2 == vp.getValues(h, 4));
  assertTrue(hm1 == vp.getValues(h, nn.getConnection(i, h)));
  assertTrue(hm2 == vp.getValues(h, nn.getConnection(h, o)));
  assertTrue(om == vp.getValues(o, 1));
  assertTrue(om == vp.getValues(o));
  assertTrue(2 == vp.getColumns());
    }
View Full Code Here


  assertTrue(2 == vp.getColumns());
    }

    @Test
    public void testRandomInitializer() {
  NeuralNetworkImpl nn = NNFactory.mlp(new int[] { 3, 2 }, true);
  NNRandomInitializer rand = new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.1f, 0.1f), 0.5f);
  rand.initialize(nn);

  for (Layer l : nn.getLayers()) {
      if (Util.isBias(l)) {
    GraphConnections gc = (GraphConnections) l.getConnections().get(0);
    for (float v : gc.getConnectionGraph().getElements()) {
        assertEquals(0.5, v, 0f);
    }
      } else {
    GraphConnections gc = (GraphConnections) l.getConnections().get(0);
    for (float v : gc.getConnectionGraph().getElements()) {
        assertTrue(v >= -0.1f && v <= 0.1f && v != 0);
    }
      }
  }

  rand = new NNRandomInitializer(new MersenneTwisterRandomInitializer(2f, 3f), new MersenneTwisterRandomInitializer(-2f, -1f));
  rand.initialize(nn);

  for (Layer l : nn.getLayers()) {
      if (Util.isBias(l)) {
    GraphConnections gc = (GraphConnections) l.getConnections().get(0);
    for (float v : gc.getConnectionGraph().getElements()) {
        assertTrue(v >= -2f && v <= -1f);
    }
View Full Code Here

    /**
     * Simple backpropagation test with specific values
     */
    @Test
    public void testSigmoidBP() {
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 2, 2, 1 }, false);

  FullyConnected c1 = (FullyConnected) mlp.getInputLayer().getConnections().iterator().next();
  Matrix cg1 = c1.getConnectionGraph();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 0, 1);
  cg1.set(0.4f, 1, 0);
  cg1.set(0.6f, 1, 1);

  FullyConnected c2 = (FullyConnected) mlp.getOutputLayer().getConnections().iterator().next();
  Matrix cg2 = c2.getConnectionGraph();
  cg2.set(0.3f, 0, 0);
  cg2.set(0.9f, 0, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }, 1, 1), new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }, 1, 1), null, null, 1f, 0f, 0f, 0f);
View Full Code Here

    /**
     * Simple backpropagation test with specific values
     */
    @Test
    public void testSigmoidBP2() {
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 3, 2, 1 }, true);

  List<Connections> c = mlp.getConnections();
  FullyConnected c1 = (FullyConnected) c.get(0);
  Matrix cg1 = c1.getConnectionGraph();
  cg1.set(0.2f, 0, 0);
  cg1.set(0.4f, 0, 1);
  cg1.set(-0.5f, 0, 2);
View Full Code Here

  assertEquals(0.218, cgb2.get(0, 0), 0.001);
    }

    @Test
    public void testParallelNetworks() {
  NeuralNetworkImpl mlp = new NeuralNetworkImpl();
  Layer input = new Layer();
  mlp.addLayer(input);

  Layer leaf1 = new Layer();
  FullyConnected fc1 = new FullyConnected(input, leaf1, 2, 3);
  Util.fillArray(fc1.getConnectionGraph().getElements(), 0.1f);
  mlp.addConnection(fc1);

  Layer leaf2 = new Layer();
  FullyConnected fc2 = new FullyConnected(input, leaf2, 2, 3);
  Util.fillArray(fc2.getConnectionGraph().getElements(), 0.2f);
  mlp.addConnection(fc2);

  Layer output = new Layer();
  FullyConnected fc3 = new FullyConnected(leaf1, output, 3, 1);
  Util.fillArray(fc3.getConnectionGraph().getElements(), 0.3f);
  mlp.addConnection(fc3);
  FullyConnected fc4 = new FullyConnected(leaf2, output, 3, 1);
  Util.fillArray(fc4.getConnectionGraph().getElements(), 0.4f);
  mlp.addConnection(fc4);

  mlp.setLayerCalculator(NNFactory.lcWeightedSum(mlp, null));

  Matrix i = new Matrix(new float [] {2, 2}, 1);
  Set<Layer> calculated = new HashSet<>();
  calculated.add(mlp.getInputLayer());

  ValuesProvider results = new ValuesProvider();
  results.addValues(input, i);

  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  mlp.getLayerCalculator().calculate(mlp, output, calculated, results);

  assertEquals(1.32, results.getValues(output).get(0, 0), 0.000001);
    }
View Full Code Here

  assertEquals(1.32, results.getValues(output).get(0, 0), 0.000001);
    }

    @Test
    public void testRemoveLayer() {
  NeuralNetworkImpl mlp = NNFactory.mlp(new int[] {3, 4, 5}, true);
  assertEquals(5, mlp.getLayers().size(), 0);
  Layer currentOutput = mlp.getOutputLayer();
  mlp.removeLayer(mlp.getOutputLayer());
  assertEquals(3, mlp.getLayers().size(), 0);
  assertEquals(true, currentOutput != mlp.getOutputLayer());
    }
View Full Code Here

    }

    @Test
    public void testLayerOrderStrategy() {
  // MLP
  NeuralNetworkImpl mlp = NNFactory.mlp(new int[] {3, 4, 5}, true);
 
  Set<Layer> calculated = new HashSet<Layer>();
  calculated.add(mlp.getInputLayer());
  List<ConnectionCandidate> ccc = new TargetLayerOrderStrategy(mlp, mlp.getOutputLayer(), calculated).order();
  assertEquals(4, ccc.size(), 0);
  Layer l = mlp.getInputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
  assertTrue(ccc.get(2).connection == l.getConnections().get(2));
  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(4, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  l = l.getConnections().get(0).getInputLayer();
  assertTrue(ccc.get(2).connection == l.getConnections().get(0));
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  // Simple MLP
  mlp = NNFactory.mlp(new int[] {3, 4}, true);

  calculated = new HashSet<Layer>();
  calculated.add(mlp.getInputLayer());
  ccc = new TargetLayerOrderStrategy(mlp, mlp.getOutputLayer(), calculated).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  // CNN
  NeuralNetworkImpl cnn = NNFactory.convNN(new int[][] { { 3, 3, 2 }, { 2, 2, 1, 1 } }, true);

  calculated = new HashSet<Layer>();
  calculated.add(cnn.getInputLayer());
  ccc = new TargetLayerOrderStrategy(cnn, cnn.getOutputLayer(), calculated).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(cnn, cnn.getOutputLayer()).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
    }
View Full Code Here

  assertEquals(3, sub.getFilters(), 0);
    }

    @Test
    public void testCNNConstruction() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 32, 32, 1 }, { 5, 5, 6, 1 }, { 2, 2 }, { 5, 5, 16, 1 }, { 2, 2 }, { 5, 5, 120, 1 }, {84}, {10} }, true);
  assertEquals(13, nn.getLayers().size(), 0);

  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  assertEquals(28, cc.getOutputFeatureMapRows(), 0);
  assertEquals(28, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(6, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
View Full Code Here

  assertEquals(10, cg2.getConnectionGraph().getRows(), 0);
    }

    @Test
    public void testCNNConstruction2() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  assertEquals(11, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(24, cc.getOutputFeatureMapRows(), 0);
  assertEquals(24, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(20, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
View Full Code Here

  assertEquals(10, layer.getConnections().get(0).getOutputUnitCount(), 0);
    }

    @Test
    public void testCNNConstruction3() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 6, 6, 1 }, { 3, 3, 2, 2 }, { 2, 2 } }, true);
  assertEquals(4, nn.getLayers().size(), 0);

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Layer l = nn.getInputLayer().getConnections().get(0).getOutputLayer();
  assertEquals(2, cc.getOutputFeatureMapRows(), 0);
  assertEquals(2, cc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, cc.getOutputFilters(), 0);

  Subsampling2DConnection sc = (Subsampling2DConnection) l.getConnections().get(2);
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.architecture.NeuralNetworkImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.