Examples of NeuralNetworkImpl


Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  assertEquals(1, sc.getOutputFeatureMapColumns(), 0);
  assertEquals(2, sc.getFilters(), 0);
    }

    public void testCNNLayerCalculatorConstruction() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  // feedforwad cc
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  Layer l = nn.getInputLayer();

  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSubsampling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSigmoid);

  bpt = TrainerFactory.backPropagation(nn, null, null, new MultipleNeuronsOutputError(), null, 0.02f, 0.5f, 0f, 0f);
  bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }

    @Test
    public void testConvolutions() {
  NeuralNetworkImpl nn = new NeuralNetworkImpl();
  Conv2DConnection c = new Conv2DConnection(new Layer(), new Layer(), 3, 3, 2, 2, 2, 1, 1);
  nn.addConnection(c);
  c.getWeights()[0] = 1;
  c.getWeights()[1] = 2;
  c.getWeights()[2] = 3;
  c.getWeights()[3] = 4;
  c.getWeights()[4] = 1;
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  Util.fillArray(o.getElements(), 0);
    }

    @Test
    public void testSimpleCNN() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] {{3, 3, 2}, {2, 2, 2, 1}, {2, 2}}, false);
  nn.setLayerCalculator(NNFactory.lcWeightedSum(nn, null));
  NNFactory.lcMaxPooling(nn);

  Conv2DConnection c = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  c.getWeights()[0] = 1;
  c.getWeights()[1] = 2;
  c.getWeights()[2] = 3;
  c.getWeights()[3] = 4;
  c.getWeights()[4] = 1;
  c.getWeights()[5] = 2;
  c.getWeights()[6] = 3;
  c.getWeights()[7] = 4;
  c.getWeights()[8] = 1;
  c.getWeights()[9] = 2;
  c.getWeights()[10] = 3;
  c.getWeights()[11] = 4;
  c.getWeights()[12] = 1;
  c.getWeights()[13] = 2;
  c.getWeights()[14] = 3;
  c.getWeights()[15] = 4;

  Matrix i1 = new Matrix(new float[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18 }, 1);

  ValuesProvider vp = new ValuesProvider();
  vp.addValues(nn.getInputLayer(), i1);

  Set<Layer> calculatedLayers = new HashSet<>();
  calculatedLayers.add(nn.getInputLayer());
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, vp);

  Matrix o = vp.getValues(nn.getOutputLayer());

  assertEquals(244, o.get(0, 0), 0);
  assertEquals(244, o.get(1, 0), 0);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

    @Test
    public void testCNNBackpropagation() {
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 3, 3, 2 }, { 2, 2, 1, 1 } }, true);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));

  Conv2DConnection c = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  c.setWeights(new float [] {0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f});

  Conv2DConnection b = (Conv2DConnection) nn.getOutputLayer().getConnections().get(1);
  b.setWeights(new float [] {-3f});
 
  SimpleInputProvider ts = new SimpleInputProvider(new float[][] { { 0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f, 0.7f, 0.8f, 0.9f, 1, 1.1f, 1.2f, 1.3f, 1.4f, 1.5f, 1.6f, 1.7f, 1.8f } }, new float[][] { { 1, 1, 1, 1 } }, 1, 1);
  BackPropagationTrainer<?> t = TrainerFactory.backPropagation(nn, ts, null, null, null, 0.5f, 0f, 0f, 0f);
  t.train();
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  assertEquals(-2.911599, b.getWeights()[0], 0.00001);
    }

    @Test
    public void testCNNBackpropagation2() {
  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { {2, 1, 1}, {1, 1}, {2}, {2}, {1} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  FullyConnected c1 = (FullyConnected) nn.getInputLayer().getConnections().get(0).getOutputLayer().getConnections().get(1).getOutputLayer().getConnections().get(1);
  Matrix cg1 = c1.getConnectionGraph();
  cg1.set(0.1f, 0, 0);
  cg1.set(0.8f, 0, 1);
  cg1.set(0.4f, 1, 0);
  cg1.set(0.6f, 1, 1);

  FullyConnected c2 = (FullyConnected) nn.getOutputLayer().getConnections().iterator().next();
  Matrix cg2 = c2.getConnectionGraph();
  cg2.set(0.3f, 0, 0);
  cg2.set(0.9f, 0, 1);

  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }, 1, 1), new SimpleInputProvider(new float[][] { { 0.35f, 0.9f } }, new float[][] { { 0.5f } }, 1, 1), null, null, 1f, 0f, 0f, 0f);
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

    @Test
    public void testCNNStride() {
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 5, 5, 1 }, { 2, 2, 1, 2 } }, false);
  nn.setLayerCalculator(NNFactory.lcWeightedSum(nn, null));

  Conv2DConnection cc = (Conv2DConnection) nn.getInputLayer().getConnections().get(0);
  Util.fillArray(cc.getWeights(), 1);

  ValuesProvider vp = new ValuesProvider();
  vp.addValues(nn.getInputLayer(), new Matrix(new float[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25}, 1));

  Set<Layer> calculatedLayers = new HashSet<>();
  calculatedLayers.add(nn.getInputLayer());
  nn.getLayerCalculator().calculate(nn, nn.getOutputLayer(), calculatedLayers, vp);

  Matrix o = vp.getValues(nn.getOutputLayer());
  assertEquals(16, o.get(0, 0), 0.00001);
  assertEquals(24, o.get(1, 0), 0.00001);
  assertEquals(56, o.get(2, 0), 0.00001);
  assertEquals(64, o.get(3, 0), 0.00001);
    }
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

public class IrisTest {

    @Test
    public void testMLPSigmoidBP() {
  // create the network
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 4, 2, 3 }, true);

  // training and testing data providers
  IrisInputProvider trainInputProvider = new IrisInputProvider(150, 300000, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  IrisInputProvider testInputProvider = new IrisInputProvider(1, 150, new IrisTargetMultiNeuronOutputConverter(), false, true, false);
  OutputError outputError = new MultipleNeuronsOutputError();

  // trainer
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(mlp, trainInputProvider, testInputProvider, outputError, new NNRandomInitializer(new MersenneTwisterRandomInitializer(-0.01f, 0.01f), 0.5f), 0.02f, 0.7f, 0f, 0f);

  // log data
  bpt.addEventListener(new LogTrainingListener(Thread.currentThread().getStackTrace()[1].getMethodName()));

  // early stopping
  //bpt.addEventListener(new EarlyStoppingListener(testInputProvider, 100, 0.015f));

  // execution mode
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);

  // train
  bpt.train();

  // add softmax function
  LayerCalculatorImpl lc = (LayerCalculatorImpl) mlp.getLayerCalculator();
  ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(mlp.getOutputLayer());
  cc.addActivationFunction(new SoftmaxFunction());

  // test
  bpt.test();
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  // execution mode
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);
  Environment.getInstance().setUseWeightsSharedMemory(false);

  // create the network
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 4, 2, 3 }, true);

  // training and testing data providers
  IrisInputProvider trainInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
  trainInputProvider.addInputModifier(new ScalingInputFunction(trainInputProvider));
  IrisInputProvider testInputProvider = new IrisInputProvider(new IrisTargetMultiNeuronOutputConverter(), false);
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

  // execution mode
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.CPU);
  Environment.getInstance().setUseWeightsSharedMemory(true);

  // create the network
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 4, 2, 3 }, true);

  // training and testing data providers
  String inputPath = Thread.currentThread().getContextClassLoader().getResource("IRISinput.txt").getPath();
  String targetPath = Thread.currentThread().getContextClassLoader().getResource("IRIStarget.txt").getPath();
View Full Code Here

Examples of com.github.neuralnetworks.architecture.NeuralNetworkImpl

*/
public class MnistTest {

    @Test
    public void testSigmoidBP() {
  NeuralNetworkImpl mlp = NNFactory.mlpSigmoid(new int[] { 784, 10 }, true);

  MnistInputProvider trainInputProvider = new MnistInputProvider("train-images.idx3-ubyte", "train-labels.idx1-ubyte", 1, 1, new MnistTargetMultiNeuronOutputConverter());
  trainInputProvider.addInputModifier(new ScalingInputFunction(255));
  MnistInputProvider testInputProvider = new MnistInputProvider("t10k-images.idx3-ubyte", "t10k-labels.idx1-ubyte", 1000, 1, new MnistTargetMultiNeuronOutputConverter());
  testInputProvider.addInputModifier(new ScalingInputFunction(255));
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.