Package com.github.neuralnetworks.calculation

Examples of com.github.neuralnetworks.calculation.BreadthFirstOrderStrategy


    private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) {
  BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl();
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
View Full Code Here


  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
  assertTrue(ccc.get(2).connection == l.getConnections().get(2));
  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(4, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  l = l.getConnections().get(0).getInputLayer();
  assertTrue(ccc.get(2).connection == l.getConnections().get(0));
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  // Simple MLP
  mlp = NNFactory.mlp(new int[] {3, 4}, true);

  calculated = new HashSet<Layer>();
  calculated.add(mlp.getInputLayer());
  ccc = new TargetLayerOrderStrategy(mlp, mlp.getOutputLayer(), calculated).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  // CNN
  NeuralNetworkImpl cnn = NNFactory.convNN(new int[][] { { 3, 3, 2 }, { 2, 2, 1, 1 } }, true);

  calculated = new HashSet<Layer>();
  calculated.add(cnn.getInputLayer());
  ccc = new TargetLayerOrderStrategy(cnn, cnn.getOutputLayer(), calculated).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(cnn, cnn.getOutputLayer()).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
    }
View Full Code Here

  this.randomInitializer = randomInitializer;
  this.biasDefaultValue = biasDefaultValue;
    }

    public void initialize(NeuralNetwork nn) {
  List<ConnectionCandidate> ccs = new BreadthFirstOrderStrategy(nn, nn.getInputLayer()).order();
  for (ConnectionCandidate cc : ccs) {
      if (cc.connection instanceof GraphConnections) {
    GraphConnections fc = (GraphConnections) cc.connection;
    if (Util.isBias(fc.getInputLayer())) {
        if (biasDefaultValue != null) {
View Full Code Here

    @Override
    public void backpropagate(NeuralNetwork nn, Set<Layer> calculatedLayers, ValuesProvider activations, ValuesProvider results) {
  this.activations = activations;

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  calculate(results, connections, nn);
    }
View Full Code Here

  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
  assertTrue(ccc.get(2).connection == l.getConnections().get(2));
  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(4, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  l = l.getConnections().get(0).getInputLayer();
  assertTrue(ccc.get(2).connection == l.getConnections().get(0));
  assertTrue(ccc.get(3).connection == l.getConnections().get(1));

  // Simple MLP
  mlp = NNFactory.mlp(new int[] {3, 4}, true);

  calculated = new HashSet<Layer>();
  calculated.add(mlp.getInputLayer());
  ccc = new TargetLayerOrderStrategy(mlp, mlp.getOutputLayer(), calculated).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(mlp, mlp.getOutputLayer()).order();
  assertEquals(2, ccc.size(), 0);
  l = mlp.getOutputLayer();
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  // CNN
  NeuralNetworkImpl cnn = NNFactory.convNN(new int[][] { { 3, 3, 2 }, { 2, 2, 1, 1 } }, true);

  calculated = new HashSet<Layer>();
  calculated.add(cnn.getInputLayer());
  ccc = new TargetLayerOrderStrategy(cnn, cnn.getOutputLayer(), calculated).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));

  ccc = new BreadthFirstOrderStrategy(cnn, cnn.getOutputLayer()).order();
  l = cnn.getOutputLayer();
  assertEquals(2, ccc.size(), 0);
  assertTrue(ccc.get(0).connection == l.getConnections().get(0));
  assertTrue(ccc.get(1).connection == l.getConnections().get(1));
    }
View Full Code Here

    private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) {
  BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl();
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
View Full Code Here

    @Override
    public void backpropagate(NeuralNetwork nn, Set<Layer> calculatedLayers, ValuesProvider activations, ValuesProvider results) {
  this.activations = activations;

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  calculate(results, connections, nn);
    }
View Full Code Here

  this.randomInitializer = randomInitializer;
  this.biasDefaultValue = biasDefaultValue;
    }

    public void initialize(NeuralNetwork nn) {
  List<ConnectionCandidate> ccs = new BreadthFirstOrderStrategy(nn, nn.getInputLayer()).order();
  for (ConnectionCandidate cc : ccs) {
      if (cc.connection instanceof FullyConnected) {
    FullyConnected fc = (FullyConnected) cc.connection;
    if (Util.isBias(fc.getInputLayer())) {
        if (biasDefaultValue != null) {
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.calculation.BreadthFirstOrderStrategy

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.