Package com.github.neuralnetworks.architecture

Examples of com.github.neuralnetworks.architecture.Layer


    @Test
    public void testValuesProvider() {
  ValuesProvider vp = new ValuesProvider();

  NeuralNetworkImpl nn = new NeuralNetworkImpl();
  Layer i = new Layer();
  Layer h = new Layer();
  Layer o = new Layer();

  nn.addLayer(i);

  NNFactory.addFullyConnectedLayer(nn, h, 2, 3, true);
  NNFactory.addFullyConnectedLayer(nn, o, 4, 1, true);
View Full Code Here


  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
                     // convolutional
                     // layers
                     // (because their
                     // calculations
                     // are
                     // interlinked)
      convCalculatedLayers.add(nn.getOutputLayer());

      for (int i = 0; i < connections.size(); i++) {
    ConnectionCandidate c = connections.get(i);
    chunk.add(c.connection);

    if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) {
        current = c.target;

        ConnectionCalculator result = null;
        ConnectionCalculator ffcc = null;
        if (Util.isBias(current)) {
      ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer());
        } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) {
      if (chunk.size() != 1) {
          throw new IllegalArgumentException("Convolutional layer with more than one connection");
      }

      ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current));
        } else {
      ffcc = lc.getConnectionCalculator(current);
        }

        if (ffcc instanceof AparapiSigmoid) {
      result = new BackPropagationSigmoid(p);
        } else if (ffcc instanceof AparapiTanh) {
      result = new BackPropagationTanh(p);
        } else if (ffcc instanceof AparapiSoftReLU) {
      result = new BackPropagationSoftReLU(p);
        } else if (ffcc instanceof AparapiReLU) {
      result = new BackPropagationReLU(p);
        } else if (ffcc instanceof AparapiMaxPooling2D || ffcc instanceof AparapiStochasticPooling2D) {
      result = new BackpropagationMaxPooling2D();
        } else if (ffcc instanceof AparapiAveragePooling2D) {
      result = new BackpropagationAveragePooling2D();
        } else if (ffcc instanceof ConnectionCalculatorConv) {
      Layer opposite = Util.getOppositeLayer(chunk.iterator().next(), current);
      if (!convCalculatedLayers.contains(opposite)) {
          convCalculatedLayers.add(opposite);

          if (ffcc instanceof AparapiConv2DSigmoid) {
        result = new BackPropagationConv2DSigmoid(p);
View Full Code Here

      calculateHiddenLayer(rbm, visible, negPhaseHidden);
  }
    }

    public void calculateVisibleLayer(RBM rbm, Matrix visibleLayerResults, Matrix hiddenLayerResults) {
  Layer visibleLayer = rbm.getVisibleLayer();
  Layer hiddenLayer = rbm.getHiddenLayer();

  calculatedLayers.clear();
  calculatedLayers.add(hiddenLayer);

  results.addValues(visibleLayer, visibleLayerResults);
View Full Code Here

  super.calculate(rbm, visibleLayer, calculatedLayers, results);
    }

    public void calculateHiddenLayer(RBM rbm, Matrix visibleLayerResults, Matrix hiddenLayerResults) {
  Layer visibleLayer = rbm.getVisibleLayer();
  Layer hiddenLayer = rbm.getHiddenLayer();

  calculatedLayers.clear();
  calculatedLayers.add(visibleLayer);

  results.addValues(visibleLayer, visibleLayerResults);
View Full Code Here

      inProgressLayers.add(currentLayer);
      List<ConnectionCandidate> currentCandidates = new ArrayList<ConnectionCandidate>();

      boolean hasNoBiasConnections = false;
      for (Connections c : currentLayer.getConnections(neuralNetwork)) {
    Layer opposite = Util.getOppositeLayer(c, currentLayer);
    if (orderConnections(neuralNetwork, opposite, calculatedLayers, inProgressLayers, calculateCandidates)) {
        currentCandidates.add(new ConnectionCandidate(c, currentLayer));

        if (!Util.isBias(opposite)) {
      hasNoBiasConnections = true;
View Full Code Here

    @Override
    public List<ConnectionCandidate> order() {
  List<ConnectionCandidate> result = new ArrayList<>();

  Layer currentLayer = startLayer;

  Queue<Layer> layersQueue = new LinkedList<>();
  layersQueue.add(currentLayer);
  Set<Connections> visitedConnections = new HashSet<>();

  while (layersQueue.size() > 0) {
      Layer l = layersQueue.poll();

      l.getConnections(neuralNetwork).stream().filter(c -> !visitedConnections.contains(c)).forEach(c -> {
    Layer opposite = Util.getOppositeLayer(c, l);
    result.add(new ConnectionCandidate(c, opposite));
    layersQueue.add(opposite);
    visitedConnections.add(c);
      });
  }
View Full Code Here

    public void testWeightedSumFF() {
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.GPU);

  Matrix o = new Matrix(2, 2);

  Layer il1 = new Layer();
  Layer ol = new Layer();
  Layer il2 = new Layer();
  FullyConnected c1 = new FullyConnected(il1, ol, 3, 2);
  FullyConnected c2 = new FullyConnected(il2, ol, 3, 2);
  FullyConnected bc = new FullyConnected(new Layer(), ol, 1, 2);

  Matrix cg = c1.getConnectionGraph();
  cg.set(1, 0, 0);
  cg.set(2, 0, 1);
  cg.set(3, 0, 2);
View Full Code Here

    public void testWeightedSumBP() {
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.GPU);

  Matrix o = new Matrix(2, 2);

  Layer il1 = new Layer();
  Layer ol = new Layer();
  Layer il2 = new Layer();
  FullyConnected c1 = new FullyConnected(ol, il1, 2, 3);
  FullyConnected c2 = new FullyConnected(ol, il2, 2, 3);
  FullyConnected bc = new FullyConnected(new Layer(), ol, 1, 2);

  Matrix cg = c1.getConnectionGraph();
  cg.set(1, 0, 0);
  cg.set(2, 1, 0);
  cg.set(3, 2, 0);
View Full Code Here

    }

    @Test
    public void testParallelNetworks() {
  NeuralNetworkImpl mlp = new NeuralNetworkImpl();
  Layer input = new Layer();
  mlp.addLayer(input);

  Layer leaf1 = new Layer();
  FullyConnected fc1 = new FullyConnected(input, leaf1, 2, 3);
  Util.fillArray(fc1.getConnectionGraph().getElements(), 0.1f);
  mlp.addConnection(fc1);

  Layer leaf2 = new Layer();
  FullyConnected fc2 = new FullyConnected(input, leaf2, 2, 3);
  Util.fillArray(fc2.getConnectionGraph().getElements(), 0.2f);
  mlp.addConnection(fc2);

  Layer output = new Layer();
  FullyConnected fc3 = new FullyConnected(leaf1, output, 3, 1);
  Util.fillArray(fc3.getConnectionGraph().getElements(), 0.3f);
  mlp.addConnection(fc3);
  FullyConnected fc4 = new FullyConnected(leaf2, output, 3, 1);
  Util.fillArray(fc4.getConnectionGraph().getElements(), 0.4f);
View Full Code Here

    @Test
    public void testRemoveLayer() {
  NeuralNetworkImpl mlp = NNFactory.mlp(new int[] {3, 4, 5}, true);
  assertEquals(5, mlp.getLayers().size(), 0);
  Layer currentOutput = mlp.getOutputLayer();
  mlp.removeLayer(mlp.getOutputLayer());
  assertEquals(3, mlp.getLayers().size(), 0);
  assertEquals(true, currentOutput != mlp.getOutputLayer());
    }
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.architecture.Layer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.