Examples of LayerCalculatorImpl


Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return t;
    }

    private static BackPropagationLayerCalculatorImpl bplc(NeuralNetworkImpl nn, Properties p) {
  BackPropagationLayerCalculatorImpl blc = new BackPropagationLayerCalculatorImpl();
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  List<ConnectionCandidate> connections = new BreadthFirstOrderStrategy(nn, nn.getOutputLayer()).order();

  if (connections.size() > 0) {
      Layer current = null;
      List<Connections> chunk = new ArrayList<>();
      Set<Layer> convCalculatedLayers = new HashSet<>(); // tracks
                     // convolutional
                     // layers
                     // (because their
                     // calculations
                     // are
                     // interlinked)
      convCalculatedLayers.add(nn.getOutputLayer());

      for (int i = 0; i < connections.size(); i++) {
    ConnectionCandidate c = connections.get(i);
    chunk.add(c.connection);

    if (i == connections.size() - 1 || connections.get(i + 1).target != c.target) {
        current = c.target;

        ConnectionCalculator result = null;
        ConnectionCalculator ffcc = null;
        if (Util.isBias(current)) {
      ffcc = lc.getConnectionCalculator(current.getConnections().get(0).getOutputLayer());
        } else if (Util.isConvolutional(current) || Util.isSubsampling(current)) {
      if (chunk.size() != 1) {
          throw new IllegalArgumentException("Convolutional layer with more than one connection");
      }

      ffcc = lc.getConnectionCalculator(Util.getOppositeLayer(chunk.iterator().next(), current));
        } else {
      ffcc = lc.getConnectionCalculator(current);
        }

        if (ffcc instanceof AparapiSigmoid) {
      result = new BackPropagationSigmoid(p);
        } else if (ffcc instanceof AparapiTanh) {
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  // train
  bpt.train();

  // add softmax function
  LayerCalculatorImpl lc = (LayerCalculatorImpl) mlp.getLayerCalculator();
  ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(mlp.getOutputLayer());
  cc.addActivationFunction(new SoftmaxFunction());

  // test
  bpt.test();
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  NeuralNetworkImpl nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 5, 5, 20, 1 }, { 2, 2 }, { 5, 5, 50, 1 }, { 2, 2 }, {500}, {10} }, true);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  // feedforwad cc
  LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();

  Layer l = nn.getInputLayer();

  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiMaxPooling2D);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorFullyConnected);

  // backpropagation cc
  BackPropagationTrainer<?> bpt = TrainerFactory.backPropagation(nn, null, null, null, null, 0.01f, 0.5f, 0f, 0f);
  BackPropagationLayerCalculatorImpl bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);   // bias

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationConv2DSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationConv2D);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(1).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);

  l = l.getConnections().get(2).getOutputLayer();
  assertTrue(bplc.getConnectionCalculator(l.getConnections().get(1).getInputLayer()) instanceof BackPropagationSigmoid);
  assertTrue(bplc.getConnectionCalculator(l) == null);

  // simple convolutional network
  nn = NNFactory.convNN(new int[][] { { 28, 28, 1 }, { 1, 1 }, {10} }, false);
  nn.setLayerCalculator(NNFactory.lcSigmoid(nn, null));
  NNFactory.lcMaxPooling(nn);

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof ConnectionCalculatorConv);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSubsampling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof AparapiSigmoid);

  bpt = TrainerFactory.backPropagation(nn, null, null, new MultipleNeuronsOutputError(), null, 0.02f, 0.5f, 0f, 0f);
  bplc = (BackPropagationLayerCalculatorImpl) bpt.getBPLayerCalculator();

  l = nn.getInputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackpropagationMaxPooling2D);

  l = l.getConnections().get(0).getOutputLayer();
  assertTrue(lc.getConnectionCalculator(l) instanceof BackPropagationSigmoid);
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return result;
    }

    public static LayerCalculatorImpl lcWeightedSum(NeuralNetworkImpl nn, ConnectionCalculator outputCC) {
  LayerCalculatorImpl lc = new LayerCalculatorImpl();
  for (Layer l : nn.getLayers()) {
      if (!Util.isBias(l)) {
    if (outputCC != null && nn.getOutputLayer() == l) {
        lc.addConnectionCalculator(l, outputCC);
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new ConnectionCalculatorConv());
    } else {
        lc.addConnectionCalculator(l, new ConnectionCalculatorFullyConnected());
    }
      } else {
    lc.addConnectionCalculator(l, new ConstantConnectionCalculator());
      }
  }

  return lc;
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return lc;
    }

    public static LayerCalculatorImpl lcSigmoid(NeuralNetworkImpl nn, ConnectionCalculator outputCC) {
  LayerCalculatorImpl lc = new LayerCalculatorImpl();
  for (Layer l : nn.getLayers()) {
      if (!Util.isBias(l)) {
    if (outputCC != null && nn.getOutputLayer() == l) {
        lc.addConnectionCalculator(l, outputCC);
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DSigmoid());
    } else if (!Util.isSubsampling(l)) {
        lc.addConnectionCalculator(l, new AparapiSigmoid());
    }
      } else {
    lc.addConnectionCalculator(l, new ConstantConnectionCalculator());
      }
  }

  return lc;
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return lc;
    }

    public static LayerCalculatorImpl lcSoftRelu(NeuralNetworkImpl nn, ConnectionCalculator outputCC) {
  LayerCalculatorImpl lc = new LayerCalculatorImpl();
  for (Layer l : nn.getLayers()) {
      if (!Util.isBias(l)) {
    if (nn.getOutputLayer() == l) {
        if (outputCC != null) {
      lc.addConnectionCalculator(l, outputCC);
        } else {
      AparapiSoftReLU c = new AparapiSoftReLU();
      c.addActivationFunction(new SoftmaxFunction());
      lc.addConnectionCalculator(l, c);
        }
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DSoftReLU());
    } else {
        lc.addConnectionCalculator(l, new AparapiSoftReLU());
    }
      } else {
    lc.addConnectionCalculator(l, new ConstantConnectionCalculator());
      }
  }

  return lc;
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return lc;
    }

    public static LayerCalculatorImpl lcRelu(NeuralNetworkImpl nn, ConnectionCalculator outputCC) {
  LayerCalculatorImpl lc = new LayerCalculatorImpl();
  for (Layer l : nn.getLayers()) {
      if (!Util.isBias(l)) {
    if (nn.getOutputLayer() == l) {
        if (outputCC != null) {
      lc.addConnectionCalculator(l, outputCC);
        } else {
      AparapiReLU c = new AparapiReLU();
      c.addActivationFunction(new SoftmaxFunction());
      lc.addConnectionCalculator(l, c);
        }
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DReLU());
    } else {
        lc.addConnectionCalculator(l, new AparapiReLU());
    }
      } else {
    lc.addConnectionCalculator(l, new ConstantConnectionCalculator());
      }
  }

  return lc;
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return lc;
    }

    public static LayerCalculatorImpl lcTanh(NeuralNetworkImpl nn, ConnectionCalculator outputCC) {
  LayerCalculatorImpl lc = new LayerCalculatorImpl();
  for (Layer l : nn.getLayers()) {
      if (!Util.isBias(l)) {
    if (outputCC != null && nn.getOutputLayer() == l) {
        lc.addConnectionCalculator(l, outputCC);
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DTanh());
    } else {
        lc.addConnectionCalculator(l, new AparapiTanh());
    }
      } else {
    lc.addConnectionCalculator(l, new ConstantConnectionCalculator());
      }
  }

  return lc;
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  return lc;
    }

    public static void lcMaxPooling(NeuralNetworkImpl nn) {
  if (nn.getLayerCalculator() instanceof LayerCalculatorImpl) {
      LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();
      nn.getLayers().stream().filter(l -> Util.isSubsampling(l)).forEach(l -> lc.addConnectionCalculator(l, new AparapiMaxPooling2D()));
  } else {
      throw new IllegalArgumentException("LayerCalculator type not supported");
  }
    }
View Full Code Here

Examples of com.github.neuralnetworks.calculation.LayerCalculatorImpl

  }
    }
   
    public static void lcAveragePooling(NeuralNetworkImpl nn) {
  if (nn.getLayerCalculator() instanceof LayerCalculatorImpl) {
      LayerCalculatorImpl lc = (LayerCalculatorImpl) nn.getLayerCalculator();
      nn.getLayers().stream().filter(l -> Util.isSubsampling(l)).forEach(l -> lc.addConnectionCalculator(l, new AparapiAveragePooling2D()));
  } else {
      throw new IllegalArgumentException("LayerCalculator type not supported");
  }
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.