Package com.github.neuralnetworks.calculation.neuronfunctions

Examples of com.github.neuralnetworks.calculation.neuronfunctions.SoftmaxFunction


  m.set(3, 2, 0);
  m.set(4, 0, 1);
  m.set(5, 1, 1);
  m.set(6, 2, 1);

  SoftmaxFunction sf = new SoftmaxFunction();
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);
  sf.value(m);

  assertEquals(1 / 6f, m.get(0, 0), 0);
  assertEquals(2 / 6f, m.get(1, 0), 0);
  assertEquals(3 / 6f, m.get(2, 0), 0);
  assertEquals(4 / 15f, m.get(0, 1), 0);
View Full Code Here


  bpt.train();

  // add softmax function
  LayerCalculatorImpl lc = (LayerCalculatorImpl) mlp.getLayerCalculator();
  ConnectionCalculatorFullyConnected cc = (ConnectionCalculatorFullyConnected) lc.getConnectionCalculator(mlp.getOutputLayer());
  cc.addActivationFunction(new SoftmaxFunction());

  // test
  bpt.test();

  assertEquals(0, bpt.getOutputError().getTotalNetworkError(), 0.1);
View Full Code Here

    if (nn.getOutputLayer() == l) {
        if (outputCC != null) {
      lc.addConnectionCalculator(l, outputCC);
        } else {
      AparapiSoftReLU c = new AparapiSoftReLU();
      c.addActivationFunction(new SoftmaxFunction());
      lc.addConnectionCalculator(l, c);
        }
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DSoftReLU());
    } else {
View Full Code Here

    if (nn.getOutputLayer() == l) {
        if (outputCC != null) {
      lc.addConnectionCalculator(l, outputCC);
        } else {
      AparapiReLU c = new AparapiReLU();
      c.addActivationFunction(new SoftmaxFunction());
      lc.addConnectionCalculator(l, c);
        }
    } else if (Util.isConvolutional(l)) {
        lc.addConnectionCalculator(l, new AparapiConv2DReLU());
    } else {
View Full Code Here

    public static RBMLayerCalculator rbmSoftReluSoftRelu(RBM rbm) {
  RBMLayerCalculator lc = new RBMLayerCalculator();

  AparapiSoftReLU c1 = new AparapiSoftReLU();
  c1.addActivationFunction(new SoftmaxFunction());
  lc.addConnectionCalculator(rbm.getVisibleLayer(), c1);


  AparapiSoftReLU c2 = new AparapiSoftReLU();
  c2.addActivationFunction(new SoftmaxFunction());
  lc.addConnectionCalculator(rbm.getHiddenLayer(), c2);

  populateBiasLayers(lc, rbm);

  return lc;
View Full Code Here

   
    public static RBMLayerCalculator rbmReluRelu(RBM rbm) {
  RBMLayerCalculator lc = new RBMLayerCalculator();

  AparapiReLU c1 = new AparapiReLU();
  c1.addActivationFunction(new SoftmaxFunction());
  lc.addConnectionCalculator(rbm.getVisibleLayer(), c1);

  AparapiReLU c2 = new AparapiReLU();
  c2.addActivationFunction(new SoftmaxFunction());
  lc.addConnectionCalculator(rbm.getHiddenLayer(), c2);

  populateBiasLayers(lc, rbm);

  return lc;
View Full Code Here

  m.set(3, 2, 0);
  m.set(4, 0, 1);
  m.set(5, 1, 1);
  m.set(6, 2, 1);

  SoftmaxFunction sf = new SoftmaxFunction();
  Environment.getInstance().setExecutionMode(EXECUTION_MODE.SEQ);
  sf.value(m);

  assertEquals(1 / 6f, m.get(0, 0), 0);
  assertEquals(2 / 6f, m.get(1, 0), 0);
  assertEquals(3 / 6f, m.get(2, 0), 0);
  assertEquals(4 / 15f, m.get(0, 1), 0);
View Full Code Here

TOP

Related Classes of com.github.neuralnetworks.calculation.neuronfunctions.SoftmaxFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.