Package de.jungblut.math.activation

Examples of de.jungblut.math.activation.SigmoidActivationFunction


    // functions
    if (conf.activationFunctions == null) {
      this.activations = new ActivationFunction[layers.length];
      this.activations[0] = new LinearActivationFunction();
      for (int i = 1; i < layers.length; i++) {
        this.activations[i] = new SigmoidActivationFunction();
      }
    } else {
      this.activations = conf.activationFunctions;
    }
    Preconditions.checkArgument(layers.length == activations.length,
View Full Code Here


    WeightMatrix pInput = new WeightMatrix(test[0].getDimension(),
        hiddenUnits + 1);
    DoubleVector foldMatrices = DenseMatrixFolder.foldMatrices(pInput
        .getWeights());
    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0d,
        MultilayerPerceptron.SEED, false);

    CostGradientTuple evaluateCost = fnc.evaluateCost(foldMatrices);

    assertEquals(10.62, evaluateCost.getCost(), 1e-2);
View Full Code Here

    WeightMatrix pInput = new WeightMatrix(test[0].getDimension(),
        hiddenUnits + 1);
    DoubleVector foldMatrices = DenseMatrixFolder.foldMatrices(pInput
        .getWeights());
    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0.1d,
        MultilayerPerceptron.SEED, false);
    CostGradientTuple evaluateCost = fnc.evaluateCost(foldMatrices);
    assertEquals(10.62, evaluateCost.getCost(), 1e-2);
    DoubleVector target = new DenseDoubleVector(new double[] { 0.0,
        0.02692309216175836, 0.028617918716451567, -0.38090575317687425,
View Full Code Here

  public void testCostFunction() {
    WeightMatrix pInput = new WeightMatrix(test[0].getDimension(),
        hiddenUnits + 1);

    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0d,
        MultilayerPerceptron.SEED, false);

    DoubleVector theta = GradientDescent.minimizeFunction(fnc,
        DenseMatrixFolder.foldMatrices(pInput.getWeights()), 0.01, 1e-5, 5000,
        false);
View Full Code Here

TOP

Related Classes of de.jungblut.math.activation.SigmoidActivationFunction

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.