Package de.jungblut.math

Examples of de.jungblut.math.DoubleVector


  public static int compareVector(VectorWritable a, VectorWritable o) {
    return compareVector(a.getVector(), o.getVector());
  }

  public static int compareVector(DoubleVector a, DoubleVector o) {
    DoubleVector subtract = a.subtract(o);
    return (int) subtract.sum();
  }
View Full Code Here


  private static TIntArrayList getNearestNeighbours(TIntHashSet visited,
      TIntHashSet currentNeighbours, List<DoubleVector> points, int x,
      DistanceMeasurer measurer, double epsilon) {
    TIntArrayList list = new TIntArrayList();
    final DoubleVector ref = points.get(x);
    for (int i = 0; i < points.size(); i++) {
      // filter based on what we've seen at the time
      if (!visited.contains(i)) {
        if (currentNeighbours != null && currentNeighbours.contains(i)) {
          continue;
View Full Code Here

    // do the clustering
    List<DoubleVector> canopyList = new ArrayList<>();
    long start = System.currentTimeMillis();
    while (!points.isEmpty()) {
      DoubleVector p1 = points.get(0);
      points.remove(0);
      DoubleVector canopy = p1.deepCopy();
      int assigned = 1;
      // one can speed this up by an inverted index or a kd-tree
      Iterator<DoubleVector> iterator = points.iterator();
      while (iterator.hasNext()) {
        DoubleVector p2 = iterator.next();
        double dist = measure.measureDistance(p1, p2);
        // Put all points that are within distance threshold T1 into the
        // canopy
        if (dist < t1) {
          assigned++;
View Full Code Here

        TreeCompiler.generateClassName(), root);
  }

  @Test
  public void testNominalSwitches() {
    DoubleVector vec = new DenseDoubleVector(new double[] { 0, 0, 0 });
    int result = compiledRoot.predict(vec);
    Assert.assertEquals(0, result);

    vec = new DenseDoubleVector(new double[] { 1, 0, 5 });
    result = compiledRoot.predict(vec);
View Full Code Here

  }

  @Test
  public void testOtherResults() {

    DoubleVector vec = new DenseDoubleVector(new double[] { 2, 2, 0 });
    int result = compiledRoot.predict(vec);
    Assert.assertEquals(1337, result);

    vec = new DenseDoubleVector(new double[] { 2, 18, 0 });
    result = compiledRoot.predict(vec);
View Full Code Here

  @Test
  public void testGradient() {
    WeightMatrix pInput = new WeightMatrix(test[0].getDimension(),
        hiddenUnits + 1);
    DoubleVector foldMatrices = DenseMatrixFolder.foldMatrices(pInput
        .getWeights());
    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0d,
        MultilayerPerceptron.SEED, false);

    CostGradientTuple evaluateCost = fnc.evaluateCost(foldMatrices);

    assertEquals(10.62, evaluateCost.getCost(), 1e-2);
    DoubleVector target = new DenseDoubleVector(new double[] { 0.0,
        0.027379415757720366, 0.029102968186221934, -0.38090575317687425,
        -0.27799120250510584, -0.05453365605307239, 0.028442797042677864,
        -0.007547440696105356, -0.020996345540311157, 0.23725599589259425,
        0.16279353745280023, 0.021913996227666748, 0.21119663986488538,
        0.14066157414419367, 0.018971946780403166, 0.027585532151946184,
View Full Code Here

  @Test
  public void testRegularizedGradient() {
    WeightMatrix pInput = new WeightMatrix(test[0].getDimension(),
        hiddenUnits + 1);
    DoubleVector foldMatrices = DenseMatrixFolder.foldMatrices(pInput
        .getWeights());
    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0.1d,
        MultilayerPerceptron.SEED, false);
    CostGradientTuple evaluateCost = fnc.evaluateCost(foldMatrices);
    assertEquals(10.62, evaluateCost.getCost(), 1e-2);
    DoubleVector target = new DenseDoubleVector(new double[] { 0.0,
        0.02692309216175836, 0.028617918716451567, -0.38090575317687425,
        -0.2733580157966874, -0.05362476178552118, 0.028442797042677864,
        -0.0074216500178369334, -0.020646406447972637, 0.23725599589259425,
        0.1600803118285869, 0.021548762957205637, 0.21119663986488538,
        0.13831721457512378, 0.018655747667396447, 0.027585532151946184,
View Full Code Here

  }

  @Test
  public void testMomentumGradientDescent() {

    DoubleVector start = new DenseDoubleVector(new double[] { 2, -1 });

    CostFunction inlineFunction = getCostFunction();
    GradientDescent gd = GradientDescentBuilder.create(0.8d).momentum(0.9d)
        .breakOnDifference(1e-20).build();
    DoubleVector minimizeFunction = gd.minimize(inlineFunction, start, 1000,
        false);
    // 1E-5 is close enough to zero for the test to pass
    assertEquals(minimizeFunction.get(0), 0, 1E-5);
    assertEquals(minimizeFunction.get(1), 0, 1E-5);
  }
View Full Code Here

    RBMCostFunction fnc = new RBMCostFunction(test, 0, 1, hiddenUnits,
        new SigmoidActivationFunction(), TrainingType.CPU, 0d,
        MultilayerPerceptron.SEED, false);

    DoubleVector theta = GradientDescent.minimizeFunction(fnc,
        DenseMatrixFolder.foldMatrices(pInput.getWeights()), 0.01, 1e-5, 5000,
        false);

    int[][] pms = MultilayerPerceptronCostFunction
        .computeUnfoldParameters(new int[] { test[0].getDimension(),
View Full Code Here

public class ParticleSwarmOptimizationTest {

  @Test
  public void testParticleSwarmOptimization() {

    DoubleVector start = new DenseDoubleVector(new double[] { 22, 15 });

    // our function is f(x,y) = x^2+y^2
    CostFunction inlineFunction = new CostFunction() {
      @Override
      public CostGradientTuple evaluateCost(DoubleVector input) {

        double cost = Math.pow(input.get(0), 2) + Math.pow(input.get(1), 2);

        return new CostGradientTuple(cost, null);
      }
    };

    DoubleVector minimizeFunction = ParticleSwarmOptimization.minimizeFunction(
        inlineFunction, start, 1000, 0.1, 0.2, 0.4, 100, 8, false);
    // 1E-5 is close enough to zero for the test to pass
    assertEquals(minimizeFunction.get(0), 0, 1E-5);
    assertEquals(minimizeFunction.get(1), 0, 1E-5);
  }
View Full Code Here

TOP

Related Classes of de.jungblut.math.DoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.