Package org.apache.mahout.math

Examples of org.apache.mahout.math.DenseVector


    assertEquals("models", model.toString(), model2.toString());
  }
 
  public void testClusterWritableSerialization() throws Exception {
    double[] m = {1.1, 2.2, 3.3};
    DirichletCluster<?> cluster = new DirichletCluster(new NormalModel(new DenseVector(m), 4), 10);
    DataOutputBuffer out = new DataOutputBuffer();
    cluster.write(out);
    DirichletCluster<?> cluster2 = new DirichletCluster();
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), out.getLength());
View Full Code Here


  public void testMeasure() {

    DistanceMeasure distanceMeasure = distanceMeasureFactory();

    Vector[] vectors = {
        new DenseVector(new double[]{1, 1, 1, 1, 1, 1}),
        new DenseVector(new double[]{2, 2, 2, 2, 2, 2}),
        new DenseVector(new double[]{6, 6, 6, 6, 6, 6}),
        new DenseVector(new double[]{-1,-1,-1,-1,-1,-1})
    };

    compare(distanceMeasure, vectors);

    vectors = new Vector[4];
View Full Code Here

  public void testMeasure() {

    DistanceMeasure distanceMeasure = new CosineDistanceMeasure();

    Vector[] vectors = {
        new DenseVector(new double[]{1, 0, 0, 0, 0, 0}),
        new DenseVector(new double[]{1, 1, 1, 0, 0, 0}),
        new DenseVector(new double[]{1, 1, 1, 1, 1, 1})
    };

    double[][] distanceMatrix = new double[3][3];

    for (int a = 0; a < 3; a++) {
View Full Code Here

    }
  }

  public void testRmultinom1() {
    double[] b = {0.4, 0.6};
    Vector v = new DenseVector(b);
    Vector t = v.like();
    for (int i = 1; i <= 100; i++) {
      Vector multinom = UncommonDistributions.rMultinom(100, v);
      t = t.plus(multinom);
    }
    System.out.println("sum(rMultinom(" + 100 + ", [0.4, 0.6]))/100="
View Full Code Here

  }

  public void testRmultinom2() {
    double[] b = {0.1, 0.2, 0.7};
    Vector v = new DenseVector(b);
    Vector t = v.like();
    for (int i = 1; i <= 100; i++) {
      Vector multinom = UncommonDistributions.rMultinom(100, v);
      t = t.plus(multinom);
    }
    System.out.println("sum(rMultinom(" + 100 + ", [ 0.1, 0.2, 0.7 ]))/100="
View Full Code Here

  public void testMeasureWeighted() {

    WeightedDistanceMeasure distanceMeasure = distanceMeasureFactory();

    Vector[] vectors = {
        new DenseVector(new double[]{9, 9, 1}),
        new DenseVector(new double[]{1, 9, 9}),
        new DenseVector(new double[]{9, 1, 9}),
    };
    distanceMeasure.setWeights(new DenseVector(new double[]{1, 1000, 1}));

    double[][] distanceMatrix = new double[3][3];

    for (int a = 0; a < 3; a++) {
      for (int b = 0; b < 3; b++) {
View Full Code Here

  }

  public void testRmultinom() {
    double[] b = {0.1, 0.2, 0.8};
    Vector v = new DenseVector(b);
    for (int i = 1; i <= 100; i++) {
      System.out.println("rMultinom(" + 100 + ", [0.1, 0.2, 0.8])="
          + UncommonDistributions.rMultinom(100, v).asFormatString());
    }
  }
View Full Code Here

                    Matrix eigenVectors,
                    List<Double> eigenValues,
                    boolean isSymmetric) {
    log.info("Finding {} singular vectors of matrix with {} rows, via Lanczos", desiredRank, corpus.numRows());
    Vector currentVector = getInitialVector(corpus);
    Vector previousVector = new DenseVector(currentVector.size());
    Matrix basis = new SparseRowMatrix(new int[]{desiredRank, corpus.numCols()});
    basis.assignRow(0, currentVector);
    double alpha = 0;
    double beta = 0;
    DoubleMatrix2D triDiag = new DenseDoubleMatrix2D(desiredRank, desiredRank);
    for (int i = 1; i < desiredRank; i++) {
      startTime(TimingSection.ITERATE);
      Vector nextVector = isSymmetric ? corpus.times(currentVector) : corpus.timesSquared(currentVector);
      log.info("{} passes through the corpus so far...", i);
      calculateScaleFactor(nextVector);
      nextVector.assign(new Scale(1 / scaleFactor));
      nextVector.assign(previousVector, new PlusMult(-beta));
      // now orthogonalize
      alpha = currentVector.dot(nextVector);
      nextVector.assign(currentVector, new PlusMult(-alpha));
      endTime(TimingSection.ITERATE);
      startTime(TimingSection.ORTHOGANLIZE);
      orthoganalizeAgainstAllButLast(nextVector, basis);
      endTime(TimingSection.ORTHOGANLIZE);
      // and normalize
      beta = nextVector.norm(2);
      if (outOfRange(beta) || outOfRange(alpha)) {
        log.warn("Lanczos parameters out of range: alpha = {}, beta = {}.  Bailing out early!", alpha, beta);
        break;
      }
      final double b = beta;
      nextVector.assign(new Scale(1 / b));
      basis.assignRow(i, nextVector);
      previousVector = currentVector;
      currentVector = nextVector;
      // save the projections and norms!
      triDiag.set(i - 1, i - 1, alpha);
      if (i < desiredRank - 1) {
        triDiag.set(i - 1, i, beta);
        triDiag.set(i, i - 1, beta);
      }
    }
    startTime(TimingSection.TRIDIAG_DECOMP);

    log.info("Lanczos iteration complete - now to diagonalize the tri-diagonal auxiliary matrix.");
    // at this point, have tridiag all filled out, and basis is all filled out, and orthonormalized
    EigenvalueDecomposition decomp = new EigenvalueDecomposition(triDiag);

    DoubleMatrix2D eigenVects = decomp.getV();
    DoubleMatrix1D eigenVals = decomp.getRealEigenvalues();
    endTime(TimingSection.TRIDIAG_DECOMP);
    startTime(TimingSection.FINAL_EIGEN_CREATE);

    for (int i = 0; i < basis.numRows() - 1; i++) {
      Vector realEigen = new DenseVector(corpus.numCols());
      // the eigenvectors live as columns of V, in reverse order.  Weird but true.
      DoubleMatrix1D ejCol = eigenVects.viewColumn(basis.numRows() - i - 1);
      for (int j = 0; j < ejCol.size(); j++) {
        double d = ejCol.getQuick(j);
        realEigen.assign(basis.getRow(j), new PlusMult(d));
      }
      realEigen = realEigen.normalize();
      eigenVectors.assignRow(i, realEigen);
      log.info("Eigenvector {} found with eigenvalue {}", i, eigenVals.get(i));
      eigenValues.add(eigenVals.get(i));
    }
    log.info("LanczosSolver finished.");
View Full Code Here

      if (slice == null || (vector = slice.vector()) == null || vector.getLengthSquared() == 0) {
        continue;
      }
      scaleFactor += vector.getLengthSquared();
      if (v == null) {
        v = new DenseVector(vector.size()).plus(vector);
      } else {
        v.assign(vector, plus);
      }
    }
    v.assign(div(v.norm(2)));
View Full Code Here

  public InferredDocument infer(Vector wordCounts) {
    double docTotal = wordCounts.zSum();
    int docLength = wordCounts.size(); // cardinality of document vectors
   
    // initialize variational approximation to p(z|doc)
    Vector gamma = new DenseVector(state.numTopics);
    gamma.assign(state.topicSmoothing + docTotal / state.numTopics);
    Vector nextGamma = new DenseVector(state.numTopics);
    createPhiMatrix(docLength);
   
    Vector digammaGamma = digammaGamma(gamma);
   
    int[] map = new int[docLength];
   
    int iteration = 0;
   
    boolean converged = false;
    double oldLL = 1;
    while (!converged && (iteration < MAX_ITER)) {
      nextGamma.assign(state.topicSmoothing); // nG := alpha, for all topics
     
      int mapping = 0;
      for (Iterator<Vector.Element> iter = wordCounts.iterateNonZero(); iter.hasNext();) {
        Vector.Element e = iter.next();
        int word = e.index();
        Vector phiW = eStepForWord(word, digammaGamma);
        phi.assignColumn(mapping, phiW);
        if (iteration == 0) { // first iteration
          map[word] = mapping;
        }
       
        for (int k = 0; k < nextGamma.size(); ++k) {
          double g = nextGamma.getQuick(k);
          nextGamma.setQuick(k, g + e.get() * Math.exp(phiW.getQuick(k)));
        }
       
        mapping++;
      }
     
View Full Code Here

TOP

Related Classes of org.apache.mahout.math.DenseVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.