Package de.jungblut.math

Examples of de.jungblut.math.DoubleVector.iterateNonZero()


      DoubleVector feature = tuple.getFirst();
      DoubleVector outcome = tuple.getSecond();
      Iterator<DoubleVectorElement> featureIterator = feature.iterateNonZero();
      while (featureIterator.hasNext()) {
        DoubleVectorElement feat = featureIterator.next();
        Iterator<DoubleVectorElement> outcomeIterator = outcome
            .iterateNonZero();
        while (outcomeIterator.hasNext()) {
          DoubleVectorElement out = outcomeIterator.next();
          theta.set(feat.getIndex(), out.getIndex(), random.nextDouble());
        }
View Full Code Here


    for (int row = 0; row < numDistinctClasses; row++) {
      // we can quite efficiently iterate over the non-zero row vectors now
      DoubleVector rowVector = probabilityMatrix.getRowVector(row);
      // don't care about not occuring words, we honor them with a very small
      // probability later on when predicting, here we save a lot space.
      Iterator<DoubleVectorElement> iterateNonZero = rowVector.iterateNonZero();
      double normalizer = FastMath.log(tokenPerClass[row]
          + probabilityMatrix.getColumnCount() - 1);
      while (iterateNonZero.hasNext()) {
        DoubleVectorElement next = iterateNonZero.next();
        double currentWordCount = next.getValue();
View Full Code Here

    final int[] rowEntries = transitionProbabilities.rowIndices();
    for (int rowIndex : rowEntries) {
      DoubleVector rowVector = transitionProbabilities.getRowVector(rowIndex);
      double sum = rowVector.sum();
      Iterator<DoubleVectorElement> iterateNonZero = rowVector.iterateNonZero();
      // loop over all counts and take the log of the probability
      while (iterateNonZero.hasNext()) {
        DoubleVectorElement columnElement = iterateNonZero.next();
        int columnIndex = columnElement.getIndex();
        double probability = FastMath.log(columnElement.getValue())
View Full Code Here

    out.writeInt(rowIndices.length);
    for (int row : rowIndices) {
      out.writeInt(row);
      DoubleVector rowVector = mat.getRowVector(row);
      out.writeInt(rowVector.getLength());
      Iterator<DoubleVectorElement> iterateNonZero = rowVector.iterateNonZero();
      while (iterateNonZero.hasNext()) {
        DoubleVectorElement next = iterateNonZero.next();
        out.writeInt(next.getIndex());
        out.writeDouble(next.getValue());
      }
View Full Code Here

    // loop over all feature rows to determine the probabilities
    for (int row = 0; row < m; row++) {
      DoubleVector rowVector = features.getRowVector(row);
      double[] logProbabilities = new double[classes];
      // sum the probabilities for each class over all features
      Iterator<DoubleVectorElement> iterateNonZero = rowVector.iterateNonZero();
      while (iterateNonZero.hasNext()) {
        DoubleVectorElement next = iterateNonZero.next();
        for (int i = 0; i < classes; i++) {
          logProbabilities[i] += theta.get(i, next.getIndex());
        }
View Full Code Here

        }
      }
      double z = logSum(logProbabilities);
      for (int i = 0; i < classes; i++) {
        double prob = Math.exp(logProbabilities[i] - z);
        iterateNonZero = rowVector.iterateNonZero();
        while (iterateNonZero.hasNext()) {
          DoubleVectorElement next = iterateNonZero.next();
          gradient.set(i, next.getIndex(), gradient.get(i, next.getIndex())
              + prob);
          if (correctPrediction(i, outcome.getRowVector(row))) {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.