Package org.apache.mahout.math

Examples of org.apache.mahout.math.SparseMatrix


    reader.close();
   
    // read the class matrix
    reader = new SequenceFile.Reader(fs, classVectorPath, conf);
    IntWritable label = new IntWritable();
    Matrix matrix = new SparseMatrix(new int[] {labelCount, featureCount});
    while (reader.next(label, value)) {
      matrix.assignRow(label.get(), value.get());
    }
    reader.close();
   
    model.setWeightMatrix(matrix);
  
View Full Code Here


      alphaI = in.readFloat();
      weightsPerFeature = VectorWritable.readVector(in);
      weightsPerLabel = VectorWritable.readVector(in);
      perLabelThetaNormalizer = VectorWritable.readVector(in);

      weightsPerLabelAndFeature = new SparseMatrix(weightsPerLabel.size(), weightsPerFeature.size() );
      for (int label = 0; label < weightsPerLabelAndFeature.numRows(); label++) {
        weightsPerLabelAndFeature.assignRow(label, VectorWritable.readVector(in));
      }
    } finally {
      Closeables.closeQuietly(in);
View Full Code Here

    }

    Preconditions.checkNotNull(scoresPerFeature);
    Preconditions.checkNotNull(scoresPerLabel);

    Matrix scoresPerLabelAndFeature = new SparseMatrix(scoresPerLabel.size(), scoresPerFeature.size());
    for (Pair<IntWritable,VectorWritable> entry : new SequenceFileDirIterable<IntWritable,VectorWritable>(
        new Path(base, TrainNaiveBayesJob.SUMMED_OBSERVATIONS), PathType.LIST, PathFilters.partFilter(), conf)) {
      scoresPerLabelAndFeature.assignRow(entry.getFirst().get(), entry.getSecond().get());
    }

    Vector perlabelThetaNormalizer = null;
    for (Pair<Text,VectorWritable> entry : new SequenceFileDirIterable<Text,VectorWritable>(
        new Path(base, TrainNaiveBayesJob.THETAS), PathType.LIST, PathFilters.partFilter(), conf)) {
View Full Code Here

    Matrix em = model.getEmissionMatrix();
    Matrix tr = model.getTransitionMatrix();
    // allocate the sparse data structures
    RandomAccessSparseVector sparseIp = new RandomAccessSparseVector(model
        .getNrOfHiddenStates());
    SparseMatrix sparseEm = new SparseMatrix(new int[]{
        model.getNrOfHiddenStates(), model.getNrOfOutputStates()});
    SparseMatrix sparseTr = new SparseMatrix(new int[]{
        model.getNrOfHiddenStates(), model.getNrOfHiddenStates()});
    // now transfer the values
    for (int i = 0; i < model.getNrOfHiddenStates(); ++i) {
      double value = ip.getQuick(i);
      if (value > threshold) {
        sparseIp.setQuick(i, value);
      }
      for (int j = 0; j < model.getNrOfHiddenStates(); ++j) {
        value = tr.getQuick(i, j);
        if (value > threshold) {
          sparseTr.setQuick(i, j, value);
        }
      }

      for (int j = 0; j < model.getNrOfOutputStates(); ++j) {
        value = em.getQuick(i, j);
View Full Code Here

      }
    }
  }

  public Matrix buildPreferenceVectorForUser(long realId) throws TasteException {
    Matrix ids = new SparseMatrix(1, dataModel.getNumItems());
    for (Preference pref : dataModel.getPreferencesFromUser(realId)) {
      int itemidx = itemIndex(pref.getItemID());
      ids.setQuick(0, itemidx, pref.getValue());
    }
    return ids;
  }
View Full Code Here

    return ids;
  }

  private Matrix buildConfidenceMatrixForItem(long itemId) throws TasteException {
    PreferenceArray prefs = dataModel.getPreferencesForItem(itemId);
    Matrix confidenceMatrix = new SparseMatrix(dataModel.getNumUsers(), dataModel.getNumUsers());
    for (Preference pref : prefs) {
      long userId = pref.getUserID();
      int userIdx = userIndex(userId);
      confidenceMatrix.setQuick(userIdx, userIdx, 1);
    }
    return new DiagonalMatrix(confidenceMatrix);
  }
View Full Code Here

    return new DiagonalMatrix(confidenceMatrix);
  }

  private Matrix buildConfidenceMatrixForUser(long userId) throws TasteException {
    PreferenceArray prefs = dataModel.getPreferencesFromUser(userId);
    Matrix confidenceMatrix = new SparseMatrix(dataModel.getNumItems(), dataModel.getNumItems());
    for (Preference pref : prefs) {
      long itemId = pref.getItemID();
      int itemIdx = itemIndex(itemId);
      confidenceMatrix.setQuick(itemIdx, itemIdx, 1);
    }
    return new DiagonalMatrix(confidenceMatrix);
  }
View Full Code Here

    }
    return new DiagonalMatrix(confidenceMatrix);
  }

  private Matrix buildPreferenceVectorForItem(long realId) throws TasteException {
    Matrix ids = new SparseMatrix(1, dataModel.getNumUsers());
    for (Preference pref : dataModel.getPreferencesForItem(realId)) {
      int useridx = userIndex(pref.getUserID());
      ids.setQuick(0, useridx, pref.getValue());
    }
    return ids;
  }
View Full Code Here

    Matrix em = model.getEmissionMatrix();
    Matrix tr = model.getTransitionMatrix();
    // allocate the sparse data structures
    RandomAccessSparseVector sparseIp = new RandomAccessSparseVector(model
        .getNrOfHiddenStates());
    SparseMatrix sparseEm = new SparseMatrix(model.getNrOfHiddenStates(), model.getNrOfOutputStates());
    SparseMatrix sparseTr = new SparseMatrix(model.getNrOfHiddenStates(), model.getNrOfHiddenStates());
    // now transfer the values
    for (int i = 0; i < model.getNrOfHiddenStates(); ++i) {
      double value = ip.getQuick(i);
      if (value > threshold) {
        sparseIp.setQuick(i, value);
      }
      for (int j = 0; j < model.getNrOfHiddenStates(); ++j) {
        value = tr.getQuick(i, j);
        if (value > threshold) {
          sparseTr.setQuick(i, j, value);
        }
      }

      for (int j = 0; j < model.getNrOfOutputStates(); ++j) {
        value = em.getQuick(i, j);
View Full Code Here

    }

    Preconditions.checkNotNull(scoresPerFeature);
    Preconditions.checkNotNull(scoresPerLabel);

    Matrix scoresPerLabelAndFeature = new SparseMatrix(scoresPerLabel.size(), scoresPerFeature.size());
    for (Pair<IntWritable,VectorWritable> entry : new SequenceFileDirIterable<IntWritable,VectorWritable>(
        new Path(base, TrainNaiveBayesJob.SUMMED_OBSERVATIONS), PathType.LIST, PathFilters.partFilter(), conf)) {
      scoresPerLabelAndFeature.assignRow(entry.getFirst().get(), entry.getSecond().get());
    }

    Vector perlabelThetaNormalizer = scoresPerLabel.like();
    /* for (Pair<Text,VectorWritable> entry : new SequenceFileDirIterable<Text,VectorWritable>(
        new Path(base, TrainNaiveBayesJob.THETAS), PathType.LIST, PathFilters.partFilter(), conf)) {
View Full Code Here

TOP

Related Classes of org.apache.mahout.math.SparseMatrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.