Package org.apache.hama.commons.math

Examples of org.apache.hama.commons.math.DenseDoubleMatrix


    DoubleMatrix[] weightUpdates = new DoubleMatrix[this.inMemoryModel.weightMatrixList
        .size()];
    for (int i = 0; i < weightUpdates.length; ++i) {
      int row = this.inMemoryModel.weightMatrixList.get(i).getRowCount();
      int col = this.inMemoryModel.weightMatrixList.get(i).getColumnCount();
      weightUpdates[i] = new DenseDoubleMatrix(row, col);
    }

    // continue to train
    double avgTrainingError = 0.0;
    LongWritable key = new LongWritable();
View Full Code Here


  private void initializeWeightMatrix() {
    this.weightMatrice = new DenseDoubleMatrix[this.numberOfLayers - 1];
    // each layer contains one bias neuron
    for (int i = 0; i < this.numberOfLayers - 1; ++i) {
      // add weights for bias
      this.weightMatrice[i] = new DenseDoubleMatrix(this.layerSizeArray[i] + 1,
          this.layerSizeArray[i + 1]);

      this.weightMatrice[i].applyToElements(new DoubleFunction() {

        private final Random rnd = new Random();
View Full Code Here

  private void initializePrevWeightUpdateMatrix() {
    this.prevWeightUpdateMatrices = new DenseDoubleMatrix[this.numberOfLayers - 1];
    for (int i = 0; i < this.prevWeightUpdateMatrices.length; ++i) {
      int row = this.layerSizeArray[i] + 1;
      int col = this.layerSizeArray[i + 1];
      this.prevWeightUpdateMatrices[i] = new DenseDoubleMatrix(row, col);
    }
  }
View Full Code Here

  DenseDoubleMatrix[] trainByInstance(DoubleVector trainingInstance)
      throws Exception {
    // initialize weight update matrices
    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
      weightUpdateMatrices[m] = new DenseDoubleMatrix(
          this.layerSizeArray[m] + 1, this.layerSizeArray[m + 1]);
    }

    if (trainingInstance == null) {
      return weightUpdateMatrices;
    }

    // transform the features (exclude the labels) to new space
    double[] trainingVec = trainingInstance.toArray();
    double[] trainingFeature = this.featureTransformer.transform(
        trainingInstance.sliceUnsafe(0, this.layerSizeArray[0] - 1)).toArray();
    double[] trainingLabels = Arrays.copyOfRange(trainingVec,
        this.layerSizeArray[0], trainingVec.length);

    DoubleVector trainingFeatureVec = new DenseDoubleVector(trainingFeature);
    List<double[]> outputCache = this.outputInternal(trainingFeatureVec);

    // calculate the delta of output layer
    double[] delta = new double[this.layerSizeArray[this.layerSizeArray.length - 1]];
    double[] outputLayerOutput = outputCache.get(outputCache.size() - 1);
    double[] lastHiddenLayerOutput = outputCache.get(outputCache.size() - 2);

    DenseDoubleMatrix prevWeightUpdateMatrix = this.prevWeightUpdateMatrices[this.prevWeightUpdateMatrices.length - 1];
    for (int j = 0; j < delta.length; ++j) {
      delta[j] = this.costFunction.applyDerivative(trainingLabels[j],
          outputLayerOutput[j]);
      // add regularization term
      if (this.regularization != 0.0) {
        double derivativeRegularization = 0.0;
        DenseDoubleMatrix weightMatrix = this.weightMatrice[this.weightMatrice.length - 1];
        for (int k = 0; k < this.layerSizeArray[this.layerSizeArray.length - 1]; ++k) {
          derivativeRegularization += weightMatrix.get(k, j);
        }
        derivativeRegularization /= this.layerSizeArray[this.layerSizeArray.length - 1];
        delta[j] += this.regularization * derivativeRegularization;
      }

View Full Code Here

   */
  private DenseDoubleMatrix[] getZeroWeightMatrices() {
    DenseDoubleMatrix[] weightUpdateCache = new DenseDoubleMatrix[this.layerSizeArray.length - 1];
    // initialize weight matrix each layer
    for (int i = 0; i < weightUpdateCache.length; ++i) {
      weightUpdateCache[i] = new DenseDoubleMatrix(this.layerSizeArray[i] + 1,
          this.layerSizeArray[i + 1]);
    }
    return weightUpdateCache;
  }
View Full Code Here

      int sizePrevLayer = this.layerSizeList.get(layerIdx - 1);
      // row count equals to size of current size and column count equals to
      // size of previous layer
      int row = isFinalLayer ? size : size - 1;
      int col = sizePrevLayer;
      DoubleMatrix weightMatrix = new DenseDoubleMatrix(row, col);
      // initialize weights
      weightMatrix.applyToElements(new DoubleFunction() {
        @Override
        public double apply(double value) {
          return RandomUtils.nextDouble() - 0.5;
        }

        @Override
        public double applyDerivative(double value) {
          throw new UnsupportedOperationException("");
        }
      });
      this.weightMatrixList.add(weightMatrix);
      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(row, col));
      this.squashingFunctionList.add(squashingFunction);
    }
    return layerIdx;
  }
View Full Code Here

    this.weightMatrixList = Lists.newArrayList();
    this.prevWeightUpdatesList = Lists.newArrayList();
    for (int i = 0; i < numOfMatrices; ++i) {
      DoubleMatrix matrix = MatrixWritable.read(input);
      this.weightMatrixList.add(matrix);
      this.prevWeightUpdatesList.add(new DenseDoubleMatrix(
          matrix.getRowCount(), matrix.getColumnCount()));
    }

  }
View Full Code Here

    DoubleVector output = internalResults.get(internalResults.size() - 1);
    // initialize weight update matrices
    DenseDoubleMatrix[] weightUpdateMatrices = new DenseDoubleMatrix[this.weightMatrixList
        .size()];
    for (int m = 0; m < weightUpdateMatrices.length; ++m) {
      weightUpdateMatrices[m] = new DenseDoubleMatrix(this.weightMatrixList
          .get(m).getRowCount(), this.weightMatrixList.get(m).getColumnCount());
    }
    DoubleVector deltaVec = new DenseDoubleVector(
        this.layerSizeList.get(this.layerSizeList.size() - 1));
View Full Code Here

    int offset = 1;
    while (offset < matrix_size) {
      slices.add(vc.slice(offset, matrix_rank));
      offset += matrix_rank;
    }
    DoubleMatrix res = new DenseDoubleMatrix((DoubleVector[])slices.toArray());
    return res;
  }
View Full Code Here

    if (isAvailableItemFeature) {
      DoubleVector[] Mtransposed = new DenseDoubleVector[rank];
      for (int i = 0; i<rank; i++) {
        Mtransposed[i] = e.itemFeatureFactorized.getRowVector(i).multiply(aal_ml_xa.get(i));
      }
      tmp = new DenseDoubleMatrix(Mtransposed);
      tmp = tmp.multiply(2*TETTA*scoreDifference);
      res.itemFeatureFactorized = e.itemFeatureFactorized.add(tmp);
    }

    if (isAvailableUserFeature) {
      DoubleVector[] Mtransposed = new DenseDoubleVector[rank];
      for (int i = 0; i<rank; i++) {
        Mtransposed[i] = e.userFeatureFactorized.getRowVector(i).multiply(bbl_vl_yb.get(i));
      }
      tmp = new DenseDoubleMatrix(Mtransposed);
      tmp = tmp.multiply(2*TETTA*scoreDifference);
      res.userFeatureFactorized = e.userFeatureFactorized.add(tmp);
    }
    return res;
  }
View Full Code Here

TOP

Related Classes of org.apache.hama.commons.math.DenseDoubleMatrix

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.