Package org.apache.hama.commons.math

Examples of org.apache.hama.commons.math.DenseDoubleVector


      if (hasKey) {
        columnLength = columnLength - 1;
        indexPos++;
      }

      DenseDoubleVector vec = new DenseDoubleVector(columnLength);
      for (int j = 0; j < columnLength; j++) {
        vec.set(j, Double.parseDouble(split[j + indexPos]));
      }

      VectorWritable vector;
      if (hasKey) {
        NamedDoubleVector named = new NamedDoubleVector(split[0], vec);
View Full Code Here


      double[] arr = new double[dimension];
      for (int d = 0; d < dimension; d++) {
        arr[d] = r.nextInt(count);
      }
      VectorWritable vector = new VectorWritable(new DenseDoubleVector(arr));
      dataWriter.append(vector, value);
      if (k > i) {
        centerWriter.append(vector, value);
      }
    }
View Full Code Here

    }
    return res;
  }

  private VectorWritable convertMatrixToVector(DoubleMatrix mat) {
    DoubleVector res = new DenseDoubleVector(mat.getRowCount()*mat.getColumnCount()+1);
    int idx = 0;
    res.set(idx, MATRIX_RANK);
    idx++;
    for (int i=0; i<mat.getRowCount(); i++) {
      for (int j=0; j<mat.getColumnCount(); j++) {
        res.set(idx, mat.get(i, j));
        idx++;
      }
    }
    return new VectorWritable(res);
  }
View Full Code Here

      itemId = (Text) msg.get(OnlineCF.Settings.MSG_ITEM_MATRIX);
      value = (VectorWritable) msg.get(OnlineCF.Settings.MSG_VALUE);
      senderId = (IntWritable) msg.get(OnlineCF.Settings.MSG_SENDER_ID);

      if (normalizedValues.containsKey(itemId) == false) {
        DenseDoubleVector tmp = new DenseDoubleVector(MATRIX_RANK, 0.0);
        normalizedValues.put(itemId, tmp);
        normalizedValueCount.put(itemId, 0);
        senderList.put(itemId, new LinkedList<IntWritable>());
      }
View Full Code Here

        // parse as <k:userId, v:(itemId, score)>
        String itemId = Long.toString((long)value.getVector().get(0));
        String score = Double.toString(value.getVector().get(1));

        if (usersMatrix.containsKey(actualId) == false) {
          DenseDoubleVector vals = new DenseDoubleVector(MATRIX_RANK);
          for (int i=0; i<MATRIX_RANK; i++) {
            vals.set(i, rnd.nextDouble());
          }
          VectorWritable rndValues = new VectorWritable(vals);
          usersMatrix.put(actualId, rndValues);
        }

        if (itemsMatrix.containsKey(itemId) == false) {
          DenseDoubleVector vals = new DenseDoubleVector(MATRIX_RANK);
          for (int i=0; i<MATRIX_RANK; i++) {
            vals.set(i, rnd.nextDouble());
          }
          VectorWritable rndValues = new VectorWritable(vals);
          itemsMatrix.put(itemId, rndValues);
        }
        preferences.add(new Preference<String, String>(actualId, itemId, Double.parseDouble(score)));
View Full Code Here

  private void updateTheta(double[] thetaDiff) {
    double[] newTheta = new double[theta.getLength()];
    for (int j = 0; j < theta.getLength(); j++) {
      newTheta[j] = theta.get(j) - thetaDiff[j] * alpha;
    }
    theta = new DenseDoubleVector(newTheta);
  }
View Full Code Here

  private void broadcastVector(
      BSPPeer<VectorWritable, DoubleWritable, VectorWritable, DoubleWritable, VectorWritable> peer,
      double[] vector) throws IOException {
    for (String peerName : peer.getAllPeerNames()) {
      if (!peerName.equals(peer.getPeerName())) { // avoid sending to oneself
        peer.send(peerName, new VectorWritable(new DenseDoubleVector(vector)));
      }
    }
  }
View Full Code Here

          BSPPeer<VectorWritable, DoubleWritable, VectorWritable, DoubleWritable, VectorWritable> peer)
      throws IOException, SyncException, InterruptedException {
    if (theta == null) {
      if (master) {
        int size = getXSize(peer);
        theta = new DenseDoubleVector(size, peer.getConfiguration().getInt(
            INITIAL_THETA_VALUES, 1));
        broadcastVector(peer, theta.toArray());
        if (log.isDebugEnabled()) {
          log.debug("{}: sending theta", peer.getPeerName());
        }
View Full Code Here

      e.printStackTrace();
    }

    // initial the mlp with existing model meta-data and get the output
    MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath);
    DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 });
    try {
      DoubleVector result = mlp.output(input);
      assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 },
          result.toArray(), 0.0001);
    } catch (Exception e1) {
View Full Code Here

   */
  @Test
  public void testTrainWithSquaredError() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.3;
    double regularization = 0.02; // no regularization
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "SquaredError";
    int[] layerSizeArray = new int[] { 2, 5, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 100000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

TOP

Related Classes of org.apache.hama.commons.math.DenseDoubleVector

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.