Examples of DenseDoubleVector


Examples of org.apache.hama.commons.math.DenseDoubleVector

   */
  @Test
  public void testTrainWithSquaredError() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.3;
    double regularization = 0.02; // no regularization
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "SquaredError";
    int[] layerSizeArray = new int[] { 2, 5, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 100000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

   */
  @Test
  public void testTrainWithCrossEntropy() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.3;
    double regularization = 0.0; // no regularization
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 50000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

   */
  @Test
  public void testWithRegularization() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.3;
    double regularization = 0.02; // regularization should be a tiny number
    double momentum = 0; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 20000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

   */
  @Test
  public void testWithMomentum() {
    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    // set parameters
    double learningRate = 0.3;
    double regularization = 0.02; // regularization should be a tiny number
    double momentum = 0.5; // no momentum
    String squashingFunctionName = "Sigmoid";
    String costFunctionName = "CrossEntropy";
    int[] layerSizeArray = new int[] { 2, 7, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    try {
      // train by multiple instances
      Random rnd = new Random();
      for (int i = 0; i < 5000; ++i) {
        DenseDoubleMatrix[] weightUpdates = mlp
            .trainByInstance(trainingData[rnd.nextInt(4)]);
        mlp.updateWeightMatrices(weightUpdates);
      }

      // System.out.printf("Weight matrices: %s\n",
      // mlp.weightsToString(mlp.getWeightMatrices()));
      for (int i = 0; i < trainingData.length; ++i) {
        DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i]
            .slice(2);
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

    String strDataPath = "/tmp/xor-training-by-xor";
    Path dataPath = new Path(strDataPath);

    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };

    try {
      URI uri = new URI(strDataPath);
      FileSystem fs = FileSystem.get(uri, conf);
      fs.delete(dataPath, true);
      if (!fs.exists(dataPath)) {
        fs.createNewFile(dataPath);
        SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
            dataPath, LongWritable.class, VectorWritable.class);

        for (int i = 0; i < 1000; ++i) {
          VectorWritable vecWritable = new VectorWritable(trainingData[i % 4]);
          writer.append(new LongWritable(i), vecWritable);
        }
        writer.close();
      }

    } catch (Exception e) {
      e.printStackTrace();
    }

    // begin training
    String modelPath = "/tmp/xorModel-training-by-xor.data";
    double learningRate = 0.6;
    double regularization = 0.02; // no regularization
    double momentum = 0.3; // no momentum
    String squashingFunctionName = "Tanh";
    String costFunctionName = "SquaredError";
    int[] layerSizeArray = new int[] { 2, 5, 1 };
    SmallMultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(learningRate,
        regularization, momentum, squashingFunctionName, costFunctionName,
        layerSizeArray);

    Map<String, String> trainingParams = new HashMap<String, String>();
    trainingParams.put("training.iteration", "2000");
    trainingParams.put("training.mode", "minibatch.gradient.descent");
    trainingParams.put("training.batch.size", "100");
    trainingParams.put("tasks", "3");
    trainingParams.put("modelPath", modelPath);

    try {
      mlp.train(dataPath, trainingParams);
    } catch (Exception e) {
      e.printStackTrace();
    }

    // test the model
    for (int i = 0; i < trainingData.length; ++i) {
      DenseDoubleVector testVec = (DenseDoubleVector) trainingData[i].slice(2);
      try {
        double expected = trainingData[i].toArray()[2];
        double actual = mlp.output(testVec).toArray()[0];
        if (expected < 0.5 && actual >= 0.5 || expected >= 0.5 && actual < 0.5) {
          Log.info("Neural network failes to lear the XOR.");
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

    String strDataPath = "/tmp/xor-training-by-xor";
    Path dataPath = new Path(strDataPath);

    // generate training data
    DoubleVector[] trainingData = new DenseDoubleVector[] {
        new DenseDoubleVector(new double[] { 0, 0, 0 }),
        new DenseDoubleVector(new double[] { 0, 1, 1 }),
        new DenseDoubleVector(new double[] { 1, 0, 1 }),
        new DenseDoubleVector(new double[] { 1, 1, 0 }) };
   
    try {
      URI uri = new URI(strDataPath);
      FileSystem fs = FileSystem.get(uri, conf);
      fs.delete(dataPath, true);
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

      String[] split = s.split(" ");
      double[] dar = new double[split.length];
      for (int i = 0; i < split.length; i++) {
        dar[i] = Double.valueOf(split[i]);
      }
      return new DenseDoubleVector(dar);
    }
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

        String[] tokens = line.trim().split(",");
        double[] vals = new double[tokens.length];
        for (int i = 0; i < tokens.length; ++i) {
          vals[i] = Double.parseDouble(tokens[i]);
        }
        DoubleVector instance = new DenseDoubleVector(vals);
        DoubleVector result = ann.getOutput(instance);
        double[] arrResult = result.toArray();
        StringBuilder sb = new StringBuilder();
        for (int i = 0; i < arrResult.length; ++i) {
          sb.append(arrResult[i]);
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

      }

      LOG.info("writeRandomDistributedRowMatrix path: " + path
          + " saveTransposed: " + saveTransposed);
      for (int i = 0; i < matrix.length; i++) {
        DenseDoubleVector rowVector = new DenseDoubleVector(matrix[i]);
        writer.append(new IntWritable(i), new PipesVectorWritable(rowVector));
        LOG.info("IntWritable: " + i + " PipesVectorWritable: "
            + rowVector.toString());
      }

    } catch (IOException e) {
      e.printStackTrace();
    } finally {
View Full Code Here

Examples of org.apache.hama.commons.math.DenseDoubleVector

    String str = Text.readString(in);
    // LOG.debug("readVector: '" + str + "'");

    String[] values = str.split(",");
    int len = values.length;
    DoubleVector vector = new DenseDoubleVector(len);
    for (int i = 0; i < len; i++) {
      vector.set(i, Double.parseDouble(values[i]));
    }
    return vector;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.