Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.DoubleWritable


  }

  @Test
  public void testDoubles() throws Exception {
    double j = 55.5d;
    DoubleWritable w = new DoubleWritable(j);
    testInputOutputFn(Writables.doubles(), j, w);
  }
View Full Code Here


      long itemID = indexItemIDMap.get(itemIDIndex);
      for (SimilarItem similarItem : topKMostSimilarItems.retrieve()) {
        long otherItemID = similarItem.getItemID();
        if (itemID < otherItemID) {
          ctx.write(new EntityEntityWritable(itemID, otherItemID), new DoubleWritable(similarItem.getSimilarity()));
        } else {
          ctx.write(new EntityEntityWritable(otherItemID, itemID), new DoubleWritable(similarItem.getSimilarity()));
        }
      }
    }
View Full Code Here

    } else if (value instanceof DoubleWritable) {
      df = ((DoubleWritable)value).get();
    }
    if (!Double.isNaN(df)) {
      // For calculating the sum of squares
      context.write(SUM_OF_SQUARES, new DoubleWritable(df * df));
      context.write(SUM, new DoubleWritable(df));
      // For calculating the total number of entries
      context.write(TOTAL_COUNT, new DoubleWritable(1));
    }
  }
View Full Code Here

    }

    @Override
    protected void map(IntWritable index, IntWritable vertex, Mapper.Context ctx)
        throws IOException, InterruptedException {
      ctx.write(vertex, new DoubleWritable(ranks.get(index.get())));
    }
View Full Code Here

    /*Normal normal = new Normal(5, 3, random);
    for (int i = 0; i < 10000; i++){
      writer.append(new IntWritable(i), new DoubleWritable((long)normal.nextDouble()));
    }*/
    int i = 0;
    writer.append(new IntWritable(i++), new DoubleWritable(7));
    writer.append(new IntWritable(i++), new DoubleWritable(9));
    writer.append(new IntWritable(i++), new DoubleWritable(9));
    writer.append(new IntWritable(i++), new DoubleWritable(10));
    writer.append(new IntWritable(i++), new DoubleWritable(10));
    writer.append(new IntWritable(i++), new DoubleWritable(10));
    writer.append(new IntWritable(i++), new DoubleWritable(10));
    writer.append(new IntWritable(i++), new DoubleWritable(11));
    writer.append(new IntWritable(i++), new DoubleWritable(11));
    writer.append(new IntWritable(i++), new DoubleWritable(13));
    writer.close();
  }
View Full Code Here

    SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, input, IntWritable.class,
            DoubleWritable.class);
    Random random = RandomUtils.getRandom();
    Normal normal = new Normal(5, 3, random);
    for (int i = 0; i < 1000000; i++) {
      writer.append(new IntWritable(i), new DoubleWritable((long) normal.nextInt()));
    }
    writer.close();
    double v = BasicStats.stdDev(input, output, conf);
    assertEquals(3, v, 0.02);
  }
View Full Code Here

                                                                        null,
                                                                        null,
                                                                        true,
                                                                        job)) {
      IntPairWritable key = record.getFirst();
      DoubleWritable value = record.getSecond();
      int topic = key.getFirst();
      int word = key.getSecond();
      if (word == TOPIC_SUM_KEY) {
        logTotals[topic] = value.get();
        Preconditions.checkArgument(!Double.isInfinite(value.get()));
      } else if (topic == LOG_LIKELIHOOD_KEY) {
        ll = value.get();
      } else {
        Preconditions.checkArgument(topic >= 0, "topic should be non-negative, not %d", topic);
        Preconditions.checkArgument(word >= 0, "word should be non-negative not %d", word);
        Preconditions.checkArgument(pWgT.getQuick(topic, word) == 0.0);

        pWgT.setQuick(topic, word, value.get());
        Preconditions.checkArgument(!Double.isInfinite(pWgT.getQuick(topic, word)));
      }
    }

    return new LDAState(numTopics, numWords, topicSmoothing, pWgT, logTotals, ll);
View Full Code Here

  private static void writeInitialState(Path statePath, int numTopics, int numWords) throws IOException {
    Configuration job = new Configuration();
    FileSystem fs = statePath.getFileSystem(job);

    DoubleWritable v = new DoubleWritable();

    Random random = RandomUtils.getRandom();

    for (int k = 0; k < numTopics; ++k) {
      Path path = new Path(statePath, "part-" + k);
      SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, IntPairWritable.class, DoubleWritable.class);

      try {
        double total = 0.0; // total number of pseudo counts we made
        for (int w = 0; w < numWords; ++w) {
          Writable kw = new IntPairWritable(k, w);
          // A small amount of random noise, minimized by having a floor.
          double pseudocount = random.nextDouble() + 1.0E-8;
          total += pseudocount;
          v.set(Math.log(pseudocount));
          writer.append(kw, v);
        }
        Writable kTsk = new IntPairWritable(k, TOPIC_SUM_KEY);
        v.set(Math.log(total));
        writer.append(kTsk, v);
      } finally {
        Closeables.closeQuietly(writer);
      }
    }
View Full Code Here

    }
  }

  private static void writeState(Configuration job, LDAState state, Path statePath) throws IOException {
    FileSystem fs = statePath.getFileSystem(job);
    DoubleWritable v = new DoubleWritable();

    for (int k = 0; k < state.getNumTopics(); ++k) {
      Path path = new Path(statePath, "part-" + k);
      SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, IntPairWritable.class, DoubleWritable.class);

      try {
        for (int w = 0; w < state.getNumWords(); ++w) {
          Writable kw = new IntPairWritable(k, w);
          v.set(state.logProbWordGivenTopic(w,k) + state.getLogTotal(k));
          writer.append(kw, v);
        }
        Writable kTsk = new IntPairWritable(k, TOPIC_SUM_KEY);
        v.set(state.getLogTotal(k));
        writer.append(kTsk, v);
      } finally {
        Closeables.closeQuietly(writer);
      }
    }
    Path path = new Path(statePath, "part-" + LOG_LIKELIHOOD_KEY);
    SequenceFile.Writer writer = new SequenceFile.Writer(fs, job, path, IntPairWritable.class, DoubleWritable.class);
    try {
      Writable kTsk = new IntPairWritable(LOG_LIKELIHOOD_KEY,LOG_LIKELIHOOD_KEY);
      v.set(state.getLogLikelihood());
      writer.append(kTsk, v);
    } finally {
      Closeables.closeQuietly(writer);
    }
  }
View Full Code Here

  protected double computeRmse(Path errors) {
    RunningAverage average = new FullRunningAverage();
    for (Pair<DoubleWritable,NullWritable> entry :
      new SequenceFileDirIterable<DoubleWritable, NullWritable>(errors, PathType.LIST, PathFilters.logsCRCFilter(),
          getConf())) {
      DoubleWritable error = entry.getFirst();
      average.addDatum(error.get() * error.get());
    }

    return Math.sqrt(average.getAverage());
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.DoubleWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.