Package org.apache.hadoop.mapred

Examples of org.apache.hadoop.mapred.Reporter


        if (progressable == null || progressable == Reporter.NULL) {
            return;
        }

        if (progressable instanceof Reporter) {
            Reporter reporter = (Reporter) progressable;
            for (Counter count : Counter.ALL) {
                oldApiCounter(reporter, count, count.get(stats));
            }
        }
View Full Code Here


  @Test
  public final void testMapperValidValues() throws IOException,
      InterruptedException {
    OutputCollector<AvroValue<Pair>, NullWritable> output = mock(OutputCollector.class);
    Reporter reporter = mock(Reporter.class);

    DBInputAvroMapper mapper = new DBInputAvroMapper();

    ColumnInfo intColumn = new ColumnInfo(0, Types.INTEGER, "intColumn");
    ColumnInfo stringColumn = new ColumnInfo(1, Types.VARCHAR,
View Full Code Here

    Builder.sortSplits(sorted);

    int numTrees = Builder.getNbTrees(job); // total number of trees

    firstOutput = new PartialOutputCollector(numTrees);
    Reporter reporter = Reporter.NULL;

    firstIds = new int[splits.length];
    sizes = new int[splits.length];
   
    // to compute firstIds, process the splits in file order
View Full Code Here

    for (int p = 0; p < splits.length; p++) {
      total += Step2Mapper.nbConcerned(splits.length, numTrees, p);
    }

    secondOutput = new PartialOutputCollector(total);
    Reporter reporter = Reporter.NULL;
    long slowest = 0; // duration of slowest map

    for (int partition = 0; partition < splits.length; partition++) {
      InputSplit split = splits[partition];
      RecordReader<LongWritable, Text> reader = input.getRecordReader(split,
View Full Code Here

    InputSplit[] sorted = Arrays.copyOf(splits, splits.length);
    Builder.sortSplits(sorted);

    Step0OutputCollector collector = new Step0OutputCollector(numMaps);
    Reporter reporter = Reporter.NULL;

    for (int p = 0; p < numMaps; p++) {
      InputSplit split = sorted[p];
      RecordReader<LongWritable, Text> reader = input.getRecordReader(split, job, reporter);
View Full Code Here

    InputSplit[] splits = input.getSplits(job, numMaps);

    InputSplit[] sorted = Arrays.copyOf(splits, splits.length);
    Builder.sortSplits(sorted);

    Reporter reporter = Reporter.NULL;

    int[] keys = new int[numMaps];
    Step0Output[] values = new Step0Output[numMaps];
   
    int[] expectedIds = new int[numMaps];
View Full Code Here

                        Properties tableProperties,
                        Progressable progress) throws IOException {
    final RecordWriter result =
      super.getHiveRecordWriter(jc,outPath,valueClass,isCompressed,
        tableProperties,progress);
    final Reporter reporter = (Reporter) progress;
    reporter.setStatus("got here");
    System.out.println("Got a reporter " + reporter);
    return new RecordWriter() {
      @Override
      public void write(Writable w) throws IOException {
        if (w instanceof Text) {
View Full Code Here

      HadoopUtility.toHConfiguration(index, job);
      index.close();
      InputSplit s = new BitPostingIndexInputSplit(
          new Path(args[3]), Long.parseLong(args[4]), Long.parseLong(args[5]),
          new String[0], Integer.parseInt(args[6]), Integer.parseInt(args[7]));
      RecordReader<IntWritable, IntObjectWrapper<IterablePosting>> rr = new BitPostingIndexInputFormat().getRecordReader(s, job, new Reporter(){
        public InputSplit getInputSplit() throws UnsupportedOperationException {return null;}
        @SuppressWarnings("unchecked")
        public void incrCounter(Enum arg0, long arg1) {}
        public void incrCounter(String arg0, String arg1, long arg2) {}
        @SuppressWarnings("unchecked")
View Full Code Here

          public void collect(Object key, Object value)
            throws IOException {
            //just consume it, no need to write the record anywhere
          }
        };
        Reporter reporter = Reporter.NULL;//dummy reporter
        startOutputThreads(collector, reporter);
      }
      int exitVal = sim.waitFor();
      // how'd it go?
      if (exitVal != 0) {
View Full Code Here

      conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
      initStdOut(conf);
      conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
      CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
              new Counters.Counter(), new Progress());
      Reporter reporter = new TestTaskReporter();
      List<Text> texts = new ArrayList<Text>();
      texts.add(new Text("first"));
      texts.add(new Text("second"));
      texts.add(new Text("third"));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapred.Reporter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.