Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.Counter


    hihoTuple.setKey(key);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(null, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here


    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeValueReducer mergeReducer = new MergeValueReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(value2, key);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    hihoValue2.setIsOld(false);
    ArrayList<HihoValue> values = new ArrayList<HihoValue>();

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, null);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    values.add(hihoValue1);
    values.add(hihoValue2);

    Reducer.Context context = mock(Reducer.Context.class);
    Counters counters = new Counters();
    Counter counter = counters.findCounter(MergeRecordCounter.OUTPUT);
    when(context.getCounter(MergeRecordCounter.OUTPUT)).thenReturn(counter);
    MergeKeyReducer mergeReducer = new MergeKeyReducer();
    mergeReducer.reduce(hihoTuple, values, context);
    verify(context).write(key, value2);
    assertEquals(1, context.getCounter(MergeRecordCounter.OUTPUT)
View Full Code Here

    if (counters == null) {
      return;
    }

    // TODO: remove deprecation suppress when we don't want to rely on org.apache.hadoop.mapred
    Counter count = counters.findCounter(Task.Counter.MAP_INPUT_RECORDS);

    for (int i = 0; i < safeLongToInt(count.getValue()); i++) {
      contribution.incrementReadCount();
    }

    count = counters.findCounter(Task.Counter.MAP_SKIPPED_RECORDS);
    contribution.incrementReadSkipCount(safeLongToInt(count.getValue()));

    count = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
    contribution.incrementWriteCount(safeLongToInt(count.getValue()));

    count = counters.findCounter(Task.Counter.REDUCE_SKIPPED_RECORDS);

    for (int i = 0; i < safeLongToInt(count.getValue()); i++) {
      contribution.incrementWriteSkipCount();
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.Counter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.