Package org.apache.hadoop.mapreduce.v2.api.records

Examples of org.apache.hadoop.mapreduce.v2.api.records.Counter


      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
      yGrp.setName(grp.getName());
      yGrp.setDisplayName(grp.getDisplayName());
      yGrp.addAllCounters(new HashMap<String, Counter>());
      for (org.apache.hadoop.mapreduce.Counter cntr : grp) {
        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
        yCntr.setName(cntr.getName());
        yCntr.setDisplayName(cntr.getDisplayName());
        yCntr.setValue(cntr.getValue());
        yGrp.setCounter(yCntr.getName(), yCntr);
      }
      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
  }
View Full Code Here


      cGrp.setName(groupName);
      cGrp.setDisplayName(groupName);
      setCounterGroup(groupName, cGrp);
    }
    if (getCounterGroup(groupName).getCounter(key.name()) == null) {
      Counter c = new CounterPBImpl();
      c.setName(key.name());
      c.setDisplayName(key.name());
      c.setValue(0l);
      getCounterGroup(groupName).setCounter(key.name(), c);
    }
    Counter counter = getCounterGroup(groupName).getCounter(key.name());
    counter.setValue(counter.getValue() + amount);
  }
View Full Code Here

      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
      yGrp.setName(grp.getName());
      yGrp.setDisplayName(grp.getDisplayName());
      yGrp.addAllCounters(new HashMap<String, Counter>());
      for (org.apache.hadoop.mapred.Counters.Counter cntr : grp) {
        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
        yCntr.setName(cntr.getName());
        yCntr.setDisplayName(cntr.getDisplayName());
        yCntr.setValue(cntr.getValue());
        yGrp.setCounter(yCntr.getName(), yCntr);
      }
      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
  }
View Full Code Here

      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
      yGrp.setName(grp.getName());
      yGrp.setDisplayName(grp.getDisplayName());
      yGrp.addAllCounters(new HashMap<String, Counter>());
      for (org.apache.hadoop.mapreduce.Counter cntr : grp) {
        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
        yCntr.setName(cntr.getName());
        yCntr.setDisplayName(cntr.getDisplayName());
        yCntr.setValue(cntr.getValue());
        yGrp.setCounter(yCntr.getName(), yCntr);
      }
      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
  }
View Full Code Here

      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
      yGrp.setName(grp.getName());
      yGrp.setDisplayName(grp.getDisplayName());
      yGrp.addAllCounters(new HashMap<String, Counter>());
      for (org.apache.hadoop.mapred.Counters.Counter cntr : grp) {
        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
        yCntr.setName(cntr.getName());
        yCntr.setDisplayName(cntr.getDisplayName());
        yCntr.setValue(cntr.getValue());
        yGrp.setCounter(yCntr.getName(), yCntr);
      }
      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
  }
View Full Code Here

      CounterGroup yGrp = recordFactory.newRecordInstance(CounterGroup.class);
      yGrp.setName(grp.getName());
      yGrp.setDisplayName(grp.getDisplayName());
      yGrp.addAllCounters(new HashMap<String, Counter>());
      for (org.apache.hadoop.mapreduce.Counter cntr : grp) {
        Counter yCntr = recordFactory.newRecordInstance(Counter.class);
        yCntr.setName(cntr.getName());
        yCntr.setDisplayName(cntr.getDisplayName());
        yCntr.setValue(cntr.getValue());
        yGrp.setCounter(yCntr.getName(), yCntr);
      }
      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
    }
    return yCntrs;
  }
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    }
  }

  private static JobCounterUpdateEvent createJobCounterUpdateEventTASucceeded(
      TaskAttemptImpl taskAttempt) {
    TaskId taskId = taskAttempt.attemptId.getTaskId();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
    updateMillisCounters(jce, taskAttempt);
    return jce;
  }
View Full Code Here

    }
  }
 
  private static void updateMillisCounters(JobCounterUpdateEvent jce,
      TaskAttemptImpl taskAttempt) {
    TaskType taskType = taskAttempt.getID().getTaskId().getTaskType();
    long duration = (taskAttempt.getFinishTime() - taskAttempt.getLaunchTime());
    int mbRequired =
        taskAttempt.getMemoryRequired(taskAttempt.conf, taskType);
    int vcoresRequired = taskAttempt.getCpuRequired(taskAttempt.conf, taskType);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.api.records.Counter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.