Package org.apache.hadoop.mapreduce.jobhistory

Examples of org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent


              throws IOException {
            // send two task start and two task fail events for tasks 0 and 1
            int eventId = numEventsRead.getAndIncrement();
            TaskID tid = tids[eventId & 0x1];
            if (eventId < 2) {
              return new TaskStartedEvent(tid, 0, taskType, "");
            }
            if (eventId < 4) {
              TaskFailedEvent tfe = new TaskFailedEvent(tid, 0, taskType,
                  "failed", "FAILED", null, new Counters());
              tfe.setDatum(tfe.getDatum());
View Full Code Here


    @Override
    public void transition(TaskImpl task, TaskEvent event) {
      task.addAndScheduleAttempt();
      task.scheduledTime = task.clock.getTime();
      TaskStartedEvent tse = new TaskStartedEvent(
          TypeConverter.fromYarn(task.taskId), task.getLaunchTime(),
          TypeConverter.fromYarn(task.taskId.getTaskType()),
          task.getSplitsAsString());
      task.eventHandler
          .handle(new JobHistoryEvent(task.taskId.getJobId(), tse));
View Full Code Here

    @Override
    public void transition(TaskImpl task, TaskEvent event) {
      task.addAndScheduleAttempt();
      task.scheduledTime = task.clock.getTime();
      TaskStartedEvent tse = new TaskStartedEvent(
          TypeConverter.fromYarn(task.taskId), task.getLaunchTime(),
          TypeConverter.fromYarn(task.taskId.getTaskType()),
          task.getSplitsAsString());
      task.eventHandler
          .handle(new JobHistoryEvent(task.taskId.getJobId(), tse));
View Full Code Here

    subject.process(new TaskAttemptUnsuccessfulCompletionEvent
                    (TaskAttemptID.forName("attempt_200904211745_0003_m_000004_2"),
                     TaskType.valueOf("MAP"), "STATUS", 1234567890L,
                     "/194\\.6\\.134\\.80/cluster50263\\.secondleveldomain\\.com",
                     "MACHINE_EXPLODED", splits));
    subject.process(new TaskStartedEvent(TaskID
        .forName("task_200904211745_0003_m_000004"), 1234567890L, TaskType
        .valueOf("MAP"),
        "/194\\.6\\.134\\.80/cluster50263\\.secondleveldomain\\.com"));

    final LoggedNetworkTopology topology = subject.build();
View Full Code Here

      metrics.launchReduce(id);
    }
    // Note that the logs are for the scheduled tasks only. Tasks that join on
    // restart has already their logs in place.
    if (tip.isFirstAttempt(id)) {
      TaskStartedEvent tse = new TaskStartedEvent(tip.getTIPId(),
          tip.getExecStartTime(),
          name, splits);
     
      jobHistory.logEvent(tse, tip.getJob().jobId);
      setFirstTaskLaunchTime(tip);
View Full Code Here

    @Override
    public void transition(TaskImpl task, TaskEvent event) {
      task.addAndScheduleAttempt();
      task.scheduledTime = task.clock.getTime();
      TaskStartedEvent tse = new TaskStartedEvent(
          TypeConverter.fromYarn(task.taskId), task.getLaunchTime(),
          TypeConverter.fromYarn(task.taskId.getTaskType()),
          task.getSplitsAsString());
      task.eventHandler
          .handle(new JobHistoryEvent(task.taskId.getJobId(), tse));
View Full Code Here

    @Override
    public void transition(TaskImpl task, TaskEvent event) {
      task.addAndScheduleAttempt();
      task.scheduledTime = task.clock.getTime();
      TaskStartedEvent tse = new TaskStartedEvent(
          TypeConverter.fromYarn(task.taskId), task.getLaunchTime(),
          TypeConverter.fromYarn(task.taskId.getTaskType()),
          task.getSplitsAsString());
      task.eventHandler
          .handle(new JobHistoryEvent(task.taskId.getJobId(), tse));
View Full Code Here

    @Override
    public void transition(TaskImpl task, TaskEvent event) {
      task.addAndScheduleAttempt();
      task.scheduledTime = task.clock.getTime();
      TaskStartedEvent tse = new TaskStartedEvent(
          TypeConverter.fromYarn(task.taskId), task.getLaunchTime(),
          TypeConverter.fromYarn(task.taskId.getTaskType()),
          task.getSplitsAsString());
      task.eventHandler
          .handle(new JobHistoryEvent(task.taskId.getJobId(), tse));
View Full Code Here

      metrics.launchReduce(id);
    }
    // Note that the logs are for the scheduled tasks only. Tasks that join on
    // restart has already their logs in place.
    if (tip.isFirstAttempt(id)) {
      TaskStartedEvent tse = new TaskStartedEvent(tip.getTIPId(),
          tip.getExecStartTime(),
          name, splits);
     
      jobHistory.logEvent(tse, tip.getJob().jobId);
      setFirstTaskLaunchTime(tip);
View Full Code Here

      eventHandler.handle(new JobTaskAttemptCompletedEvent(tce));
    }
  }

  private void sendTaskStartedEvent() {
    TaskStartedEvent tse = new TaskStartedEvent(
        TypeConverter.fromYarn(taskId), getLaunchTime(),
        TypeConverter.fromYarn(taskId.getTaskType()),
        getSplitsAsString());
    eventHandler
        .handle(new JobHistoryEvent(taskId.getJobId(), tse));
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.jobhistory.TaskStartedEvent

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.