Package org.apache.hadoop.mapreduce.v2.api.records

Examples of org.apache.hadoop.mapreduce.v2.api.records.TaskId


  public void testContainerCleanedWhileCommitting() throws Exception {
    ApplicationId appId = BuilderUtils.newApplicationId(1, 2);
    ApplicationAttemptId appAttemptId =
      BuilderUtils.newApplicationAttemptId(appId, 0);
    JobId jobId = MRBuilderUtils.newJobId(appId, 1);
    TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
    TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
    Path jobFile = mock(Path.class);

    MockEventHandler eventHandler = new MockEventHandler();
    TaskAttemptListener taListener = mock(TaskAttemptListener.class);
View Full Code Here


  public void testDoubleTooManyFetchFailure() throws Exception {
    ApplicationId appId = BuilderUtils.newApplicationId(1, 2);
    ApplicationAttemptId appAttemptId =
      BuilderUtils.newApplicationAttemptId(appId, 0);
    JobId jobId = MRBuilderUtils.newJobId(appId, 1);
    TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
    TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(taskId, 0);
    Path jobFile = mock(Path.class);

    MockEventHandler eventHandler = new MockEventHandler();
    TaskAttemptListener taListener = mock(TaskAttemptListener.class);
View Full Code Here

    }

    @Override
    public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
      throws YarnRemoteException {
      TaskId taskId = request.getTaskId();
      GetTaskReportResponse response =
        recordFactory.newRecordInstance(GetTaskReportResponse.class);
      response.setTaskReport(verifyAndGetTask(taskId, false).getReport());
      return response;
    }
View Full Code Here

    @SuppressWarnings("unchecked")
    @Override
    public KillTaskResponse killTask(KillTaskRequest request)
      throws YarnRemoteException {
      TaskId taskId = request.getTaskId();
      String message = "Kill task received from client " + taskId;
      LOG.info(message);
      verifyAndGetTask(taskId, true);
      appContext.getEventHandler().handle(
          new TaskEvent(taskId, TaskEventType.T_KILL));
View Full Code Here

    public void handle(SpeculatorEvent event) {
      if (disabled) {
        return;
      }

      TaskId tId = event.getTaskID();
      TaskType tType = null;
      /* event's TaskId will be null if the event type is JOB_CREATE or
       * ATTEMPT_STATUS_UPDATE
       */
      if (tId != null) {
        tType = tId.getTaskType();
      }
      boolean shouldMapSpec =
              conf.getBoolean(MRJobConfig.MAP_SPECULATIVE, false);
      boolean shouldReduceSpec =
              conf.getBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
View Full Code Here

    public void transition(TaskAttemptImpl taskAttempt,
        TaskAttemptEvent event) {
      //set the finish time
      taskAttempt.setFinishTime();
      long slotMillis = computeSlotMillis(taskAttempt);
      TaskId taskId = taskAttempt.attemptId.getTaskId();
      JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
      jce.addCounterUpdate(
        taskId.getTaskType() == TaskType.MAP ?
          JobCounter.SLOTS_MILLIS_MAPS : JobCounter.SLOTS_MILLIS_REDUCES,
          slotMillis);
      taskAttempt.eventHandler.handle(jce);
      taskAttempt.logAttemptFinishedEvent(TaskAttemptStateInternal.SUCCEEDED);
      taskAttempt.eventHandler.handle(new TaskTAttemptEvent(
View Full Code Here

      return response;
    }

    @Override
    public GetTaskReportResponse getTaskReport(GetTaskReportRequest request) throws YarnRemoteException {
      TaskId taskId = request.getTaskId();
      Job job = verifyAndGetJob(taskId.getJobId());
      GetTaskReportResponse response = recordFactory.newRecordInstance(GetTaskReportResponse.class);
      response.setTaskReport(job.getTask(taskId).getReport());
      return response;
    }
View Full Code Here

  }

  private ContainerRequestEvent
      createReq(JobId jobId, int taskAttemptId, int memory, String[] hosts,
          boolean earlierFailedAttempt, boolean reduce) {
    TaskId taskId;
    if (reduce) {
      taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
    } else {
      taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
    }
View Full Code Here

        new String[] { NetworkTopology.DEFAULT_RACK });
  }

  private ContainerFailedEvent createFailEvent(JobId jobId, int taskAttemptId,
      String host, boolean reduce) {
    TaskId taskId;
    if (reduce) {
      taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.REDUCE);
    } else {
      taskId = MRBuilderUtils.newTaskId(jobId, 0, TaskType.MAP);
    }
View Full Code Here

  }

  private static TaskAttemptCompletionEvent createTce(int eventId,
      boolean isMap, TaskAttemptCompletionEventStatus status) {
    JobId jid = MRBuilderUtils.newJobId(12345, 1, 1);
    TaskId tid = MRBuilderUtils.newTaskId(jid, 0,
        isMap ? org.apache.hadoop.mapreduce.v2.api.records.TaskType.MAP
            : org.apache.hadoop.mapreduce.v2.api.records.TaskType.REDUCE);
    TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(tid, 0);
    RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
    TaskAttemptCompletionEvent tce = recordFactory
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.api.records.TaskId

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.