Package org.apache.hadoop.mapreduce.v2.api.protocolrecords

Examples of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest


  public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID)
      throws YarnRemoteException, IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
        TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);

    JobReport report =
        ((GetJobReportResponse) invoke("getJobReport",
            GetJobReportRequest.class, request)).getJobReport();
    if (EnumSet.of(JobState.SUCCEEDED, JobState.FAILED, JobState.KILLED,
View Full Code Here


        recordFactory.newRecordInstance(GetCountersRequest.class);   
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null",
        proxy.getCounters(gcRequest).getCounters());

    GetJobReportRequest gjrRequest =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
   

    GetTaskAttemptCompletionEventsRequest gtaceRequest =
View Full Code Here

  }
 
  public JobStatus getJobStatus(JobID oldJobID) throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
      TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);
    JobReport report = ((GetJobReportResponse) invoke("getJobReport",
        GetJobReportRequest.class, request)).getJobReport();
    JobStatus jobStatus = null;
    if (report != null) {
      if (StringUtils.isEmpty(report.getJobFile())) {
View Full Code Here

  public LogParams getLogFilePath(JobID oldJobID, TaskAttemptID oldTaskAttemptID)
      throws IOException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
        TypeConverter.toYarn(oldJobID);
    GetJobReportRequest request =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    request.setJobId(jobId);

    JobReport report =
        ((GetJobReportResponse) invoke("getJobReport",
            GetJobReportRequest.class, request)).getJobReport();
    if (EnumSet.of(JobState.SUCCEEDED, JobState.FAILED, JobState.KILLED,
View Full Code Here

        recordFactory.newRecordInstance(GetCountersRequest.class);   
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null",
        proxy.getCounters(gcRequest).getCounters());

    GetJobReportRequest gjrRequest =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
   

    GetTaskAttemptCompletionEventsRequest gtaceRequest =
View Full Code Here

        recordFactory.newRecordInstance(GetCountersRequest.class);   
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null",
        proxy.getCounters(gcRequest).getCounters());

    GetJobReportRequest gjrRequest =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
    gjrRequest.setJobId(job.getID());
    JobReport jr = proxy.getJobReport(gjrRequest).getJobReport();
    verifyJobReport(jr);
   

    GetTaskAttemptCompletionEventsRequest gtaceRequest =
View Full Code Here

            .newRecordInstance(GetCountersRequest.class);
    counterRequest.setJobId(job.getID());
    GetCountersResponse counterResponse = protocol.getCounters(counterRequest);
    assertNotNull(counterResponse.getCounters().getCounterGroup("org.apache.hadoop.mapreduce.JobCounter"));
    // test getJobReport
    GetJobReportRequest reportRequest = recordFactory
            .newRecordInstance(GetJobReportRequest.class);
    reportRequest.setJobId(job.getID());
    GetJobReportResponse jobReport = protocol.getJobReport(reportRequest);
    assertEquals(1, jobReport.getJobReport().getAMInfos().size());
    assertNotNull(jobReport.getJobReport().getJobFile());
    assertEquals(job.getID().toString(), jobReport.getJobReport().getJobId().toString());
    assertNotNull(jobReport.getJobReport().getTrackingUrl());
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    }
  }

  private static JobCounterUpdateEvent createJobCounterUpdateEventTASucceeded(
      TaskAttemptImpl taskAttempt) {
    TaskId taskId = taskAttempt.attemptId.getTaskId();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
    updateMillisCounters(jce, taskAttempt);
    return jce;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.