Package org.apache.hadoop.mapreduce.v2.api.protocolrecords

Examples of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest


  }

  public org.apache.hadoop.mapreduce.Counters getJobCounters(JobID arg0) throws IOException,
  InterruptedException {
    org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
      GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class);
      request.setJobId(jobID);
      Counters cnt = ((GetCountersResponse)
          invoke("getCounters", GetCountersRequest.class, request)).getCounters();
      return TypeConverter.fromYarn(cnt);

  }
View Full Code Here


      e.printStackTrace();
      Assert.fail("Failed to crete record");
    }
   
    try {
      GetCountersRequest response = pbRecordFactory.newRecordInstance(GetCountersRequest.class);
      Assert.assertEquals(GetCountersRequestPBImpl.class, response.getClass());
    } catch (YarnRuntimeException e) {
      e.printStackTrace();
      Assert.fail("Failed to crete record");
    }
  }
View Full Code Here

    //verify that all object are fully populated by invoking RPCs.
    YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol proxy =
      (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
          app.clientService.getBindAddress(), conf);
    GetCountersRequest gcRequest =
        recordFactory.newRecordInstance(GetCountersRequest.class);   
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null",
        proxy.getCounters(gcRequest).getCounters());

    GetJobReportRequest gjrRequest =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
View Full Code Here

    //verify that all object are fully populated by invoking RPCs.
    YarnRPC rpc = YarnRPC.create(conf);
    MRClientProtocol proxy =
      (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
          app.clientService.getBindAddress(), conf);
    GetCountersRequest gcRequest =
        recordFactory.newRecordInstance(GetCountersRequest.class);   
    gcRequest.setJobId(job.getID());
    Assert.assertNotNull("Counters is null",
        proxy.getCounters(gcRequest).getCounters());

    GetJobReportRequest gjrRequest =
        recordFactory.newRecordInstance(GetJobReportRequest.class);
View Full Code Here

            .getDiagnostics(diagnosticRequest);
    // it is strange : why one empty string ?
    assertEquals(1, diagnosticResponse.getDiagnosticsCount());
    assertEquals("", diagnosticResponse.getDiagnostics(0));

    GetCountersRequest counterRequest = recordFactory
            .newRecordInstance(GetCountersRequest.class);
    counterRequest.setJobId(job.getID());
    GetCountersResponse counterResponse = protocol.getCounters(counterRequest);
    assertNotNull(counterResponse.getCounters().getCounterGroup("org.apache.hadoop.mapreduce.JobCounter"));
    // test getJobReport
    GetJobReportRequest reportRequest = recordFactory
            .newRecordInstance(GetJobReportRequest.class);
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    YarnConfiguration conf = new YarnConfiguration();
    conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
    JobContext mockJobContext = mock(JobContext.class);
    ApplicationAttemptId attemptid =
      ConverterUtils.toApplicationAttemptId("appattempt_1234567890000_0001_0");
    JobId jobId =  TypeConverter.toYarn(
        TypeConverter.fromYarn(attemptid.getApplicationId()));
   
    WaitForItHandler waitForItHandler = new WaitForItHandler();
   
    when(mockContext.getApplicationID()).thenReturn(attemptid.getApplicationId());
View Full Code Here

    }
  }

  private static JobCounterUpdateEvent createJobCounterUpdateEventTASucceeded(
      TaskAttemptImpl taskAttempt) {
    TaskId taskId = taskAttempt.attemptId.getTaskId();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskId.getJobId());
    updateMillisCounters(jce, taskAttempt);
    return jce;
  }
View Full Code Here

    }
  }
 
  private static void updateMillisCounters(JobCounterUpdateEvent jce,
      TaskAttemptImpl taskAttempt) {
    TaskType taskType = taskAttempt.getID().getTaskId().getTaskType();
    long duration = (taskAttempt.getFinishTime() - taskAttempt.getLaunchTime());
    int mbRequired =
        taskAttempt.getMemoryRequired(taskAttempt.conf, taskType);
    int vcoresRequired = taskAttempt.getCpuRequired(taskAttempt.conf, taskType);
View Full Code Here

    return jce;
  }
 
  private static JobCounterUpdateEvent createJobCounterUpdateEventTAFailed(
      TaskAttemptImpl taskAttempt, boolean taskAlreadyCompleted) {
    TaskType taskType = taskAttempt.getID().getTaskId().getTaskType();
    JobCounterUpdateEvent jce = new JobCounterUpdateEvent(taskAttempt.getID().getTaskId().getJobId());
   
    if (taskType == TaskType.MAP) {
      jce.addCounterUpdate(JobCounter.NUM_FAILED_MAPS, 1);
    } else {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.