Package org.apache.hadoop.mapreduce

Examples of org.apache.hadoop.mapreduce.JobID


        // the job - get a hold of the modified conf
        conf = job.getConfiguration();
        inputFormat = wrappedLoadFunc.getInputFormat();
        try {
            inpSplits = inputFormat.getSplits(new JobContext(conf,
                    new JobID()));
        } catch (InterruptedException e) {
            throw new IOException(e);
        }       
    }
View Full Code Here


  private static class MapreduceV1Shim extends MapreduceTestingShim {
    public JobContext newJobContext(Configuration jobConf) throws IOException {
      // Implementing:
      // return new JobContext(jobConf, new JobID());
      JobID jobId = new JobID();
      Constructor<JobContext> c;
      try {
        c = JobContext.class.getConstructor(Configuration.class, JobID.class);
        return c.newInstance(jobConf, jobId);
      } catch (Exception e) {
View Full Code Here

public class TestRCFileOutputStorageDriver extends TestCase {

  public void testConversion() throws IOException {
    Configuration conf = new Configuration();
    JobContext jc = new JobContext(conf, new JobID());

    HowlSchema schema = buildHiveSchema();
    HowlInputStorageDriver isd = new RCFileInputDriver();

    isd.setOriginalSchema(jc, schema);
View Full Code Here

    writer.close();
    BytesRefArrayWritable[] bytesArr = new BytesRefArrayWritable[]{bytes,bytes2};

    HowlSchema schema = buildHiveSchema();
    RCFileInputDriver sd = new RCFileInputDriver();
    JobContext jc = new JobContext(conf, new JobID());
    sd.setInputPath(jc, file.toString());
    InputFormat<?,?> iF = sd.getInputFormat(null);
    InputSplit split = iF.getSplits(jc).get(0);
    sd.setOriginalSchema(jc, schema);
    sd.setOutputSchema(jc, schema);
View Full Code Here

    writer.append(bytes2);
    writer.close();
    BytesRefArrayWritable[] bytesArr = new BytesRefArrayWritable[]{bytes,bytes2};

    RCFileInputDriver sd = new RCFileInputDriver();
    JobContext jc = new JobContext(conf, new JobID());
    sd.setInputPath(jc, file.toString());
    InputFormat<?,?> iF = sd.getInputFormat(null);
    InputSplit split = iF.getSplits(jc).get(0);
    sd.setOriginalSchema(jc, buildHiveSchema());
    sd.setOutputSchema(jc, buildPrunedSchema());
View Full Code Here

    writer.append(bytes2);
    writer.close();
    BytesRefArrayWritable[] bytesArr = new BytesRefArrayWritable[]{bytes,bytes2};

    RCFileInputDriver sd = new RCFileInputDriver();
    JobContext jc = new JobContext(conf, new JobID());
    sd.setInputPath(jc, file.toString());
    InputFormat<?,?> iF = sd.getInputFormat(null);
    InputSplit split = iF.getSplits(jc).get(0);
    sd.setOriginalSchema(jc, buildHiveSchema());
    sd.setOutputSchema(jc, buildReorderedSchema());
View Full Code Here

              resource, System.currentTimeMillis() + 10000, 42, 42);
        Token containerToken = newContainerToken(nodeId, "password".getBytes(),
              containerTokenIdentifier);
        Container container = Container.newInstance(cId, nodeId,
            NM_HOST + ":" + NM_HTTP_PORT, resource, null, containerToken);
        JobID id = TypeConverter.fromYarn(applicationId);
        JobId jobId = TypeConverter.toYarn(id);
        getContext().getEventHandler().handle(new JobHistoryEvent(jobId,
            new NormalizedResourceEvent(
                org.apache.hadoop.mapreduce.TaskType.REDUCE,
            100)));
View Full Code Here

     @Override
     protected Job createJob(Configuration conf, JobStateInternal forcedState,
         String diagnostic) {
       JobImpl jobImpl = mock(JobImpl.class);
       when(jobImpl.getInternalState()).thenReturn(this.jobStateInternal);
       JobID jobID = JobID.forName("job_1234567890000_0001");
       JobId jobId = TypeConverter.toYarn(jobID);
       when(jobImpl.getID()).thenReturn(jobId);
       ((AppContext) getContext())
           .getAllJobs().put(jobImpl.getID(), jobImpl);
       return jobImpl;
View Full Code Here

    String user2 = user1 + "1234";
    UserGroupInformation ugi1 = UserGroupInformation.createRemoteUser(user1);
    UserGroupInformation ugi2 = UserGroupInformation.createRemoteUser(user2);

    // Create the job
    JobID jobID = JobID.forName("job_1234567890000_0001");
    JobId jobId = TypeConverter.toYarn(jobID);

    // Setup configuration access only to user1 (owner)
    Configuration conf1 = new Configuration();
    conf1.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
View Full Code Here

    Assert.assertTrue(job5.checkAccess(ugi2, null));
  }

  @Test
  public void testReportDiagnostics() throws Exception {
    JobID jobID = JobID.forName("job_1234567890000_0001");
    JobId jobId = TypeConverter.toYarn(jobID);
    final String diagMsg = "some diagnostic message";
    final JobDiagnosticsUpdateEvent diagUpdateEvent =
        new JobDiagnosticsUpdateEvent(jobId, diagMsg);
    MRAppMetrics mrAppMetrics = MRAppMetrics.create();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.mapreduce.JobID

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.