Examples of JobContextImpl


Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

  }

  @Test
  public void testAtomicCommitMissingFinal() {
    TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
    JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
        taskAttemptContext.getTaskAttemptID().getJobID());
    Configuration conf = jobContext.getConfiguration();

    String workPath = "/tmp1/" + String.valueOf(rand.nextLong());
    String finalPath = "/tmp1/" + String.valueOf(rand.nextLong());
    FileSystem fs = null;
    try {
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

  }

  @Test
  public void testAtomicCommitExistingFinal() {
    TaskAttemptContext taskAttemptContext = getTaskAttemptContext(config);
    JobContext jobContext = new JobContextImpl(taskAttemptContext.getConfiguration(),
        taskAttemptContext.getTaskAttemptID().getJobID());
    Configuration conf = jobContext.getConfiguration();


    String workPath = "/tmp1/" + String.valueOf(rand.nextLong());
    String finalPath = "/tmp1/" + String.valueOf(rand.nextLong());
    FileSystem fs = null;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

    Path listFile = new Path(cluster.getFileSystem().getUri().toString()
        + "/tmp/testGetSplits_1/fileList.seq");
    CopyListing.getCopyListing(configuration, CREDENTIALS, options).
        buildListing(listFile, options);

    JobContext jobContext = new JobContextImpl(configuration, new JobID());
    UniformSizeInputFormat uniformSizeInputFormat = new UniformSizeInputFormat();
    List<InputSplit> splits
            = uniformSizeInputFormat.getSplits(jobContext);

    int sizePerMap = totalFileSize/nMaps;

    checkSplits(listFile, splits);

    int doubleCheckedTotalSize = 0;
    int previousSplitSize = -1;
    for (int i=0; i<splits.size(); ++i) {
      InputSplit split = splits.get(i);
      int currentSplitSize = 0;
      RecordReader<Text, CopyListingFileStatus> recordReader =
        uniformSizeInputFormat.createRecordReader(split, null);
      StubContext stubContext = new StubContext(jobContext.getConfiguration(),
                                                recordReader, 0);
      final TaskAttemptContext taskAttemptContext
         = stubContext.getContext();
      recordReader.initialize(split, taskAttemptContext);
      while (recordReader.nextKeyValue()) {
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

                      String.valueOf(options.getMaxMaps()));
    CopyListing.getCopyListing(configuration, CREDENTIALS, options).buildListing(
            new Path(cluster.getFileSystem().getUri().toString()
                    +"/tmp/testDynInputFormat/fileList.seq"), options);

    JobContext jobContext = new JobContextImpl(configuration, new JobID());
    DynamicInputFormat<Text, CopyListingFileStatus> inputFormat =
        new DynamicInputFormat<Text, CopyListingFileStatus>();
    List<InputSplit> splits = inputFormat.getSplits(jobContext);

    int nFiles = 0;
    int taskId = 0;

    for (InputSplit split : splits) {
      RecordReader<Text, CopyListingFileStatus> recordReader =
           inputFormat.createRecordReader(split, null);
      StubContext stubContext = new StubContext(jobContext.getConfiguration(),
                                                recordReader, taskId);
      final TaskAttemptContext taskAttemptContext
         = stubContext.getContext();
     
      recordReader.initialize(splits.get(0), taskAttemptContext);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

      throws IOException {
    boolean isSupported = false;
    JobContext _jobContext;
    if (committer != null) {
      if (newApiCommitter) {
         _jobContext = new JobContextImpl(
            getConfig(), TypeConverter.fromYarn(getJobId()));
      } else {
          _jobContext = new org.apache.hadoop.mapred.JobContextImpl(
                new JobConf(getConfig()), TypeConverter.fromYarn(getJobId()));
      }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

        }

        checkTaskLimits();

        if (job.newApiCommitter) {
          job.jobContext = new JobContextImpl(job.conf,
              job.oldJobId);
        } else {
          job.jobContext = new org.apache.hadoop.mapred.JobContextImpl(
              job.conf, job.oldJobId);
        }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

    }

    @Override
    public JobContext createJobContext(Configuration conf,
                                       JobID jobId) {
      return new JobContextImpl(conf instanceof JobConf? new JobConf(conf) : conf,
              jobId);
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

    return new TaskAttemptID(jobId.getJtIdentifier(), jobId.getId(), isMap ?  TaskType.MAP : TaskType.REDUCE, taskId, id);
  }

  @Override
  public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
    return new JobContextImpl(job.getConfiguration(), job.getJobID());
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

        }

        checkTaskLimits();

        if (job.newApiCommitter) {
          job.jobContext = new JobContextImpl(job.conf,
              job.oldJobId);
        } else {
          job.jobContext = new org.apache.hadoop.mapred.JobContextImpl(
              job.conf, job.oldJobId);
        }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.task.JobContextImpl

      this.memoryPerMap = conf.getMemoryForMapTask();
      this.memoryPerReduce = conf.getMemoryForReduceTask();

      this.taskCompletionEvents = new ArrayList<TaskCompletionEvent>(
          numMapTasks + numReduceTasks + 10);
      JobContext jobContext = new JobContextImpl(conf, jobId);
      this.jobSetupCleanupNeeded = jobContext.getJobSetupCleanupNeeded();
      this.taskCleanupNeeded = jobContext.getTaskCleanupNeeded();

      // Construct the jobACLs
      status.setJobACLs(jobtracker.getJobACLsManager().constructJobACLs(conf));

      this.mapFailuresPercent = conf.getMaxMapTaskFailuresPercent();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.