Examples of TaskAttemptID


Examples of org.apache.hadoop.mapreduce.TaskAttemptID

  public void testReinit() throws Exception {
    // Test that a split containing multiple files works correctly,
    // with the child RecordReader getting its initialize() method
    // called a second time.
    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf = new Configuration();
    TaskAttemptContext context = new TaskAttemptContext(conf, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

     */
    public static TaskAttemptID newTaskAttemptId(TaskID taskId) {
        if (taskId == null) {
            throw new IllegalArgumentException("taskId must not be null"); //$NON-NLS-1$
        }
        return new TaskAttemptID(taskId, 0);
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

            LOG.debug(MessageFormat.format(
                    "Progressable object is found (jobId={0}, object={1})",
                    jobContext.getJobID(),
                    progressable));
        }
        TaskAttemptID id = JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(jobContext.getJobID()));
        return JobCompatibility.newTaskAttemptContext(jobContext.getConfiguration(), id, progressable);
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

    conf.setInt(org.apache.hadoop.mapreduce.lib.input.
        LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);
    assertTrue("unexpected test data at " + testFile,
        testFileSize > firstSplitLength);

    TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());

    // read the data without splitting to count the records
    FileSplit split = new FileSplit(testFilePath, 0, testFileSize,
        (String[])null);
    LineRecordReader reader = new LineRecordReader();
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

    File testFile = new File(testFileUrl.getFile());
    long testFileSize = testFile.length();
    Path testFilePath = new Path(testFile.getAbsolutePath());
    Configuration conf = new Configuration();
    conf.setInt("io.file.buffer.size", 1);
    TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());

    // Gather the records returned by the record reader
    ArrayList<String> records = new ArrayList<String>();

    long offset = 0;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

    long testFileSize = testFile.length();
    Configuration conf = new Configuration();
    conf.setInt(org.apache.hadoop.mapreduce.lib.input.
        LineRecordReader.MAX_LINE_LENGTH, Integer.MAX_VALUE);

    TaskAttemptContext context = new TaskAttemptContextImpl(conf, new TaskAttemptID());

    // read the data and check whether BOM is skipped
    FileSplit split = new FileSplit(testFilePath, 0, testFileSize,
        (String[])null);
    LineRecordReader reader = new LineRecordReader();
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

    //
    // Old Hadoop API
    //
    public org.apache.hadoop.mapred.InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException
    {
        TaskAttemptContext tac = new TaskAttemptContext(jobConf, new TaskAttemptID());
        List<org.apache.hadoop.mapreduce.InputSplit> newInputSplits = this.getSplits(tac);
        org.apache.hadoop.mapred.InputSplit[] oldInputSplits = new org.apache.hadoop.mapred.InputSplit[newInputSplits.size()];
        for (int i = 0; i < newInputSplits.size(); i++)
            oldInputSplits[i] = (ColumnFamilySplit)newInputSplits.get(i);
        return oldInputSplits;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

      }
    } catch (Exception e) {
      throw new IOException("Failed getting constructor", e);
    }
    try {
      return (TaskAttemptContext)c.newInstance(job.getConfiguration(), new TaskAttemptID());
    } catch (Exception e) {
      throw new IOException("Failed creating instance", e);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

  @Override
  public RecordWriter<Text, BlurMutate> getRecordWriter(TaskAttemptContext context) throws IOException,
      InterruptedException {
    int id = context.getTaskAttemptID().getTaskID().getId();
    TaskAttemptID taskAttemptID = context.getTaskAttemptID();
    return new BlurRecordWriter(context.getConfiguration(), id, taskAttemptID.toString() + ".tmp");
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptID

        POStore store = new POStore(new OperatorKey());
        store.setSFile(new FileSpec(file, storeFuncSpec));
        PigOutputFormat.setLocation(jc, store);
        OutputCommitter oc;
        // create a simulated TaskAttemptContext
        TaskAttemptContext tac = new TaskAttemptContext(conf, new TaskAttemptID());
        PigOutputFormat.setLocation(tac, store);
        RecordWriter<?,?> rw ;
        try {
            of.checkOutputSpecs(jc);
            oc = of.getOutputCommitter(tac);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.