Examples of TaskAttemptContext


Examples of org.apache.hadoop.mapred.TaskAttemptContext

    HiveApiOutputFormat outputFormat = new HiveApiOutputFormat();
    outputFormat.setMyProfileId(profile);

    TaskAttemptID taskAttemptID = TaskAttemptID.forName(taskAttemptIdStr);
    TaskAttemptContext taskContext = new HackTaskAttemptContext(new JobConf(conf), taskAttemptID);
    JobContext jobContext = new HackJobContext(new JobConf(conf), taskAttemptID.getJobID());

    RecordWriterImpl recordWriter = outputFormat.getRecordWriter(taskContext);

    HiveApiOutputCommitter committer = outputFormat.getOutputCommitter(taskContext);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

  /** {@inheritDoc} */
  public RecordWriter<K, V> getRecordWriter(FileSystem filesystem,
      JobConf job, String name, Progressable progress) throws IOException {
    org.apache.hadoop.mapreduce.RecordWriter<K, V> w = super.getRecordWriter(
      new TaskAttemptContext(job,
            TaskAttemptID.forName(job.get("mapred.task.id"))));
    org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter writer =
     (org.apache.hadoop.mapreduce.lib.db.DBOutputFormat.DBRecordWriter) w;
    try {
      return new DBRecordWriter(writer.getConnection(), writer.getStatement());
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

    // CombineFileInputFormat and CombineFileRecordReader are used.

    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf1 = new Configuration();
    conf1.set(DUMMY_KEY, "STATE1");
    TaskAttemptContext context1 = new TaskAttemptContext(conf1, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
    InputFormat inputFormat = new ChildRRInputFormat();

    Path [] files = { new Path("file1") };
    long [] lengths = { 1 };

    CombineFileSplit split = new CombineFileSplit(files, lengths);

    RecordReader rr = inputFormat.createRecordReader(split, context1);
    assertTrue("Unexpected RR type!", rr instanceof CombineFileRecordReader);

    // Verify that the initial configuration is the one being used.
    // Right after construction the dummy key should have value "STATE1"
    assertEquals("Invalid initial dummy key value", "STATE1",
      rr.getCurrentKey().toString());

    // Switch the active context for the RecordReader...
    Configuration conf2 = new Configuration();
    conf2.set(DUMMY_KEY, "STATE2");
    TaskAttemptContext context2 = new TaskAttemptContext(conf2, taskId);
    rr.initialize(split, context2);

    // And verify that the new context is updated into the child record reader.
    assertEquals("Invalid secondary dummy key value", "STATE2",
      rr.getCurrentKey().toString());
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

    // Test that a split containing multiple files works correctly,
    // with the child RecordReader getting its initialize() method
    // called a second time.
    TaskAttemptID taskId = new TaskAttemptID("jt", 0, true, 0, 0);
    Configuration conf = new Configuration();
    TaskAttemptContext context = new TaskAttemptContext(conf, taskId);

    // This will create a CombineFileRecordReader that itself contains a
    // DummyRecordReader.
    InputFormat inputFormat = new ChildRRInputFormat();
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

            throw new IllegalArgumentException("conf must not be null"); //$NON-NLS-1$
        }
        if (id == null) {
            throw new IllegalArgumentException("id must not be null"); //$NON-NLS-1$
        }
        return new TaskAttemptContext(conf, id);
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

            throw new IllegalArgumentException("id must not be null"); //$NON-NLS-1$
        }
        if (progressable == null) {
            return newTaskAttemptContext(conf, id);
        }
        return new TaskAttemptContext(conf, id) {
            @Override
            public void progress() {
                progressable.progress();
                super.progress();
            }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

        assert counters != null;
        Job job = JobCompatibility.newJob(context.getConfiguration());
        job.setOutputFormatClass(formatClass);
        job.setOutputKeyClass(keyClass);
        job.setOutputValueClass(valueClass);
        TaskAttemptContext localContext = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                context.getTaskAttemptID());
        if (FileOutputFormat.class.isAssignableFrom(formatClass)) {
            setOutputFilePrefix(localContext, name);
        }
        OutputFormat<?, ?> format = ReflectionUtils.newInstance(
                formatClass,
                localContext.getConfiguration());
        RecordWriter<?, ?> writer = format.getRecordWriter(localContext);
        return new ResultOutput<Writable>(localContext, writer);
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

        checkType(definition, description);
        Configuration conf = configurations.newInstance();
        Job job = JobCompatibility.newJob(conf);
        String resolved = variables.parse(description.getPathPrefix(), false);
        FileInputFormat.setInputPaths(job, new Path(resolved));
        TaskAttemptContext taskContext = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileInputFormat<?, V> format = getOpposite(conf, description.getOutputFormat());
        FileInputFormatDriver<V> result = new FileInputFormatDriver<V>(definition, taskContext, format);
        return result;
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

            throw new IOException("Failed to create temporary directory");
        }
        LOG.debug("Using staging deploy target: {}", temporaryDir);
        URI uri = temporaryDir.toURI();
        FileOutputFormat.setOutputPath(job, new Path(uri));
        TaskAttemptContext context = JobCompatibility.newTaskAttemptContext(
                job.getConfiguration(),
                JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(JobCompatibility.newJobId())));
        FileOutputFormatDriver<V> result = new FileOutputFormatDriver<V>(context, output, NullWritable.get()) {
            @Override
            public void close() throws IOException {
View Full Code Here

Examples of org.apache.hadoop.mapreduce.TaskAttemptContext

      SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
    @SuppressWarnings("unchecked")
    @Override
    public void transition(TaskAttemptImpl taskAttempt,
        TaskAttemptEvent event) {
      TaskAttemptContext taskContext =
        new TaskAttemptContextImpl(taskAttempt.conf,
            TypeConverter.fromYarn(taskAttempt.attemptId));
      taskAttempt.eventHandler.handle(new CommitterTaskAbortEvent(
          taskAttempt.attemptId, taskContext));
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.