Examples of FileOutputCommitter


Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

  public void checkOutputSpecs(JobContext context)
      throws IOException, InterruptedException {}

  public OutputCommitter getOutputCommitter(TaskAttemptContext context)
      throws IOException, InterruptedException {
    return new FileOutputCommitter(FileOutputFormat.getOutputPath(context),
                                   context);
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

  public RecordWriter<ImmutableBytesWritable, KeyValue> getRecordWriter(final TaskAttemptContext context)
  throws IOException, InterruptedException {
    // Get the path of the temporary output file
    final Path outputPath = FileOutputFormat.getOutputPath(context);
    final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath();
    final Configuration conf = context.getConfiguration();
    final FileSystem fs = outputdir.getFileSystem(conf);
    // These configs. are from hbase-*.xml
    final long maxsize = conf.getLong("hbase.hregion.max.filesize",
        HConstants.DEFAULT_MAX_FILE_SIZE);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

 
  public RecordWriter<ImmutableBytesWritable, KeyValue> getRecordWriter(final TaskAttemptContext context)
  throws IOException, InterruptedException {
    // Get the path of the temporary output file
    final Path outputPath = FileOutputFormat.getOutputPath(context);
    final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath();
    Configuration conf = context.getConfiguration();
    final FileSystem fs = outputdir.getFileSystem(conf);
    // These configs. are from hbase-*.xml
    final long maxsize = conf.getLong("hbase.hregion.max.filesize", 268435456);
    final int blocksize = conf.getInt("hfile.min.blocksize.size", 65536);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

  public RecordWriter<ImmutableBytesWritable, KeyValue> getRecordWriter(final TaskAttemptContext context)
  throws IOException, InterruptedException {
    // Get the path of the temporary output file
    final Path outputPath = FileOutputFormat.getOutputPath(context);
    final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath();
    final Configuration conf = context.getConfiguration();
    final FileSystem fs = outputdir.getFileSystem(conf);
    // These configs. are from hbase-*.xml
    final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE,
        HConstants.DEFAULT_MAX_FILE_SIZE);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

    String outputPath = getOutputDir(conf);
    // we need to do this to get the task path and set it for mapred
    // implementation since it can't be done automatically because of
    // mapreduce->mapred abstraction
    if (outputPath != null) {
      FileOutputCommitter foc = new FileOutputCommitter(getOutputPath(conf), context);
      Path path = foc.getWorkPath();
      FileSystem fs = path.getFileSystem(conf);
      fs.mkdirs(path);
      conf.set("mapred.work.output.dir", path.toString());
      LOG.info("Setting mapred.work.output.dir to {}", path.toString());
    }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

 
  public OutputCommitter getOutputCommitter(TaskAttemptContext context)
      throws IOException {
    if (committer == null) {
      Path output = getOutputPath(context);
      committer = new FileOutputCommitter(output, context);
    }
    return committer;
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

        }

        @Override
        public Path getDefaultWorkFile(TaskAttemptContext context,
                String extension) throws IOException {
            FileOutputCommitter committer =
                    (FileOutputCommitter) super.getOutputCommitter(context);
            return new Path(committer.getWorkPath(), getUniqueFile(context,
                    "part", extension));
        }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

      createRecordWriter(final TaskAttemptContext context)
          throws IOException, InterruptedException {

    // Get the path of the temporary output file
    final Path outputPath = FileOutputFormat.getOutputPath(context);
    final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath();
    final Configuration conf = context.getConfiguration();
    final FileSystem fs = outputdir.getFileSystem(conf);
    // These configs. are from hbase-*.xml
    final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE,
        HConstants.DEFAULT_MAX_FILE_SIZE);
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

    String outputPath = getOutputDir(conf);
    // we need to do this to get the task path and set it for mapred
    // implementation since it can't be done automatically because of
    // mapreduce->mapred abstraction
    if (outputPath != null) {
      FileOutputCommitter foc =
          new FileOutputCommitter(getOutputPath(conf), context);
      conf.set("mapred.work.output.dir", foc.getWorkPath().toString());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter

                        HCatMapRedUtil.createTaskAttemptContext(currJobContext.getJobConf(),
                                                                                        currTaskContext.getTaskAttemptID(),
                                                                                        currTaskContext.getProgressible());
                //set temp location
                currTaskContext.getConfiguration().set("mapred.work.output.dir",
                                new FileOutputCommitter(new Path(localJobInfo.getLocation()),currTaskContext).getWorkPath().toString());
                //setupTask()
                baseOutputCommitter.setupTask(currTaskContext);

                org.apache.hadoop.mapred.RecordWriter baseRecordWriter =
                        baseOF.getRecordWriter(null,
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.