Package org.apache.hcatalog.mapreduce

Examples of org.apache.hcatalog.mapreduce.HCatOutputFormat


    this(null,null);
  }

  @Override
  public OutputFormat getOutputFormat() throws IOException {
    return new HCatOutputFormat();
  }
View Full Code Here


  }

  public void invokeOutputCommitterForLocalMode(Job job) throws IOException {
    if (isLocalJobTracker(job) && isHadoop1()) {
      LOG.info("Explicitly committing job in local mode");
      HCatHadoopShims.Instance.get().commitJob(new HCatOutputFormat(), job);
    }
  }
View Full Code Here

        Job job;
        try {
            job = new Job(conf);
            HCatOutputFormat.setOutput(job, jobInfo);
            HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job));
            HCatOutputFormat outFormat = new HCatOutputFormat();
            outFormat.checkOutputSpecs(job);
            outFormat.getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
                (job.getConfiguration(), HCatHadoopShims.Instance.get().createTaskAttemptID())).setupJob(job);
        } catch (IOException e) {
            throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
        } catch (InterruptedException e) {
            throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

    @Override
    public void write(Iterator<HCatRecord> recordItr) throws HCatException {

        int id = sp.getId();
        setVarsInConf(id);
        HCatOutputFormat outFormat = new HCatOutputFormat();
        TaskAttemptContext cntxt = HCatHadoopShims.Instance.get().createTaskAttemptContext
            (conf, new TaskAttemptID(HCatHadoopShims.Instance.get().createTaskID(), id));
        OutputCommitter committer = null;
        RecordWriter<WritableComparable<?>, HCatRecord> writer;
        try {
            committer = outFormat.getOutputCommitter(cntxt);
            committer.setupTask(cntxt);
            writer = outFormat.getRecordWriter(cntxt);
            while (recordItr.hasNext()) {
                HCatRecord rec = recordItr.next();
                writer.write(null, rec);
            }
            writer.close(cntxt);
View Full Code Here

    }

    @Override
    public void commit(WriterContext context) throws HCatException {
        try {
            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
                .commitJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null));
        } catch (IOException e) {
            throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
        } catch (InterruptedException e) {
View Full Code Here

    }

    @Override
    public void abort(WriterContext context) throws HCatException {
        try {
            new HCatOutputFormat().getOutputCommitter(HCatHadoopShims.Instance.get().createTaskAttemptContext
                (context.getConf(), HCatHadoopShims.Instance.get().createTaskAttemptID()))
                .abortJob(HCatHadoopShims.Instance.get().createJobContext(context.getConf(), null), State.FAILED);
        } catch (IOException e) {
            throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
        } catch (InterruptedException e) {
View Full Code Here

        this(null, null);
    }

    @Override
    public OutputFormat getOutputFormat() throws IOException {
        return new HCatOutputFormat();
    }
View Full Code Here

    Job job;
    try {
      job = new Job(conf);
      HCatOutputFormat.setOutput(job, jobInfo);
      HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job));
      HCatOutputFormat outFormat = new HCatOutputFormat();
      outFormat.checkOutputSpecs(job);
      outFormat.getOutputCommitter(new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID())).setupJob(job);
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    } catch (InterruptedException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    }
View Full Code Here

  @Override
  public void write(Iterator<HCatRecord> recordItr) throws HCatException {
   
    int id = sp.getId();
    setVarsInConf(id);
    HCatOutputFormat outFormat = new HCatOutputFormat();
    TaskAttemptContext cntxt = new TaskAttemptContext(conf, new TaskAttemptID(new TaskID(), id));
    OutputCommitter committer = null;
    RecordWriter<WritableComparable<?>, HCatRecord> writer;
    try {
      committer = outFormat.getOutputCommitter(cntxt);
      committer.setupTask(cntxt);
      writer   = outFormat.getRecordWriter(cntxt);
      while(recordItr.hasNext()){
        HCatRecord rec = recordItr.next();
        writer.write(null, rec);
      }
      writer.close(cntxt);
View Full Code Here

  }

  @Override
  public void commit(WriterContext context) throws HCatException {
    try {
      new HCatOutputFormat().getOutputCommitter(new TaskAttemptContext(context.getConf(), new TaskAttemptID()))
      .commitJob(new JobContext(context.getConf(), null));
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    } catch (InterruptedException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

TOP

Related Classes of org.apache.hcatalog.mapreduce.HCatOutputFormat

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.