Examples of HCatOutputFormat


Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

  private void copyTable(String in, String out) throws IOException, InterruptedException {
    Job ijob = new Job();
    Job ojob = new Job();
    HCatInputFormat inpy = new HCatInputFormat();
    inpy.setInput(ijob , null, in);
    HCatOutputFormat oupy = new HCatOutputFormat();
    oupy.setOutput(ojob,
      OutputJobInfo.create(null, out, new HashMap<String,String>()
      ));

    // Test HCatContext

    System.err.println("HCatContext INSTANCE is present : " +HCatContext.INSTANCE.getConf().isPresent());
    if (HCatContext.INSTANCE.getConf().isPresent()){
      System.err.println("HCatContext tinyint->int promotion says " +
        HCatContext.INSTANCE.getConf().get().getBoolean(
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT));
    }

    HCatSchema tableSchema = inpy.getTableSchema(ijob.getConfiguration());
    System.err.println("Copying from ["+in+"] to ["+out+"] with schema : "+ tableSchema.toString());
    oupy.setSchema(ojob, tableSchema);
    oupy.checkOutputSpecs(ojob);
    OutputCommitter oc = oupy.getOutputCommitter(createTaskAttemptContext(ojob.getConfiguration()));
    oc.setupJob(ojob);

    for (InputSplit split : inpy.getSplits(ijob)){

      TaskAttemptContext rtaskContext = createTaskAttemptContext(ijob.getConfiguration());
      TaskAttemptContext wtaskContext = createTaskAttemptContext(ojob.getConfiguration());

      RecordReader<WritableComparable, HCatRecord> rr = inpy.createRecordReader(split, rtaskContext);
      rr.initialize(split, rtaskContext);

      OutputCommitter taskOc = oupy.getOutputCommitter(wtaskContext);
      taskOc.setupTask(wtaskContext);
      RecordWriter<WritableComparable<?>, HCatRecord> rw = oupy.getRecordWriter(wtaskContext);

      while(rr.nextKeyValue()){
        rw.write(rr.getCurrentKey(), rr.getCurrentValue());
      }
      rw.close(wtaskContext);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

    this(null, null, "");
  }

  @Override
  public OutputFormat getOutputFormat() throws IOException {
    return new HCatOutputFormat();
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

    Job job;
    try {
      job = new Job(conf);
      HCatOutputFormat.setOutput(job, jobInfo);
      HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job.getConfiguration()));
      HCatOutputFormat outFormat = new HCatOutputFormat();
      outFormat.checkOutputSpecs(job);
      outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
          job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job);
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    } catch (InterruptedException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

  @Override
  public void write(Iterator<HCatRecord> recordItr) throws HCatException {

    int id = sp.getId();
    setVarsInConf(id);
    HCatOutputFormat outFormat = new HCatOutputFormat();
    TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
        conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id));
    OutputCommitter committer = null;
    RecordWriter<WritableComparable<?>, HCatRecord> writer;
    try {
      committer = outFormat.getOutputCommitter(cntxt);
      committer.setupTask(cntxt);
      writer = outFormat.getRecordWriter(cntxt);
      while (recordItr.hasNext()) {
        HCatRecord rec = recordItr.next();
        writer.write(null, rec);
      }
      writer.close(cntxt);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

  @Override
  public void commit(WriterContext context) throws HCatException {
    WriterContextImpl cntxtImpl = (WriterContextImpl)context;
    try {
      new HCatOutputFormat().getOutputCommitter(
          ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
              cntxtImpl.getConf(),
              ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
          .commitJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(
              cntxtImpl.getConf(), null));
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

  @Override
  public void abort(WriterContext context) throws HCatException {
    WriterContextImpl cntxtImpl = (WriterContextImpl)context;
    try {
      new HCatOutputFormat().getOutputCommitter(
          ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(
            cntxtImpl.getConf(),
            ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))
          .abortJob(ShimLoader.getHadoopShims().getHCatShim().createJobContext(
            cntxtImpl.getConf(), null), State.FAILED);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatOutputFormat

  private void copyTable(String in, String out) throws IOException, InterruptedException {
    Job ijob = new Job();
    Job ojob = new Job();
    HCatInputFormat inpy = new HCatInputFormat();
    inpy.setInput(ijob , null, in);
    HCatOutputFormat oupy = new HCatOutputFormat();
    oupy.setOutput(ojob, OutputJobInfo.create(null, out, new HashMap<String,String>()));

    // Test HCatContext

    System.err.println("HCatContext INSTANCE is present : " +HCatContext.INSTANCE.getConf().isPresent());
    if (HCatContext.INSTANCE.getConf().isPresent()){
      System.err.println("HCatContext tinyint->int promotion says " +
        HCatContext.INSTANCE.getConf().get().getBoolean(
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT));
    }

    HCatSchema tableSchema = inpy.getTableSchema(ijob.getConfiguration());
    System.err.println("Copying from ["+in+"] to ["+out+"] with schema : "+ tableSchema.toString());
    oupy.setSchema(ojob, tableSchema);
    oupy.checkOutputSpecs(ojob);
    OutputCommitter oc = oupy.getOutputCommitter(createTaskAttemptContext(ojob.getConfiguration()));
    oc.setupJob(ojob);

    for (InputSplit split : inpy.getSplits(ijob)){

      TaskAttemptContext rtaskContext = createTaskAttemptContext(ijob.getConfiguration());
      TaskAttemptContext wtaskContext = createTaskAttemptContext(ojob.getConfiguration());

      RecordReader<WritableComparable, HCatRecord> rr = inpy.createRecordReader(split, rtaskContext);
      rr.initialize(split, rtaskContext);

      OutputCommitter taskOc = oupy.getOutputCommitter(wtaskContext);
      taskOc.setupTask(wtaskContext);
      RecordWriter<WritableComparable<?>, HCatRecord> rw = oupy.getRecordWriter(wtaskContext);

      while(rr.nextKeyValue()){
        rw.write(rr.getCurrentKey(), rr.getCurrentValue());
      }
      rw.close(wtaskContext);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.