Examples of HCatInputFormat


Examples of org.apache.hcatalog.mapreduce.HCatInputFormat

  @Override
  public RecordReader<ITuple, NullWritable> createRecordReader(InputSplit split,
      TaskAttemptContext taskContext) throws IOException, InterruptedException {

    HCatInputFormat iF = new HCatInputFormat();

    @SuppressWarnings("rawtypes")
    final RecordReader<WritableComparable, HCatRecord> hCatRecordReader = iF.createRecordReader(split,
        taskContext);

    return new RecordReader<ITuple, NullWritable>() {

      ITuple tuple = new Tuple(pangoolSchema);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.HCatInputFormat

    };
  }

  @Override
  public List<InputSplit> getSplits(JobContext jobcontext) throws IOException, InterruptedException {
    HCatInputFormat iF = new HCatInputFormat();
    return iF.getSplits(jobcontext);
  }
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.HCatInputFormat

  @Override
  public ReaderContext prepareRead() throws HCatException {
    try {
      Job job = new Job(conf);
      HCatInputFormat hcif = HCatInputFormat.setInput(
        job, re.getDbName(), re.getTableName()).setFilter(re.getFilterString());
      ReaderContext cntxt = new ReaderContext();
      cntxt.setInputSplits(hcif.getSplits(
        ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null)));
      cntxt.setConf(job.getConfiguration());
      return cntxt;
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

Examples of org.apache.hcatalog.mapreduce.HCatInputFormat

  }

  @Override
  public Iterator<HCatRecord> read() throws HCatException {

    HCatInputFormat inpFmt = new HCatInputFormat();
    RecordReader<WritableComparable, HCatRecord> rr;
    try {
      TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID());
      rr = inpFmt.createRecordReader(split, cntxt);
      rr.initialize(split, cntxt);
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    } catch (InterruptedException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat

  private static Map<String, Credentials> jobCredentials = new HashMap<String, Credentials>();

  @Override
  public InputFormat<?, ?> getInputFormat() throws IOException {
    if (hcatInputFormat == null) {
      hcatInputFormat = new HCatInputFormat();
    }
    return hcatInputFormat;
  }
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat

  @Override
  public ReaderContext prepareRead() throws HCatException {
    try {
      Job job = new Job(conf);
      HCatInputFormat hcif = HCatInputFormat.setInput(
        job, re.getDbName(), re.getTableName()).setFilter(re.getFilterString());
      ReaderContext cntxt = new ReaderContext();
      cntxt.setInputSplits(hcif.getSplits(
          ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null)));
      cntxt.setConf(job.getConfiguration());
      return cntxt;
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat

  }

  @Override
  public Iterator<HCatRecord> read() throws HCatException {

    HCatInputFormat inpFmt = new HCatInputFormat();
    RecordReader<WritableComparable, HCatRecord> rr;
    try {
      TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID());
      rr = inpFmt.createRecordReader(split, cntxt);
      rr.initialize(split, cntxt);
    } catch (IOException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
    } catch (InterruptedException e) {
      throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat


  private void copyTable(String in, String out) throws IOException, InterruptedException {
    Job ijob = new Job();
    Job ojob = new Job();
    HCatInputFormat inpy = new HCatInputFormat();
    inpy.setInput(ijob , null, in);
    HCatOutputFormat oupy = new HCatOutputFormat();
    oupy.setOutput(ojob,
      OutputJobInfo.create(null, out, new HashMap<String,String>()
      ));

    // Test HCatContext

    System.err.println("HCatContext INSTANCE is present : " +HCatContext.INSTANCE.getConf().isPresent());
    if (HCatContext.INSTANCE.getConf().isPresent()){
      System.err.println("HCatContext tinyint->int promotion says " +
        HCatContext.INSTANCE.getConf().get().getBoolean(
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION,
          HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT));
    }

    HCatSchema tableSchema = inpy.getTableSchema(ijob.getConfiguration());
    System.err.println("Copying from ["+in+"] to ["+out+"] with schema : "+ tableSchema.toString());
    oupy.setSchema(ojob, tableSchema);
    oupy.checkOutputSpecs(ojob);
    OutputCommitter oc = oupy.getOutputCommitter(createTaskAttemptContext(ojob.getConfiguration()));
    oc.setupJob(ojob);

    for (InputSplit split : inpy.getSplits(ijob)){

      TaskAttemptContext rtaskContext = createTaskAttemptContext(ijob.getConfiguration());
      TaskAttemptContext wtaskContext = createTaskAttemptContext(ojob.getConfiguration());

      RecordReader<WritableComparable, HCatRecord> rr = inpy.createRecordReader(split, rtaskContext);
      rr.initialize(split, rtaskContext);

      OutputCommitter taskOc = oupy.getOutputCommitter(wtaskContext);
      taskOc.setupTask(wtaskContext);
      RecordWriter<WritableComparable<?>, HCatRecord> rw = oupy.getRecordWriter(wtaskContext);
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat

      LOG.info("Creating HCatalog table " + hCatQualifiedTableName
        + " for import");
      createHCatTable();
    }
    // For serializing the schema to conf
    HCatInputFormat hif = HCatInputFormat.setInput(hCatJob, hCatDatabaseName,
      hCatTableName);
    // For serializing the schema to conf
    if (filterStr != null) {
      LOG.info("Setting hCatInputFormat filter to " + filterStr);
      hif.setFilter(filterStr);
    }

    hCatFullTableSchema = HCatInputFormat.getTableSchema(configuration);
    hCatFullTableSchemaFieldNames = hCatFullTableSchema.getFieldNames();
View Full Code Here

Examples of org.apache.hive.hcatalog.mapreduce.HCatInputFormat

      LOG.info("Creating HCatalog table " + hCatQualifiedTableName
        + " for import");
      createHCatTable();
    }
    // For serializing the schema to conf
    HCatInputFormat hif = HCatInputFormat.setInput(hCatJob, hCatDatabaseName,
      hCatTableName);
    // For serializing the schema to conf
    if (filterStr != null) {
      LOG.info("Setting hCatInputFormat filter to " + filterStr);
      hif.setFilter(filterStr);
    }

    hCatFullTableSchema = HCatInputFormat.getTableSchema(configuration);
    hCatFullTableSchemaFieldNames = hCatFullTableSchema.getFieldNames();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.