Package com.facebook.giraph.hive.impl.input

Examples of com.facebook.giraph.hive.impl.input.HiveApiInputSplit


          ") values: " +
          inputPartition.getInputSplitData().getPartitionValues() +
          ", got " + baseSplits.length + " splits");

      for (org.apache.hadoop.mapred.InputSplit baseSplit : baseSplits)  {
        InputSplit split = new HiveApiInputSplit(baseInputFormat, baseSplit,
            inputInfo.getTableSchema(), inputInfo.getColumnIds(),
            inputPartition.getInputSplitData(), conf);
        splits.add(split);
      }
View Full Code Here


  createRecordReader(InputSplit inputSplit, TaskAttemptContext context)
    throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    JobConf jobConf = new JobConf(conf);

    HiveApiInputSplit apiInputSplit;
    if (inputSplit instanceof HiveApiInputSplit) {
      apiInputSplit = (HiveApiInputSplit) inputSplit;
    } else {
      throw new IllegalArgumentException("InputSplit not a HiveApiInputSplit");
    }
    apiInputSplit.setConf(jobConf);

    // CHECKSTYLE: stop LineLength
    org.apache.hadoop.mapred.RecordReader<WritableComparable, Writable> baseRecordReader =
        apiInputSplit.getBaseRecordReader(jobConf, context);
    // CHECKSTYLE: resume LineLength

    HiveUtils.setReadColumnIds(conf, apiInputSplit.getColumnIds());

    boolean reuseRecord = conf.getBoolean(REUSE_RECORD_KEY, true);

    HiveApiRecordReader reader = new HiveApiRecordReader(
        baseRecordReader,
        apiInputSplit.getDeserializer(),
        apiInputSplit.getPartitionValues(),
        apiInputSplit.getTableSchema().numColumns(),
        reuseRecord);
    reader.setObserver(observer);

    return reader;
  }
View Full Code Here

          ") values: " +
          inputPartition.getInputSplitData().getPartitionValues() +
          ", got " + baseSplits.length + " splits");

      for (org.apache.hadoop.mapred.InputSplit baseSplit : baseSplits)  {
        InputSplit split = new HiveApiInputSplit(baseInputFormat, baseSplit,
            inputInfo.getTableSchema(), inputInfo.getColumnIds(),
            inputPartition.getInputSplitData(), conf);
        splits.add(split);
      }
View Full Code Here

  createRecordReader(InputSplit inputSplit, TaskAttemptContext context)
    throws IOException, InterruptedException {
    Configuration conf = context.getConfiguration();
    JobConf jobConf = new JobConf(conf);

    HiveApiInputSplit apiInputSplit;
    if (inputSplit instanceof HiveApiInputSplit) {
      apiInputSplit = (HiveApiInputSplit) inputSplit;
    } else {
      throw new IllegalArgumentException("InputSplit not a HiveApiInputSplit");
    }
    apiInputSplit.setConf(jobConf);

    // CHECKSTYLE: stop LineLength
    org.apache.hadoop.mapred.RecordReader<WritableComparable, Writable> baseRecordReader =
        apiInputSplit.getBaseRecordReader(jobConf, context);
    // CHECKSTYLE: resume LineLength

    HiveUtils.setReadColumnIds(conf, apiInputSplit.getColumnIds());

    boolean reuseRecord = conf.getBoolean(REUSE_RECORD_KEY, true);

    HiveApiRecordReader reader = new HiveApiRecordReader(
        baseRecordReader,
        apiInputSplit.getDeserializer(),
        apiInputSplit.getPartitionValues(),
        apiInputSplit.getTableSchema().numColumns(),
        reuseRecord);
    reader.setObserver(observer);

    return reader;
  }
View Full Code Here

TOP

Related Classes of com.facebook.giraph.hive.impl.input.HiveApiInputSplit

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.