Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.SplitSample


  // 1. samping is not allowed
  // 2. for partitioned table, all filters should be targeted to partition column
  // 3. SelectOperator should be select star
  private FetchData checkTree(boolean aggressive, ParseContext pctx, String alias,
      TableScanOperator ts) throws HiveException {
    SplitSample splitSample = pctx.getNameToSplitSample().get(alias);
    if (!aggressive && splitSample != null) {
      return null;
    }
    QB qb = pctx.getQB();
    if (!aggressive && qb.hasTableSample(alias)) {
View Full Code Here


      long totalSize = 0;
      for (InputSplitShim split : splitList) {
        totalSize += split.getLength();
      }

      SplitSample splitSample = nameToSamples.get(entry.getKey());

      long targetSize = splitSample.getTargetSize(totalSize);
      int startIndex = splitSample.getSeedNum() % splitList.size();
      long size = 0;
      for (int i = 0; i < splitList.size(); i++) {
        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
        retLists.add(split);
        long splitgLength = split.getLength();
View Full Code Here

  // 1. samping is not allowed
  // 2. for partitioned table, all filters should be targeted to partition column
  // 3. SelectOperator should be select star
  private FetchData checkTree(boolean aggressive, ParseContext pctx, String alias,
      TableScanOperator ts) throws HiveException {
    SplitSample splitSample = pctx.getNameToSplitSample().get(alias);
    if (!aggressive && splitSample != null) {
      return null;
    }
    QB qb = pctx.getQB();
    if (!aggressive && qb.hasTableSample(alias)) {
View Full Code Here

  // 1. samping is not allowed
  // 2. for partitioned table, all filters should be targeted to partition column
  // 3. SelectOperator should be select star
  private FetchData checkTree(boolean aggressive, ParseContext pctx, String alias,
      TableScanOperator ts) throws HiveException {
    SplitSample splitSample = pctx.getNameToSplitSample().get(alias);
    if (!aggressive && splitSample != null) {
      return null;
    }
    QB qb = pctx.getQB();
    if (!aggressive && qb.hasTableSample(alias)) {
View Full Code Here

      long totalSize = 0;
      for (InputSplitShim split : splitList) {
        totalSize += split.getLength();
      }

      SplitSample splitSample = nameToSamples.get(entry.getKey());

      long targetSize = splitSample.getTargetSize(totalSize);
      int startIndex = splitSample.getSeedNum() % splitList.size();
      long size = 0;
      for (int i = 0; i < splitList.size(); i++) {
        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
        retLists.add(split);
        long splitgLength = split.getLength();
View Full Code Here

  // 1. samping is not allowed
  // 2. for partitioned table, all filters should be targeted to partition column
  // 3. SelectOperator should use only simple cast/column access
  private FetchData checkTree(boolean aggressive, ParseContext pctx, String alias,
      TableScanOperator ts) throws HiveException {
    SplitSample splitSample = pctx.getNameToSplitSample().get(alias);
    if (!aggressive && splitSample != null) {
      return null;
    }
    QB qb = pctx.getQB();
    if (!aggressive && qb.hasTableSample(alias)) {
View Full Code Here

      long totalSize = 0;
      for (InputSplitShim split : splitList) {
        totalSize += split.getLength();
      }

      SplitSample splitSample = nameToSamples.get(entry.getKey());

      long targetSize = splitSample.getTargetSize(totalSize);
      int startIndex = splitSample.getSeedNum() % splitList.size();
      long size = 0;
      for (int i = 0; i < splitList.size(); i++) {
        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
        retLists.add(split);
        long splitgLength = split.getLength();
View Full Code Here

      long totalSize = 0;
      for (InputSplitShim split : splitList) {
        totalSize += split.getLength();
      }

      SplitSample splitSample = nameToSamples.get(entry.getKey());

      long targetSize = splitSample.getTargetSize(totalSize);
      int startIndex = splitSample.getSeedNum() % splitList.size();
      long size = 0;
      for (int i = 0; i < splitList.size(); i++) {
        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
        retLists.add(split);
        long splitgLength = split.getLength();
View Full Code Here

  // 1. samping is not allowed
  // 2. for partitioned table, all filters should be targeted to partition column
  // 3. SelectOperator should be select star
  private FetchData checkTree(boolean aggressive, ParseContext pctx, String alias,
      TableScanOperator ts) throws HiveException {
    SplitSample splitSample = pctx.getNameToSplitSample().get(alias);
    if (!aggressive && splitSample != null) {
      return null;
    }
    QB qb = pctx.getQB();
    if (!aggressive && qb.hasTableSample(alias)) {
View Full Code Here

      long totalSize = 0;
      for (InputSplitShim split : splitList) {
        totalSize += split.getLength();
      }

      SplitSample splitSample = nameToSamples.get(entry.getKey());

      long targetSize = splitSample.getTargetSize(totalSize);
      int startIndex = splitSample.getSeedNum() % splitList.size();
      long size = 0;
      for (int i = 0; i < splitList.size(); i++) {
        InputSplitShim split = splitList.get((startIndex + i) % splitList.size());
        retLists.add(split);
        long splitgLength = split.getLength();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.SplitSample

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.