Examples of TableScanOperator


Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

    }

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      TableScanOperator node = (TableScanOperator) nd;
      WalkerCtx walkerCtx = (WalkerCtx) procCtx;
      if (((node.getNeededColumnIDs() == null) || (node.getNeededColumnIDs().size() == 0))
          && ((node.getConf() == null) ||
              (node.getConf().getVirtualCols() == null) ||
              (node.getConf().getVirtualCols().isEmpty()))) {
        walkerCtx.setMayBeMetadataOnly(node);
      }
      return nd;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

          walkerCtx.getMetadataOnlyTableScans().size()));
      Iterator<TableScanOperator> iterator
        = walkerCtx.getMetadataOnlyTableScans().iterator();

      while (iterator.hasNext()) {
        TableScanOperator tso = iterator.next();
        LOG.info("Metadata only table scan for " + tso.getConf().getAlias());
        convertToMetadataOnlyQuery((MapredWork) task.getWork(), tso);
      }

      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

      Map<String, Operator<? extends OperatorDesc>> topOps = this.pGraphContext
          .getTopOps();
      Map<TableScanOperator, Table> topToTable = this.pGraphContext
          .getTopToTable();
      TableScanOperator tso = (TableScanOperator) topOps.get(alias);
      if (tso == null) {
        return false;
      }

      List<ExprNodeDesc> keys = op.getConf().getKeys().get((byte) pos);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

   * The Node Processor for Column Pruning on Table Scan Operators. It will store
   * needed columns in tableScanDesc.
   */
  public static class ColumnPrunerTableScanProc implements NodeProcessor {
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx ctx, Object... nodeOutputs) throws SemanticException {
      TableScanOperator scanOp = (TableScanOperator)nd;
      ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx)ctx;
      List<String> cols = cppCtx.genColLists((Operator<? extends Serializable>)nd);
      cppCtx.getPrunedColLists().put((Operator<? extends Serializable>)nd, cols);
      ArrayList<Integer> needed_columns = new ArrayList<Integer>();
      RowResolver inputRR = cppCtx.getOpToParseCtxMap().get(scanOp).getRR();
      for (int i = 0; i < cols.size(); i++) {
        int position = inputRR.getPosition(cols.get(i));
        needed_columns.add(position);
      }
      scanOp.setNeededColumnIDs(needed_columns);
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

      // symbol. So we just pop out the two elements from the top and if the
      // second one of them is not a table scan then the operator on the top of
      // the stack is the Table scan operator.
      Node tmp = stack.pop();
      Node tmp2 = stack.pop();
      TableScanOperator top = null;
      if (tmp2 instanceof TableScanOperator) {
        top = (TableScanOperator)tmp2;
      }
      else {
        top = (TableScanOperator) stack.peek();
        fop2 = (FilterOperator) tmp2;
      }
      stack.push(tmp2);
      stack.push(tmp);

      // If fop2 exists (i.e this is not the top level filter and fop2 is not
      // a sampling filter then we ignore the current filter
      if (fop2 != null && !fop2.getConf().getIsSamplingPred())
        return null;
     
      // ignore the predicate in case it is not a sampling predicate
      if (fop.getConf().getIsSamplingPred()) {
        return null;
      }
     
      // Otherwise this is not a sampling predicate and we need to
      exprNodeDesc predicate = fop.getConf().getPredicate();
      String alias = top.getConf().getAlias();
     
      // Generate the partition pruning predicate
      boolean hasNonPartCols = false;
      exprNodeDesc ppr_pred = ExprProcFactory.genPruner(alias, predicate, hasNonPartCols);
      owc.addHasNonPartCols(hasNonPartCols);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

    }
    for (String alias : aliases) {
      Operator<? extends Serializable> op = this.mrwork.getAliasToWork().get(
          alias);
      if (op instanceof TableScanOperator) {
        TableScanOperator tableScan = (TableScanOperator) op;
        ArrayList<Integer> list = tableScan.getNeededColumnIDs();
        if (list != null)
          HiveFileFormatUtils.setReadColumnIDs(jobConf, list);
        else
          HiveFileFormatUtils.setFullyReadColumns(jobConf);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      LOG.info("Processing for " +  nd.getName() + "(" + ((Operator)nd).getIdentifier() + ")");
      OpWalkerInfo owi = (OpWalkerInfo)procCtx;
      RowResolver inputRR = owi.getRowResolver(nd);
      TableScanOperator tsOp = (TableScanOperator)nd;
      mergeWithChildrenPred(tsOp, owi, null, null, false);
      ExprWalkerInfo pushDownPreds = owi.getPrunedPreds(tsOp);
      return createFilter(tsOp, pushDownPreds, owi);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

      // LineageCtx
      LineageCtx lCtx = (LineageCtx) procCtx;
      ParseContext pctx = lCtx.getParseCtx();

      // Table scan operator.
      TableScanOperator top = (TableScanOperator)nd;
      org.apache.hadoop.hive.ql.metadata.Table t = pctx.getTopToTable().get(top);
      Table tab = t.getTTable();

      // Generate the mappings
      RowSchema rs = top.getSchema();
      List<FieldSchema> cols = t.getAllCols();
      Map<String, FieldSchema> fieldSchemaMap = new HashMap<String, FieldSchema>();
      for(FieldSchema col : cols) {
        fieldSchemaMap.put(col.getName(), col);
      }

      Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(pctx.getConf()).iterator();
      while (vcs.hasNext()) {
        VirtualColumn vc = vcs.next();
        fieldSchemaMap.put(vc.getName(), new FieldSchema(vc.getName(),
            vc.getTypeInfo().getTypeName(), ""));
      }

      TableAliasInfo tai = new TableAliasInfo();
      tai.setAlias(top.getConf().getAlias());
      tai.setTable(tab);
      for(ColumnInfo ci : rs.getSignature()) {
        // Create a dependency
        Dependency dep = new Dependency();
        BaseColumnInfo bci = new BaseColumnInfo();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

      LinkedHashMap<Partition, List<String>> bigTblPartsToBucketFileNames = new LinkedHashMap<Partition, List<String>>();
      LinkedHashMap<Partition, Integer> bigTblPartsToBucketNumber = new LinkedHashMap<Partition, Integer>();

      for (int index = 0; index < joinAliases.size(); index++) {
        String alias = joinAliases.get(index);
        TableScanOperator tso = (TableScanOperator) topOps.get(alias);
        if (tso == null) {
          return null;
        }
        Table tbl = topToTable.get(tso);
        if(tbl.isPartitioned()) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.TableScanOperator

      }

      assert (stack.size() == 3 && stack.get(1) instanceof FilterOperator) ||
          stack.size() == 2;

      TableScanOperator tsOp = (TableScanOperator) stack.get(0);
      ((SamplePrunerCtx) procCtx).getOpToSamplePruner().put(tsOp, sampleDescr);
      return null;
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.