Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc


        for(WindowFunctionDef wDef : tDef.getWindowFunctions() ) {
          if ( wDef.getArgs() == null) {
            continue;
          }
          for(PTFExpressionDef arg : wDef.getArgs()) {
            ExprNodeDesc exprNode = arg.getExprNode();
            Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
          }
        }
      }
     if(tDef.getPartition() != null){
         for(PTFExpressionDef col : tDef.getPartition().getExpressions()){
           ExprNodeDesc exprNode = col.getExprNode();
           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
         }
       }
       if(tDef.getOrder() != null){
         for(PTFExpressionDef col : tDef.getOrder().getExpressions()){
           ExprNodeDesc exprNode = col.getExprNode();
           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
         }
       }
    }
View Full Code Here


          flags[i] = false;
        }
        if (childJoinCols != null && childJoinCols.size() > 0) {
          Map<String, ExprNodeDesc> exprMap = op.getColumnExprMap();
          for (String childCol : childJoinCols) {
            ExprNodeDesc desc = exprMap.get(childCol);
            int index = conf.getValueCols().indexOf(desc);
            flags[index] = true;
            String[] nm = redSinkRR.reverseLookup(childCol);
            if (nm != null) {
              ColumnInfo cInfo = parRR.get(nm[0], nm[1]);
              if (!colLists.contains(cInfo.getInternalName())) {
                colLists.add(cInfo.getInternalName());
              }
            }
          }
        }
        Collections.sort(colLists);
        pruneReduceSinkOperator(flags, op, cppCtx);
      } else if ((childOperators.size() == 1)
          && (childOperators.get(0) instanceof ExtractOperator )
          && (childOperators.get(0).getChildOperators().size() == 1)
          && (childOperators.get(0).getChildOperators().get(0) instanceof PTFOperator )
          && ((PTFOperator)childOperators.get(0).
              getChildOperators().get(0)).getConf().forWindowing() )  {

        /*
         * For RS that are followed by Extract & PTFOp for windowing
         * - do the same thing as above. Reconstruct ValueColumn list based on what is required
         *   by the PTFOp.
         */

        assert parentOperators.size() == 1;

        PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
        List<String> childCols = cppCtx.getPrunedColList(ptfOp);
        boolean[] flags = new boolean[conf.getValueCols().size()];
        for (int i = 0; i < flags.length; i++) {
          flags[i] = false;
        }
        if (childCols != null && childCols.size() > 0) {
          ArrayList<String> outColNames = op.getConf().getOutputValueColumnNames();
          for(int i=0; i < outColNames.size(); i++ ) {
            if ( childCols.contains(outColNames.get(i))) {
              ExprNodeDesc exprNode = op.getConf().getValueCols().get(i);
              flags[i] = true;
              Utilities.mergeUniqElems(colLists, exprNode.getCols());
            }
          }
        }
        Collections.sort(colLists);
        pruneReduceSinkOperator(flags, op, cppCtx);
View Full Code Here

      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.
      List<String> colsAfterReplacement = new ArrayList<String>();
      for (String col : cols) {
        if (colExprMap.containsKey(col)) {
          ExprNodeDesc expr = colExprMap.get(col);
          colsAfterReplacement.addAll(expr.getCols());
        } else {
          colsAfterReplacement.add(col);
        }
      }
      // this is SEL(*) cols + UDTF cols
View Full Code Here

    } else if (expr instanceof ExprNodeGenericFuncDesc) {
      GenericUDF udf = ((ExprNodeGenericFuncDesc)expr).getGenericUDF();
      boolean isAnd = udf instanceof GenericUDFOPAnd;
      if (isAnd || udf instanceof GenericUDFOPOr) {
        List<ExprNodeDesc> children = expr.getChildren();
        ExprNodeDesc left = children.get(0);
        children.set(0, compactExpr(left));
        ExprNodeDesc right = children.get(1);
        children.set(1, compactExpr(right));
        // Note that one does not simply compact (not-null or null) to not-null.
        // Only if we have an "and" is it valid to send one side to metastore.
        if (children.get(0) == null && children.get(1) == null) {
          return null;
View Full Code Here

      }

      // Remove virtual columns. See javadoc for details.
      prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab));
      // Remove all unknown parts e.g. non-partition columns. See javadoc for details.
      ExprNodeDesc compactExpr = compactExpr(prunerExpr.clone());
      String oldFilter = prunerExpr.getExprString();
      if (compactExpr == null) {
        // This could happen when hive.mapred.mode=nonstrict and all the predicates
        // are on non-partition columns.
        LOG.debug("Filter " + oldFilter + " was null after compacting");
        return new PrunedPartitionList(tab, Hive.get().getAllPartitionsForPruner(tab), true);
      }

      Set<Partition> partitions = new LinkedHashSet<Partition>();
      boolean hasUnknownPartitions = false;
      String message = Utilities.checkJDOPushDown(tab, compactExpr, null);
      if (message != null) {
        LOG.info(ErrorMsg.INVALID_JDO_FILTER_EXPRESSION.getMsg("by condition '"
            + message + "'"));
        hasUnknownPartitions = pruneBySequentialScan(tab, partitions, prunerExpr, conf);
      } else {
        String filter = compactExpr.getExprString();
        LOG.debug("Filter w/ compacting: " + filter +"; filter w/o compacting: " + oldFilter);
        hasUnknownPartitions = !filter.equals(oldFilter);
        partitions.addAll(Hive.get().getPartitionsByFilter(tab, filter));
      }
      return new PrunedPartitionList(tab, partitions, hasUnknownPartitions);
View Full Code Here

      return checkOperators(new FetchData(table, splitSample), ts, aggressive, false);
    }

    boolean bypassFilter = false;
    if (HiveConf.getBoolVar(pctx.getConf(), HiveConf.ConfVars.HIVEOPTPPD)) {
      ExprNodeDesc pruner = pctx.getOpToPartPruner().get(ts);
      bypassFilter = PartitionPruner.onlyContainsPartnCols(table, pruner);
    }
    if (aggressive || bypassFilter) {
      PrunedPartitionList pruned = pctx.getPrunedPartitions(alias, ts);
      if (aggressive || !pruned.hasUnknownPartitions()) {
View Full Code Here

   */
  public PTFExpressionDef buildExpressionDef(ShapeDetails inpShape, ASTNode arg)
      throws HiveException {
    PTFExpressionDef argDef = new PTFExpressionDef();

    ExprNodeDesc exprNode = semAly.genExprNodeDesc(arg, inpShape.getRr(),
        inpShape.getTypeCheckCtx());
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode);
    ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape);

    argDef.setExpressionTreeString(arg.toStringTree());
View Full Code Here

    List<ExprNodeGenericFuncDesc> llFuncExprs = llInfo.getLLFuncExprsInTopExpr(exprNode);
    if (llFuncExprs != null)
    {
      for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs)
      {
        ExprNodeDesc firstArg = llFuncExpr.getChildren().get(0);
        ExprNodeEvaluator dupExprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, firstArg);
        dupExprEval.initialize(inpShape.getOI());
        GenericUDFLeadLag llFn = (GenericUDFLeadLag) llFuncExpr.getGenericUDF();
        llFn.setExprEvaluator(dupExprEval);
      }
View Full Code Here

          // Partial aggregation is not done for distincts on the mapper
          // However, if the data is bucketed/sorted on the distinct key, partial aggregation
          // can be performed on the mapper.
          if (aggr.getDistinct()) {
            ArrayList<ExprNodeDesc> parameters = new ArrayList<ExprNodeDesc>();
            ExprNodeDesc param = aggr.getParameters().get(0);
            assert param instanceof ExprNodeColumnDesc;
            ExprNodeColumnDesc paramC = (ExprNodeColumnDesc) param;
            paramC.setIsPartitionColOrVirtualCol(false);
            paramC.setColumn("VALUE._col" + pos);
            parameters.add(paramC);
View Full Code Here

              tableColsMapping.remove(outputColumnName);
              newConstantCols.add(outputColumnName);
              continue;
            }

            ExprNodeDesc selectColList = selectDesc.getColList().get(pos);
            if (selectColList instanceof ExprNodeColumnDesc) {
              String newValue =
                  tableColsMapping.get(((ExprNodeColumnDesc) selectColList).getColumn());
              tableColsMapping.put(outputColumnName, newValue);
            }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.