Examples of ExprNodeDesc


Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

        for(WindowFunctionDef wDef : tDef.getWindowFunctions() ) {
          if ( wDef.getArgs() == null) {
            continue;
          }
          for(PTFExpressionDef arg : wDef.getArgs()) {
            ExprNodeDesc exprNode = arg.getExprNode();
            Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
          }
        }
      }
     if(tDef.getPartition() != null){
         for(PTFExpressionDef col : tDef.getPartition().getExpressions()){
           ExprNodeDesc exprNode = col.getExprNode();
           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
         }
       }
       if(tDef.getOrder() != null){
         for(PTFExpressionDef col : tDef.getOrder().getExpressions()){
           ExprNodeDesc exprNode = col.getExprNode();
           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
         }
       }
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

          flags[i] = false;
        }
        if (childJoinCols != null && childJoinCols.size() > 0) {
          Map<String, ExprNodeDesc> exprMap = op.getColumnExprMap();
          for (String childCol : childJoinCols) {
            ExprNodeDesc desc = exprMap.get(childCol);
            int index = conf.getValueCols().indexOf(desc);
            flags[index] = true;
            String[] nm = redSinkRR.reverseLookup(childCol);
            if (nm != null) {
              ColumnInfo cInfo = parRR.get(nm[0], nm[1]);
              if (!colLists.contains(cInfo.getInternalName())) {
                colLists.add(cInfo.getInternalName());
              }
            }
          }
        }
        Collections.sort(colLists);
        pruneReduceSinkOperator(flags, op, cppCtx);
      } else if ((childOperators.size() == 1)
          && (childOperators.get(0) instanceof ExtractOperator )
          && (childOperators.get(0).getChildOperators().size() == 1)
          && (childOperators.get(0).getChildOperators().get(0) instanceof PTFOperator )
          && ((PTFOperator)childOperators.get(0).
              getChildOperators().get(0)).getConf().forWindowing() )  {

        /*
         * For RS that are followed by Extract & PTFOp for windowing
         * - do the same thing as above. Reconstruct ValueColumn list based on what is required
         *   by the PTFOp.
         */

        assert parentOperators.size() == 1;

        PTFOperator ptfOp = (PTFOperator) childOperators.get(0).getChildOperators().get(0);
        List<String> childCols = cppCtx.getPrunedColList(ptfOp);
        boolean[] flags = new boolean[conf.getValueCols().size()];
        for (int i = 0; i < flags.length; i++) {
          flags[i] = false;
        }
        if (childCols != null && childCols.size() > 0) {
          ArrayList<String> outColNames = op.getConf().getOutputValueColumnNames();
          for(int i=0; i < outColNames.size(); i++ ) {
            if ( childCols.contains(outColNames.get(i))) {
              ExprNodeDesc exprNode = op.getConf().getValueCols().get(i);
              flags[i] = true;
              Utilities.mergeUniqElems(colLists, exprNode.getCols());
            }
          }
        }
        Collections.sort(colLists);
        pruneReduceSinkOperator(flags, op, cppCtx);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

      // to undo this transformation using the column expression map as the
      // column names propagate up the DAG.
      List<String> colsAfterReplacement = new ArrayList<String>();
      for (String col : cols) {
        if (colExprMap.containsKey(col)) {
          ExprNodeDesc expr = colExprMap.get(col);
          colsAfterReplacement.addAll(expr.getCols());
        } else {
          colsAfterReplacement.add(col);
        }
      }
      // this is SEL(*) cols + UDTF cols
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    //  a constant, we convert that into an exprNodeConstantDesc.  For others we just
    //  build the exprNodeFuncDesc with recursively built children.
    ASTNode expr = (ASTNode)nd;
    TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
    RowResolver input = ctx.getInputRR();
    exprNodeDesc desc = null;

    //  If the current subExpression is pre-calculated, as in Group-By etc.
    ColumnInfo colInfo = input.get("", expr.toStringTree());
    if (colInfo != null) {
      desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName());
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      return new exprNodeNullDesc();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      Number v = null;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      ASTNode expr = (ASTNode)nd;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }

      ASTNode expr = (ASTNode)nd;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }

      ASTNode expr = (ASTNode)nd;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeDesc

     */
    public static exprNodeDesc getFuncExprNodeDesc(String udfName, List<exprNodeDesc> children) {
      // Find the corresponding method
      ArrayList<Class<?>> argumentClasses = new ArrayList<Class<?>>(children.size());
      for(int i=0; i<children.size(); i++) {
        exprNodeDesc child = children.get(i);
        assert(child != null);
        TypeInfo childTypeInfo = child.getTypeInfo();
        assert(childTypeInfo != null);
       
        // Note: we don't pass the element types of MAP/LIST to UDF.
        // That will work for null test and size but not other more complex functionalities like list slice etc.
        // For those more complex functionalities, we plan to have a ComplexUDF interface which has an evaluate
        // method that accepts a list of objects and a list of objectinspectors.
        switch (childTypeInfo.getCategory()) {
          case PRIMITIVE: {
            argumentClasses.add(childTypeInfo.getPrimitiveClass());
            break;
          }
          case MAP: {
            argumentClasses.add(Map.class);
            break;
          }
          case LIST: {
            argumentClasses.add(List.class);
            break;
          }
          case STRUCT: {
            argumentClasses.add(Object.class);
            break;
          }
          default: {
            // should never happen
            assert(false);
          }
        }
      }
      Method udfMethod = FunctionRegistry.getUDFMethod(udfName, argumentClasses);
      if (udfMethod == null) return null;

      ArrayList<exprNodeDesc> ch = new ArrayList<exprNodeDesc>();
      Class<?>[] pTypes = udfMethod.getParameterTypes();

      for (int i = 0; i < children.size(); i++)
      {
        exprNodeDesc desc = children.get(i);
        Class<?> pType = ObjectInspectorUtils.generalizePrimitive(pTypes[i]);
        if (desc instanceof exprNodeNullDesc) {
          exprNodeConstantDesc newCh = new exprNodeConstantDesc(TypeInfoFactory.getPrimitiveTypeInfo(pType), null);
          ch.add(newCh);
        } else if (pType.isAssignableFrom(argumentClasses.get(i))) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.