Examples of ExprNodeGenericFuncDesc


Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

                  ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg()));
          }

          // Calculate TypeInfo
          TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else if (myt.getCategory() == Category.MAP) {
          // Only allow constant map key for now
          if (!(children.get(1) instanceof ExprNodeConstantDesc)) {
            throw new SemanticException(SemanticAnalyzer.generateErrorMessage(
                  expr,
                  ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg()));
          }
          if (!(((ExprNodeConstantDesc) children.get(1)).getTypeInfo()
              .equals(((MapTypeInfo) myt).getMapKeyTypeInfo()))) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE
                .getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = ((MapTypeInfo) myt).getMapValueTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
              myt.getTypeName()));
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

      while (joinKeys.size() > 0) {
        ExprNodeDesc node = joinKeys.remove(0);
        if (node instanceof ExprNodeColumnDesc) {
          joinCols.addAll(node.getCols());
        } else if (node instanceof ExprNodeGenericFuncDesc) {
          ExprNodeGenericFuncDesc udfNode = ((ExprNodeGenericFuncDesc) node);
          GenericUDF udf = udfNode.getGenericUDF();
          if (!FunctionRegistry.isDeterministic(udf)) {
            return false;
          }
          joinKeys.addAll(0, udfNode.getChildExprs());
        }
      }

      Table tbl = topToTable.get(tso);
      if (tbl.isPartitioned()) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

   */
  private static Class<? extends GenericUDF> getGenericUDFClassFromExprDesc(ExprNodeDesc desc) {
    if (!(desc instanceof ExprNodeGenericFuncDesc)) {
      return null;
    }
    ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) desc;
    return genericFuncDesc.getGenericUDF().getClass();
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

   */
  private static Class<? extends UDF> getUDFClassFromExprDesc(ExprNodeDesc desc) {
    if (!(desc instanceof ExprNodeGenericFuncDesc)) {
      return null;
    }
    ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) desc;
    if (!(genericFuncDesc.getGenericUDF() instanceof GenericUDFBridge)) {
      return null;
    }
    GenericUDFBridge bridge = (GenericUDFBridge) (genericFuncDesc
        .getGenericUDF());
    return bridge.getUdfClass();
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

  public static class GenericFuncExprProcessor implements NodeProcessor {
    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      PcrExprProcCtx ctx = (PcrExprProcCtx) procCtx;
      ExprNodeGenericFuncDesc fd = (ExprNodeGenericFuncDesc) nd;

      if (FunctionRegistry.isOpNot(fd)) {
        assert (nodeOutputs.length == 1);
        NodeInfoWrapper wrapper = (NodeInfoWrapper) nodeOutputs[0];
        if (wrapper.state == WalkState.TRUE) {
          ExprNodeConstantDesc falseDesc = new ExprNodeConstantDesc(
              wrapper.outExpr.getTypeInfo(), Boolean.FALSE);
          return new NodeInfoWrapper(WalkState.FALSE, null, falseDesc);
        } else if (wrapper.state == WalkState.FALSE) {
          ExprNodeConstantDesc trueDesc = new ExprNodeConstantDesc(
              wrapper.outExpr.getTypeInfo(), Boolean.TRUE);
          return new NodeInfoWrapper(WalkState.TRUE, null, trueDesc);
        } else if (wrapper.state == WalkState.DIVIDED) {
          Boolean[] results = new Boolean[ctx.getPartList().size()];
          for (int i = 0; i < ctx.getPartList().size(); i++) {
            results[i] = opNot(wrapper.ResultVector[i]);
          }
          return new NodeInfoWrapper(WalkState.DIVIDED, results,
              getOutExpr(fd, nodeOutputs));
        } else {
          return new NodeInfoWrapper(wrapper.state, null,
              getOutExpr(fd, nodeOutputs));
        }
      } else if (FunctionRegistry.isOpAnd(fd)) {
        assert (nodeOutputs.length == 2);
        NodeInfoWrapper c1 = (NodeInfoWrapper)nodeOutputs[0];
        NodeInfoWrapper c2 = (NodeInfoWrapper)nodeOutputs[1];

        if (c1.state == WalkState.FALSE) {
          return c1;
        } else if (c2.state == WalkState.FALSE) {
          return c2;
        } else if (c1.state == WalkState.TRUE) {
          return c2;
        } else if (c2.state == WalkState.TRUE) {
          return c1;
        } else if (c1.state == WalkState.UNKNOWN || c2.state == WalkState.UNKNOWN) {
          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
        } else if (c1.state == WalkState.DIVIDED && c2.state == WalkState.DIVIDED) {
          Boolean[] results = new Boolean[ctx.getPartList().size()];
          for (int i = 0; i < ctx.getPartList().size(); i++) {
            results[i] = opAnd(c1.ResultVector[i], c2.ResultVector[i]);
          }
          return getResultWrapFromResults(results, fd, nodeOutputs);
        }
        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
      } else if (FunctionRegistry.isOpOr(fd)) {
        assert (nodeOutputs.length == 2);
        NodeInfoWrapper c1 = (NodeInfoWrapper)nodeOutputs[0];
        NodeInfoWrapper c2 = (NodeInfoWrapper)nodeOutputs[1];

        if (c1.state == WalkState.TRUE) {
          return c1;
        } else if (c2.state == WalkState.TRUE) {
          return c2;
        } else if (c1.state == WalkState.FALSE) {
          return c2;
        } else if (c2.state == WalkState.FALSE) {
          return c1;
        } else if (c1.state == WalkState.UNKNOWN || c2.state == WalkState.UNKNOWN) {
          return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
        } else if (c1.state == WalkState.DIVIDED && c2.state == WalkState.DIVIDED) {
          Boolean[] results = new Boolean[ctx.getPartList().size()];
          for (int i = 0; i < ctx.getPartList().size(); i++) {
            results[i] = opOr(c1.ResultVector[i], c2.ResultVector[i]);
          }
          return getResultWrapFromResults(results, fd, nodeOutputs);
        }
        return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
      } else if (!FunctionRegistry.isDeterministic(fd.getGenericUDF())) {
        // If it's a non-deterministic UDF, set unknown to true
        return new NodeInfoWrapper(WalkState.UNKNOWN, null,
            getOutExpr(fd, nodeOutputs));
      } else {
        // If any child is unknown, set unknown to true
        boolean has_part_col = false;
        for (Object child : nodeOutputs) {
          NodeInfoWrapper wrapper = (NodeInfoWrapper) child;
          if (wrapper.state == WalkState.UNKNOWN) {
            return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
          } else if (wrapper.state == WalkState.PART_COL) {
            has_part_col = true;
          }
        }

        if (has_part_col) {
          //  we need to evaluate result for every pruned partition
          if (fd.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
            // if the return type of the GenericUDF is boolean and all partitions agree on
            // a result, we update the state of the node to be TRUE of FALSE
            Boolean[] results = new Boolean[ctx.getPartList().size()];
            for (int i = 0; i < ctx.getPartList().size(); i++) {
              results[i] = (Boolean) evalExprWithPart(fd, ctx.getPartList().get(i));
            }
            return getResultWrapFromResults(results, fd, nodeOutputs);
          }

          // the case that return type of the GenericUDF is not boolean, and if not all partition
          // agree on result, we make the node UNKNOWN. If they all agree, we replace the node
          // to be a CONSTANT node with value to be the agreed result.
          Object[] results = new Object[ctx.getPartList().size()];
          for (int i = 0; i < ctx.getPartList().size(); i++) {
            results[i] = evalExprWithPart(fd, ctx.getPartList().get(i));
          }
          Object result = ifResultsAgree(results);
          if (result == null) {
            // if the result is not boolean and not all partition agree on the
            // result, we don't remove the condition. Potentially, it can miss
            // the case like "where ds % 3 == 1 or ds % 3 == 2"
            // TODO: handle this case by making result vector to handle all
            // constant values.
            return new NodeInfoWrapper(WalkState.UNKNOWN, null, getOutExpr(fd, nodeOutputs));
          }
          return new NodeInfoWrapper(WalkState.CONSTANT, null,
              new ExprNodeConstantDesc(fd.getTypeInfo(), result));
        }

        return new NodeInfoWrapper(WalkState.CONSTANT, null, getOutExpr(fd, nodeOutputs));
      }
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

    for (Map.Entry<Byte, List<ExprNodeDesc>> entry : filterMap.entrySet()) {
      Byte srcAlias = entry.getKey();
      List<ExprNodeDesc> columnDescList = entry.getValue();

      for (ExprNodeDesc nodeExpr : columnDescList) {
        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) nodeExpr;
        for (ExprNodeDesc childDesc : funcDesc.getChildExprs()) {
          if (!(childDesc instanceof ExprNodeColumnDesc)) {
            continue;
          }
          ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) childDesc;
          // reset columns
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

      while (joinKeys.size() > 0) {
        ExprNodeDesc node = joinKeys.remove(0);
        if (node instanceof ExprNodeColumnDesc) {
          joinCols.addAll(node.getCols());
        } else if (node instanceof ExprNodeGenericFuncDesc) {
          ExprNodeGenericFuncDesc udfNode = ((ExprNodeGenericFuncDesc) node);
          GenericUDF udf = udfNode.getGenericUDF();
          if (!FunctionRegistry.isDeterministic(udf)) {
            return false;
          }
          joinKeys.addAll(0, udfNode.getChildExprs());
        } else {
          return false;
        }
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc

            throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr));
          }
       
          // Calculate TypeInfo
          TypeInfo t = ((ListTypeInfo)myt).getListElementTypeInfo();
          desc = new exprNodeGenericFuncDesc(t,
              FunctionRegistry.getGenericUDFForIndex(),
              children);
        }
        else if (myt.getCategory() == Category.MAP) {
          // Only allow only constant indexes for now
          if (!(children.get(1) instanceof exprNodeConstantDesc)) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr));
          }
          if (!(((exprNodeConstantDesc)children.get(1)).getTypeInfo().equals(
              ((MapTypeInfo)myt).getMapKeyTypeInfo()))) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = ((MapTypeInfo)myt).getMapValueTypeInfo();
          desc = new exprNodeGenericFuncDesc(t,
              FunctionRegistry.getGenericUDFForIndex(),
              children);
        }
        else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc

      for(; i < preds.size(); i++) {
        List<exprNodeDesc> children = new ArrayList<exprNodeDesc>(2);
        children.add(condn);
        children.add((exprNodeDesc) preds.get(i));
        condn = new exprNodeGenericFuncDesc(
                                            TypeInfoFactory.booleanTypeInfo,
                                            FunctionRegistry.getGenericUDFForAnd(),
                                            children
                                            );
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
      String alias = null;
      exprNodeGenericFuncDesc expr = (exprNodeGenericFuncDesc) nd;

     
      if (!FunctionRegistry.isDeterministic(expr.getGenericUDF())) {
        // this GenericUDF can't be pushed down
        ctx.setIsCandidate(expr, false);
        ctx.setDeterministic(false);
        return false;
      }
     
      boolean isCandidate = true;
      for (int i=0; i < nd.getChildren().size(); i++) {
        exprNodeDesc ch = (exprNodeDesc) nd.getChildren().get(i);
        exprNodeDesc newCh = ctx.getConvertedNode(ch);
        if (newCh != null) {
          expr.getChildExprs().set(i, newCh);
          ch = newCh;
        }
        String chAlias = ctx.getAlias(ch);
       
        isCandidate = isCandidate && ctx.isCandidate(ch);
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.