Examples of ExprNodeGenericFuncDesc


Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

    for (Map.Entry<Byte, List<ExprNodeDesc>> entry : filterMap.entrySet()) {
      Byte srcAlias = entry.getKey();
      List<ExprNodeDesc> columnDescList = entry.getValue();

      for (ExprNodeDesc nodeExpr : columnDescList) {
        ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) nodeExpr;
        for (ExprNodeDesc childDesc : funcDesc.getChildExprs()) {
          if (!(childDesc instanceof ExprNodeColumnDesc)) {
            continue;
          }
          ExprNodeColumnDesc columnDesc = (ExprNodeColumnDesc) childDesc;
          // reset columns
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

      }
    }

    ExprNodeDesc equalsExpr = null;
    {
      ExprNodeDesc hashfnExpr = new ExprNodeGenericFuncDesc(
          TypeInfoFactory.intTypeInfo, new GenericUDFHash(), args);
      assert (hashfnExpr != null);
      LOG.info("hashfnExpr = " + hashfnExpr);
      ExprNodeDesc andExpr = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("&", hashfnExpr, intMaxExpr);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

                  ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg()));
          }

          // Calculate TypeInfo
          TypeInfo t = ((ListTypeInfo) myt).getListElementTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else if (myt.getCategory() == Category.MAP) {
          // Only allow constant map key for now
          if (!(children.get(1) instanceof ExprNodeConstantDesc)) {
            throw new SemanticException(SemanticAnalyzer.generateErrorMessage(
                  expr,
                  ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg()));
          }
          if (!(((ExprNodeConstantDesc) children.get(1)).getTypeInfo()
              .equals(((MapTypeInfo) myt).getMapKeyTypeInfo()))) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE
                .getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = ((MapTypeInfo) myt).getMapValueTypeInfo();
          desc = new ExprNodeGenericFuncDesc(t, FunctionRegistry
              .getGenericUDFForIndex(), children);
        } else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
              myt.getTypeName()));
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

      for (; i < preds.size(); i++) {
        List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(2);
        children.add(condn);
        children.add(preds.get(i));
        condn = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
            FunctionRegistry.getGenericUDFForAnd(), children);
      }
    }

    if (condn == null) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

   */
  private static Class<? extends GenericUDF> getGenericUDFClassFromExprDesc(ExprNodeDesc desc) {
    if (!(desc instanceof ExprNodeGenericFuncDesc)) {
      return null;
    }
    ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) desc;
    return genericFuncDesc.getGenericUDF().getClass();
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

   */
  private static Class<? extends UDF> getUDFClassFromExprDesc(ExprNodeDesc desc) {
    if (!(desc instanceof ExprNodeGenericFuncDesc)) {
      return null;
    }
    ExprNodeGenericFuncDesc genericFuncDesc = (ExprNodeGenericFuncDesc) desc;
    if (!(genericFuncDesc.getGenericUDF() instanceof GenericUDFBridge)) {
      return null;
    }
    GenericUDFBridge bridge = (GenericUDFBridge) (genericFuncDesc
        .getGenericUDF());
    return bridge.getUdfClass();
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {
      ExprWalkerInfo ctx = (ExprWalkerInfo) procCtx;
      String alias = null;
      ExprNodeGenericFuncDesc expr = (ExprNodeGenericFuncDesc) nd;

      if (!FunctionRegistry.isDeterministic(expr.getGenericUDF())) {
        // this GenericUDF can't be pushed down
        ctx.setIsCandidate(expr, false);
        ctx.setDeterministic(false);
        return false;
      }

      boolean isCandidate = true;
      for (int i = 0; i < nd.getChildren().size(); i++) {
        ExprNodeDesc ch = (ExprNodeDesc) nd.getChildren().get(i);
        ExprNodeDesc newCh = ctx.getConvertedNode(ch);
        if (newCh != null) {
          expr.getChildExprs().set(i, newCh);
          ch = newCh;
        }
        String chAlias = ctx.getAlias(ch);

        isCandidate = isCandidate && ctx.isCandidate(ch);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

        assert(false); // cannot find the partition column!
     } else {
       return false;
     }
    } else if (expr instanceof ExprNodeGenericFuncDesc) {
      ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) expr;
      GenericUDF func = funcDesc.getGenericUDF();
      if (!supportedJDOFuncs(func)) {
        return false;
      }
      List<ExprNodeDesc> children = funcDesc.getChildExprs();
      for (ExprNodeDesc child: children) {
        if (!checkJDOPushDown(tab, child)) {
          return false;
        }
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

    Map<Integer, List<ExprNodeGenericFuncDesc>> result =
        new HashMap<Integer, List<ExprNodeGenericFuncDesc>>();
    for (int childIndex = 1; childIndex < ast.getChildCount(); childIndex++) {
      Tree partSpecTree = ast.getChild(childIndex);
      if (partSpecTree.getType() != HiveParser.TOK_PARTSPEC) continue;
      ExprNodeGenericFuncDesc expr = null;
      HashSet<String> names = new HashSet<String>(partSpecTree.getChildCount());
      for (int i = 0; i < partSpecTree.getChildCount(); ++i) {
        CommonTree partSpecSingleKey = (CommonTree) partSpecTree.getChild(i);
        assert (partSpecSingleKey.getType() == HiveParser.TOK_PARTVAL);
        String key = partSpecSingleKey.getChild(0).getText().toLowerCase();
        String operator = partSpecSingleKey.getChild(1).getText();
        String val = stripQuotes(partSpecSingleKey.getChild(2).getText());

        String type = colTypes.get(key);
        if (type == null) {
          throw new SemanticException("Column " + key + " not found");
        }
        // Create the corresponding hive expression to filter on partition columns.
        ExprNodeColumnDesc column = new ExprNodeColumnDesc(
            TypeInfoFactory.getPrimitiveTypeInfo(type), key, null, true);
        ExprNodeGenericFuncDesc op = makeBinaryPredicate(
            operator, column, new ExprNodeConstantDesc(val));
        // If it's multi-expr filter (e.g. a='5', b='2012-01-02'), AND with previous exprs.
        expr = (expr == null) ? op : makeBinaryPredicate("and", expr, op);
        names.add(key);
      }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc

    return result;
  }

  private static ExprNodeGenericFuncDesc makeBinaryPredicate(
      String fn, ExprNodeDesc left, ExprNodeDesc right) {
    return new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
        FunctionRegistry.getFunctionInfo(fn).getGenericUDF(), Lists.newArrayList(left, right));
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.