Examples of exprNodeDesc


Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    TypeCheckCtx tcCtx = new TypeCheckCtx(input);
    tcCtx.setUnparseTranslator(unparseTranslator);

    HashMap<Node, Object> nodeOutputs =
      TypeCheckProcFactory.genExprNode(expr, tcCtx);
    ExprNodeDesc desc = (ExprNodeDesc) nodeOutputs.get(expr);
    if (desc == null) {
      throw new SemanticException(tcCtx.getError());
    }

    if (!unparseTranslator.isEnabled()) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    IndexPredicateAnalyzer analyzer =
      HiveHBaseTableInputFormat.newIndexPredicateAnalyzer(
        columnNames.get(hbaseSerde.getKeyColumnOffset()));
    List<IndexSearchCondition> searchConditions =
      new ArrayList<IndexSearchCondition>();
    ExprNodeDesc residualPredicate =
      analyzer.analyzePredicate(predicate, searchConditions);
    if (searchConditions.size() != 1) {
      // Either there was nothing which could be pushed down (size = 0),
      // or more than one predicate (size > 1); in the latter case,
      // we bail out for now since multiple lookups on the key are
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    // construct column name list for reference by filter push down
    Utilities.setColumnNameList(jobConf, tableScan);

    // push down filters
    ExprNodeDesc filterExpr = scanDesc.getFilterExpr();
    if (filterExpr == null) {
      return;
    }

    String filterText = filterExpr.getExprString();
    String filterExprSerialized = Utilities.serializeExpression(filterExpr);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Filter text = " + filterText);
      LOG.debug("Filter expression = " + filterExprSerialized);
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

      throw new RuntimeException("UTF-8 support required", ex);
    }
    ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
    XMLDecoder decoder = new XMLDecoder(bais, null, null, conf.getClassLoader());
    try {
      ExprNodeDesc expr = (ExprNodeDesc) decoder.readObject();
      return expr;
    } finally {
      decoder.close();
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

  private FilterDesc getTestFilterDesc(String column) {
    ArrayList<ExprNodeDesc> children1 = new ArrayList<ExprNodeDesc>();
    children1.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo,
        column, "", false));
    ExprNodeDesc lhs = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
        Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children1);

    ArrayList<ExprNodeDesc> children2 = new ArrayList<ExprNodeDesc>();
    children2.add(new ExprNodeConstantDesc(TypeInfoFactory.longTypeInfo, Long
        .valueOf(100)));
    ExprNodeDesc rhs = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.doubleTypeInfo, FunctionRegistry.getFunctionInfo(
        Constants.DOUBLE_TYPE_NAME).getGenericUDF(), children2);

    ArrayList<ExprNodeDesc> children3 = new ArrayList<ExprNodeDesc>();
    children3.add(lhs);
    children3.add(rhs);

    ExprNodeDesc desc = new ExprNodeGenericFuncDesc(
        TypeInfoFactory.booleanTypeInfo, FunctionRegistry.getFunctionInfo("<")
        .getGenericUDF(), children3);

    return new FilterDesc(desc, false);
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

        assert (children.size() == 1);
        assert (children.get(0) != null);
        return children.get(0);
      }
      String funcText = getFunctionText(expr, isFunction);
      ExprNodeDesc desc;
      if (funcText.equals(".")) {
        // "." : FIELD Expression
        assert (children.size() == 2);
        // Only allow constant field name for now
        assert (children.get(1) instanceof ExprNodeConstantDesc);
        ExprNodeDesc object = children.get(0);
        ExprNodeConstantDesc fieldName = (ExprNodeConstantDesc) children.get(1);
        assert (fieldName.getValue() instanceof String);

        // Calculate result TypeInfo
        String fieldNameString = (String) fieldName.getValue();
        TypeInfo objectTypeInfo = object.getTypeInfo();

        // Allow accessing a field of list element structs directly from a list
        boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
        if (isList) {
          objectTypeInfo = ((ListTypeInfo) objectTypeInfo)
              .getListElementTypeInfo();
        }
        if (objectTypeInfo.getCategory() != Category.STRUCT) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;

      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        // Here we know nd represents a group by expression.

        // During the DFS traversal of the AST, a descendant of nd likely set an
        // error because a sub-tree of nd is unlikely to also be a group by
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    String filterExprSerialized =
      jobConf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
    if (filterExprSerialized == null) {
      return tableSplit;
    }
    ExprNodeDesc filterExpr =
      Utilities.deserializeExpression(filterExprSerialized, jobConf);

    String colName = jobConf.get(serdeConstants.LIST_COLUMNS).split(",")[iKey];
    String colType = jobConf.get(serdeConstants.LIST_COLUMN_TYPES).split(",")[iKey];
    IndexPredicateAnalyzer analyzer = newIndexPredicateAnalyzer(colName,colType, isKeyBinary);

    List<IndexSearchCondition> searchConditions =
      new ArrayList<IndexSearchCondition>();
    ExprNodeDesc residualPredicate =
      analyzer.analyzePredicate(filterExpr, searchConditions);

    // There should be no residual since we already negotiated
    // that earlier in HBaseStorageHandler.decomposePredicate.
    if (residualPredicate != null) {
      throw new RuntimeException(
        "Unexpected residual predicate " + residualPredicate.getExprString());
    }

    // There should be exactly one predicate since we already
    // negotiated that also.
    if (searchConditions.size() < 1 || searchConditions.size() > 2) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

    // build the exprNodeFuncDesc with recursively built children.
    ASTNode expr = (ASTNode) nd;
    TypeCheckCtx ctx = (TypeCheckCtx) procCtx;

    RowResolver input = ctx.getInputRR();
    ExprNodeDesc desc = null;

    if ((ctx == null) || (input == null)) {
      return null;
    }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
      if (ctx.getError() != null) {
        return null;
      }

      ExprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }

      return new ExprNodeNullDesc();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.