Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.ExprNodeDesc


    // a constant, we convert that into an exprNodeConstantDesc. For others we
    // just
    // build the exprNodeFuncDesc with recursively built children.

    // If the current subExpression is pre-calculated, as in Group-By etc.
    ExprNodeDesc cached = getExprNodeDescCached(expr, input);
    if (cached == null) {
      Map<ASTNode, ExprNodeDesc> allExprs = genAllExprNodeDesc(expr, input, tcCtx);
      return allExprs.get(expr);
    }
    return cached;
View Full Code Here


    // Create the walker and  the rules dispatcher.
    tcCtx.setUnparseTranslator(unparseTranslator);

    Map<ASTNode, ExprNodeDesc> nodeOutputs =
        TypeCheckProcFactory.genExprNode(expr, tcCtx);
    ExprNodeDesc desc = (ExprNodeDesc) nodeOutputs.get(expr);
    if (desc == null) {
      String errMsg = tcCtx.getError();
      if (errMsg == null) {
        errMsg = "Error in parsing ";
      }
View Full Code Here

    /*
     * construct the ReduceSinkRR
     */
    int pos = 0;
    for (ColumnInfo colInfo : colInfoList) {
        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
            .getInternalName(), colInfo.getTabAlias(), colInfo
            .getIsVirtualCol());
        valueCols.add(valueColExpr);
        colExprMap.put(colInfo.getInternalName(), valueColExpr);
        String outColName = SemanticAnalyzer.getColumnInternalName(pos++);
View Full Code Here

    List<String> outputColumnNames = new ArrayList<String>();
    StringBuilder orderString = new StringBuilder();

    ArrayList<PartitionExpression> partColList = spec.getQueryPartitionSpec().getExpressions();
    for (PartitionExpression partCol : partColList) {
      ExprNodeDesc partExpr = genExprNodeDesc(partCol.getExpression(), inputRR);
      partCols.add(partExpr);
      orderCols.add(partExpr);
      orderString.append('+');
    }

    ArrayList<OrderExpression> orderColList = spec.getQueryOrderSpec() == null ?
        new ArrayList<PTFInvocationSpec.OrderExpression>() :
          spec.getQueryOrderSpec().getExpressions();
    for (int i = 0; i < orderColList.size(); i++) {
      OrderExpression orderCol = orderColList.get(i);
      org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order order = orderCol.getOrder();
      if (order.name().equals("ASC")) {
        orderString.append('+');
      } else {
        orderString.append('-');
      }
      ExprNodeDesc orderExpr = genExprNodeDesc(orderCol.getExpression(), inputRR);
      orderCols.add(orderExpr);
    }

    ArrayList<ColumnInfo> colInfoList = inputRR.getColumnInfos();
    RowResolver rsNewRR = new RowResolver();
    int pos = 0;
    for (ColumnInfo colInfo : colInfoList) {
        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), colInfo
            .getInternalName(), colInfo.getTabAlias(), colInfo
            .getIsVirtualCol());
        valueCols.add(valueColExpr);
        colExprMap.put(colInfo.getInternalName(), valueColExpr);
        String outColName = SemanticAnalyzer.getColumnInternalName(pos++);
View Full Code Here

  public ExprNodeGenericFuncEvaluator(ExprNodeGenericFuncDesc expr) throws HiveException {
    super(expr);
    children = new ExprNodeEvaluator[expr.getChildExprs().size()];
    isEager = false;
    for (int i = 0; i < children.length; i++) {
      ExprNodeDesc child = expr.getChildExprs().get(i);
      ExprNodeEvaluator nodeEvaluator = ExprNodeEvaluatorFactory.get(child);
      children[i] = nodeEvaluator;
      // If we have eager evaluators anywhere below us, then we are eager too.
      if (nodeEvaluator instanceof ExprNodeGenericFuncEvaluator) {
        if (((ExprNodeGenericFuncEvaluator) nodeEvaluator).isEager) {
View Full Code Here

    final String F1 = "#affiliations";
    final String F2 = "friends[0].friendid";

    try {
      // initialize a complete map reduce configuration
      ExprNodeDesc expr1 = new ExprNodeColumnDesc(
          TypeInfoFactory.stringTypeInfo, F1, "", false);
      ExprNodeDesc expr2 = new ExprNodeColumnDesc(
          TypeInfoFactory.stringTypeInfo, F2, "", false);
      ExprNodeDesc filterExpr = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("==", expr1, expr2);

      FilterDesc filterCtx = new FilterDesc(filterExpr, false);
      Operator<FilterDesc> op = OperatorFactory.get(FilterDesc.class);
      op.setConf(filterCtx);
View Full Code Here

  }

  public void testBaseFilterOperator() throws Throwable {
    try {
      System.out.println("Testing Filter Operator");
      ExprNodeDesc col0 = TestExecDriver.getStringColumn("col0");
      ExprNodeDesc col1 = TestExecDriver.getStringColumn("col1");
      ExprNodeDesc col2 = TestExecDriver.getStringColumn("col2");
      ExprNodeDesc zero = new ExprNodeConstantDesc("0");
      ExprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc(">", col2, col1);
      ExprNodeDesc func2 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("==", col0, zero);
      ExprNodeDesc func3 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("and", func1, func2);
      assert (func3 != null);
      FilterDesc filterCtx = new FilterDesc(func3, false);

      // Configuration
View Full Code Here

  public void testScriptOperator() throws Throwable {
    try {
      System.out.println("Testing Script Operator");
      // col1
      ExprNodeDesc exprDesc1 = TestExecDriver.getStringColumn("col1");

      // col2
      ExprNodeDesc expr1 = TestExecDriver.getStringColumn("col0");
      ExprNodeDesc expr2 = new ExprNodeConstantDesc("1");
      ExprNodeDesc exprDesc2 = TypeCheckProcFactory.DefaultExprProcessor
          .getFuncExprNodeDesc("concat", expr1, expr2);

      // select operator to project these two columns
      ArrayList<ExprNodeDesc> earr = new ArrayList<ExprNodeDesc>();
      earr.add(exprDesc1);
View Full Code Here

    Timestamp ts = new Timestamp(1374554702000L);
    ts.setNanos(123456);
    ExprNodeConstantDesc constant = new ExprNodeConstantDesc(
        TypeInfoFactory.timestampTypeInfo, ts);
    String serialized = Utilities.serializeExpression(constant);
    ExprNodeDesc deserialized = Utilities.deserializeExpression(serialized, new Configuration());
    assertEquals(constant.getExprString(), deserialized.getExprString());
  }
View Full Code Here

        "   <object idref=\"PrimitiveTypeInfo1\"/> \n" +
        "  </void> \n" +
        " </object> \n" +
        "</java> \n";
    Configuration conf = new Configuration();
    ExprNodeDesc expr = Utilities.deserializeExpression(exprStr, conf);
    SearchArgumentImpl sarg =
        (SearchArgumentImpl) SearchArgument.FACTORY.create(expr);
    List<PredicateLeaf> leaves = sarg.getLeaves();
    assertEquals(9, leaves.size());
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.ExprNodeDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.