Examples of exprNodeIndexDesc


Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

            throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr));
          }
       
          // Calculate TypeInfo
          TypeInfo t = myt.getListElementTypeInfo();
          desc = new exprNodeIndexDesc(t, children.get(0), children.get(1));
        }
        else if (myt.getCategory() == Category.MAP) {
          // Only allow only constant indexes for now
          if (!(children.get(1) instanceof exprNodeConstantDesc)) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr));
          }
          if (!(((exprNodeConstantDesc)children.get(1)).getValue().getClass() ==
                myt.getMapKeyTypeInfo().getPrimitiveClass())) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = myt.getMapValueTypeInfo();
         
          desc = new exprNodeIndexDesc(t, children.get(0), children.get(1));
        }
        else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
              myt.getTypeName()));
        }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

      exprNodeConstantDesc d = (exprNodeConstantDesc)desc;
      return d.getValue() == null;
    } else if (desc instanceof exprNodeNullDesc) {
      return false;
    } else if (desc instanceof exprNodeIndexDesc) {
      exprNodeIndexDesc d = (exprNodeIndexDesc)desc;
      return mightBeUnknown(d.getDesc()) || mightBeUnknown(d.getIndex());
    } else if (desc instanceof exprNodeFieldDesc) {
      exprNodeFieldDesc d = (exprNodeFieldDesc)desc;
      return mightBeUnknown(d.getDesc());
    } else if (desc instanceof exprNodeFuncDesc) {
      exprNodeFuncDesc d = (exprNodeFuncDesc)desc;
      for(int i=0; i<d.getChildren().size(); i++) {
        if (mightBeUnknown(d.getChildren().get(i))) {
          return true;
        }
      }
      return false;
    } else if (desc instanceof exprNodeColumnDesc) {
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

  public void testExprNodeFuncEvaluator() throws Throwable {
    try {
      // get a evaluator for a string concatenation expression
      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
      exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola");
      exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
      exprNodeDesc cola0desc = new exprNodeIndexDesc(coladesc, new exprNodeConstantDesc(new Integer(0)));
      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat", col11desc, cola0desc);
      ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);

      // evaluate on row
      InspectableObject result = new InspectableObject();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

  public void testExprNodeConversionEvaluator() throws Throwable {
    try {
      // get a evaluator for a string concatenation expression
      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
      exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
      exprNodeDesc func1 = TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc(Double.class.getName(), col11desc);
      ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);

      // evaluate on row
      InspectableObject result = new InspectableObject();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

      exprNodeDesc constant2 = new exprNodeConstantDesc(2);
      measureSpeed("concat(col1[1], cola[1])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                  new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1))),
          r,
          "1b");
      measureSpeed("concat(concat(col1[1], cola[1]), col1[2])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                      new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                      new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2))),
          r,
          "1b2");
      measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                  TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                      TypeCheckProcFactory.DefaultExprProcessor.getFuncExprNodeDesc("concat",
                          new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                          new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                      new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2)),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant2))),
          r,
          "1b2c");
     
    } catch (Throwable e) {
      e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

  public void testExprNodeFuncEvaluator() throws Throwable {
    try {
      // get a evaluator for a string concatenation expression
      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
      exprNodeDesc coladesc = new exprNodeColumnDesc(colaType, "cola");
      exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
      exprNodeDesc cola0desc = new exprNodeIndexDesc(coladesc, new exprNodeConstantDesc(new Integer(0)));
      exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc("concat", col11desc, cola0desc);
      ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);

      // evaluate on row
      InspectableObject result = new InspectableObject();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

  public void testExprNodeConversionEvaluator() throws Throwable {
    try {
      // get a evaluator for a string concatenation expression
      exprNodeDesc col1desc = new exprNodeColumnDesc(col1Type, "col1");
      exprNodeDesc col11desc = new exprNodeIndexDesc(col1desc, new exprNodeConstantDesc(new Integer(1)));
      exprNodeDesc func1 = SemanticAnalyzer.getFuncExprNodeDesc(Double.class.getName(), col11desc);
      ExprNodeEvaluator eval = ExprNodeEvaluatorFactory.get(func1);

      // evaluate on row
      InspectableObject result = new InspectableObject();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

      exprNodeDesc constant2 = new exprNodeConstantDesc(2);
      measureSpeed("concat(col1[1], cola[1])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              SemanticAnalyzer.getFuncExprNodeDesc("concat",
                  new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1))),
          r,
          "1b");
      measureSpeed("concat(concat(col1[1], cola[1]), col1[2])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              SemanticAnalyzer.getFuncExprNodeDesc("concat",
                  SemanticAnalyzer.getFuncExprNodeDesc("concat",
                      new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                      new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2))),
          r,
          "1b2");
      measureSpeed("concat(concat(concat(col1[1], cola[1]), col1[2]), cola[2])",
          basetimes * 10,
          ExprNodeEvaluatorFactory.get(
              SemanticAnalyzer.getFuncExprNodeDesc("concat",
                  SemanticAnalyzer.getFuncExprNodeDesc("concat",
                      SemanticAnalyzer.getFuncExprNodeDesc("concat",
                          new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant1),
                          new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant1)),
                      new exprNodeIndexDesc(new exprNodeColumnDesc(col1Type, "col1"), constant2)),
                  new exprNodeIndexDesc(new exprNodeColumnDesc(colaType, "cola"), constant2))),
          r,
          "1b2c");
     
    } catch (Throwable e) {
      e.printStackTrace();
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.exprNodeIndexDesc

      exprNodeConstantDesc d = (exprNodeConstantDesc)desc;
      return d.getValue() == null;
    } else if (desc instanceof exprNodeNullDesc) {
      return false;
    } else if (desc instanceof exprNodeIndexDesc) {
      exprNodeIndexDesc d = (exprNodeIndexDesc)desc;
      return mightBeUnknown(d.getDesc()) || mightBeUnknown(d.getIndex());
    } else if (desc instanceof exprNodeFieldDesc) {
      exprNodeFieldDesc d = (exprNodeFieldDesc)desc;
      return mightBeUnknown(d.getDesc());
    } else if (desc instanceof exprNodeFuncDesc) {
      exprNodeFuncDesc d = (exprNodeFuncDesc)desc;
      for(int i=0; i<d.getChildren().size(); i++) {
        if (mightBeUnknown(d.getChildren().get(i))) {
          return true;
        }
      }
      return false;
    } else if (desc instanceof exprNodeColumnDesc) {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.