Package org.apache.hadoop.hive.ql.parse

Source Code of org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$BoolExprProcessor

/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements.  See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership.  The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License.  You may obtain a copy of the License at
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.hadoop.hive.ql.parse;

import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Stack;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeFieldDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeFuncDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.exprNodeNullDesc;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.ql.udf.UDFOPPositive;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.util.ReflectionUtils;

/**
* The Factory for creating typecheck processors. The typecheck processors are used to
* processes the syntax trees for expressions and convert them into expression Node
* Descriptor trees. They also introduce the correct conversion functions to do proper
* implicit conversion.
*/
public class TypeCheckProcFactory {

  protected static final Log LOG = LogFactory.getLog(TypeCheckProcFactory.class.getName());
  /**
   * Function to do groupby subexpression elimination. This is called by all the processors initially.
   * As an example, consider the query
   *   select a+b, count(1) from T group by a+b;
   * Then a+b is already precomputed in the group by operators key, so we substitute a+b in the select
   * list with the internal column name of the a+b expression that appears in the in input row resolver.
   *
   * @param nd The node that is being inspected.
   * @param procCtx The processor context.
   *
   * @return exprNodeColumnDesc.
   */
  public static exprNodeDesc processGByExpr(Node nd, Object procCtx)
    throws SemanticException {
    //  We recursively create the exprNodeDesc.  Base cases:  when we encounter
    //  a column ref, we convert that into an exprNodeColumnDesc;  when we encounter
    //  a constant, we convert that into an exprNodeConstantDesc.  For others we just
    //  build the exprNodeFuncDesc with recursively built children.
    ASTNode expr = (ASTNode)nd;
    TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
    RowResolver input = ctx.getInputRR();
    exprNodeDesc desc = null;

    //  If the current subExpression is pre-calculated, as in Group-By etc.
    ColumnInfo colInfo = input.get("", expr.toStringTree());
    if (colInfo != null) {
      desc = new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
                                    colInfo.getTabAlias(), colInfo.getIsPartitionCol());
      return desc;
    }   
    return desc;
  }
 
  /**
   * Processor for processing NULL expression.
   */
  public static class NullExprProcessor implements NodeProcessor {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;
      if (ctx.getError() != null) {
        return null;
      }
     
      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      return new exprNodeNullDesc();
    }
   
  }
 
  /**
   * Factory method to get NullExprProcessor.
   * @return NullExprProcessor.
   */
  public static NullExprProcessor getNullExprProcessor() {
    return new NullExprProcessor();
  }
 
  /**
   * Processor for processing numeric constants.
   */
  public static class NumExprProcessor implements NodeProcessor {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;
      if (ctx.getError() != null) {
        return null;
      }
     
      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      Number v = null;
      ASTNode expr = (ASTNode)nd;
      // The expression can be any one of Double, Long and Integer. We
      // try to parse the expression in that order to ensure that the
      // most specific type is used for conversion.
      try {
        v = Double.valueOf(expr.getText());
        v = Long.valueOf(expr.getText());
        v = Integer.valueOf(expr.getText());
      } catch (NumberFormatException e) {
        // do nothing here, we will throw an exception in the following block
      }
      if (v == null) {
        throw new SemanticException(ErrorMsg.INVALID_NUMERICAL_CONSTANT.getMsg(expr));
      }
      return new exprNodeConstantDesc(v);
    }
   
  }
 
  /**
   * Factory method to get NumExprProcessor.
   * @return NumExprProcessor.
   */
  public static NumExprProcessor getNumExprProcessor() {
    return new NumExprProcessor();
  }
 
  /**
   * Processor for processing string constants.
   */
  public static class StrExprProcessor implements NodeProcessor {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;
      if (ctx.getError() != null) {
        return null;
      }
     
      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }
     
      ASTNode expr = (ASTNode)nd;
      String str = null;
     
      switch (expr.getToken().getType()) {
      case HiveParser.StringLiteral:
        str = BaseSemanticAnalyzer.unescapeSQLString(expr.getText());
        break;
      case HiveParser.TOK_CHARSETLITERAL:
        str = BaseSemanticAnalyzer.charSetString(expr.getChild(0).getText(), expr.getChild(1).getText());
        break;
      default:
        // HiveParser.identifier | HiveParse.KW_IF | HiveParse.KW_LEFT | HiveParse.KW_RIGHT
        str = BaseSemanticAnalyzer.unescapeIdentifier(expr.getText());
        break;
      }
      return new exprNodeConstantDesc(TypeInfoFactory.stringTypeInfo, str);
    }
   
  }
 
  /**
   * Factory method to get StrExprProcessor.
   * @return StrExprProcessor.
   */
  public static StrExprProcessor getStrExprProcessor() {
    return new StrExprProcessor();
  }
 
  /**
   * Processor for boolean constants.
   */
  public static class BoolExprProcessor implements NodeProcessor {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;
      if (ctx.getError() != null) {
        return null;
      }

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }

      ASTNode expr = (ASTNode)nd;
      Boolean bool = null;

      switch (expr.getToken().getType()) {
      case HiveParser.KW_TRUE:
        bool = Boolean.TRUE;
        break;
      case HiveParser.KW_FALSE:
        bool = Boolean.FALSE;
        break;
      default:
        assert false;
      }
      return new exprNodeConstantDesc(TypeInfoFactory.booleanTypeInfo, bool);     
    }
   
  }
 
  /**
   * Factory method to get BoolExprProcessor.
   * @return BoolExprProcessor.
   */
  public static BoolExprProcessor getBoolExprProcessor() {
    return new BoolExprProcessor();
  }
 
  /**
   * Processor for table columns
   */
  public static class ColumnExprProcessor implements NodeProcessor {

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;
      if (ctx.getError() != null) {
        return null;
      }

      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        return desc;
      }

      ASTNode expr = (ASTNode)nd;
      RowResolver input = ctx.getInputRR();

      if (expr.getType() != HiveParser.TOK_TABLE_OR_COL) {
        ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr));
        return null;
      }
     
      assert(expr.getChildCount() == 1);
      String tableOrCol = BaseSemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText());

      boolean isTableAlias = input.hasTableAlias(tableOrCol);
      ColumnInfo colInfo = input.get(null, tableOrCol);
     
      if (isTableAlias) {
        if (colInfo != null) {
          // it's a table alias, and also a column
          ctx.setError(ErrorMsg.AMBIGUOUS_TABLE_OR_COLUMN.getMsg(expr));
          return null;
        } else {
          // It's a table alias.
          // We will process that later in DOT.
          return null;
        }
      } else {
        if (colInfo == null) {
          // It's not a column or a table alias.
          if (input.getIsExprResolver()) {
            ctx.setError(ErrorMsg.NON_KEY_EXPR_IN_GROUPBY.getMsg(expr));
            return null;
          } else {
            ctx.setError(ErrorMsg.INVALID_TABLE_OR_COLUMN.getMsg(expr.getChild(0)));
            LOG.debug(ErrorMsg.INVALID_TABLE_OR_COLUMN.toString() + ":" + input.toString());
            return null;
          }
        } else {
          // It's a column.
          return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
                                        colInfo.getTabAlias(), colInfo.getIsPartitionCol());
        }
      }

    }
   
  }
 
  /**
   * Factory method to get ColumnExprProcessor.
   * @return ColumnExprProcessor.
   */
  public static ColumnExprProcessor getColumnExprProcessor() {
    return new ColumnExprProcessor();
  }
 
  /**
   * The default processor for typechecking.
   */
  public static class DefaultExprProcessor implements NodeProcessor {

    static HashMap<Integer, String> specialUnaryOperatorTextHashMap;
    static HashMap<Integer, String> specialFunctionTextHashMap;
    static HashMap<Integer, String> conversionFunctionTextHashMap;
    static {
      specialUnaryOperatorTextHashMap = new HashMap<Integer, String>();
      specialUnaryOperatorTextHashMap.put(HiveParser.PLUS, "positive");
      specialUnaryOperatorTextHashMap.put(HiveParser.MINUS, "negative");
      specialFunctionTextHashMap = new HashMap<Integer, String>();
      specialFunctionTextHashMap.put(HiveParser.TOK_ISNULL, "isnull");
      specialFunctionTextHashMap.put(HiveParser.TOK_ISNOTNULL, "isnotnull");
      conversionFunctionTextHashMap = new HashMap<Integer, String>();
      conversionFunctionTextHashMap.put(HiveParser.TOK_BOOLEAN, Constants.BOOLEAN_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_TINYINT, Constants.TINYINT_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_SMALLINT, Constants.SMALLINT_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_INT, Constants.INT_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_BIGINT, Constants.BIGINT_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_FLOAT, Constants.FLOAT_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_DOUBLE, Constants.DOUBLE_TYPE_NAME);
      conversionFunctionTextHashMap.put(HiveParser.TOK_STRING, Constants.STRING_TYPE_NAME);
    }

    public static boolean isRedundantConversionFunction(ASTNode expr, boolean isFunction, ArrayList<exprNodeDesc> children) {
      if (!isFunction) return false;
      // children is always one less than the expr.getChildCount(), since the latter contains function name.
      assert(children.size() == expr.getChildCount() - 1);
      // conversion functions take a single parameter
      if (children.size() != 1) return false;
      String funcText = conversionFunctionTextHashMap.get(((ASTNode)expr.getChild(0)).getType());
      // not a conversion function
      if (funcText == null) return false;
      // return true when the child type and the conversion target type is the same
      return ((PrimitiveTypeInfo)children.get(0).getTypeInfo()).getTypeName().equalsIgnoreCase(funcText);
    }
   
    public static String getFunctionText(ASTNode expr, boolean isFunction) {
      String funcText = null;
      if (!isFunction) {
        // For operator, the function name is the operator text, unless it's in our special dictionary
        if (expr.getChildCount() == 1) {
          funcText = specialUnaryOperatorTextHashMap.get(expr.getType());
        }
        if (funcText == null) {
          funcText = expr.getText();
        }
      } else {
        // For TOK_FUNCTION, the function name is stored in the first child, unless it's in our
        // special dictionary.
        assert(expr.getChildCount() >= 1);
        int funcType = ((ASTNode)expr.getChild(0)).getType();
        funcText = specialFunctionTextHashMap.get(funcType);
        if (funcText == null) {
          funcText = conversionFunctionTextHashMap.get(funcType);
        }
        if (funcText == null) {
          funcText = ((ASTNode)expr.getChild(0)).getText();
        }
      }
      return funcText;
    }


   
    /**
     * Get the exprNodeDesc
     * @param name
     * @param children
     * @return The expression node descriptor
     * @throws UDFArgumentException
     */
    public static exprNodeDesc getFuncExprNodeDesc(String name, exprNodeDesc... children) {
      try {
        return getFuncExprNodeDesc(name, Arrays.asList(children));
      } catch (UDFArgumentException e) {
        throw new RuntimeException("Hive 2 internal error", e);
      }
    }
   
    /**
     * This function create an ExprNodeDesc for a UDF function given the children (arguments).
     * It will insert implicit type conversion functions if necessary.
     * @throws SemanticException
     */
    public static exprNodeDesc getFuncExprNodeDesc(String udfName, List<exprNodeDesc> children)
        throws UDFArgumentException {

      FunctionInfo fi = FunctionRegistry.getFunctionInfo(udfName);
      if (fi == null) return null;
     
      // Is it a generic UDF?
      Class<? extends GenericUDF> genericUDFClass = fi.getGenericUDFClass();
      if (genericUDFClass != null) {
        return exprNodeGenericFuncDesc.newInstance(genericUDFClass, children);
      }
     
      // TODO: extract as a function
      // Find the corresponding method
      ArrayList<TypeInfo> argumentTypeInfos = new ArrayList<TypeInfo>(children.size());
      for(int i=0; i<children.size(); i++) {
        exprNodeDesc child = children.get(i);
        argumentTypeInfos.add(child.getTypeInfo());
      }
     
      Method udfMethod = FunctionRegistry.getUDFMethod(udfName, argumentTypeInfos);
      if (udfMethod == null) return null;

      // Convert the parameters if the type of parameters do not exactly match.
      ArrayList<exprNodeDesc> ch = SemanticAnalyzer.convertParameters(udfMethod, children);

      // The return type of a function can be of either Java Primitive Type/Class or Writable Class.
      TypeInfo resultTypeInfo = null;
      if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(udfMethod.getReturnType())) {
        resultTypeInfo = TypeInfoFactory.getPrimitiveTypeInfoFromPrimitiveWritable(udfMethod.getReturnType());
      } else {
        resultTypeInfo = TypeInfoFactory.getPrimitiveTypeInfoFromJavaPrimitive(udfMethod.getReturnType());
      }
     
      exprNodeFuncDesc desc = new exprNodeFuncDesc(
        udfName,
        resultTypeInfo,
        FunctionRegistry.getUDFClass(udfName),
        udfMethod, ch);
      return desc;
    }

    static exprNodeDesc getXpathOrFuncExprNodeDesc(ASTNode expr, boolean isFunction,
        ArrayList<exprNodeDesc> children)
        throws SemanticException, UDFArgumentException {
      // return the child directly if the conversion is redundant.
      if (isRedundantConversionFunction(expr, isFunction, children)) {
        assert(children.size() == 1);
        assert(children.get(0) != null);
        return children.get(0);
      }
      String funcText = getFunctionText(expr, isFunction);
      exprNodeDesc desc;
      if (funcText.equals(".")) {
        // "." :  FIELD Expression
        assert(children.size() == 2);
        // Only allow constant field name for now
        assert(children.get(1) instanceof exprNodeConstantDesc);
        exprNodeDesc object = children.get(0);
        exprNodeConstantDesc fieldName = (exprNodeConstantDesc)children.get(1);
        assert(fieldName.getValue() instanceof String);
       
        // Calculate result TypeInfo
        String fieldNameString = (String)fieldName.getValue();
        TypeInfo objectTypeInfo = object.getTypeInfo();
       
        // Allow accessing a field of list element structs directly from a list 
        boolean isList = (object.getTypeInfo().getCategory() == ObjectInspector.Category.LIST);
        if (isList) {
          objectTypeInfo = ((ListTypeInfo)objectTypeInfo).getListElementTypeInfo();
        }
        if (objectTypeInfo.getCategory() != Category.STRUCT) {
          throw new SemanticException(ErrorMsg.INVALID_DOT.getMsg(expr));
        }
        TypeInfo t = ((StructTypeInfo)objectTypeInfo).getStructFieldTypeInfo(fieldNameString);
        if (isList) {
          t = TypeInfoFactory.getListTypeInfo(t);
        }
       
        desc = new exprNodeFieldDesc(t, children.get(0), fieldNameString, isList);
       
      } else if (funcText.equals("[")){
        // "[]" : LSQUARE/INDEX Expression
        assert(children.size() == 2);
       
        // Check whether this is a list or a map
        TypeInfo myt = children.get(0).getTypeInfo();

        if (myt.getCategory() == Category.LIST) {
          // Only allow constant integer index for now
          if (!(children.get(1) instanceof exprNodeConstantDesc)
              || !(((exprNodeConstantDesc)children.get(1)).getTypeInfo().equals(TypeInfoFactory.intTypeInfo))) {
            throw new SemanticException(ErrorMsg.INVALID_ARRAYINDEX_CONSTANT.getMsg(expr));
          }
       
          // Calculate TypeInfo
          TypeInfo t = ((ListTypeInfo)myt).getListElementTypeInfo();
          desc = new exprNodeGenericFuncDesc(t,
              FunctionRegistry.getGenericUDFClassForIndex(),
              children);
        }
        else if (myt.getCategory() == Category.MAP) {
          // Only allow only constant indexes for now
          if (!(children.get(1) instanceof exprNodeConstantDesc)) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_CONSTANT.getMsg(expr));
          }
          if (!(((exprNodeConstantDesc)children.get(1)).getTypeInfo().equals(
              ((MapTypeInfo)myt).getMapKeyTypeInfo()))) {
            throw new SemanticException(ErrorMsg.INVALID_MAPINDEX_TYPE.getMsg(expr));
          }
          // Calculate TypeInfo
          TypeInfo t = ((MapTypeInfo)myt).getMapValueTypeInfo();
          desc = new exprNodeGenericFuncDesc(t,
              FunctionRegistry.getGenericUDFClassForIndex(),
              children);
        }
        else {
          throw new SemanticException(ErrorMsg.NON_COLLECTION_TYPE.getMsg(expr,
              myt.getTypeName()));
        }
      } else {
        // other operators or functions
        FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcText);
       
        if (fi == null) {
          if (isFunction)
            throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr.getChild(0)));
          else
            throw new SemanticException(ErrorMsg.INVALID_FUNCTION.getMsg((ASTNode)expr));
        }
       
        desc = getFuncExprNodeDesc(funcText, children);
        if (desc == null) {
          ArrayList<Class<?>> argumentClasses = new ArrayList<Class<?>>(children.size());
          for(int i=0; i<children.size(); i++) {
            argumentClasses.add(((PrimitiveTypeInfo)children.get(i).getTypeInfo()).getPrimitiveWritableClass());
          }
   
          if (isFunction) {
            String reason = "Looking for UDF \"" + expr.getChild(0).getText() + "\" with parameters " + argumentClasses;
            throw new SemanticException(ErrorMsg.INVALID_FUNCTION_SIGNATURE.getMsg((ASTNode)expr.getChild(0), reason));
          } else {
            String reason = "Looking for Operator \"" + expr.getText() + "\" with parameters " + argumentClasses;
            throw new SemanticException(ErrorMsg.INVALID_OPERATOR_SIGNATURE.getMsg(expr, reason));
          }
        }
      }
      // UDFOPPositive is a no-op.
      // However, we still create it, and then remove it here, to make sure we only allow
      // "+" for numeric types.
      if (desc instanceof exprNodeFuncDesc) {
        exprNodeFuncDesc funcDesc = (exprNodeFuncDesc)desc;
        if (funcDesc.getUDFClass().equals(UDFOPPositive.class)) {
          assert(funcDesc.getChildren().size() == 1);
          desc = funcDesc.getChildExprs().get(0);
        }
      }
      assert(desc != null);
      return desc;
    }

    @Override
    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
        Object... nodeOutputs) throws SemanticException {

      TypeCheckCtx ctx = (TypeCheckCtx)procCtx;

      // If this is a GroupBy expression, clear error and continue
      exprNodeDesc desc = TypeCheckProcFactory.processGByExpr(nd, procCtx);
      if (desc != null) {
        ctx.setError(null);
        return desc;
      }
     
      if (ctx.getError() != null) {
        return null;
      }
     
      ASTNode expr = (ASTNode)nd;
     
      // If the first child is a TOK_TABLE_OR_COL, and nodeOutput[0] is NULL,
      // and the operator is a DOT, then it's a table column reference.
      if (expr.getType() == HiveParser.DOT
          && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL
          && nodeOutputs[0] == null) {

        RowResolver input = ctx.getInputRR();
        String tableAlias = SemanticAnalyzer.unescapeIdentifier(
            expr.getChild(0).getChild(0).getText());
        // NOTE: tableAlias must be a valid non-ambiguous table alias,
        // because we've checked that in TOK_TABLE_OR_COL's process method.
        ColumnInfo colInfo = input.get(tableAlias,
            ((exprNodeConstantDesc)nodeOutputs[1]).getValue().toString() );

        if (colInfo == null) {
          ctx.setError(ErrorMsg.INVALID_COLUMN.getMsg(expr.getChild(1)));
          return null;
        }
        return new exprNodeColumnDesc(colInfo.getType(), colInfo.getInternalName(),
                                      colInfo.getTabAlias(), colInfo.getIsPartitionCol());
      }

      // Return nulls for conversion operators
      if (conversionFunctionTextHashMap.keySet().contains(expr.getType()) ||
          specialFunctionTextHashMap.keySet().contains(expr.getType()) ||
          expr.getToken().getType() == HiveParser.CharSetName ||
          expr.getToken().getType() == HiveParser.CharSetLiteral) {
        return null;
      }
     
      boolean isFunction = (expr.getType() == HiveParser.TOK_FUNCTION);
     
      // Create all children
      int childrenBegin = (isFunction ? 1 : 0);
      ArrayList<exprNodeDesc> children = new ArrayList<exprNodeDesc>(expr.getChildCount() - childrenBegin);
      for (int ci=childrenBegin; ci<expr.getChildCount(); ci++) {
        children.add((exprNodeDesc)nodeOutputs[ci]);
      }
     
      // If any of the children contains null, then return a null
      // this is a hack for now to handle the group by case
      if (children.contains(null)) {
        return null;
      }
     
      // Create function desc
      try {
        return getXpathOrFuncExprNodeDesc(expr, isFunction, children);
      } catch (UDFArgumentTypeException e) {
        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_TYPE
            .getMsg(expr.getChild(childrenBegin + e.getArgumentId()), e.getMessage()));
      } catch (UDFArgumentLengthException e) {
        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT_LENGTH
            .getMsg(expr, e.getMessage()));
      } catch (UDFArgumentException e) {
        throw new SemanticException(ErrorMsg.INVALID_ARGUMENT
            .getMsg(expr, e.getMessage()));
      }
    }
   
  }
 
  /**
   * Factory method to get DefaultExprProcessor.
   * @return DefaultExprProcessor.
   */
  public static DefaultExprProcessor getDefaultExprProcessor() {
    return new DefaultExprProcessor();
  }
}
TOP

Related Classes of org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory$BoolExprProcessor

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.