Package org.apache.hadoop.hive.ql.exec

Examples of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator


   * chain.
   */
  @Override
  public void visit(WhereDef where) throws WindowingException
  {
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(
        tInfo, where.getExprNode());
    ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef,
        where.getExprNode(), exprEval, inputInfo);
    where.setExprEvaluator(exprEval);
    where.setOI(oi);
View Full Code Here


    Converter whConverter = !applyWhere ? null
        : ObjectInspectorConverters
            .getConverter(
                whDef.getOI(),
                PrimitiveObjectInspectorFactory.javaBooleanObjectInspector);
    ExprNodeEvaluator whCondEval = !applyWhere ? null : whDef
        .getExprEvaluator();

    Writable value = null;
    PartitionIterator<Object> pItr = oPart.iterator();
    RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
    while (pItr.hasNext())
    {
      int colCnt = 0;
      ArrayList selectList = new ArrayList();
      Object oRow = pItr.next();

      if (applyWhere)
      {
        Object whCond = null;
        try
        {
          whCond = whCondEval.evaluate(oRow);
          whCond = whConverter.convert(whCond);
        }
        catch (HiveException he)
        {
          throw new WindowingException(he);
View Full Code Here

      throw new WindowingException(sprintf("Unknown Table Reference in column", cSpec));
    }
   
    ASTNode expr = TranslateUtils.buildASTNode(cSpec.getColumnName());
    ExprNodeDesc exprNode = TranslateUtils.buildExprNode(expr, iInfo.getTypeCheckCtx());
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), exprNode);
    ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
   
    cDef.setExpression(expr);
    cDef.setExprNode(exprNode);
    cDef.setExprEvaluator(exprEval);
View Full Code Here

      ValueBoundarySpec vBndSpec = (ValueBoundarySpec) bndSpec;
      ValueBoundaryDef vbDef = new ValueBoundaryDef(vBndSpec);
      TranslateUtils.validateNoLeadLagInValueBoundarySpec(vBndSpec.getExpression());
      ExprNodeDesc exprNode = TranslateUtils.buildExprNode(vBndSpec.getExpression(), iInfo.getTypeCheckCtx());
      vbDef.setExprNode(exprNode);
      ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(qDef.getTranslationInfo(), exprNode);
      ObjectInspector OI = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
      TranslateUtils.validateValueBoundaryExprType(OI);
      vbDef.setExprEvaluator(exprEval);
      vbDef.setOI(OI);
      return vbDef;
View Full Code Here

        + "Expecting string.");
  }

  private long evaluateCastToTimestamp(ExprNodeDesc expr) throws HiveException {
    ExprNodeGenericFuncDesc expr2 = (ExprNodeGenericFuncDesc) expr;
    ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(expr2);
    ObjectInspector output = evaluator.initialize(null);
    Object constant = evaluator.evaluate(null);
    Object java = ObjectInspectorUtils.copyToStandardJavaObject(constant, output);

    if (!(java instanceof Timestamp)) {
      throw new HiveException("Udf: failed to convert to timestamp");
    }
View Full Code Here

        ArrayList<ObjectInspector> selectListExprOIs = new ArrayList<ObjectInspector>();
        resultExprInfo.resultExprEvals = new ArrayList<ExprNodeEvaluator>();

        for(int i=0 ; i < resultExprInfo.resultExprNodes.size(); i++) {
          ExprNodeDesc selectColumnExprNode =resultExprInfo.resultExprNodes.get(i);
          ExprNodeEvaluator selectColumnExprEval =
              ExprNodeEvaluatorFactory.get(selectColumnExprNode);
          ObjectInspector selectColumnOI = selectColumnExprEval.initialize(selectListInputOI);
          resultExprInfo.resultExprEvals.add(selectColumnExprEval);
          selectListExprOIs.add(selectColumnOI);
        }

        resultExprInfo.resultOI = ObjectInspectorFactory.getStandardStructObjectInspector(
View Full Code Here

    }
  }

  protected void initialize(PTFExpressionDef eDef, ShapeDetails inpShape) throws HiveException {
    ExprNodeDesc exprNode = eDef.getExprNode();
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, exprNode);
    ObjectInspector oi = initExprNodeEvaluator(exprEval, exprNode, inpShape);
    eDef.setExprEvaluator(exprEval);
    eDef.setOI(oi);
  }
View Full Code Here

     */
    List<ExprNodeGenericFuncDesc> llFuncExprs = llInfo.getLLFuncExprsInTopExpr(exprNode);
    if (llFuncExprs != null) {
      for (ExprNodeGenericFuncDesc llFuncExpr : llFuncExprs) {
        ExprNodeDesc firstArg = llFuncExpr.getChildren().get(0);
        ExprNodeEvaluator dupExprEval = WindowingExprNodeEvaluatorFactory.get(llInfo, firstArg);
        dupExprEval.initialize(inpShape.getOI());
        GenericUDFLeadLag llFn = (GenericUDFLeadLag) llFuncExpr.getGenericUDF();
        llFn.setExprEvaluator(dupExprEval);
      }
    }

View Full Code Here

      symbolExprEvalMap = new HashMap<String, Object[]>();
      int sz = symbolNames.size();
      for(int i=0; i < sz; i++)
      {
        String symbolName = symbolNames.get(i);
        ExprNodeEvaluator symbolExprEval = symbolExprEvals.get(i);
        ObjectInspector symbolExprOI = symbolExprOIs.get(i);
        symbolExprEvalMap.put(symbolName.toLowerCase(),
            new Object[] {symbolExprEval, symbolExprOI});
      }
    }
View Full Code Here

        if ( symbolDetails == null )
        {
          throw new SemanticException(String.format("Unknown Symbol %s", symbol));
        }

        ExprNodeEvaluator symbolExprEval = (ExprNodeEvaluator) symbolDetails[0];
        ObjectInspector symbolExprOI = (ObjectInspector) symbolDetails[1];
        SymbolFunction sFn = new Symbol(symbolExprEval, symbolExprOI);

        if ( isStar )
        {
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.