Examples of PartitionedTableFunctionDef


Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

  /*
   * If the final PTF in a PTFChain can stream its output, then set the OI of its OutputShape
   * to the OI returned by the TableFunctionEvaluator.
   */
  public static void alterOutputOIForStreaming(PTFDesc ptfDesc) {
    PartitionedTableFunctionDef tDef = ptfDesc.getFuncDef();
    TableFunctionEvaluator tEval = tDef.getTFunction();

    if ( tEval.canIterateOutput() ) {
      tDef.getOutputShape().setOI(tEval.getOutputOI());
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

    isMapOperator = conf.isMapSide();

    reconstructQueryDef(hiveConf);

    if (isMapOperator) {
      PartitionedTableFunctionDef tDef = conf.getStartOfChain();
      outputObjInspector = tDef.getRawInputShape().getOI();
    } else {
      outputObjInspector = conf.getFuncDef().getOutputShape().getOI();
    }

    setupKeysWrapper(inputObjInspectors[0]);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

    }

    PTFInvocation curr = null, first = null;

    while(!fnDefs.isEmpty()) {
      PartitionedTableFunctionDef currFn = fnDefs.pop();
      curr = new PTFInvocation(curr, currFn.getTFunction());
      if ( first == null ) {
        first = curr;
      }
    }
    return first;
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

    TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(spec.getName());
    if (tFn == null) {
      throw new SemanticException(String.format("Unknown Table Function %s",
          spec.getName()));
    }
    PartitionedTableFunctionDef def = new PartitionedTableFunctionDef();
    def.setInput(inpDef);
    def.setName(spec.getName());
    def.setResolverClassName(tFn.getClass().getName());
    def.setAlias(spec.getAlias() == null ? "ptf_" + inpNum : spec.getAlias());
    def.setExpressionTreeString(spec.getAstNode().toStringTree());
    def.setTransformsRawInput(tFn.transformsRawInput());
    /*
     * translate args
     */
    List<ASTNode> args = spec.getArgs();
    if (args != null)
    {
      for (ASTNode expr : args)
      {
        PTFExpressionDef argDef = null;
        try {
          argDef = buildExpressionDef(inpDef.getOutputShape(), expr);
        } catch (HiveException he) {
          throw new SemanticException(he);
        }
        def.addArg(argDef);
      }
    }

    tFn.initialize(hCfg, ptfDesc, def);
    TableFunctionEvaluator tEval = tFn.getEvaluator();
    def.setTFunction(tEval);
    def.setCarryForwardNames(tFn.carryForwardNames());
    tFn.setupRawInputOI();

    if (tFn.transformsRawInput()) {
      StructObjectInspector rawInOutOI = tEval.getRawInputOI();
      List<String> rawInOutColNames = tFn.getRawInputColumnNames();
      RowResolver rawInRR = buildRowResolverForPTF(def.getName(),
          spec.getAlias(),
          rawInOutOI,
          rawInOutColNames,
          inpDef.getOutputShape().getRr());
      ShapeDetails rawInpShape = setupTableFnShape(def.getName(),
          inpDef.getOutputShape(),
          rawInOutOI,
          rawInOutColNames,
          rawInRR);
      def.setRawInputShape(rawInpShape);
    }
    else {
      def.setRawInputShape(inpDef.getOutputShape());
    }

    translatePartitioning(def, spec);
    tFn.setupOutputOI();

    StructObjectInspector outputOI = tEval.getOutputOI();
    List<String> outColNames = tFn.getOutputColumnNames();
    RowResolver outRR = buildRowResolverForPTF(def.getName(),
        spec.getAlias(),
        outputOI,
        outColNames,
        def.getRawInputShape().getRr());
    ShapeDetails outputShape = setupTableFnShape(def.getName(),
        inpDef.getOutputShape(),
        outputOI,
        outColNames,
        outRR);
    def.setOutputShape(outputShape);

    return def;
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

     * Input -> ReduceSink
     *
     * Here the ExprNodeDescriptors in the QueryDef are based on the Input Operator's RR.
     */
    {
      PartitionedTableFunctionDef tabDef = ptfDesc.getStartOfChain();

      /*
       * a. add Map-side PTF Operator if needed
       */
      if (tabDef.isTransformsRawInput() )
      {
        RowResolver ptfMapRR = tabDef.getRawInputShape().getRr();

        ptfDesc.setMapSide(true);
        input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
            new RowSchema(ptfMapRR.getColumnInfos()),
            input), ptfMapRR);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

     * Input -> ReduceSink
     *
     * Here the ExprNodeDescriptors in the QueryDef are based on the Input Operator's RR.
     */
    {
      PartitionedTableFunctionDef tabDef = ptfDesc.getStartOfChain();

      /*
       * a. add Map-side PTF Operator if needed
       */
      if (tabDef.isTransformsRawInput() )
      {
        RowResolver ptfMapRR = tabDef.getRawInputShape().getRr();

        ptfDesc.setMapSide(true);
        input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
            new RowSchema(ptfMapRR.getColumnInfos()),
            input), ptfMapRR);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef

    TableFunctionResolver tFn = FunctionRegistry.getTableFunctionResolver(spec.getName());
    if (tFn == null) {
      throw new SemanticException(String.format("Unknown Table Function %s",
          spec.getName()));
    }
    PartitionedTableFunctionDef def = new PartitionedTableFunctionDef();
    def.setInput(inpDef);
    def.setName(spec.getName());
    def.setResolverClassName(tFn.getClass().getName());
    def.setAlias(spec.getAlias() == null ? "ptf_" + inpNum : spec.getAlias());
    def.setExpressionTreeString(spec.getAstNode().toStringTree());
    def.setTransformsRawInput(tFn.transformsRawInput());
    /*
     * translate args
     */
    List<ASTNode> args = spec.getArgs();
    if (args != null)
    {
      for (ASTNode expr : args)
      {
        PTFExpressionDef argDef = null;
        try {
          argDef = buildExpressionDef(inpDef.getOutputShape(), expr);
        } catch (HiveException he) {
          throw new SemanticException(he);
        }
        def.addArg(argDef);
      }
    }

    tFn.initialize(hCfg, ptfDesc, def);
    TableFunctionEvaluator tEval = tFn.getEvaluator();
    def.setTFunction(tEval);
    def.setCarryForwardNames(tFn.carryForwardNames());
    tFn.setupRawInputOI();

    if (tFn.transformsRawInput()) {
      StructObjectInspector rawInOutOI = tEval.getRawInputOI();
      List<String> rawInOutColNames = tFn.getRawInputColumnNames();
      RowResolver rawInRR = buildRowResolverForPTF(def.getName(),
          spec.getAlias(),
          rawInOutOI,
          rawInOutColNames,
          inpDef.getOutputShape().getRr());
      ShapeDetails rawInpShape = setupTableFnShape(def.getName(),
          inpDef.getOutputShape(),
          rawInOutOI,
          rawInOutColNames,
          rawInRR);
      def.setRawInputShape(rawInpShape);
    }
    else {
      def.setRawInputShape(inpDef.getOutputShape());
    }

    translatePartitioning(def, spec);
    tFn.setupOutputOI();

    StructObjectInspector outputOI = tEval.getOutputOI();
    List<String> outColNames = tFn.getOutputColumnNames();
    RowResolver outRR = buildRowResolverForPTF(def.getName(),
        spec.getAlias(),
        outputOI,
        outColNames,
        def.getRawInputShape().getRr());
    ShapeDetails outputShape = setupTableFnShape(def.getName(),
        inpDef.getOutputShape(),
        outputOI,
        outColNames,
        outRR);
    def.setOutputShape(outputShape);

    return def;
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.