Package org.apache.hadoop.hive.ql.parse

Examples of org.apache.hadoop.hive.ql.parse.RowResolver


 
  RowSchema getQueryOutputRowSchema(QueryDef qry, JobConf jcfg) throws WindowingException
  {
    String tabAlias = qry.getOutput().getSpec().getHiveTable();
    StructObjectInspector rowObjectInspector = qry.getSelectList().getOI();
    RowResolver rr = HiveUtils.getRowResolver(tabAlias, rowObjectInspector );
    return rr.getRowSchema();
  }
View Full Code Here


      HiveMetaStoreClient client = getClient(conf);

      db = validateDB(client, db);
      org.apache.hadoop.hive.ql.metadata.Table t = Hive.get(conf).getTable(db, table);
       StructObjectInspector rowObjectInspector = (StructObjectInspector) t.getDeserializer().getObjectInspector();
      RowResolver rwsch = getRowResolver(alias, rowObjectInspector ) ;
      
       for (FieldSchema part_col : t.getPartCols())
       {
              LOG.trace("Adding partition col: " + part_col);
              rwsch.put(alias, part_col.getName(),
                  new ColumnInfo(part_col.getName(),
                      TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), alias, true)
              );
        }
      
       Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator();
          //use a list for easy cumtomize
          List<VirtualColumn> vcList = new ArrayList<VirtualColumn>();
          while (vcs.hasNext())
          {
            VirtualColumn vc = vcs.next();
            rwsch.put(alias, vc.getName(),
                new ColumnInfo(vc.getName(),
                    vc.getTypeInfo(), alias, true, vc.getIsHidden()
                    )
            );
            vcList.add(vc);
View Full Code Here

  public static RowResolver getRowResolver(String tabAlias, StructObjectInspector rowObjectInspector ) throws WindowingException
  {
    LOG.info("HiveUtils::getRowResolver invoked on ObjectInspector");
    try
    {
      RowResolver rwsch = new RowResolver();
       List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
       for (int i = 0; i < fields.size(); i++)
       {
         rwsch.put(tabAlias, fields.get(i).getFieldName(),
               new ColumnInfo(fields.get(i).getFieldName(),
                   TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()),
                   tabAlias,
                   false)
         );
View Full Code Here

    return createFilter(op, pushDownPreds.getFinalCandidates(), owi);
  }

  protected static Object createFilter(Operator op,
      Map<String, List<ExprNodeDesc>> predicates, OpWalkerInfo owi) {
    RowResolver inputRR = owi.getRowResolver(op);

    // combine all predicates into a single expression
    List<ExprNodeDesc> preds = new ArrayList<ExprNodeDesc>();
    Iterator<List<ExprNodeDesc>> iterator = predicates.values().iterator();
    while (iterator.hasNext()) {
      for (ExprNodeDesc pred : iterator.next()) {
        preds = ExprNodeDescUtils.split(pred, preds);
      }
    }

    if (preds.isEmpty()) {
      return null;
    }

    ExprNodeDesc condn = ExprNodeDescUtils.mergePredicates(preds);
    if(!(condn instanceof ExprNodeGenericFuncDesc)) {
      return null;
    }

    if (op instanceof TableScanOperator) {
      boolean pushFilterToStorage;
      HiveConf hiveConf = owi.getParseContext().getConf();
      pushFilterToStorage =
        hiveConf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_STORAGE);
      if (pushFilterToStorage) {
        condn = pushFilterToStorageHandler(
          (TableScanOperator) op,
          (ExprNodeGenericFuncDesc)condn,
          owi,
          hiveConf);
        if (condn == null) {
          // we pushed the whole thing down
          return null;
        }
      }
    }

    // add new filter op
    List<Operator<? extends OperatorDesc>> originalChilren = op
        .getChildOperators();
    op.setChildOperators(null);
    Operator<FilterDesc> output = OperatorFactory.getAndMakeChild(
        new FilterDesc(condn, false), new RowSchema(inputRR.getColumnInfos()),
        op);
    output.setChildOperators(originalChilren);
    for (Operator<? extends OperatorDesc> ch : originalChilren) {
      List<Operator<? extends OperatorDesc>> parentOperators = ch
          .getParentOperators();
View Full Code Here

            // parseJoinCondPopulateAlias().
            String alias = BaseSemanticAnalyzer.unescapeIdentifier(
                nd.getChild(0).getChild(0).getText().toLowerCase());
            String column = BaseSemanticAnalyzer.unescapeIdentifier(
                nd.getChild(1).getText().toLowerCase());
            RowResolver rr=aliastoRR.get(alias);
            if (rr == null) {
              return null;
            }
            return rr.get(alias, column);
          }
        }
      }
      return null;
    }
View Full Code Here

        exprs = ExprNodeDescUtils.split(((FilterOperator)parent).getConf().getPredicate(), exprs);
        ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
        ((FilterOperator)parent).getConf().setPredicate(merged);
      } else {
        ExprNodeDesc merged = ExprNodeDescUtils.mergePredicates(exprs);
        RowResolver parentRR = pGraphContext.getOpParseCtx().get(parent).getRowResolver();
        Operator<FilterDesc> newFilter = createFilter(reducer, parent, parentRR, merged);
        pGraphContext.getOpParseCtx().put(newFilter, new OpParseContext(parentRR));
      }
    }
View Full Code Here

  /*
   * add array<struct> to the list of columns
   */
  protected static RowResolver createSelectListRR(MatchPath evaluator,
      PTFInputDef inpDef) throws SemanticException {
    RowResolver rr = new RowResolver();
    RowResolver inputRR = inpDef.getOutputShape().getRr();

    evaluator.inputColumnNamesMap = new HashMap<String,String>();
    ArrayList<String> inputColumnNames = new ArrayList<String>();

    ArrayList<ObjectInspector> inpColOIs = new ArrayList<ObjectInspector>();

    for (ColumnInfo inpCInfo : inputRR.getColumnInfos()) {
      ColumnInfo cInfo = new ColumnInfo(inpCInfo);
      String colAlias = cInfo.getAlias();

      String[] tabColAlias = inputRR.reverseLookup(inpCInfo.getInternalName());
      if (tabColAlias != null) {
        colAlias = tabColAlias[1];
      }
      ASTNode inExpr = null;
      inExpr = PTFTranslator.getASTNode(inpCInfo, inputRR);
View Full Code Here

      setupSymbolFunctionChain(evaluator);

      /*
       * setup OI for input to resultExpr select list
       */
      RowResolver selectListInputRR = MatchPath.createSelectListRR(evaluator, tDef.getInput());

      /*
       * parse ResultExpr Str and setup OI.
       */
      ResultExpressionParser resultExprParser =
View Full Code Here

  // replace the cRS to SEL operator
  // If child if cRS is EXT, EXT also should be removed
  protected static SelectOperator replaceOperatorWithSelect(Operator<?> operator,
      ParseContext context, AbstractCorrelationProcCtx procCtx)
      throws SemanticException {
    RowResolver inputRR = context.getOpParseCtx().get(operator).getRowResolver();
    SelectDesc select = new SelectDesc(null, null);

    Operator<?> parent = getSingleParent(operator);
    Operator<?> child = getSingleChild(operator);

    parent.getChildOperators().clear();

    SelectOperator sel = (SelectOperator) putOpInsertMap(
        OperatorFactory.getAndMakeChild(select, new RowSchema(inputRR
            .getColumnInfos()), parent), inputRR, context);

    sel.setColumnExprMap(operator.getColumnExprMap());

    sel.setChildOperators(operator.getChildOperators());
View Full Code Here

      for (AggregationDesc aggr : cGBYm.getConf().getAggregators()) {
        aggr.setMode(GenericUDAFEvaluator.Mode.COMPLETE);
      }
      cGBYr.setColumnExprMap(cGBYm.getColumnExprMap());
      cGBYr.setSchema(cGBYm.getSchema());
      RowResolver resolver = context.getOpParseCtx().get(cGBYm).getRowResolver();
      context.getOpParseCtx().get(cGBYr).setRowResolver(resolver);
    } else {
      // pRS-cRS-cGBYr (no map aggregation) --> pRS-cGBYr(COMPLETE)
      // revert expressions of cGBYr to that of cRS
      cGBYr.getConf().setKeys(ExprNodeDescUtils.backtrack(cGBYr.getConf().getKeys(), cGBYr, cRS));
      for (AggregationDesc aggr : cGBYr.getConf().getAggregators()) {
        aggr.setParameters(ExprNodeDescUtils.backtrack(aggr.getParameters(), cGBYr, cRS));
      }

      Map<String, ExprNodeDesc> oldMap = cGBYr.getColumnExprMap();
      RowResolver oldRR = context.getOpParseCtx().get(cGBYr).getRowResolver();

      Map<String, ExprNodeDesc> newMap = new HashMap<String, ExprNodeDesc>();
      RowResolver newRR = new RowResolver();

      List<String> outputCols = cGBYr.getConf().getOutputColumnNames();
      for (int i = 0; i < outputCols.size(); i++) {
        String colName = outputCols.get(i);
        String[] nm = oldRR.reverseLookup(colName);
        ColumnInfo colInfo = oldRR.get(nm[0], nm[1]);
        newRR.put(nm[0], nm[1], colInfo);
        ExprNodeDesc colExpr = ExprNodeDescUtils.backtrack(oldMap.get(colName), cGBYr, cRS);
        if (colExpr != null) {
          newMap.put(colInfo.getInternalName(), colExpr);
        }
      }
      cGBYr.setColumnExprMap(newMap);
      cGBYr.setSchema(new RowSchema(newRR.getColumnInfos()));
      context.getOpParseCtx().get(cGBYr).setRowResolver(newRR);
    }
    cGBYr.getConf().setMode(GroupByDesc.Mode.COMPLETE);

    removeOperator(cRS, cGBYr, parent, context);
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.parse.RowResolver

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.