Package org.apache.hadoop.hive.ql.plan

Examples of org.apache.hadoop.hive.ql.plan.DynamicPruningEventDesc


    if (procCtx.abandonedEventOperatorSet.contains(event)) {
      // don't need this anymore
      return;
    }

    DynamicPruningEventDesc eventDesc = (DynamicPruningEventDesc)event.getConf();
    TableScanOperator ts = eventDesc.getTableScan();

    MapWork work = (MapWork) procCtx.rootToWorkMap.get(ts);
    if (work == null) {
      throw new AssertionError("No work found for tablescan " + ts);
    }

    BaseWork enclosingWork = getEnclosingWork(event, procCtx);
    if (enclosingWork == null) {
      throw new AssertionError("Cannot find work for operator" + event);
    }
    String sourceName = enclosingWork.getName();

    // store the vertex name in the operator pipeline
    eventDesc.setVertexName(work.getName());
    eventDesc.setInputName(work.getAliases().get(0));

    // store table descriptor in map-work
    if (!work.getEventSourceTableDescMap().containsKey(sourceName)) {
      work.getEventSourceTableDescMap().put(sourceName, new LinkedList<TableDesc>());
    }
    List<TableDesc> tables = work.getEventSourceTableDescMap().get(sourceName);
    tables.add(event.getConf().getTable());

    // store column name in map-work
    if (!work.getEventSourceColumnNameMap().containsKey(sourceName)) {
      work.getEventSourceColumnNameMap().put(sourceName, new LinkedList<String>());
    }
    List<String> columns = work.getEventSourceColumnNameMap().get(sourceName);
    columns.add(eventDesc.getTargetColumnName());

    // store partition key expr in map-work
    if (!work.getEventSourcePartKeyExprMap().containsKey(sourceName)) {
      work.getEventSourcePartKeyExprMap().put(sourceName, new LinkedList<ExprNodeDesc>());
    }
    List<ExprNodeDesc> keys = work.getEventSourcePartKeyExprMap().get(sourceName);
    keys.add(eventDesc.getPartKey());

  }
View Full Code Here


    Map<String, ExprNodeDesc> colMap = new HashMap<String, ExprNodeDesc>();
    colMap.put(outputNames.get(0), groupByExpr);
    groupByOp.setColumnExprMap(colMap);

    // finally add the event broadcast operator
    DynamicPruningEventDesc eventDesc = new DynamicPruningEventDesc();
    eventDesc.setTableScan(ts);
    eventDesc.setTable(PlanUtils.getReduceValueTableDesc(PlanUtils
        .getFieldSchemasFromColumnList(keyExprs, "key")));
    eventDesc.setTargetColumnName(column);
    eventDesc.setPartKey(partKey);

    OperatorFactory.getAndMakeChild(eventDesc, groupByOp);
  }
View Full Code Here

  @Override
  public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)
      throws SemanticException {
    GenTezProcContext context = (GenTezProcContext) procCtx;
    AppMasterEventOperator event = (AppMasterEventOperator) nd;
    DynamicPruningEventDesc desc = (DynamicPruningEventDesc) event.getConf();

    // simply need to remember that we've seen an event operator.
    context.eventOperatorSet.add(event);

    // and remember link between event and table scan
    List<AppMasterEventOperator> events;
    if (context.tsToEventMap.containsKey(desc.getTableScan())) {
      events = context.tsToEventMap.get(desc.getTableScan());
    } else {
      events = new ArrayList<AppMasterEventOperator>();
    }
    events.add(event);
    context.tsToEventMap.put(desc.getTableScan(), events);
    return true;
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.ql.plan.DynamicPruningEventDesc

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.