Examples of InputInfo


Examples of com.facebook.giraph.hive.impl.input.InputInfo

        return tableSchema.positionOf(input);
      }
    };
    List<Integer> columnIds = transform(inputDesc.getColumns(), columnNameToId);

    InputInfo inputInfo = new InputInfo(tableSchema, columnIds);

    if (table.getPartitionKeysSize() == 0) {
      // table without partitions
      inputInfo.addPartition(new InputPartition(table));
    } else {
      // table with partitions, find matches to user filter.
      List<Partition> partitions = null;
      try {
        partitions = client.get_partitions_by_filter(dbName, tableName,
            inputDesc.getPartitionFilter(), (short) -1);
      } catch (NoSuchObjectException e) {
        throw new TException(e.getMessage());
      } catch (MetaException e) {
        throw new TException(e);
      }
      for (Partition partition : partitions) {
        inputInfo.addPartition(new InputPartition(table, partition));
      }
    }

    InputConf inputConf = new InputConf(conf, profileId);
    inputConf.writeNumSplitsToConf(inputDesc.getNumSplits());
View Full Code Here

Examples of com.facebook.giraph.hive.impl.input.InputInfo

    InputConf inputConf = new InputConf(conf, myProfileId);

    LOG.info("getSplits for profile " + inputConf.getProfileId());

    JobConf jobConf = new JobConf(conf);
    InputInfo inputInfo = inputConf.readInputInfoFromConf();

    int partitionNum = 0;
    List<InputSplit> splits = Lists.newArrayList();
    Iterable<InputPartition> partitions = inputInfo.getPartitions();

    for (InputPartition inputPartition : partitions) {
      org.apache.hadoop.mapred.InputFormat baseInputFormat =
          inputPartition.makeInputFormat(conf);
      HadoopUtils.setInputDir(jobConf, inputPartition.getLocation());

      int splitsRequested = inputConf.readNumSplitsFromConf();
      org.apache.hadoop.mapred.InputSplit[] baseSplits =
          baseInputFormat.getSplits(jobConf, splitsRequested);
      LOG.info("Requested " + splitsRequested + " from partition (" +
          partitionNum + " out of " + Iterables.size(partitions) +
          ") values: " +
          inputPartition.getInputSplitData().getPartitionValues() +
          ", got " + baseSplits.length + " splits");

      for (org.apache.hadoop.mapred.InputSplit baseSplit : baseSplits)  {
        InputSplit split = new HiveApiInputSplit(baseInputFormat, baseSplit,
            inputInfo.getTableSchema(), inputInfo.getColumnIds(),
            inputPartition.getInputSplitData(), conf);
        splits.add(split);
      }

      partitionNum++;
View Full Code Here

Examples of com.facebook.giraph.hive.impl.input.InputInfo

    HiveTableSchemas.putForName(conf, dbName, tableName, tableSchema);
    HiveTableSchemas.putForProfile(conf, profileId, tableSchema);

    List<Integer> columnIds = transform(inputDesc.getColumns(), schemaLookupFunc(tableSchema));

    InputInfo inputInfo = new InputInfo(tableSchema, columnIds);

    if (table.getPartitionKeysSize() == 0) {
      // table without partitions
      inputInfo.addPartition(InputPartition.newFromHiveTable(table));
    } else {
      // table with partitions, find matches to user filter.
      List<Partition> partitions = null;
      try {
        partitions = client.get_partitions_by_filter(dbName, tableName,
            inputDesc.getPartitionFilter(), (short) -1);
      } catch (NoSuchObjectException e) {
        throw new TException(e.getMessage());
      } catch (MetaException e) {
        throw new TException(e);
      }
      for (Partition partition : partitions) {
        inputInfo.addPartition(InputPartition.newFromHivePartition(partition));
      }
    }

    InputConf inputConf = new InputConf(conf, profileId);
    inputConf.writeNumSplitsToConf(inputDesc.getNumSplits());
View Full Code Here

Examples of com.facebook.giraph.hive.impl.input.InputInfo

    InputConf inputConf = new InputConf(conf, myProfileId);

    LOG.info("getSplits for profile " + inputConf.getProfileId());

    JobConf jobConf = new JobConf(conf);
    InputInfo inputInfo = inputConf.readInputInfoFromConf();

    int partitionNum = 0;
    List<InputSplit> splits = Lists.newArrayList();
    Iterable<InputPartition> partitions = inputInfo.getPartitions();

    for (InputPartition inputPartition : partitions) {
      org.apache.hadoop.mapred.InputFormat baseInputFormat =
          inputPartition.makeInputFormat(conf);
      HadoopUtils.setInputDir(jobConf, inputPartition.getLocation());

      int splitsRequested = inputConf.readNumSplitsFromConf();
      org.apache.hadoop.mapred.InputSplit[] baseSplits =
          baseInputFormat.getSplits(jobConf, splitsRequested);
      LOG.info("Requested " + splitsRequested + " from partition (" +
          partitionNum + " out of " + Iterables.size(partitions) +
          ") values: " +
          inputPartition.getInputSplitData().getPartitionValues() +
          ", got " + baseSplits.length + " splits");

      for (org.apache.hadoop.mapred.InputSplit baseSplit : baseSplits)  {
        InputSplit split = new HiveApiInputSplit(baseInputFormat, baseSplit,
            inputInfo.getTableSchema(), inputInfo.getColumnIds(),
            inputPartition.getInputSplitData(), conf);
        splits.add(split);
      }

      partitionNum++;
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

public class WindowFunctionTranslation
{
  public static WindowFunctionDef translate(QueryDef qDef, TableFuncDef windowTableFnDef, WindowFunctionSpec wFnSpec) throws WindowingException
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    InputInfo iInfo = tInfo.getInputInfo(windowTableFnDef.getInput());

    WindowFunctionDef wFnDef = new WindowFunctionDef();
    wFnDef.setSpec(wFnSpec);
   
    /*
 
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

    {
      throw new WindowingException("Ranking Functions can take no arguments");
    }
   
    QueryInputDef inpDef = windowTableFnDef.getInput();
    InputInfo inpInfo = qDef.getTranslationInfo().getInputInfo(inpDef);
    OrderDef oDef = getTableFuncOrderDef(windowTableFnDef);
    ArrayList<OrderColumnDef> oCols = oDef.getColumns();
    for(OrderColumnDef oCol : oCols)
    {
      wFnDef.addArg(TranslateUtils.buildArgDef(qDef, inpInfo, oCol.getExpression()));
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

 
  public static void addInputColumnsToList(QueryDef qDef, TableFuncDef windowTableFnDef,
      ArrayList<String> fieldNames, ArrayList<ObjectInspector> fieldOIs)
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    InputInfo iInfo = tInfo.getInputInfo(windowTableFnDef.getInput());
   
    StructObjectInspector OI = (StructObjectInspector) iInfo.getOI();
    for(StructField f : OI.getAllStructFieldRefs() )
    {
      fieldNames.add(f.getFieldName());
      fieldOIs.add(f.getFieldObjectInspector());
    }
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

      return null;
    }
   
    String desc = getInputDescription(qDef, tFnDef);
    TableFunctionEvaluator tFn = tFnDef.getFunction();
    InputInfo iInfo = null;
    if ( tFn.isTransformsRawInput() )
    {
      iInfo = tInfo.getMapInputInfo(tFnDef);
    }
    else
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

   
    WhereDef whDef = new WhereDef();
    whDef.setExpression(wExpr);
   
    QueryInputDef iDef = qDef.getInput();
    InputInfo iInfo = tInfo.getInputInfo(iDef);
   
    ExprNodeDesc exprNode = TranslateUtils.buildExprNode(wExpr, iInfo.getTypeCheckCtx());
    ExprNodeEvaluator exprEval = WindowingExprNodeEvaluatorFactory.get(tInfo, exprNode);
    ObjectInspector oi = TranslateUtils.initExprNodeEvaluator(qDef, exprNode, exprEval, iInfo);
   
    try
    {
View Full Code Here

Examples of com.sap.hadoop.windowing.query2.translate.QueryTranslationInfo.InputInfo

 
  public static void translateSelectExprs(QueryDef qDef) throws WindowingException
  {
    QueryTranslationInfo tInfo = qDef.getTranslationInfo();
    QueryInputDef iDef = qDef.getInput();
    InputInfo iInfo = tInfo.getInputInfo(iDef);
    SelectDef selectDef = qDef.getSelectList();
    SelectSpec selectSpec = qDef.getSpec().getSelectList();
    Iterator<Object> selectExprsAndAliases = selectSpec.getColumnListAndAlias();
    int i = 0;
    ColumnDef cDef = null;
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.