Package com.sap.hadoop.windowing.runtime2

Examples of com.sap.hadoop.windowing.runtime2.WindowingShell


  }
 
  private static HiveQueryDef translate(QueryDef qDef, HiveQuerySpec spec) throws WindowingException
  {
    HiveQueryDef def = new HiveQueryDef();
    HiveQueryExecutor hiveQryExec = qDef.getTranslationInfo().getHiveQueryExecutor();
    Hive hive = qDef.getTranslationInfo().getHive();
   
    String tableName = hiveQryExec.createTableAsQuery(spec.getHiveQuery());
    HiveTableSpec tSpec = new HiveTableSpec();
    tSpec.setDbName(hive.getCurrentDatabase());
    tSpec.setTableName(tableName);
    tSpec.setPartition(spec.getPartition());
    tSpec.setOrder(spec.getOrder());
View Full Code Here


    Configuration conf = HOME();
    conf.setBoolean(Constants.WINDOWING_TEST_LOCAL_MODE, true);
    conf.set(Constants.WINDOWING_TEST_DATA_DIR, basedir);
    HiveConf hCfg = new HiveConf(conf, conf.getClass());
   
    wshell = new WindowingShell(hCfg, new Translator(), new LocalExecutor(new PrintStream(outStream)));
    //wshell.hiveQryExec = new ThriftBasedHiveQueryExecutor(conf)
    outPrinter = null;
  }
View Full Code Here

  public static void setupClass() throws Exception
  {
    setupCluster();
   
    HiveConf hCfg = new HiveConf(conf, conf.getClass());
    wshell = new WindowingShell(hCfg, new Translator(), new LocalExecutor(new PrintStream(outStream)));
    //wshell.setHiveQryExec(new ThriftBasedHiveQueryExecutor(conf));
    outPrinter = null;
  }
View Full Code Here

  public Partition execute(Partition iPart)
      throws WindowingException
  {
    PartitionIterator<Object> pItr = iPart.iterator();
    RuntimeUtils.connectLeadLagFunctionsToPartition(qDef, pItr);
    Partition outP = new Partition(getPartitionClass(),
        getPartitionMemSize(), tDef.getSerde(), OI);
    execute(pItr, outP);
    return outP;
  }
View Full Code Here

 
  protected void processInputPartition() throws HiveException
  {
    try
    {
      Partition outPart = Executor.executeChain(qDef, inputPart);
      Executor.executeSelectList(qDef, outPart, new ForwardPTF());
    }
    catch (WindowingException we)
    {
      throw new HiveException("Cannot close PTFOperator.", we);
View Full Code Here

  protected void processMapFunction() throws HiveException
  {
    try
    {
      TableFuncDef tDef = RuntimeUtils.getFirstTableFunction(qDef);
      Partition outPart = tDef.getFunction().transformRawInput(inputPart);
      PartitionIterator<Object> pItr = outPart.iterator();
      while (pItr.hasNext())
      {
        Object oRow = pItr.next();
        forward(oRow, outputObjInspector);
      }
View Full Code Here

    try
    {
      SerDe serDe = (SerDe) wIn.getDeserializer();
      StructObjectInspector oI = (StructObjectInspector) serDe
          .getObjectInspector();
      Partition p = new Partition(partitionClass, partitionMemSize,
          serDe, oI);
      Writable w = wIn.createRow();
      while( wIn.next(w) != -1)
      {
        p.append(w);
      }
      return p;
    }
    catch (WindowingException we)
    {
View Full Code Here

  @SuppressWarnings({ "unchecked", "rawtypes" })
  @Override
  public void execute(PartitionIterator<Object> pItr, Partition outP) throws WindowingException
  {
    ArrayList<List<?>> oColumns = new ArrayList<List<?>>();
    Partition iPart = pItr.getPartition();
    StructObjectInspector inputOI;
    try {
      inputOI = (StructObjectInspector) iPart.getSerDe().getObjectInspector();
    } catch (SerDeException se) {
      throw new WindowingException(se);
    }
   
    try
    {
      for(WindowFunctionDef wFn : wFnDefs)
      {
        boolean processWindow = wFn.getWindow() != null;
        pItr.reset();
        if ( !processWindow )
        {
          GenericUDAFEvaluator fEval = wFn.getEvaluator();
          Object[] args = new Object[wFn.getArgs().size()];
          AggregationBuffer aggBuffer = fEval.getNewAggregationBuffer();
          while(pItr.hasNext())
          {
            Object row = pItr.next();
            int i =0;
            for(ArgDef arg : wFn.getArgs())
            {
              args[i++] = arg.getExprEvaluator().evaluate(row);
            }
            fEval.aggregate(aggBuffer, args);
          }
          Object out = fEval.evaluate(aggBuffer);
          WindowFunctionInfo wFnInfo = FunctionRegistry.getWindowFunctionInfo(wFn.getSpec().getName());
          if ( !wFnInfo.isPivotResult())
          {
            out = new SameList(iPart.size(), out);
          }
          oColumns.add((List<?>)out);
        }
        else
        {
          oColumns.add(executeFnwithWindow(getQueryDef(), wFn, iPart));
        }
      }
     
      for(int i=0; i < iPart.size(); i++)
      {
        ArrayList oRow = new ArrayList();
        Object iRow = iPart.getAt(i);
       
        for(StructField f : inputOI.getAllStructFieldRefs())
        {
          oRow.add(inputOI.getStructFieldData(iRow, f));
        }
View Full Code Here

    setupCluster();
   
    HiveConf hCfg = new HiveConf(conf, conf.getClass());
    wshell = new WindowingShell(hCfg, new Translator(), new MRExecutor());
    //wshell.setHiveQryExec(new ThriftBasedHiveQueryExecutor(conf));
    outPrinter = new QueryOutputPrinter(new TestLogHelper(false));
  }
View Full Code Here

  {
    this.hiveDriver = hiveDriver;
    setupConf(hiveDriver.getCfg());
    wshell = new WindowingShell( hiveDriver.getCfg(), new Translator(), new MRExecutor());
    wshell.setHiveQryExec(this);
    qryOutPrntr = new QueryOutputPrinter(hiveDriver.getHiveConsole());
    hiveConsole = hiveDriver.getHiveConsole();
  }
View Full Code Here

TOP

Related Classes of com.sap.hadoop.windowing.runtime2.WindowingShell

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.