Examples of FakeCaptureOutputOperator


Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    float treshold = 100.0f*1024.0f/maxMemory;
    desc.setMemoryThreshold(treshold);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);

    this.outputRowCount = 0;
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {
      @Override
      public void inspectRow(Object row, int tag) throws HiveException {
        ++outputRowCount;
      }
    });
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    desc.setAggregators(aggs);
    desc.setKeys(keysDesc);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

      private int rowIndex;
      private String aggregateName;
      private Map<Object,Object> expected;
      private Set<Object> keys;

      @Override
      public void inspectRow(Object row, int tag) throws HiveException {
        assertTrue(row instanceof Object[]);
        Object[] fields = (Object[]) row;
        assertEquals(columnTypes.length, fields.length);
        ArrayList<Object> keyValue = new ArrayList<Object>(columnTypes.length-1);
        for(int i=0; i<columnTypes.length-1; ++i) {
          Object key = fields[i];
          if (null == key) {
            keyValue.add(null);
          } else if (key instanceof Text) {
            Text txKey = (Text)key;
            keyValue.add(txKey.toString());
          } else if (key instanceof ByteWritable) {
            ByteWritable bwKey = (ByteWritable)key;
            keyValue.add(bwKey.get());
          } else if (key instanceof ShortWritable) {
            ShortWritable swKey = (ShortWritable)key;
            keyValue.add(swKey.get());
          } else if (key instanceof IntWritable) {
            IntWritable iwKey = (IntWritable)key;
            keyValue.add(iwKey.get());
          } else if (key instanceof LongWritable) {
            LongWritable lwKey = (LongWritable)key;
            keyValue.add(lwKey.get());
          } else if (key instanceof TimestampWritable) {
            TimestampWritable twKey = (TimestampWritable)key;
            keyValue.add(twKey.getTimestamp());
          } else if (key instanceof DoubleWritable) {
            DoubleWritable dwKey = (DoubleWritable)key;
            keyValue.add(dwKey.get());
          } else if (key instanceof FloatWritable) {
            FloatWritable fwKey = (FloatWritable)key;
            keyValue.add(fwKey.get());
          } else if (key instanceof BooleanWritable) {
            BooleanWritable bwKey = (BooleanWritable)key;
            keyValue.add(bwKey.get());
          } else {
            Assert.fail(String.format("Not implemented key output type %s: %s",
                key.getClass().getName(), key));
          }
        }

        String keyAsString = Arrays.deepToString(keyValue.toArray());
        assertTrue(expected.containsKey(keyValue));
        Object expectedValue = expected.get(keyValue);
        Object value = fields[columnTypes.length-1];
        Validator validator = getValidator(aggregateName);
        validator.validate(keyAsString, expectedValue, new Object[] {value});
        keys.add(keyValue);
      }

      private FakeCaptureOutputOperator.OutputInspector init(
          String aggregateName, Map<Object,Object> expected, Set<Object> keys) {
        this.aggregateName = aggregateName;
        this.expected = expected;
        this.keys = keys;
        return this;
      }
    }.init(aggregateName, expected, keys));

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(expected.size(), outBatchList.size());
    assertEquals(expected.size(), keys.size());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    keysDesc.add(keyExp);
    desc.setKeys(keysDesc);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

      private int rowIndex;
      private String aggregateName;
      private Map<Object,Object> expected;
      private Set<Object> keys;

      @Override
      public void inspectRow(Object row, int tag) throws HiveException {
        assertTrue(row instanceof Object[]);
        Object[] fields = (Object[]) row;
        assertEquals(2, fields.length);
        Object key = fields[0];
        Object keyValue = null;
        if (null == key) {
          keyValue = null;
        } else if (key instanceof ByteWritable) {
          ByteWritable bwKey = (ByteWritable)key;
          keyValue = bwKey.get();
        } else if (key instanceof ShortWritable) {
          ShortWritable swKey = (ShortWritable)key;
          keyValue = swKey.get();
        } else if (key instanceof IntWritable) {
          IntWritable iwKey = (IntWritable)key;
          keyValue = iwKey.get();
        } else if (key instanceof LongWritable) {
          LongWritable lwKey = (LongWritable)key;
          keyValue = lwKey.get();
        } else if (key instanceof TimestampWritable) {
          TimestampWritable twKey = (TimestampWritable)key;
          keyValue = twKey.getTimestamp();
        } else if (key instanceof DoubleWritable) {
          DoubleWritable dwKey = (DoubleWritable)key;
          keyValue = dwKey.get();
        } else if (key instanceof FloatWritable) {
          FloatWritable fwKey = (FloatWritable)key;
          keyValue = fwKey.get();
        } else if (key instanceof BooleanWritable) {
          BooleanWritable bwKey = (BooleanWritable)key;
          keyValue = bwKey.get();
        } else if (key instanceof HiveDecimalWritable) {
            HiveDecimalWritable hdwKey = (HiveDecimalWritable)key;
            keyValue = hdwKey.getHiveDecimal();
        } else {
          Assert.fail(String.format("Not implemented key output type %s: %s",
              key.getClass().getName(), key));
        }

        String keyValueAsString = String.format("%s", keyValue);

        assertTrue(expected.containsKey(keyValue));
        Object expectedValue = expected.get(keyValue);
        Object value = fields[1];
        Validator validator = getValidator(aggregateName);
        validator.validate(keyValueAsString, expectedValue, new Object[] {value});
        keys.add(keyValue);
      }

      private FakeCaptureOutputOperator.OutputInspector init(
          String aggregateName, Map<Object,Object> expected, Set<Object> keys) {
        this.aggregateName = aggregateName;
        this.expected = expected;
        this.keys = keys;
        return this;
      }
    }.init(aggregateName, expected, keys));

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(expected.size(), outBatchList.size());
    assertEquals(expected.size(), keys.size());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildGroupByDescCountStar (ctx);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(1, outBatchList.size());

    Object result = outBatchList.get(0);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
        TypeInfoFactory.stringTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(1, outBatchList.size());

    Object result = outBatchList.get(0);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

        GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A",
            TypeInfoFactory.getDecimalTypeInfo(30, 4));

        VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

        FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
        vgo.initialize(null, null);

        for (VectorizedRowBatch unit: data) {
          vgo.processOp(unit,  0);
        }
        vgo.close(false);

        List<Object> outBatchList = out.getCapturedRows();
        assertNotNull(outBatchList);
        assertEquals(1, outBatchList.size());

        Object result = outBatchList.get(0);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildGroupByDescType (ctx, aggregateName, "A",
        TypeInfoFactory.doubleTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(1, outBatchList.size());

    Object result = outBatchList.get(0);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildGroupByDescType(ctx, aggregateName, "A", TypeInfoFactory.longTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(1, outBatchList.size());

    Object result = outBatchList.get(0);
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",
        TypeInfoFactory.longTypeInfo, "Key", TypeInfoFactory.longTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

      private int rowIndex;
      private String aggregateName;
      private HashMap<Object,Object> expected;
      private Set<Object> keys;

      @Override
      public void inspectRow(Object row, int tag) throws HiveException {
        assertTrue(row instanceof Object[]);
        Object[] fields = (Object[]) row;
        assertEquals(2, fields.length);
        Object key = fields[0];
        Long keyValue = null;
        if (null != key) {
          assertTrue(key instanceof LongWritable);
          LongWritable lwKey = (LongWritable)key;
          keyValue = lwKey.get();
        }
        assertTrue(expected.containsKey(keyValue));
        String keyAsString = String.format("%s", key);
        Object expectedValue = expected.get(keyValue);
        Object value = fields[1];
        Validator validator = getValidator(aggregateName);
        validator.validate(keyAsString, expectedValue, new Object[] {value});
        keys.add(keyValue);
      }

      private FakeCaptureOutputOperator.OutputInspector init(
          String aggregateName, HashMap<Object,Object> expected, Set<Object> keys) {
        this.aggregateName = aggregateName;
        this.expected = expected;
        this.keys = keys;
        return this;
      }
    }.init(aggregateName, expected, keys));

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(expected.size(), outBatchList.size());
    assertEquals(expected.size(), keys.size());
  }
View Full Code Here

Examples of org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator

    GroupByDesc desc = buildKeyGroupByDesc (ctx, aggregateName, "Value",
       dataTypeInfo, "Key", TypeInfoFactory.stringTypeInfo);

    VectorGroupByOperator vgo = new VectorGroupByOperator(ctx, desc);

    FakeCaptureOutputOperator out = FakeCaptureOutputOperator.addCaptureOutputChild(vgo);
    vgo.initialize(null, null);
    out.setOutputInspector(new FakeCaptureOutputOperator.OutputInspector() {

      private int rowIndex;
      private String aggregateName;
      private HashMap<Object,Object> expected;
      private Set<Object> keys;

      @SuppressWarnings("deprecation")
      @Override
      public void inspectRow(Object row, int tag) throws HiveException {
        assertTrue(row instanceof Object[]);
        Object[] fields = (Object[]) row;
        assertEquals(2, fields.length);
        Object key = fields[0];
        String keyValue = null;
        if (null != key) {
          assertTrue(key instanceof Text);
          Text bwKey = (Text)key;
          keyValue = bwKey.toString();
        }
        assertTrue(expected.containsKey(keyValue));
        Object expectedValue = expected.get(keyValue);
        Object value = fields[1];
        Validator validator = getValidator(aggregateName);
        String keyAsString = String.format("%s", key);
        validator.validate(keyAsString, expectedValue, new Object[] {value});
        keys.add(keyValue);
      }

      private FakeCaptureOutputOperator.OutputInspector init(
          String aggregateName, HashMap<Object,Object> expected, Set<Object> keys) {
        this.aggregateName = aggregateName;
        this.expected = expected;
        this.keys = keys;
        return this;
      }
    }.init(aggregateName, expected, keys));

    for (VectorizedRowBatch unit: data) {
      vgo.processOp(unit,  0);
    }
    vgo.close(false);

    List<Object> outBatchList = out.getCapturedRows();
    assertNotNull(outBatchList);
    assertEquals(expected.size(), outBatchList.size());
    assertEquals(expected.size(), keys.size());
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.