Package org.apache.hadoop.hive.serde2

Examples of org.apache.hadoop.hive.serde2.TestTCTLSeparatedProtocol


    synchronized (TestOrcFile.class) {
      inspector = (StructObjectInspector)
          ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class,
              ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
    }
    SerDe serde = new OrcSerde();
    HiveOutputFormat<?, ?> outFormat = new OrcOutputFormat();
    FileSinkOperator.RecordWriter writer =
        outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true,
            properties, Reporter.NULL);
    ReaderWriterProfiler.setProfilerOptions(conf);
    writer.write(serde.serialize(new MyRow(1,2), inspector));
    writer.write(serde.serialize(new MyRow(2,2), inspector));
    writer.write(serde.serialize(new MyRow(3,2), inspector));
    writer.close(true);
    serde = new OrcSerde();
    properties.setProperty("columns", "x,y");
    properties.setProperty("columns.types", "int:int");
    serde.initialize(conf, properties);
    assertEquals(OrcSerde.OrcSerdeRow.class, serde.getSerializedClass());
    inspector = (StructObjectInspector) serde.getObjectInspector();
    assertEquals("struct<x:int,y:int>", inspector.getTypeName());
    InputFormat<?,?> in = new OrcInputFormat();
    FileInputFormat.setInputPaths(conf, testFilePath.toString());
    InputSplit[] splits = in.getSplits(conf, 1);
    assertEquals(1, splits.length);

    // the the validate input method
    ArrayList<FileStatus> fileList = new ArrayList<FileStatus>(3);
    assertEquals(false,
        ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList));
    fileList.add(fs.getFileStatus(testFilePath));
    assertEquals(true,
        ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList));
    fileList.add(fs.getFileStatus(workDir));
    assertEquals(false,
        ((InputFormatChecker) in).validateInput(fs, new HiveConf(), fileList));


    // read the whole file
    org.apache.hadoop.mapred.RecordReader reader =
        in.getRecordReader(splits[0], conf, Reporter.NULL);
    Object key = reader.createKey();
    Writable value = (Writable) reader.createValue();
    int rowNum = 0;
    List<? extends StructField> fields =inspector.getAllStructFieldRefs();
    IntObjectInspector intInspector =
        (IntObjectInspector) fields.get(0).getFieldObjectInspector();
    assertEquals(0.0, reader.getProgress(), 0.00001);
    assertEquals(0, reader.getPos());
    while (reader.next(key, value)) {
      assertEquals(++rowNum, intInspector.get(inspector.
          getStructFieldData(serde.deserialize(value), fields.get(0))));
      assertEquals(2, intInspector.get(inspector.
          getStructFieldData(serde.deserialize(value), fields.get(1))));
    }
    assertEquals(3, rowNum);
    assertEquals(1.0, reader.getProgress(), 0.00001);
    reader.close();

    // read just the first column
    conf.set("hive.io.file.readcolumn.ids", "0");
    reader = in.getRecordReader(splits[0], conf, Reporter.NULL);
    key = reader.createKey();
    value = (Writable) reader.createValue();
    rowNum = 0;
    fields = inspector.getAllStructFieldRefs();
    while (reader.next(key, value)) {
      assertEquals(++rowNum, intInspector.get(inspector.
          getStructFieldData(value, fields.get(0))));
      assertEquals(null, inspector.getStructFieldData(value, fields.get(1)));
    }
    assertEquals(3, rowNum);
    reader.close();

    // test the mapping of empty string to all columns
    conf.set("hive.io.file.readcolumn.ids", "");
    reader = in.getRecordReader(splits[0], conf, Reporter.NULL);
    key = reader.createKey();
    value = (Writable) reader.createValue();
    rowNum = 0;
    fields = inspector.getAllStructFieldRefs();
    while (reader.next(key, value)) {
      assertEquals(++rowNum, intInspector.get(inspector.
          getStructFieldData(value, fields.get(0))));
      assertEquals(2, intInspector.get(inspector.
          getStructFieldData(serde.deserialize(value), fields.get(1))));
    }
    assertEquals(3, rowNum);
    reader.close();
  }
View Full Code Here


      this.conf = conf;
      this.stripeSize = stripeSize;
      this.compress = CompressionKind.valueOf(compress);
      this.compressionSize = compressionSize;
      this.rowIndexStride = rowIndexStride;
      this.stats = new SerDeStats();
    }
View Full Code Here

    return reader.loadIndeces(rowIndexEntries, updatedStartIndex);
  }

  @Override
  public Object next(Object previous) throws IOException {
    ByteWritable result = null;
    if (valuePresent) {
      if (previous == null) {
        result = new ByteWritable();
      } else {
        result = (ByteWritable) previous;
      }
      result.set(reader.next());
    }
    return result;
  }
View Full Code Here

    return o == null ? null : new OrcLazyByte((OrcLazyByte) o);
  }

  @Override
  public Object getPrimitiveJavaObject(Object o) {
    ByteWritable writable = getPrimitiveWritableObject(o);
    return writable == null ? null : Byte.valueOf(writable.get());
  }
View Full Code Here

  }

  public OrcLazyByte(OrcLazyByte copy) {
    super(copy);
    if (copy.previous != null) {
      previous = new ByteWritable(((ByteWritable)copy.previous).get());
    }
  }
View Full Code Here

      int nextCalls = 0;

      @Override
      public Object next(Object previous) throws IOException {
        if (nextCalls == 0) {
          return new ByteWritable((byte) 1);
        }

        throw new IOException("next should only be called once");
      }
View Full Code Here

      assertNull(expected.boolean1);
    } else {
      assertEquals(expected.boolean1.booleanValue(), boolean1.get());
    }

    ByteWritable byte1 = (ByteWritable) ((OrcLazyByte) row.getFieldValue(1)).materialize();
    if (byte1 == null) {
      assertNull(expected.byte1);
    } else {
      assertEquals(expected.byte1.byteValue(), byte1.get());
    }

    OrcLazyShort lazyShort1 = (OrcLazyShort) row.getFieldValue(2);
    ShortWritable short1 = (ShortWritable) lazyShort1.materialize();
    if (short1 == null) {
View Full Code Here

    return latestRead;
  }


  DoubleWritable createWritable(Object previous, double v) throws IOException {
    DoubleWritable result = null;
    if (previous == null) {
      result = new DoubleWritable();
    } else {
      result = (DoubleWritable) previous;
    }
    result.set(v);
    return result;
  }
View Full Code Here

  /**
   * Give the next double as a Writable object.
   */
  @Override
  public Object next(Object previous) throws IOException {
    DoubleWritable result = null;
    if (valuePresent) {
      result = createWritable(previous, readDouble());
    }
    return result;
  }
View Full Code Here

  }

  public OrcLazyDouble(OrcLazyDouble copy) {
    super(copy);
    if (copy.previous != null) {
      previous = new DoubleWritable(((DoubleWritable)copy.previous).get());
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.hive.serde2.TestTCTLSeparatedProtocol

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.