Package org.apache.hadoop.io

Examples of org.apache.hadoop.io.NullWritable


public class NullWritableTest extends WritableTestBase {
 
  @Test
  public void test() throws IOException {
    NullWritable writable = NullWritable.get();
    assertThat(serialize(writable).length, is(0));
  }
View Full Code Here


    assertEquals(1, splits.length);

    // read the whole file
    org.apache.hadoop.mapred.RecordReader<NullWritable, OrcLazyRow> reader =
        in.getRecordReader(splits[0], conf, Reporter.NULL);
    NullWritable key = reader.createKey();
    OrcLazyRow value = (OrcLazyRow) reader.createValue();
    List<? extends StructField> fields =inspector.getAllStructFieldRefs();
    StringObjectInspector strInspector = (StringObjectInspector)
        fields.get(0).getFieldObjectInspector();
    assertEquals(true, reader.next(key, value));
View Full Code Here

    FileSplit fileSplit = new FileSplit(inputPath, 0, fileStatus.getLen(), job);
   
    AvroRecordReader<Pair<Integer, Long>> recordReader = new AvroRecordReader<Pair<Integer, Long>>(job, fileSplit);
   
    AvroWrapper<Pair<Integer, Long>> inputPair = new AvroWrapper<Pair<Integer, Long>>(null);
    NullWritable ignore = NullWritable.get();
   
    long sumOfCounts = 0;
    long numOfCounts = 0;
    while(recordReader.next(inputPair, ignore)) {
      Assert.assertEquals((Integer)inputPair.datum().get(0), defaultRank);
View Full Code Here

        //conf.setClassLoader(this.getClass().getClassLoader());
        // add the default configure into the resource
        conf.addResource(HdfsRouteTest.class.getResourceAsStream("/core-default.xml"));
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, Text.class);
        NullWritable keyWritable = NullWritable.get();
        Text valueWritable = new Text();
        String value = "CIAO!";
        valueWritable.set(value);
        writer.append(keyWritable, valueWritable);
        writer.sync();
View Full Code Here

    private static Text[] readPartitions(FileSystem fs, Path p,
                                         JobConf job) throws IOException {
      SequenceFile.Reader reader = new SequenceFile.Reader(fs, p, job);
      List<Text> parts = new ArrayList<Text>();
      Text key = new Text();
      NullWritable value = NullWritable.get();
      while (reader.next(key, value)) {
        parts.add(key);
        key = new Text();
      }
      reader.close();
View Full Code Here

        final Path file = new Path(new File("target/test/test-camel-boolean").getAbsolutePath());
        Configuration conf = new Configuration();
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
        NullWritable keyWritable = NullWritable.get();
        BooleanWritable valueWritable = new BooleanWritable();
        valueWritable.set(true);
        writer.append(keyWritable, valueWritable);
        writer.sync();
        writer.close();
View Full Code Here

        final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
        Configuration conf = new Configuration();
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, ByteWritable.class);
        NullWritable keyWritable = NullWritable.get();
        ByteWritable valueWritable = new ByteWritable();
        byte value = 3;
        valueWritable.set(value);
        writer.append(keyWritable, valueWritable);
        writer.sync();
View Full Code Here

        final Path file = new Path(new File("target/test/test-camel-float").getAbsolutePath());
        Configuration conf = new Configuration();
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, FloatWritable.class);
        NullWritable keyWritable = NullWritable.get();
        FloatWritable valueWritable = new FloatWritable();
        float value = 3.1415926535f;
        valueWritable.set(value);
        writer.append(keyWritable, valueWritable);
        writer.sync();
View Full Code Here

        final Path file = new Path(new File("target/test/test-camel-double").getAbsolutePath());
        Configuration conf = new Configuration();
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, DoubleWritable.class);
        NullWritable keyWritable = NullWritable.get();
        DoubleWritable valueWritable = new DoubleWritable();
        double value = 3.1415926535;
        valueWritable.set(value);
        writer.append(keyWritable, valueWritable);
        writer.sync();
View Full Code Here

        final Path file = new Path(new File("target/test/test-camel-int").getAbsolutePath());
        Configuration conf = new Configuration();
        FileSystem fs1 = FileSystem.get(file.toUri(), conf);
        SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, IntWritable.class);
        NullWritable keyWritable = NullWritable.get();
        IntWritable valueWritable = new IntWritable();
        int value = 314159265;
        valueWritable.set(value);
        writer.append(keyWritable, valueWritable);
        writer.sync();
View Full Code Here

TOP

Related Classes of org.apache.hadoop.io.NullWritable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.