Package org.apache.avro.generic.GenericData

Examples of org.apache.avro.generic.GenericData.Record


    return this;
  }
 
  @Override
  public Record build() {
    Record record;
    try {
      record = new GenericData.Record(schema());
    } catch (Exception e) {
      throw new AvroRuntimeException(e);
    }
   
    for (Field field : fields()) {
      Object value;
      try {
        value = getWithDefault(field);
      } catch(IOException e) {
        throw new AvroRuntimeException(e);
      }
      if (value != null) {
        record.put(field.pos(), value);
      }
    }
   
    return record;
  }
View Full Code Here


    GenericData.Record record = new GenericData.Record(Person.SCHEMA$);
    record.put("name", "name value");
    record.put("age", 42);
    record.put("siblingnames", Lists.newArrayList());

    Record detachedRecord = genericType.getDetachedValue(record);
    assertEquals(record, detachedRecord);
    assertNotSame(record, detachedRecord);
  }
View Full Code Here

    assertNotSame(person, deepCopyPerson);
  }

  @Test
  public void testDeepCopyGeneric() {
    Record record = new Record(Person.SCHEMA$);
    record.put("name", "John Doe");
    record.put("age", 42);
    record.put("siblingnames", Lists.newArrayList());

    Record deepCopyRecord = new AvroDeepCopier.AvroGenericDeepCopier(Person.SCHEMA$)
        .deepCopy(record);

    assertEquals(record, deepCopyRecord);
    assertNotSame(record, deepCopyRecord);
  }
View Full Code Here

    writeCollection.write(To.avroFile(avroFile.getAbsolutePath()));

    PCollection<Record> readCollection = MemPipeline.getInstance().read(
        At.avroFile(avroFile.getAbsolutePath(), Avros.generics(writeRecord.getSchema())));

    Record readRecord = readCollection.materialize().iterator().next();

    assertEquals(writeRecord, readRecord);
  }
View Full Code Here

 
  /**
   * Moves data between a Tuple and an Avro Record
   */
  public Record toRecord(ITuple tuple, Record reuse) throws IOException {
    Record record = reuse;
    if (record == null){
      record = new Record(avroSchema);
    }
    for(int i = 0; i < pangoolSchema.getFields().size(); i++) {
      Object obj = tuple.get(i);
      Field field = pangoolSchema.getField(i);
      switch(field.getType()){
      case INT:
      case LONG:
      case FLOAT:
      case BOOLEAN:
      case DOUBLE:
      case BYTES:
        record.put(i, obj); //optimistic
        break;
      case OBJECT:
        FieldSerializer customSer = customSerializers[i];
        DataOutputBuffer buffer = buffers[i];
        buffer.reset();
        if (customSer != null){
          customSer.open(buffer);
          customSer.serialize(obj);
          customSer.close(); //TODO is this safe ?
        } else {
          hadoopSer.ser(obj, buffer);
        }
        //TODO this byteBuffer instances should be cached and reused
        ByteBuffer byteBuffer = ByteBuffer.wrap(buffer.getData(), 0,buffer.getLength());
        record.put(i, byteBuffer);
        break;
      case ENUM:
        record.put(i,obj.toString());
        break;
      case STRING:
        record.put(i,new Utf8(obj.toString())); //could be directly String ?
        break;
      default:
          throw
          new IOException("Not correspondence to Avro type from Pangool type " + field.getType());
      }
View Full Code Here

    writeCollection.write(To.avroFile(avroFile.getAbsolutePath()));

    PCollection<Record> readCollection = MemPipeline.getInstance().read(
        At.avroFile(avroFile.getAbsolutePath(), Avros.generics(writeRecord.getSchema())));

    Record readRecord = readCollection.materialize().iterator().next();

    assertEquals(writeRecord, readRecord);
  }
View Full Code Here

    assertNull(new AvroSpecificDeepCopier<Person>(Person.SCHEMA$).deepCopy(null));
  }

  @Test
  public void testDeepCopyGeneric() {
    Record record = new Record(Person.SCHEMA$);
    record.put("name", "John Doe");
    record.put("age", 42);
    record.put("siblingnames", Lists.newArrayList());

    Record deepCopyRecord = new AvroDeepCopier.AvroGenericDeepCopier(Person.SCHEMA$).deepCopy(record);

    assertEquals(record, deepCopyRecord);
    assertNotSame(record, deepCopyRecord);
  }
View Full Code Here

    GenericData.Record record = new GenericData.Record(Person.SCHEMA$);
    record.put("name", "name value");
    record.put("age", 42);
    record.put("siblingnames", Lists.newArrayList());

    Record detachedRecord = genericType.getDetachedValue(record);
    assertEquals(record, detachedRecord);
    assertNotSame(record, detachedRecord);
  }
View Full Code Here

  @Test
  public void testProjection() throws IOException {
    String genericSchemaJson = Person.SCHEMA$.toString().replace("Person", "GenericPerson");
    Schema genericPersonSchema = new Schema.Parser().parse(genericSchemaJson);
    GenericRecord savedRecord = new Record(genericPersonSchema);
    savedRecord.put("name", "John Doe");
    savedRecord.put("age", 42);
    savedRecord.put("siblingnames", Lists.newArrayList("Jimmy", "Jane"));
    populateGenericFile(Lists.newArrayList(savedRecord), genericPersonSchema);

    Schema projection = Schema.createRecord("projection", null, null, false);
    projection.setFields(Lists.newArrayList(cloneField(genericPersonSchema.getField("name"))));
    AvroParquetFileReaderFactory<Record> genericReader = createFileReaderFactory(Avros.generics(projection));
    Iterator<Record> recordIterator = genericReader.read(FileSystem.getLocal(new Configuration()),
        new Path(this.parquetFile.getAbsolutePath()));

    GenericRecord genericRecord = recordIterator.next();
    assertEquals(savedRecord.get("name"), genericRecord.get("name"));
    assertNull(genericRecord.get("age"));
    assertFalse(recordIterator.hasNext());
  }
View Full Code Here

    writer.writeTo(avroFile);
  }

  @Test
  public void testSpecific() throws IOException {
    GenericRecord savedRecord = new Record(Person.SCHEMA$);
    savedRecord.put("name", "John Doe");
    savedRecord.put("age", 42);
    savedRecord.put("siblingnames", Lists.newArrayList("Jimmy", "Jane"));
    populateGenericFile(Lists.newArrayList(savedRecord), Person.SCHEMA$);

    Pipeline pipeline = new MRPipeline(TrevniFileSourceTargetIT.class, tmpDir.getDefaultConfiguration());
    PCollection<Person> genericCollection = pipeline.read(new TrevniKeySource(new Path(avroFile.getAbsolutePath()),
        Avros.records(Person.class)));
View Full Code Here

TOP

Related Classes of org.apache.avro.generic.GenericData.Record

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.