Examples of TestRecord


Examples of org.commoncrawl.util.shared.ArcFileReaderTests.TestRecord

    // iterate and validate stuff ...
    while (reader.nextKeyValue()) {
      Text key = reader.getCurrentKey();
      BytesWritable value = reader.getCurrentValue();
     
      TestRecord testRecord = records.get(index++);
      // get test key bytes as utf-8 bytes ...
      byte[] testKeyBytes = testRecord.url.getBytes(Charset.forName("UTF-8"));
      // compare against raw key bytes to validate key is the same (Text's utf-8 mapping code replaces invalid characters
      // with ?, which causes our test case (which does use invalid characters to from the key, to break.
      Assert.assertTrue(ArcFileReaderTests.compareTo(testKeyBytes,0,testKeyBytes.length,key.getBytes(),0,key.getLength()) == 0);
View Full Code Here

Examples of org.kiji.schema.avro.TestRecord

            .setType(SchemaType.CLASS)
            .setValue(TestRecord.class.getName())
            .build());
    final KijiCellEncoder encoder =
        DefaultKijiCellEncoderFactory.get().create(cellSpec);
    final TestRecord record = TestRecord.newBuilder()
        .setA("a"// encodes as [2, 97]
        .setB(1)    // encodes as [2]
        .setC(2)    // encodes as [4]
        .build();
    assertArrayEquals(new byte[]{2, 97, 2, 4}, encoder.encode(record));
View Full Code Here

Examples of org.kiji.schema.avro.TestRecord

    runTest(new HasColumnDataRowFilter("family", "qualifier"));
  }

  @Test
  public void testColumnValueEqualsRowFilter() throws Exception {
    TestRecord record = createTestRecord();
    runTest(new ColumnValueEqualsRowFilter("family", "qualifier",
        new DecodedCell<Object>(record.getSchema(), record)));
  }
View Full Code Here

Examples of org.kiji.schema.avro.TestRecord

        new DecodedCell<Object>(record.getSchema(), record)));
  }

  @Test
  public void testAndRowFilter() throws Exception {
    TestRecord record = createTestRecord();
    runTest(Filters.and(
        new HasColumnDataRowFilter("fA", "qA"),
        new HasColumnDataRowFilter("fB", "qB"),
        new ColumnValueEqualsRowFilter("fC", "qC",
            new DecodedCell<Object>(record.getSchema(), record))));
  }
View Full Code Here

Examples of org.kiji.schema.avro.TestRecord

            new DecodedCell<Object>(record.getSchema(), record))));
  }

  @Test
  public void testOrRowFilter() throws Exception {
    TestRecord record = createTestRecord();
    runTest(Filters.or(
        new HasColumnDataRowFilter("fA", "qA"),
        new HasColumnDataRowFilter("fB", "qB"),
        new ColumnValueEqualsRowFilter("fC", "qC",
            new DecodedCell<Object>(record.getSchema(), record))));
  }
View Full Code Here

Examples of org.kiji.schema.avro.TestRecord

    JsonNode reserialized = deserialized.toJson();
    assertEquals(serialized, reserialized);
  }

  private TestRecord createTestRecord() {
    TestRecord record = TestRecord.newBuilder()
        .setA("a string value")
        .setB(10)
        .setC(100)
        .build();
    return record;
View Full Code Here

Examples of org.kitesdk.data.hbase.avro.entities.TestRecord

      assertTrue(dao.put(testRecord));
    }

    for (int i = 0; i < 10; ++i) {
      PartitionKey partitionKey = new PartitionKey("part1_" + i, "part2_" + i);
      TestRecord record = dao.get(partitionKey);
      assertEquals("field1_" + i, record.getField1());
      assertEquals("field2_" + i, record.getField2());
      assertEquals(TestEnum.ENUM3, record.getEnum$());
      assertEquals("field3_value_1_" + i,
          record.getField3().get("field3_key_1_" + i));
      assertEquals("field3_value_2_" + i,
          record.getField3().get("field3_key_2_" + i));
      assertEquals("embedded1_" + i, record.getField4().getEmbeddedField1());
      assertEquals(i, (long) record.getField4().getEmbeddedField2());
      assertEquals(2, record.getField5().size());
      // check 1st subrecord
      assertEquals("subfield1_" + i, record.getField5().get(0).getSubfield1());
      assertEquals(i, (long) record.getField5().get(0).getSubfield2());
      assertEquals("subfield3_" + i, record.getField5().get(0).getSubfield3());
      assertEquals("subfield4_" + i, record.getField5().get(1).getSubfield1());
      assertEquals(i, (long) record.getField5().get(1).getSubfield2());
      assertEquals("subfield6_" + i, record.getField5().get(1).getSubfield3());
    }

    int cnt = 0;
    EntityScanner<TestRecord> entityScanner = dao.getScanner();
    entityScanner.initialize();
View Full Code Here

Examples of org.kitesdk.data.hbase.avro.entities.TestRecord

  public void testConflict() throws Exception {
    Dao<TestRecord> dao = new SpecificAvroDao<TestRecord>(tablePool,
      tableName, schemaString, TestRecord.class);

    // create key and entity, and do a put
    TestRecord entity = createSpecificEntity("part1", "part2");
    assertTrue(dao.put(entity));

    // now fetch the entity twice. Change one, and do a put. Change the other,
    // and the second put should fail.
    PartitionKey key = new PartitionKey("part1", "part2");
    TestRecord recordRef1 = TestRecord.newBuilder(dao.get(key))
        .setField1("part1_1").build();
    TestRecord recordRef2 = TestRecord.newBuilder(dao.get(key))
        .setField1("part1_2").build();
    assertTrue(dao.put(recordRef1));
    assertFalse(dao.put(recordRef2));

    // Now get the latest version, change it, and put should succeed.
    recordRef2 = dao.get(key);
    assertEquals("part1_1", recordRef2.getField1());
    recordRef2 = TestRecord.newBuilder(recordRef2).setField1("part1_2").build();
    assertTrue(dao.put(recordRef2));

    // validate the most recent values.
    TestRecord finalRecord = dao.get(key);
    assertEquals("part1_2", finalRecord.getField1());

    // if we put a new entity, there should be a conflict
    assertFalse(dao.put(entity));
  }
View Full Code Here

Examples of org.kitesdk.data.hbase.avro.entities.TestRecord

    Map<String, String> field3Map = new HashMap<String, String>();
    EmbeddedRecord embeddedRecord = EmbeddedRecord.newBuilder()
        .setEmbeddedField1("embedded1").setEmbeddedField2(2).build();

    TestRecord entity = TestRecord.newBuilder().setKeyPart1("part1")
        .setKeyPart2("part2").setField1("field1").setField2("field2")
        .setEnum$(TestEnum.ENUM3).setField3(field3Map)
        .setField4(embeddedRecord).setField5(new ArrayList<ArrayRecord>())
        .build();

    assertTrue(dao.put(entity));

    PartitionKey key = new PartitionKey("part1", "part2");
    TestRecord record = dao.get(key);

    assertEquals("field1", record.getField1());
    assertEquals("field2", record.getField2());
    assertEquals(TestEnum.ENUM3, record.getEnum$());
    assertEquals(0, record.getField3().size());
    assertEquals("embedded1", record.getField4().getEmbeddedField1());
    assertEquals(2L, (long) record.getField4().getEmbeddedField2());
    assertEquals(0, record.getField5().size());
  }
View Full Code Here

Examples of org.kitesdk.data.hbase.avro.entities.TestRecord

  public void testDeleteAfterMultiplePuts() throws Exception {
    Dao<TestRecord> dao = new SpecificAvroDao<TestRecord>(tablePool,
      tableName, schemaString, TestRecord.class);

    for (int i = 0; i < 10; ++i) {
      TestRecord entity = createSpecificEntity("part1_" + i, "part2_" + i);
      assertTrue(dao.put(entity));
    }

    // get and put it a couple of times to build up versions
    PartitionKey key = new PartitionKey("part1_5", "part2_5");
    TestRecord entity = dao.get(key);
    dao.put(entity);
    entity = dao.get(key);
    dao.put(entity);

    // now make sure the dao removes all versions of all columns
    dao.delete(key);
    TestRecord deletedRecord = dao.get(key);
    assertNull(deletedRecord);
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.