Package io.druid.jackson

Examples of io.druid.jackson.DefaultObjectMapper


  @Test
  public void testHashedBucketSelection() {
    List<HadoopyShardSpec> specs = Lists.newArrayList();
    final int partitionCount = 10;
    for (int i = 0; i < partitionCount; i++) {
      specs.add(new HadoopyShardSpec(new HashBasedNumberedShardSpec(i, partitionCount, new DefaultObjectMapper()), i));
    }
    // Backwards compatibility
    DataRollupSpec rollupSpec = new DataRollupSpec();
    rollupSpec.rollupGranularity = QueryGranularity.MINUTE;
View Full Code Here


public class FireDepartmentTest
{
  @Test
  public void testSerde() throws Exception
  {
    ObjectMapper jsonMapper = new DefaultObjectMapper();

    FireDepartment schema = new FireDepartment(
        new DataSchema(
            "foo",
            new StringInputRowParser(
                new JSONParseSpec(
                    new TimestampSpec(
                        "timestamp",
                        "auto"
                    ),
                    new DimensionsSpec(
                        Arrays.asList("dim1", "dim2"),
                        null,
                        null
                    )
                ),
                null, null, null, null
            ),
            new AggregatorFactory[]{
                new CountAggregatorFactory("count")
            },
            new UniformGranularitySpec(Granularity.HOUR, QueryGranularity.MINUTE, null, Granularity.HOUR)
        ),
        new RealtimeIOConfig(
            null,
            new RealtimePlumberSchool(
                null, null, null, null, null, null, null, null, null, null, null, null, null, 0
            )
        ),
        new RealtimeTuningConfig(
            null, null, null, null, null, null, null, null, false, false
        ),
        null, null, null, null
    );

    String json = jsonMapper.writeValueAsString(schema);

    FireDepartment newSchema = jsonMapper.readValue(json, FireDepartment.class);

    Assert.assertEquals(schema.getDataSchema().getDataSource(), newSchema.getDataSchema().getDataSource());
  }
View Full Code Here

  @Test
  public void testSerde() throws Exception
  {
    CardinalityAggregatorFactory factory = new CardinalityAggregatorFactory("billy", ImmutableList.of("b", "a", "c"), true);
    ObjectMapper objectMapper = new DefaultObjectMapper();
    Assert.assertEquals(
        factory,
        objectMapper.readValue(objectMapper.writeValueAsString(factory), AggregatorFactory.class)
    );
  }
View Full Code Here

  private GroupByQueryRunnerFactory factory;

  @Parameterized.Parameters
  public static Collection<?> constructorFeeder() throws IOException
  {
    final ObjectMapper mapper = new DefaultObjectMapper();
    final StupidPool<ByteBuffer> pool = new StupidPool<ByteBuffer>(
        new Supplier<ByteBuffer>()
        {
          @Override
          public ByteBuffer get()
View Full Code Here

                                .compressionProvider(new PotentiallyGzippedCompressionProvider(true))
                                .build();
    cf.start();
    cf.create().creatingParentsIfNeeded().forPath(testBasePath);

    jsonMapper = new DefaultObjectMapper();

    announcer = new Announcer(
        cf,
        MoreExecutors.sameThreadExecutor()
    );
View Full Code Here

                                .compressionProvider(new PotentiallyGzippedCompressionProvider(false))
                                .build();
    cf.start();
    cf.create().creatingParentsIfNeeded().forPath(testBasePath);

    jsonMapper = new DefaultObjectMapper();

    announcer = new Announcer(
        cf,
        MoreExecutors.sameThreadExecutor()
    );
View Full Code Here

    Map<String, Object> payloadMap = ImmutableMap.<String, Object>of(
        "type", "and",
        "havingSpecs", ImmutableList.of(greaterMap, orMap)
    );

    ObjectMapper mapper = new DefaultObjectMapper();
    assertEquals(andHavingSpec,  mapper.convertValue(payloadMap, AndHavingSpec.class));
  }
View Full Code Here

    File file = new File(jsonFile);
    if (!file.exists()) {
      System.out.printf("File[%s] does not exist.%n", file);
    }

    final ObjectMapper jsonMapper = new DefaultObjectMapper();

    try {
      if (type.equalsIgnoreCase("query")) {
        jsonMapper.readValue(file, Query.class);
      } else if (type.equalsIgnoreCase("hadoopConfig")) {
        jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
      } else if (type.equalsIgnoreCase("task")) {
        jsonMapper.readValue(file, Task.class);
      } else if (type.equalsIgnoreCase("realtimeSchema")) {
        jsonMapper.readValue(file, Schema.class);
      } else {
        throw new UOE("Unknown type[%s]", type);
      }
    }
    catch (Exception e) {
View Full Code Here

public class SegmentChangeRequestLoadTest
{
  @Test
  public void testV1Serialization() throws Exception
  {
    ObjectMapper mapper = new DefaultObjectMapper();

    final Interval interval = new Interval("2011-10-01/2011-10-02");
    final ImmutableMap<String, Object> loadSpec = ImmutableMap.<String, Object>of("something", "or_other");

    DataSegment segment = new DataSegment(
        "something",
        interval,
        "1",
        loadSpec,
        Arrays.asList("dim1", "dim2"),
        Arrays.asList("met1", "met2"),
        new NoneShardSpec(),
        IndexIO.CURRENT_VERSION_ID,
        1
    );

    final SegmentChangeRequestLoad segmentDrop = new SegmentChangeRequestLoad(segment);

    Map<String, Object> objectMap = mapper.readValue(
        mapper.writeValueAsString(segmentDrop), new TypeReference<Map<String, Object>>(){}
    );

    Assert.assertEquals(11, objectMap.size());
    Assert.assertEquals("load", objectMap.get("action"));
    Assert.assertEquals("something", objectMap.get("dataSource"));
View Full Code Here

    serverManager = new ServerManager(
        new CacheTestSegmentLoader(),
        new NoopQueryRunnerFactoryConglomerate(),
        new NoopServiceEmitter(),
        MoreExecutors.sameThreadExecutor(),
        new DefaultObjectMapper(),
        new LocalCacheProvider().get(),
        new CacheConfig()
    );

    final DruidServerMetadata me = new DruidServerMetadata("dummyServer", "dummyHost", 0, "dummyType", "normal", 0);
View Full Code Here

TOP

Related Classes of io.druid.jackson.DefaultObjectMapper

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.