Package com.rackspacecloud.blueflood.io

Examples of com.rackspacecloud.blueflood.io.AstyanaxWriter


        Assert.assertEquals(48, actualLocators.size());
    }

    @Test
    public void testRollupGenerationSimple() throws Exception {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L; // some point during 5 April 2012.
        int hours = 48;
        final String acctId = "ac" + IntegrationTestBase.randString(8);
        final String metricName = "fooService,barServer," + randString(8);
        final long endMillis = baseMillis + (1000 * 60 * 60 * hours);
        final Locator locator = Locator.createLocatorFromPathComponents(acctId, metricName);

        writeFullData(locator, baseMillis, hours, writer);

        // FULL -> 5m
        ArrayList<SingleRollupWriteContext> writes = new ArrayList<SingleRollupWriteContext>();
        for (Range range : Range.getRangesToRollup(Granularity.FULL, baseMillis, endMillis)) {
            // each range should produce one average
            Points<SimpleNumber> input = reader.getDataToRoll(SimpleNumber.class, locator, range, CassandraModel.CF_METRICS_FULL);
            BasicRollup basicRollup = BasicRollup.buildRollupFromRawSamples(input);
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRawSamples(input);

            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.FULL.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.FULL.coarser()),
                    range.start));
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.FULL.MIN_5,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5),
                    range.start));
        }
        writer.insertRollups(writes);

        // 5m -> 20m
        writes.clear();

        for (Range range : Range.getRangesToRollup(Granularity.MIN_5, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_5));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_5.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_5.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_20,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_20),
                    range.start));
        }
        writer.insertRollups(writes);

        // 20m -> 60m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_20, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_20));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_20.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_20.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_60,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_60),
                    range.start));
        }
        writer.insertRollups(writes);

        // 60m -> 240m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_60, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_60));

            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_60.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_60.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_240,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_240),
                    range.start));
        }
        writer.insertRollups(writes);

        // 240m -> 1440m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_240, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_240));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_240.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_240.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_1440,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_1440),
                    range.start));
        }
        writer.insertRollups(writes);

        // verify the number of points in 48h worth of rollups.
        Range range = new Range(Granularity.MIN_1440.snapMillis(baseMillis), Granularity.MIN_1440.snapMillis(endMillis + Granularity.MIN_1440.milliseconds()));
        Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_1440));
View Full Code Here


                histogramRollup.getBins().size() <= HistogramRollup.MAX_BIN_SIZE);
    }

    @Test
    public void testSimpleInsertAndGet() throws Exception {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L; // some point during 5 April 2012.
        long lastMillis = baseMillis + (300 * 1000); // 300 seconds.
        final String acctId = "ac" + IntegrationTestBase.randString(8);
        final String metricName = "fooService,barServer," + randString(8);

        final Locator locator  = Locator.createLocatorFromPathComponents(acctId, metricName);

        Set<Long> expectedTimestamps = new HashSet<Long>();
        // insert something every 30s for 5 mins.
        for (int i = 0; i < 10; i++) {
            final long curMillis = baseMillis + (i * 30000); // 30 seconds later.
            expectedTimestamps.add(curMillis);
            List<Metric> metrics = new ArrayList<Metric>();
            metrics.add(getRandomIntMetric(locator, curMillis));
            writer.insertFull(metrics);
        }
       
        Set<Long> actualTimestamps = new HashSet<Long>();
        // get back the cols that were written from start to stop.

View Full Code Here

        Assert.assertEquals(expectedTimestamps, actualTimestamps);
    }

    @Test
    public void testConsecutiveWriteAndRead() throws ConnectionException, IOException {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L;

        final Locator locator = Locator.createLocatorFromPathComponents("ac0001",
                "fooService,fooServer," + randString(8));

        final List<Metric> metrics = new ArrayList<Metric>();
        for (int i = 0; i < 100; i++) {
            final Metric metric = new Metric(locator, i, baseMillis + (i * 1000),
                    new TimeValue(1, TimeUnit.DAYS), "unknown");
            metrics.add(metric);
            writer.insertFull(metrics);
            metrics.clear();
        }
       
        int count = 0;
            ColumnFamily<Locator, Long> CF_metrics_full = CassandraModel.getColumnFamily(BasicRollup.class, Granularity.FULL);
View Full Code Here

    }

    @Test
    public void testShardStateWriteRead() throws Exception {
        final Collection<Integer> shards = Lists.newArrayList(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
        AstyanaxWriter writer = AstyanaxWriter.getInstance();

        // Simulate active or running state for all the slots for all granularities.
        for (int shard : shards) {
            Map<Granularity, Map<Integer, UpdateStamp>> allUpdates = new HashMap<Granularity, Map<Integer, UpdateStamp>>();
            for (Granularity granularity : Granularity.rollupGranularities()) {
                Map<Integer, UpdateStamp> updates = new HashMap<Integer, UpdateStamp>();
                for (int slot = 0; slot < granularity.numSlots(); slot++) {
                    updates.put(slot, new UpdateStamp(System.currentTimeMillis() - 10000, UpdateStamp.State.Active,
                            true));
                }
                allUpdates.put(granularity, updates);
            }
            writer.persistShardState(shard, allUpdates);
        }

        // Now simulate rolled up state for all the slots for all granularities.
        for (int shard : shards) {
            Map<Granularity, Map<Integer, UpdateStamp>> allUpdates = new HashMap<Granularity, Map<Integer, UpdateStamp>>();
            for (Granularity granularity : Granularity.rollupGranularities()) {
                Map<Integer, UpdateStamp> updates = new HashMap<Integer, UpdateStamp>();
                for (int slot = 0; slot < granularity.numSlots(); slot++) {
                    updates.put(slot, new UpdateStamp(System.currentTimeMillis(), UpdateStamp.State.Rolled,
                            true));
                }
                allUpdates.put(granularity, updates);
            }
            writer.persistShardState(shard, allUpdates);
        }

        // Now we would have the longest row for each shard because we filled all the slots.
        // Now test whether getShardState returns all the slots
        AstyanaxReader reader = AstyanaxReader.getInstance();
View Full Code Here

    @Test
    public void testUpdateStampCoaelescing() throws Exception {
        final int shard = 24;
        final int slot = 16;
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        Map<Granularity, Map<Integer, UpdateStamp>> updates = new HashMap<Granularity, Map<Integer, UpdateStamp>>();
        Map<Integer, UpdateStamp> slotUpdates = new HashMap<Integer, UpdateStamp>();
        updates.put(Granularity.MIN_5, slotUpdates);
       
        long time = 1234;
        slotUpdates.put(slot, new UpdateStamp(time++, UpdateStamp.State.Active, true));
        writer.persistShardState(shard, updates);
       
        slotUpdates.put(slot, new UpdateStamp(time++, UpdateStamp.State.Rolled, true));
        writer.persistShardState(shard, updates);
       
        AstyanaxReader reader = AstyanaxReader.getInstance();
        ScheduleContext ctx = new ScheduleContext(System.currentTimeMillis(), Lists.newArrayList(shard));

        Collection<SlotState> slotStates = reader.getShardState(shard);
View Full Code Here

        // I want to see what happens when RollupService.rollupExecutors gets too much work. It should never reject
        // work.
        long time = 1234;

        // now we need to put data that will generate an enormous amount of locators.
        AstyanaxWriter writer = AstyanaxWriter.getInstance();

        final int NUM_LOCATORS = 5000;
        int locatorCount = 0;
        while (locatorCount < NUM_LOCATORS) {
            // generate 100 random metrics.
            writer.insertFull(makeRandomIntMetrics(100));
            locatorCount += 100;
        }

        // lets see how many locators this generated. We want it to be a lot.
View Full Code Here

    private final Map<Locator, Map<Granularity, Integer>> answers = new HashMap<Locator, Map<Granularity,Integer>>();

    @Before
    public void setUp() throws Exception {
        super.setUp();
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        IncomingMetricMetadataAnalyzer analyzer = new IncomingMetricMetadataAnalyzer(MetadataCache.getInstance());

        // insert something every 1m for 24h
        for (int i = 0; i < 1440; i++) {
            final long curMillis = baseMillis + i * 60000;
            final List<Metric> metrics = new ArrayList<Metric>();
            final Metric metric = getRandomIntMetric(locators.get(0), curMillis);
            final Metric stringMetric = getRandomStringmetric(locators.get(1), curMillis);
            metrics.add(metric);
            metrics.add(stringMetric);

            analyzer.scanMetrics(new ArrayList<IMetric>(metrics));
            writer.insertFull(metrics);
        }

        // generate every level of rollup for the raw data
        Granularity g = Granularity.FULL;
        while (g != Granularity.MIN_1440) {
View Full Code Here

TOP

Related Classes of com.rackspacecloud.blueflood.io.AstyanaxWriter

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.