Package com.rackspacecloud.blueflood.io

Examples of com.rackspacecloud.blueflood.io.AstyanaxReader


    }

    @Test
    public void testLocatorsWritten() throws Exception {
        Collection<Locator> locators = writeLocatorsOnly(48);
        AstyanaxReader r = AstyanaxReader.getInstance();

        Set<String> actualLocators = new HashSet<String>();
        for (Locator locator : locators) {
            for (Locator databaseLocator : r.getLocatorsToRollup(Util.computeShard(locator.toString()))) {
                actualLocators.add(databaseLocator.toString());
            }
        }
        Assert.assertEquals(48, actualLocators.size());
    }
View Full Code Here


    }

    @Test
    public void testRollupGenerationSimple() throws Exception {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L; // some point during 5 April 2012.
        int hours = 48;
        final String acctId = "ac" + IntegrationTestBase.randString(8);
        final String metricName = "fooService,barServer," + randString(8);
        final long endMillis = baseMillis + (1000 * 60 * 60 * hours);
        final Locator locator = Locator.createLocatorFromPathComponents(acctId, metricName);

        writeFullData(locator, baseMillis, hours, writer);

        // FULL -> 5m
        ArrayList<SingleRollupWriteContext> writes = new ArrayList<SingleRollupWriteContext>();
        for (Range range : Range.getRangesToRollup(Granularity.FULL, baseMillis, endMillis)) {
            // each range should produce one average
            Points<SimpleNumber> input = reader.getDataToRoll(SimpleNumber.class, locator, range, CassandraModel.CF_METRICS_FULL);
            BasicRollup basicRollup = BasicRollup.buildRollupFromRawSamples(input);
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRawSamples(input);

            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.FULL.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.FULL.coarser()),
                    range.start));
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.FULL.MIN_5,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5),
                    range.start));
        }
        writer.insertRollups(writes);

        // 5m -> 20m
        writes.clear();

        for (Range range : Range.getRangesToRollup(Granularity.MIN_5, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_5));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_5.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_5.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_20,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_20),
                    range.start));
        }
        writer.insertRollups(writes);

        // 20m -> 60m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_20, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_20));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_20.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_20.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_60,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_60),
                    range.start));
        }
        writer.insertRollups(writes);

        // 60m -> 240m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_60, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_60));

            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_60.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_60.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_240,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_240),
                    range.start));
        }
        writer.insertRollups(writes);

        // 240m -> 1440m
        writes.clear();
        for (Range range : Range.getRangesToRollup(Granularity.MIN_240, baseMillis, endMillis)) {
            Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_240));
            BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
            writes.add(new SingleRollupWriteContext(basicRollup,
                    locator,
                    Granularity.MIN_240.coarser(),
                    CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_240.coarser()),
                    range.start));

            Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_5));
            HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
            writes.add(new SingleRollupWriteContext(histogramRollup,
                    locator,
                    Granularity.MIN_1440,
                    CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_1440),
                    range.start));
        }
        writer.insertRollups(writes);

        // verify the number of points in 48h worth of rollups.
        Range range = new Range(Granularity.MIN_1440.snapMillis(baseMillis), Granularity.MIN_1440.snapMillis(endMillis + Granularity.MIN_1440.milliseconds()));
        Points<BasicRollup> input = reader.getDataToRoll(BasicRollup.class, locator, range,
                CassandraModel.getColumnFamily(BasicRollup.class, Granularity.MIN_1440));
        BasicRollup basicRollup = BasicRollup.buildRollupFromRollups(input);
        Assert.assertEquals(60 * hours, basicRollup.getCount());

        Points<HistogramRollup> histInput = reader.getDataToRoll(HistogramRollup.class, locator, range,
                CassandraModel.getColumnFamily(HistogramRollup.class, Granularity.MIN_1440));
        HistogramRollup histogramRollup = HistogramRollup.buildRollupFromRollups(histInput);
        Assert.assertTrue(histogramRollup.getBins().size() > 0);
        Assert.assertTrue("Number of bins is " + histogramRollup.getBins().size(),
                histogramRollup.getBins().size() <= HistogramRollup.MAX_BIN_SIZE);
View Full Code Here

    }

    @Test
    public void testSimpleInsertAndGet() throws Exception {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L; // some point during 5 April 2012.
        long lastMillis = baseMillis + (300 * 1000); // 300 seconds.
        final String acctId = "ac" + IntegrationTestBase.randString(8);
        final String metricName = "fooService,barServer," + randString(8);

        final Locator locator  = Locator.createLocatorFromPathComponents(acctId, metricName);

        Set<Long> expectedTimestamps = new HashSet<Long>();
        // insert something every 30s for 5 mins.
        for (int i = 0; i < 10; i++) {
            final long curMillis = baseMillis + (i * 30000); // 30 seconds later.
            expectedTimestamps.add(curMillis);
            List<Metric> metrics = new ArrayList<Metric>();
            metrics.add(getRandomIntMetric(locator, curMillis));
            writer.insertFull(metrics);
        }
       
        Set<Long> actualTimestamps = new HashSet<Long>();
        // get back the cols that were written from start to stop.

        Points<SimpleNumber> points = reader.getDataToRoll(SimpleNumber.class, locator, new Range(baseMillis, lastMillis),
                CassandraModel.getColumnFamily(BasicRollup.class, Granularity.FULL));
        actualTimestamps = points.getPoints().keySet();
        Assert.assertEquals(expectedTimestamps, actualTimestamps);
    }
View Full Code Here

    }

    @Test
    public void testConsecutiveWriteAndRead() throws ConnectionException, IOException {
        AstyanaxWriter writer = AstyanaxWriter.getInstance();
        AstyanaxReader reader = AstyanaxReader.getInstance();
        final long baseMillis = 1333635148000L;

        final Locator locator = Locator.createLocatorFromPathComponents("ac0001",
                "fooService,fooServer," + randString(8));

        final List<Metric> metrics = new ArrayList<Metric>();
        for (int i = 0; i < 100; i++) {
            final Metric metric = new Metric(locator, i, baseMillis + (i * 1000),
                    new TimeValue(1, TimeUnit.DAYS), "unknown");
            metrics.add(metric);
            writer.insertFull(metrics);
            metrics.clear();
        }
       
        int count = 0;
            ColumnFamily<Locator, Long> CF_metrics_full = CassandraModel.getColumnFamily(BasicRollup.class, Granularity.FULL);
        Points<SimpleNumber> points = reader.getDataToRoll(SimpleNumber.class, locator,
                new Range(baseMillis, baseMillis + 500000), CF_metrics_full);
        for (Map.Entry<Long, Points.Point<SimpleNumber>> data : points.getPoints().entrySet()) {
            Points.Point<SimpleNumber> point = data.getValue();
            Assert.assertEquals(count, point.getData().getValue());
            count++;
View Full Code Here

            writer.persistShardState(shard, allUpdates);
        }

        // Now we would have the longest row for each shard because we filled all the slots.
        // Now test whether getShardState returns all the slots
        AstyanaxReader reader = AstyanaxReader.getInstance();
        ScheduleContext ctx = new ScheduleContext(System.currentTimeMillis(), shards);
        ShardStateManager shardStateManager = ctx.getShardStateManager();

        for (Integer shard : shards) {
            Collection<SlotState> slotStates = reader.getShardState(shard);
            for (SlotState slotState : slotStates) {
                shardStateManager.updateSlotOnRead(shard, slotState);
            }

            for (Granularity granularity : Granularity.rollupGranularities()) {
View Full Code Here

        writer.persistShardState(shard, updates);
       
        slotUpdates.put(slot, new UpdateStamp(time++, UpdateStamp.State.Rolled, true));
        writer.persistShardState(shard, updates);
       
        AstyanaxReader reader = AstyanaxReader.getInstance();
        ScheduleContext ctx = new ScheduleContext(System.currentTimeMillis(), Lists.newArrayList(shard));

        Collection<SlotState> slotStates = reader.getShardState(shard);
        for (SlotState slotState : slotStates) {
            ctx.getShardStateManager().updateSlotOnRead(shard, slotState);
        }

        ShardStateManager shardStateManager = ctx.getShardStateManager();
View Full Code Here

        Locator locator = Locator.createLocatorFromPathComponents(
                (String) options.get(TENANT_ID),
                (String) options.get(METRIC));

        AstyanaxReader reader = AstyanaxReader.getInstance();

        Long from = (Long) options.get(FROM);
        Long to = (Long) options.get(TO);

        if (from == null || to == null) {
            System.out.println("Either start time or end time is null.");
            to = System.currentTimeMillis();
            from = to - DEFAULT_RANGE.toMillis();
            System.out.println("Using range: " + from + " - " + to);
        }

        if (from >= to) {
            System.err.println("End time " + to + " has to be greater than start time " + from);
            System.exit(2);
        }

        Granularity gran = Granularity.FULL;
        String res = (String) options.get("resolution");
        try {
            gran = Granularity.fromString(res.toLowerCase());
        } catch (Exception ex) {
            System.out.println("Exception mapping resolution to Granularity. Using FULL resolution instead.");
            gran = Granularity.FULL;
        } finally {
            if (gran == null) {
                gran = Granularity.FULL;
            }
        }

        System.out.println("Locator: " + locator + ", from: " + from + ", to: "
                + to + ", resolution: " + gran.shortName());

        MetricData data = reader.getDatapointsForRange(locator, new Range(from, to), gran);
        Map<Long, Points.Point> points = data.getData().getPoints();
        for (Map.Entry<Long, Points.Point> item : points.entrySet()) {
            String output = String.format("Timestamp: %d, Data: %s, Unit: %s", item.getKey(), item.getValue().getData().toString(), data.getUnit());
            System.out.println(output);
        }
View Full Code Here

TOP

Related Classes of com.rackspacecloud.blueflood.io.AstyanaxReader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.