Package org.apache.cassandra.io.compress

Examples of org.apache.cassandra.io.compress.CompressedSequentialWriter$CompressedFileWriterMark


        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        MetadataCollector collector = new MetadataCollector(new SimpleDenseCellNameType(BytesType.instance));
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here


        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        MetadataCollector collector = new MetadataCollector(new SimpleDenseCellNameType(BytesType.instance));
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), false, param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here

    public static CompressedSequentialWriter open(String dataFilePath,
                                                  String offsetsPath,
                                                  CompressionParameters parameters,
                                                  MetadataCollector sstableMetadataCollector)
    {
        return new CompressedSequentialWriter(new File(dataFilePath), offsetsPath, parameters, sstableMetadataCollector);
    }
View Full Code Here

        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        SSTableMetadata.Collector collector = SSTableMetadata.createCollector(BytesType.instance);
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), false, param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here

        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        SSTableMetadata.Collector collector = SSTableMetadata.createCollector(BytesType.instance);
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), false, param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here

        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        SSTableMetadata.Collector collector = SSTableMetadata.createCollector(BytesType.instance);
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), false, param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here

    public static CompressedSequentialWriter open(String dataFilePath,
                                                  String offsetsPath,
                                                  CompressionParameters parameters,
                                                  MetadataCollector sstableMetadataCollector)
    {
        return new CompressedSequentialWriter(new File(dataFilePath), offsetsPath, parameters, sstableMetadataCollector);
    }
View Full Code Here

        // write compressed data file of longs
        File tmp = new File(File.createTempFile("cassandra", "unittest").getParent(), "ks-cf-ib-1-Data.db");
        Descriptor desc = Descriptor.fromFilename(tmp.getAbsolutePath());
        MetadataCollector collector = new MetadataCollector(new SimpleDenseCellNameType(BytesType.instance));
        CompressionParameters param = new CompressionParameters(SnappyCompressor.instance, 32, Collections.EMPTY_MAP);
        CompressedSequentialWriter writer = new CompressedSequentialWriter(tmp, desc.filenameFor(Component.COMPRESSION_INFO), param, collector);
        Map<Long, Long> index = new HashMap<Long, Long>();
        for (long l = 0L; l < 1000; l++)
        {
            index.put(l, writer.getFilePointer());
            writer.stream.writeLong(l);
        }
        writer.close();

        CompressionMetadata comp = CompressionMetadata.create(tmp.getAbsolutePath());
        List<Pair<Long, Long>> sections = new ArrayList<Pair<Long, Long>>();
        for (long l : valuesToCheck)
        {
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.compress.CompressedSequentialWriter$CompressedFileWriterMark

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.