Package org.apache.kahadb.util

Examples of org.apache.kahadb.util.DataByteArrayOutputStream.toByteSequence()


        try {
            DataByteArrayOutputStream os = new DataByteArrayOutputStream();
            os.writeInt(BATCH_CONTROL_RECORD_SIZE);
            os.writeByte(BATCH_CONTROL_RECORD_TYPE);
            os.write(BATCH_CONTROL_RECORD_MAGIC);
            ByteSequence sequence = os.toByteSequence();
            sequence.compact();
            return sequence.getData();
        } catch (IOException e) {
            throw new RuntimeException("Could not create batch control record header.");
        }
View Full Code Here


                    buff.writeByte(write.location.getType());
                    buff.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                    write = write.getNext();
                }

                ByteSequence sequence = buff.toByteSequence();
               
                // Now we can fill in the batch control record properly.
                buff.reset();
                buff.skip(5+Journal.BATCH_CONTROL_RECORD_MAGIC.length);
                buff.writeInt(sequence.getLength()-Journal.BATCH_CONTROL_RECORD_SIZE);
View Full Code Here

                    buff.writeByte(write.location.getType());
                    buff.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                    write = write.getNext();
                }

                ByteSequence sequence = buff.toByteSequence();
               
                // Now we can fill in the batch control record properly.
                buff.reset();
                buff.skip(5+Journal.BATCH_CONTROL_RECORD_MAGIC.length);
                buff.writeInt(sequence.getLength()-Journal.BATCH_CONTROL_RECORD_SIZE);
View Full Code Here

        DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
        os.writeByte(data.type().getNumber());
        data.writeFramed(os);

        long start = System.currentTimeMillis();
        Location location = journal.write(os.toByteSequence(), sync);
        long start2 = System.currentTimeMillis();
        process(data, location);
      long end = System.currentTimeMillis();
      if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) {
        LOG.info("Slow KahaDB access: Journal append took: "+(start2-start)+" ms, Index update took "+(end-start2)+" ms");
View Full Code Here

                    buff.writeByte(write.location.getType());
                    buff.write(write.data.getData(), write.data.getOffset(), write.data.getLength());
                    write = write.getNext();
                }

                ByteSequence sequence = buff.toByteSequence();
               
                // Now we can fill in the batch control record properly.
                buff.reset();
                buff.skip(5+Journal.BATCH_CONTROL_RECORD_MAGIC.length);
                buff.writeInt(sequence.getLength()-Journal.BATCH_CONTROL_RECORD_SIZE);
View Full Code Here

            DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
            os.writeByte(data.type().getNumber());
            data.writeFramed(os);
   
            long start = System.currentTimeMillis();
            Location location = journal.write(os.toByteSequence(), sync);
            long start2 = System.currentTimeMillis();
            process(data, location);
          long end = System.currentTimeMillis();
          if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) {
            LOG.info("Slow KahaDB access: Journal append took: "+(start2-start)+" ms, Index update took "+(end-start2)+" ms");
View Full Code Here

      
       int size = expected.serializedSizeFramed();
       DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
       os.writeByte(expected.type().getNumber());
       expected.writeFramed(os);
       ByteSequence seq = os.toByteSequence();
      
       DataByteArrayInputStream is = new DataByteArrayInputStream(seq);
       KahaEntryType type = KahaEntryType.valueOf(is.readByte());
       JournalCommand message = (JournalCommand)type.createMessage();
       message.mergeFramed(is);
View Full Code Here

            DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
            os.writeByte(data.type().getNumber());
            data.writeFramed(os);
   
            long start = System.currentTimeMillis();
            Location location = journal.write(os.toByteSequence(), sync);
            long start2 = System.currentTimeMillis();
            process(data, location);
          long end = System.currentTimeMillis();
          if( LOG_SLOW_ACCESS_TIME>0 && end-start > LOG_SLOW_ACCESS_TIME) {
            LOG.info("Slow KahaDB access: Journal append took: "+(start2-start)+" ms, Index update took "+(end-start2)+" ms");
View Full Code Here

    public ByteSequence toByteSequence(JournalCommand<?> data) throws IOException {
        int size = data.serializedSizeFramed();
        DataByteArrayOutputStream os = new DataByteArrayOutputStream(size + 1);
        os.writeByte(data.type().getNumber());
        data.writeFramed(os);
        return os.toByteSequence();
    }

    // /////////////////////////////////////////////////////////////////
    // Methods call by the broker to update and query the store.
    // /////////////////////////////////////////////////////////////////
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.