Examples of PureJavaCrc32C


Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

        assertEquals("0a1A0A", p2.toArray()[6]);
    }

    public void testGetCRCWithoutCrash() throws IOException {
        ParameterSet pset;
        PureJavaCrc32C crc;
        ByteBuffer buf;

        Object[] psetObjs = new Object[] {
                null, VoltType.INTEGER.getNullValue(), VoltType.DECIMAL.getNullValue(), // null values
                (byte)1, (short)2, (int)3, (long)4, 1.2f, 3.6d, // numbers
                "This is spinal tap", "", // strings
                "ABCDF012", new byte[] { 1, 3, 5 }, new byte[0], // binary
                new BigDecimal(5.5), // decimal
                new TimestampType(new Date()) // timestamp
        };

        pset = ParameterSet.fromArrayNoCopy(psetObjs);
        crc = new PureJavaCrc32C();
        buf = ByteBuffer.allocate(pset.getSerializedSize());
        pset.flattenToBuffer(buf);
        crc.update(buf.array());
        long crc1 = crc.getValue();

        ArrayUtils.reverse(psetObjs);

        pset = ParameterSet.fromArrayNoCopy(psetObjs);
        crc = new PureJavaCrc32C();
        buf = ByteBuffer.allocate(pset.getSerializedSize());
        pset.flattenToBuffer(buf);
        crc.update(buf.array());
        long crc2 = crc.getValue();

        pset = ParameterSet.fromArrayNoCopy(new Object[0]);
        crc = new PureJavaCrc32C();
        buf = ByteBuffer.allocate(pset.getSerializedSize());
        pset.flattenToBuffer(buf);
        crc.update(buf.array());
        long crc3 = crc.getValue();

        pset = ParameterSet.fromArrayNoCopy(new Object[] { 1 });
        crc = new PureJavaCrc32C();
        buf = ByteBuffer.allocate(pset.getSerializedSize());
        pset.flattenToBuffer(buf);
        crc.update(buf.array());
        long crc4 = crc.getValue();

        assertNotSame(crc1, crc2);
        assertNotSame(crc1, crc3);
        assertNotSame(crc1, crc4);
        assertNotSame(crc2, crc3);
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

    private SQLStmt(byte[] sqlText, String joinOrder) {
        this.sqlText = sqlText;
        this.joinOrder = joinOrder;

        // create a hash for determinism purposes
        PureJavaCrc32C crc = new PureJavaCrc32C();
        crc.update(sqlText);
        // ugly hack to get bytes from an int
        this.sqlCRC = ByteBuffer.allocate(4).putInt((int) crc.getValue()).array();

        inCatalog = true;
    }
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

        System.out.println("Seed is " + seed);
        for (int ii = 0; ii < 10000; ii++) {
            int nextLength = r.nextInt(4096);
            byte bytes[] = new byte[nextLength];
            r.nextBytes(bytes);
            PureJavaCrc32C checksum = new PureJavaCrc32C();
            checksum.update(bytes);
            int javaSum = (int)checksum.getValue();
            BBContainer cont = DBBPool.allocateDirect(nextLength);
            cont.b().put(bytes);
            int cSum = DBBPool.getCRC32C(cont.address(), 0, nextLength);
            cont.discard();
            assertEquals(javaSum, cSum);
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

     * It computes a signature from the given configuration bytes
     * @param config configuration byte array
     * @return signature from the given configuration bytes
     */
    static public long computeConfigurationSignature(byte [] config) {
        PureJavaCrc32C crc = new PureJavaCrc32C();
        crc.update(config);
        return crc.getValue();
    }
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

                            lengthPrefix.putInt(tupleData.getInt(0)); // partitionId

                            /*
                             * Checksum the header and put it in the payload buffer
                             */
                            PureJavaCrc32C crc = new PureJavaCrc32C();
                            crc.update(lengthPrefix.array(), 0, 8);
                            lengthPrefix.putInt((int)crc.getValue());
                            lengthPrefix.flip();
                            payloadBuffer.put(lengthPrefix);
                            payloadBuffer.position(0);

                            enforceSnapshotRateLimit(payloadBuffer.remaining());
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

                     * partition ID for the chunk separately makes it possible to
                     * continue processing chunks from other partitions if only one partition
                     * has corrupt chunks in the file.
                     */
                    assert(m_checksumType == ChecksumType.CRC32C);
                    final Checksum partitionIdCRC = new PureJavaCrc32C();
                    final int nextChunkPartitionId = chunkLengthB.getInt(4);
                    final int nextChunkPartitionIdCRC = chunkLengthB.getInt(8);

                    partitionIdCRC.update(chunkLengthB.array(), 0, 8);
                    int generatedValue = (int)partitionIdCRC.getValue();
                    if (generatedValue != nextChunkPartitionIdCRC) {
                        chunkLengthB.position(0);
                        for (int partitionId : m_partitionIds) {
                            m_corruptedPartitions.add(partitionId);
                        }
View Full Code Here

Examples of org.apache.hadoop_voltpatches.util.PureJavaCrc32C

                     * Get the partition id and its CRC and validate it. Validating the
                     * partition ID for the chunk separately makes it possible to
                     * continue processing chunks from other partitions if only one partition
                     * has corrupt chunks in the file.
                     */
                    final Checksum partitionIdCRC = m_checksumType == ChecksumType.CRC32C ? new PureJavaCrc32C() : new PureJavaCrc32();
                    chunkLengthB.mark();
                    final int nextChunkPartitionId = chunkLengthB.getInt();
                    final int nextChunkPartitionIdCRC = chunkLengthB.getInt();
                    chunkLengthB.reset();
                    byte partitionIdBytes[] = new byte[4];
View Full Code Here

Examples of org.iq80.leveldb.util.PureJavaCrc32C

        closed = true;
    }

    public static int crc32c(Slice data, CompressionType type)
    {
        PureJavaCrc32C crc32c = new PureJavaCrc32C();
        crc32c.update(data.getRawArray(), data.getRawOffset(), data.length());
        crc32c.update(type.persistentId() & 0xFF);
        return crc32c.getMaskedValue();
    }
View Full Code Here

Examples of org.iq80.leveldb.util.PureJavaCrc32C

    }

    public static int getChunkChecksum(int chunkTypeId, byte[] buffer, int offset, int length)
    {
        // Compute the crc of the record type and the payload.
        PureJavaCrc32C crc32C = new PureJavaCrc32C();
        crc32C.update(chunkTypeId);
        crc32C.update(buffer, offset, length);
        return crc32C.getMaskedValue();
    }
View Full Code Here

Examples of org.iq80.leveldb.util.PureJavaCrc32C

    }

    public static int getChunkChecksum(int chunkTypeId, byte[] buffer, int offset, int length)
    {
        // Compute the crc of the record type and the payload.
        PureJavaCrc32C crc32C = new PureJavaCrc32C();
        crc32C.update(chunkTypeId);
        crc32C.update(buffer, offset, length);
        return crc32C.getMaskedValue();
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.