Examples of RAFInputStream


Examples of freenet.support.io.RAFInputStream

        this.memoryLimitedJobRunner = memoryLimitedJobRunner;
        this.jobRunner = jobRunner;
        this.random = random;
        this.raf = raf;
        rafLength = raf.size();
        InputStream ois = new RAFInputStream(raf, 0, rafLength);
        DataInputStream dis = new DataInputStream(ois);
        long magic = dis.readLong();
        if(magic != MAGIC)
            throw new StorageFormatException("Bad magic");
        int checksumType = dis.readInt();
        try {
            this.checker = ChecksumChecker.create(checksumType);
        } catch (IllegalArgumentException e) {
            throw new StorageFormatException("Bad checksum type");
        }
        InputStream is = checker.checksumReaderWithLength(ois, new ArrayBucketFactory(), 1024*1024);
        dis = new DataInputStream(is);
        int version = dis.readInt();
        if(version != VERSION)
            throw new StorageFormatException("Bad version");
        LockableRandomAccessBuffer rafOrig = BucketTools.restoreRAFFrom(dis, persistentFG, persistentFileTracker, masterKey);
        if(originalData == null) {
            this.originalData = rafOrig;
        } else {
            // Check that it's the same, but use the passed-in one.
            if(!originalData.equals(rafOrig))
                throw new StorageFormatException("Original data restored from different filename! Expected "+originalData+" but restored "+rafOrig);
            this.originalData = originalData;
        }
        this.totalDataBlocks = dis.readInt();
        if(totalDataBlocks <= 0) throw new StorageFormatException("Bad total data blocks "+totalDataBlocks);
        this.totalCheckBlocks = dis.readInt();
        if(totalCheckBlocks <= 0) throw new StorageFormatException("Bad total data blocks "+totalCheckBlocks);
        try {
            this.splitfileType = SplitfileAlgorithm.getByCode(dis.readShort());
        } catch (IllegalArgumentException e) {
            throw new StorageFormatException("Bad splitfile type");
        }
        try {
            this.codec = FECCodec.getInstance(splitfileType);
        } catch (IllegalArgumentException e) {
            throw new StorageFormatException("Bad splitfile codec type");
        }
        this.dataLength = dis.readLong();
        if(dataLength <= 0) throw new StorageFormatException("Bad data length");
        if(dataLength != originalData.size())
            throw new ResumeFailedException("Original data size is "+originalData.size()+" should be "+dataLength);
        if(((dataLength + CHKBlock.DATA_LENGTH - 1) / CHKBlock.DATA_LENGTH) != totalDataBlocks)
            throw new StorageFormatException("Data blocks "+totalDataBlocks+" not compatible with size "+dataLength);
        decompressedLength = dis.readLong();
        if(decompressedLength <= 0)
            throw new StorageFormatException("Bogus decompressed length");
        isMetadata = dis.readBoolean();
        short atype = dis.readShort();
        if(atype == -1) {
            archiveType = null;
        } else {
            archiveType = ARCHIVE_TYPE.getArchiveType(atype);
            if(archiveType == null) throw new StorageFormatException("Unknown archive type "+atype);
        }
        try {
            clientMetadata = ClientMetadata.construct(dis);
        } catch (MetadataParseException e) {
            throw new StorageFormatException("Failed to read MIME type: "+e);
        }
        short codec = dis.readShort();
        if(codec == (short)-1)
            compressionCodec = null;
        else {
            compressionCodec = COMPRESSOR_TYPE.getCompressorByMetadataID(codec);
            if(compressionCodec == null)
                throw new StorageFormatException("Unknown compression codec ID "+codec);
        }
        int segmentCount = dis.readInt();
        if(segmentCount <= 0) throw new StorageFormatException("Bad segment count");
        this.segmentSize = dis.readInt();
        if(segmentSize <= 0) throw new StorageFormatException("Bad segment size");
        this.checkSegmentSize = dis.readInt();
        if(checkSegmentSize <= 0) throw new StorageFormatException("Bad check segment size");
        this.crossCheckBlocks = dis.readInt();
        if(crossCheckBlocks < 0) throw new StorageFormatException("Bad cross-check block count");
        if(segmentSize + checkSegmentSize + crossCheckBlocks > FECCodec.MAX_TOTAL_BLOCKS_PER_SEGMENT)
            throw new StorageFormatException("Must be no more than "+FECCodec.MAX_TOTAL_BLOCKS_PER_SEGMENT+" blocks per segment");
        this.splitfileCryptoAlgorithm = dis.readByte();
        if(!Metadata.isValidSplitfileCryptoAlgorithm(splitfileCryptoAlgorithm))
            throw new StorageFormatException("Invalid splitfile crypto algorithm "+splitfileCryptoAlgorithm);
        if(dis.readBoolean()) {
            splitfileCryptoKey = new byte[32];
            dis.readFully(splitfileCryptoKey);
        } else {
            splitfileCryptoKey = null;
        }
        this.keyLength = dis.readInt(); // FIXME validate
        if(keyLength < SplitFileInserterSegmentStorage.getKeyLength(this))
            throw new StorageFormatException("Invalid key length "+keyLength+" should be at least "+
                    SplitFileInserterSegmentStorage.getKeyLength(this));
        int compatMode = dis.readInt();
        if(compatMode < 0 || compatMode > CompatibilityMode.values().length)
            throw new StorageFormatException("Invalid compatibility mode "+compatMode);
        this.cmode = CompatibilityMode.values()[compatMode];
        this.deductBlocksFromSegments = dis.readInt();
        if(deductBlocksFromSegments < 0 || deductBlocksFromSegments > segmentCount)
            throw new StorageFormatException("Bad deductBlocksFromSegments");
        this.maxRetries = dis.readInt();
        if(maxRetries < -1) throw new StorageFormatException("Bad maxRetries");
        this.consecutiveRNFsCountAsSuccess = dis.readInt();
        if(consecutiveRNFsCountAsSuccess < 0)
            throw new StorageFormatException("Bad consecutiveRNFsCountAsSuccess");
        specifySplitfileKeyInMetadata = dis.readBoolean();
        if(dis.readBoolean()) {
            hashThisLayerOnly = new byte[32];
            dis.readFully(hashThisLayerOnly);
        } else {
            hashThisLayerOnly = null;
        }
        topDontCompress = dis.readBoolean();
        topRequiredBlocks = dis.readInt();
        topTotalBlocks = dis.readInt();
        origDataSize = dis.readLong();
        origCompressedDataSize = dis.readLong();
        hashes = HashResult.readHashes(dis);
        dis.close();
        this.hasPaddedLastBlock = (dataLength % CHKBlock.DATA_LENGTH != 0);
        this.segments = new SplitFileInserterSegmentStorage[segmentCount];
        if(crossCheckBlocks != 0)
            this.crossSegments = new SplitFileInserterCrossSegmentStorage[segmentCount];
        else
            crossSegments = null;
        // Read offsets.
        is = checker.checksumReaderWithLength(ois, new ArrayBucketFactory(), 1024*1024);
        dis = new DataInputStream(is);
        if(hasPaddedLastBlock) {
            offsetPaddedLastBlock = readOffset(dis, rafLength, "offsetPaddedLastBlock");
        } else {
            offsetPaddedLastBlock = 0;
        }
        offsetOverallStatus = readOffset(dis, rafLength, "offsetOverallStatus");
        overallStatusLength = dis.readInt();
        if(overallStatusLength < 0) throw new StorageFormatException("Negative overall status length");
        if(overallStatusLength < FailureCodeTracker.getFixedLength(true))
            throw new StorageFormatException("Bad overall status length");
        // Will be read after offsets
        if(crossSegments != null) {
            offsetCrossSegmentBlocks = new long[crossSegments.length];
            for(int i=0;i<crossSegments.length;i++)
                offsetCrossSegmentBlocks[i] = readOffset(dis, rafLength, "cross-segment block offset");
        } else {
            offsetCrossSegmentBlocks = null;
        }
        offsetSegmentCheckBlocks = new long[segmentCount];
        for(int i=0;i<segmentCount;i++)
            offsetSegmentCheckBlocks[i] = readOffset(dis, rafLength, "segment check block offset");
        offsetSegmentStatus = new long[segmentCount];
        for(int i=0;i<segmentCount;i++)
            offsetSegmentStatus[i] = readOffset(dis, rafLength, "segment status offset");
        if(crossSegments != null) {
            offsetCrossSegmentStatus = new long[crossSegments.length];
            for(int i=0;i<crossSegments.length;i++)
                offsetCrossSegmentStatus[i] = readOffset(dis, rafLength, "cross-segment status offset");
        } else {
            offsetCrossSegmentStatus = null;
        }
        offsetSegmentKeys = new long[segmentCount];
        for(int i=0;i<segmentCount;i++)
            offsetSegmentKeys[i] = readOffset(dis, rafLength, "segment keys offset");
        dis.close();
        // Set up segments...
        underlyingOffsetDataSegments = new long[segmentCount];
        is = checker.checksumReaderWithLength(ois, new ArrayBucketFactory(), 1024*1024);
        dis = new DataInputStream(is);
        int blocks = 0;
        for(int i=0;i<segmentCount;i++) {
            segments[i] = new SplitFileInserterSegmentStorage(this, dis, i, keyLength,
                    splitfileCryptoAlgorithm, splitfileCryptoKey, random, maxRetries, consecutiveRNFsCountAsSuccess, keysFetching);
            underlyingOffsetDataSegments[i] = blocks * CHKBlock.DATA_LENGTH;
            blocks += segments[i].dataBlockCount;
            assert(underlyingOffsetDataSegments[i] < dataLength);
        }
        dis.close();
        if(blocks != totalDataBlocks)
            throw new StorageFormatException("Total data blocks should be "+totalDataBlocks+" but is "+blocks);
        if(crossSegments != null) {
            is = checker.checksumReaderWithLength(ois, new ArrayBucketFactory(), 1024*1024);
            dis = new DataInputStream(is);
            for(int i=0;i<crossSegments.length;i++) {
                crossSegments[i] = new SplitFileInserterCrossSegmentStorage(this, dis, i);
            }
            dis.close();
        }
        ois.close();
        ois = new RAFInputStream(raf, offsetOverallStatus, rafLength - offsetOverallStatus);
        dis = new DataInputStream(checker.checksumReaderWithLength(ois, new ArrayBucketFactory(), 1024*1024));
        errors = new FailureCodeTracker(true, dis);
        dis.close();
        for(SplitFileInserterSegmentStorage segment : segments) {
            segment.readStatus();
View Full Code Here

Examples of freenet.support.io.RAFInputStream

            assertEquals(storage.getStatus(), Status.FAILED);
        }
    }

    private HashResult[] getHashes(LockableRandomAccessBuffer data) throws IOException {
        InputStream is = new RAFInputStream(data, 0, data.size());
        MultiHashInputStream hashStream = new MultiHashInputStream(is, HashType.SHA256.bitmask);
        FileUtil.copy(is, new NullOutputStream(), data.size());
        is.close();
        return hashStream.getResults();
    }
View Full Code Here

Examples of freenet.support.io.RAFInputStream

        }

    }
   
    private HashResult[] getHashes(LockableRandomAccessBuffer data) throws IOException {
        InputStream is = new RAFInputStream(data, 0, data.size());
        MultiHashInputStream hashStream = new MultiHashInputStream(is, HashType.SHA256.bitmask);
        FileUtil.copy(is, new NullOutputStream(), data.size());
        is.close();
        return hashStream.getResults();
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.