Package org.apache.cassandra.io

Examples of org.apache.cassandra.io.IFileReader


        return logFile_;
    }

    void readCommitLogHeader(String logFile, byte[] bytes) throws IOException
    {
        IFileReader logReader = SequenceFile.reader(logFile);
        try
        {
            logReader.readDirect(bytes);
        }
        finally
        {
            logReader.close();
        }
    }
View Full Code Here


        while ( !filesNeeded.isEmpty() )
        {
            File file = filesNeeded.pop();
            // IFileReader reader = SequenceFile.bufferedReader(file.getAbsolutePath(), DatabaseDescriptor.getLogFileSizeThreshold());
            IFileReader reader = SequenceFile.reader(file.getAbsolutePath());
            try
            {
                Map<String, Row> rows = new HashMap<String, Row>();
                reader.readDirect(header);
                /* deserialize the commit log header */
                bufIn.reset(header, 0, header.length);
                CommitLogHeader clHeader = CommitLogHeader.serializer().deserialize(bufIn);
                /* seek to the lowest position */
                int lowPos = CommitLogHeader.getLowestPosition(clHeader);
                /*
                 * If lowPos == 0 then we need to skip the processing of this
                 * file.
                */
                if (lowPos == 0)
                    break;
                else
                    reader.seek(lowPos);

                /* read the logs populate RowMutation and apply */
                while ( !reader.isEOF() )
                {
                    bufOut.reset();
                    long bytesRead = reader.next(bufOut);
                    if ( bytesRead == -1 )
                        break;

                    bufIn.reset(bufOut.getData(), bufOut.getLength());
                    /* Skip over the commit log key portion */
                    bufIn.readUTF();
                    /* Skip over data size */
                    bufIn.readInt();
                   
                    /* read the commit log entry */
                    try
                    {                       
                        Row row = Row.serializer().deserialize(bufIn);
                        Map<String, ColumnFamily> columnFamilies = new HashMap<String, ColumnFamily>(row.getColumnFamilyMap());
                        /* remove column families that have already been flushed */
                      Set<String> cNames = columnFamilies.keySet();

                        for ( String cName : cNames )
                        {
                          ColumnFamily columnFamily = columnFamilies.get(cName);
                          /* TODO: Remove this to not process Hints */
                          if ( !DatabaseDescriptor.isApplicationColumnFamily(cName) )
                          {
                            row.removeColumnFamily(columnFamily);
                            continue;
                         
                            int id = table.getColumnFamilyId(columnFamily.name());
                            if ( clHeader.get(id) == 0 || reader.getCurrentPosition() < clHeader.getPosition(id) )
                                row.removeColumnFamily(columnFamily);
                        }
                        if ( !row.isEmpty() )
                        {                           
                          table.applyNow(row);
                        }
                    }
                    catch ( IOException e )
                    {
                        logger_.debug( LogUtil.throwableToString(e) );
                    }
                }
                reader.close();
                /* apply the rows read */
                table.flush(true);
            }
            catch ( Throwable th )
            {
                logger_.info( LogUtil.throwableToString(th) );
                /* close the reader and delete this commit log. */
                reader.close();
                FileUtils.delete( new File[]{file} );
            }
        }
    }
View Full Code Here

        return logFile_;
    }

    void readCommitLogHeader(String logFile, byte[] bytes) throws IOException
    {
        IFileReader logReader = SequenceFile.reader(logFile);
        try
        {
            logReader.readDirect(bytes);
        }
        finally
        {
            logReader.close();
        }
    }
View Full Code Here

        while ( !filesNeeded.isEmpty() )
        {
            File file = filesNeeded.pop();
            // IFileReader reader = SequenceFile.bufferedReader(file.getAbsolutePath(), DatabaseDescriptor.getLogFileSizeThreshold());
            IFileReader reader = SequenceFile.reader(file.getAbsolutePath());
            try
            {
                reader.readDirect(header);
                /* deserialize the commit log header */
                bufIn.reset(header, 0, header.length);
                CommitLogHeader clHeader = CommitLogHeader.serializer().deserialize(bufIn);
                /* seek to the lowest position */
                int lowPos = CommitLogHeader.getLowestPosition(clHeader);
                /*
                 * If lowPos == 0 then we need to skip the processing of this
                 * file.
                */
                if (lowPos == 0)
                    break;
                else
                    reader.seek(lowPos);

                /* read the logs populate RowMutation and apply */
                while ( !reader.isEOF() )
                {
                    bufOut.reset();
                    long bytesRead = reader.next(bufOut);
                    if ( bytesRead == -1 )
                        break;

                    bufIn.reset(bufOut.getData(), bufOut.getLength());
                    /* Skip over the commit log key portion */
                    bufIn.readUTF();
                    /* Skip over data size */
                    bufIn.readInt();
                   
                    /* read the commit log entry */
                    try
                    {                       
                        Row row = Row.serializer().deserialize(bufIn);
                        Map<String, ColumnFamily> columnFamilies = new HashMap<String, ColumnFamily>(row.getColumnFamilyMap());
                        /* remove column families that have already been flushed */
                      Set<String> cNames = columnFamilies.keySet();

                        for ( String cName : cNames )
                        {
                          ColumnFamily columnFamily = columnFamilies.get(cName);
                          /* TODO: Remove this to not process Hints */
                          if ( !DatabaseDescriptor.isApplicationColumnFamily(cName) )
                          {
                            row.removeColumnFamily(columnFamily);
                            continue;
                         
                            int id = table.getColumnFamilyId(columnFamily.name());
                            if ( clHeader.get(id) == 0 || reader.getCurrentPosition() < clHeader.getPosition(id) )
                                row.removeColumnFamily(columnFamily);
                        }
                        if ( !row.isEmpty() )
                        {                           
                          table.applyNow(row);
                        }
                    }
                    catch ( IOException e )
                    {
                        logger_.debug( LogUtil.throwableToString(e) );
                    }
                }
                reader.close();
                /* apply the rows read */
                table.flush(true);
            }
            catch ( Throwable th )
            {
                logger_.info( LogUtil.throwableToString(th) );
                /* close the reader and delete this commit log. */
                reader.close();
                FileUtils.delete( new File[]{file} );
            }
        }
    }
View Full Code Here

        }
    }

    private static int getBlockCount(String dataFile) throws IOException
    {
        IFileReader dataReader = SequenceFile.bufferedReader(dataFile, bufferSize_);
        DataOutputBuffer bufOut = new DataOutputBuffer();
        DataInputBuffer bufIn = new DataInputBuffer();
        int blockCount = 0;

        try
        {
            while ( !dataReader.isEOF() )
            {
                bufOut.reset();
                dataReader.next(bufOut);
                bufIn.reset(bufOut.getData(), bufOut.getLength());
                /* Key just read */
                String key = bufIn.readUTF();
                if ( key.equals(SSTable.blockIndexKey_) )
                {
                    ++blockCount;
                }
            }
        }
        finally
        {
            dataReader.close();
        }
        return blockCount;
    }
View Full Code Here

    {
        String indexFile = dataFile.replace("-Data.", "-Index.");
        final int bufferSize = 64*1024;

        IFileWriter indexWriter = SequenceFile.bufferedWriter(indexFile, bufferSize);
        IFileReader dataReader = SequenceFile.bufferedReader(dataFile, bufferSize);
        DataOutputBuffer bufOut = new DataOutputBuffer();
        DataInputBuffer bufIn = new DataInputBuffer();
        /* BloomFilter of all data in the data file */
        BloomFilter bf = new BloomFilter((SSTable.indexInterval() + 1)*blockCount, 8);

        try
        {
            while ( !dataReader.isEOF() )
            {
                bufOut.reset();
                /* Record the position of the key. */
                long blockIndexOffset = dataReader.getCurrentPosition();
                dataReader.next(bufOut);
                bufIn.reset(bufOut.getData(), bufOut.getLength());
                /* Key just read */
                String key = bufIn.readUTF();
                if ( key.equals(SSTable.blockIndexKey_) )
                {
                    /* Ignore the size of the data associated with the block index */
                    bufIn.readInt();
                    /* Number of keys in the block. */
                    int blockSize = bufIn.readInt();
                    /* Largest key in the block */
                    String largestKey = null;

                    /*
                     * Read the keys in this block and find the largest key in
                     * this block. This is the key that gets written into the
                     * index file.
                    */
                    for ( int i = 0; i < blockSize; ++i )
                    {
                        String currentKey = bufIn.readUTF();
                        bf.add(currentKey);
                        if ( largestKey == null )
                        {
                            largestKey = currentKey;
                        }
                        else
                        {
                            if ( currentKey.compareTo(largestKey) > 0 )
                            {
                                /* record this key */
                                largestKey = currentKey;
                            }
                        }
                        /* read the position of the key and the size of key data and throws it away. */
                        bufIn.readLong();
                        bufIn.readLong();
                    }

                    /*
                     * Write into the index file the largest key in the block
                     * and the offset of the block index in the data file.
                    */
                    indexWriter.append(largestKey, BasicUtilities.longToByteArray(blockIndexOffset));
                }
            }
        }
        finally
        {
            dataReader.close();
            /* Cache the bloom filter */
            SSTable.storeBloomFilter(dataFile, bf);
            /* Write the bloom filter into the index file */
            bufOut.reset();
            BloomFilter.serializer().serialize(bf, bufOut);
View Full Code Here

        extractKeyIntoFile(keyToExtract, dataFile, outputFile);
    }

    public static boolean extractKeyIntoFile(String keyToExtract, String dataFile, String outputFile) throws IOException
    {
    IFileReader dataReader = SequenceFile.bufferedReader(dataFile, bufferSize_);
        DataOutputBuffer bufOut = new DataOutputBuffer();
        DataInputBuffer bufIn = new DataInputBuffer();

      try
      {
            while ( !dataReader.isEOF() )
            {
                bufOut.reset();
                dataReader.next(bufOut);
                bufIn.reset(bufOut.getData(), bufOut.getLength());
                /* Key just read */
                String key = bufIn.readUTF();
                /* check if we want this key */
                if ( key.equals(keyToExtract) )
                {
                  int keySize = bufIn.readInt();
                  byte[] keyData = new byte[keySize];
                  bufIn.read(keyData, 0, keySize);

                  /* write the key data into a file */
                    RandomAccessFile raf = new RandomAccessFile(outputFile, "rw");                 
                  raf.writeUTF(key);
                  raf.writeInt(keySize);
                  raf.write(keyData);
                    dumpBlockIndex(keyToExtract, 0L, keySize, raf);
                    raf.close();
                    return true;
                }
            }
        }
        finally
        {
            dataReader.close();
        }

        return false;
    }
View Full Code Here

TOP

Related Classes of org.apache.cassandra.io.IFileReader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.