Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FSDataInputStream.readByte()


          i.seek(0);
          return new GZIPInputStream(i);
        }
        case 0x5345: { // 'S' 'E'
          // Might be a SequenceFile
          if (i.readByte() == 'Q') {
            i.close();
            return new TextRecordInputStream(item.stat);
          }
        }
        default: {
View Full Code Here


    // Initialize an array to hold all the doc scores.
    scores = new byte[sz];

    // Read each doc length.
    for (int i = 1; i < sz; i++) {
      scores[i] = in.readByte();
      docs++;

      if (i % 1000000 == 0) {
        LOG.info(i + " docscores read");
      }
View Full Code Here

        case 0x1f8b: { // RFC 1952
          i.seek(0);
          return new GZIPInputStream(i);
        }
        case 0x5345: { // 'S' 'E'
          if (i.readByte() == 'Q') {
            i.close();
            return new TextRecordInputStream(item.stat);
          }
          break;
        }
View Full Code Here

        case 0x1f8b: { // RFC 1952
          i.seek(0);
          return new GZIPInputStream(i);
        }
        case 0x5345: { // 'S' 'E'
          if (i.readByte() == 'Q') {
            i.close();
            return new TextRecordInputStream(item.stat);
          }
          break;
        }
View Full Code Here

          i.seek(0);
          return new GZIPInputStream(i);
        }
        case 0x5345: { // 'S' 'E'
          // Might be a SequenceFile
          if (i.readByte() == 'Q') {
            i.close();
            return new TextRecordInputStream(item.stat);
          }
        }
        default: {
View Full Code Here

    // Initialize an array to hold all the doc scores.
    scores = new byte[sz];

    // Read each doc length.
    for (int i = 1; i < sz; i++) {
      scores[i] = in.readByte();
      docs++;

      if (i % 1000000 == 0) {
        LOG.info(i + " docscores read");
      }
View Full Code Here

      FSDataOutputStream stm = createFile(fs, file1, 2);
      writeFile(stm);
      stm.close();

      in = fs.open(file1);
      in.readByte();

      DFSInputStream dfsClientIn = findDFSClientInputStream(in);     
      Field blockReaderField = DFSInputStream.class.getDeclaredField("blockReader");
      blockReaderField.setAccessible(true);
      BlockReader blockReader = (BlockReader) blockReaderField.get(dfsClientIn);
View Full Code Here

      blockReader.setArtificialSlowdown(1000);
      blockReader.isReadLocal = false;
      blockReader.isReadRackLocal = false;
      blockReader.ENABLE_THROW_FOR_SLOW = true;
      for (int i = 0; i < 1024; i++) {
        in.readByte();
      }

      blockReader.setArtificialSlowdown(0);
      for (int i = 1024; i < fileSize - 1; i++) {
        in.readByte();
View Full Code Here

        in.readByte();
      }

      blockReader.setArtificialSlowdown(0);
      for (int i = 1024; i < fileSize - 1; i++) {
        in.readByte();
      }

      ConcurrentHashMap<DatanodeInfo, DatanodeInfo> deadNodes = getDeadNodes(dfsClientIn);
      TestCase.assertEquals(1, deadNodes.size());
    } finally {
View Full Code Here

          i.seek(0);
          return new GZIPInputStream(i);
        }
        case 0x5345: { // 'S' 'E'
          // Might be a SequenceFile
          if (i.readByte() == 'Q') {
            i.close();
            return new TextRecordInputStream(item.stat);
          }
        }
        default: {
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.