Package org.apache.hadoop.dfs

Examples of org.apache.hadoop.dfs.Block


      final Path fileName = new Path("/foo1");
      DFSTestUtil.createFile(fs, fileName, 2, (short)3, 0L);
      DFSTestUtil.waitReplication(fs, fileName, (short)3);
     
      // corrupt the block on datanode 0
      Block block = DFSTestUtil.getFirstBlock(fs, fileName);
      TestDatanodeBlockScanner.corruptReplica(block.getBlockName(), 0);
      File scanLog = new File(System.getProperty("test.build.data"),
          "dfs/data/data1/current/dncp_block_verification.log.curr");
      assertTrue(scanLog.delete());
      // restart the datanode so the corrupt replica will be detected
      cluster.restartDataNode(0);
View Full Code Here


     
      // populate the cluster with a one block file
      final Path FILE_PATH = new Path("/testfile");
      DFSTestUtil.createFile(fs, FILE_PATH, 1L, REPLICATION_FACTOR, 1L);
      DFSTestUtil.waitReplication(fs, FILE_PATH, REPLICATION_FACTOR);
      Block block = DFSTestUtil.getFirstBlock(fs, FILE_PATH);

      // keep a copy of all datanode descriptor
      DatanodeDescriptor[] datanodes = (DatanodeDescriptor[])
         namesystem.heartbeats.toArray(new DatanodeDescriptor[REPLICATION_FACTOR]);
     
View Full Code Here

    final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;
   
    DatanodeDescriptor dd = new DatanodeDescriptor();
    ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
    for (int i=0; i<MAX_BLOCKS; i++) {
      blockList.add(new Block(i, 0, GenerationStamp.FIRST_VALID_STAMP));
    }
    dd.addBlocksToBeInvalidated(blockList);
    BlockCommand bc = dd.getInvalidateBlocks(MAX_LIMIT);
    assertEquals(bc.getBlocks().length, MAX_LIMIT);
    bc = dd.getInvalidateBlocks(MAX_LIMIT);
View Full Code Here

     
      // get the block
      File dataDir = new File(System.getProperty("test.build.data"),
          "dfs/data/data1/current");

      Block blk = getBlock(dataDir);
      if (blk == null) {
        blk = getBlock(new File(System.getProperty("test.build.data"),
          "dfs/data/data2/current"));
      }
      assertFalse(blk==null);
View Full Code Here

        int endIndex = fileName.length() - ".meta".length();
        blockTimeStamp = Long.parseLong(fileName.substring(startIndex, endIndex));
        break;
      }
    }
    return new Block(blockId, blocks[idx].length(), blockTimeStamp);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.dfs.Block

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.