Examples of RecoveringBlock


Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

  }

  public static RecoveringBlock convert(RecoveringBlockProto b) {
    ExtendedBlock block = convert(b.getBlock().getB());
    DatanodeInfo[] locs = convert(b.getBlock().getLocsList());
    return new RecoveringBlock(block, locs, b.getNewGenStamp());
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

      InterDatanodeProtocol dn1,
      InterDatanodeProtocol dn2) throws IOException {
   
    DatanodeInfo[] locs = new DatanodeInfo[]{
        mock(DatanodeInfo.class), mock(DatanodeInfo.class)};
    RecoveringBlock rBlock = new RecoveringBlock(block,
        locs, RECOVERY_ID);
    ArrayList<BlockRecord> syncList = new ArrayList<BlockRecord>(2);
    BlockRecord record1 = new BlockRecord(
        new DatanodeID("xx", "yy", 44, 55), dn1, replica1);
    BlockRecord record2 = new BlockRecord(
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

  private Collection<RecoveringBlock> initRecoveringBlocks() {
    Collection<RecoveringBlock> blocks = new ArrayList<RecoveringBlock>(1);
    DatanodeInfo[] locs = new DatanodeInfo[] {
        new DatanodeInfo(dn.dnRegistration),
        mock(DatanodeInfo.class) };
    RecoveringBlock rBlock = new RecoveringBlock(block, locs, RECOVERY_ID);
    blocks.add(rBlock);
    return blocks;
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

    ReplicaInPipelineInterface replicaInfo = dn.data.createRbw(block);
    BlockWriteStreams streams = null;
    try {
      streams = replicaInfo.createStreams(true, 0, 0);
      streams.checksumOut.write('a');
      dn.data.initReplicaRecovery(new RecoveringBlock(block, null, RECOVERY_ID+1));
      try {
        dn.syncBlock(rBlock, initBlockRecords(dn));
        fail("Sync should fail");
      } catch (IOException e) {
        e.getMessage().startsWith("Cannot recover ");
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

    List<BlockInfoUnderConstruction> blocks = recoverBlocks.poll(maxTransfers);
    if(blocks == null)
      return null;
    BlockRecoveryCommand brCommand = new BlockRecoveryCommand(blocks.size());
    for(BlockInfoUnderConstruction b : blocks) {
      brCommand.add(new RecoveringBlock(
          b, b.getExpectedLocations(), b.getBlockRecoveryId()));
    }
    return brCommand;
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

      Block b = locatedblock.getBlock();
      InterDatanodeProtocol.LOG.info("b=" + b + ", " + b.getClass());
      checkMetaInfo(b, datanode);
      long recoveryId = b.getGenerationStamp() + 1;
      idp.initReplicaRecovery(
          new RecoveringBlock(b, locatedblock.getLocations(), recoveryId));

      //verify updateBlock
      Block newblock = new Block(
          b.getBlockId(), b.getNumBytes()/2, b.getGenerationStamp()+1);
      idp.updateReplicaUnderRecovery(b, recoveryId, newblock.getNumBytes());
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

      //initReplicaRecovery
      final Block b = locatedblock.getBlock();
      final long recoveryid = b.getGenerationStamp() + 1;
      final long newlength = b.getNumBytes() - 1;
      final ReplicaRecoveryInfo rri = fsdataset.initReplicaRecovery(
          new RecoveringBlock(b, null, recoveryid));

      //check replica
      final ReplicaInfo replica = fsdataset.fetchReplicaInfo(b.getBlockId());
      Assert.assertEquals(ReplicaState.RUR, replica.getState());
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

  @Test
  public void testConvertRecoveringBlock() {
    DatanodeInfo di1 = DFSTestUtil.getLocalDatanodeInfo();
    DatanodeInfo di2 = DFSTestUtil.getLocalDatanodeInfo();
    DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 };
    RecoveringBlock b = new RecoveringBlock(getExtendedBlock(), dnInfo, 3);
    RecoveringBlockProto bProto = PBHelper.convert(b);
    RecoveringBlock b1 = PBHelper.convert(bProto);
    assertEquals(b.getBlock(), b1.getBlock());
    DatanodeInfo[] dnInfo1 = b1.getLocations();
    assertEquals(dnInfo.length, dnInfo1.length);
    for (int i=0; i < dnInfo.length; i++) {
      compare(dnInfo[0], dnInfo1[0]);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

    DatanodeInfo di1 = DFSTestUtil.getLocalDatanodeInfo();
    DatanodeInfo di2 = DFSTestUtil.getLocalDatanodeInfo();
    DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 };

    List<RecoveringBlock> blks = ImmutableList.of(
      new RecoveringBlock(getExtendedBlock(1), dnInfo, 3),
      new RecoveringBlock(getExtendedBlock(2), dnInfo, 3)
    );
   
    BlockRecoveryCommand cmd = new BlockRecoveryCommand(blks);
    BlockRecoveryCommandProto proto = PBHelper.convert(cmd);
    assertEquals(1, proto.getBlocks(0).getBlock().getB().getBlockId());
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock

      InterDatanodeProtocol dn2,
      long expectLen) throws IOException {
   
    DatanodeInfo[] locs = new DatanodeInfo[]{
        mock(DatanodeInfo.class), mock(DatanodeInfo.class)};
    RecoveringBlock rBlock = new RecoveringBlock(block,
        locs, RECOVERY_ID);
    ArrayList<BlockRecord> syncList = new ArrayList<BlockRecord>(2);
    BlockRecord record1 = new BlockRecord(
        DFSTestUtil.getDatanodeInfo("1.2.3.4", "bogus", 1234), dn1, replica1);
    BlockRecord record2 = new BlockRecord(
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.