Examples of InterDatanodeProtocol


Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

      int rwrCount = 0;
     
      List<BlockRecord> blockRecords = new ArrayList<BlockRecord>();
      for(DatanodeID id : datanodeids) {
        try {
          InterDatanodeProtocol datanode;
          if (dnRegistration.getHost().equals(id.getHost()) &&
              dnRegistration.getIpcPort() == id.getIpcPort()) {
            datanode = this;
          } else {
            datanode = DataNode.createInterDataNodeProtocolProxy(id, getConf());
          }
          BlockRecoveryInfo info = datanode.startBlockRecovery(block);
          if (info == null) {
            LOG.info("No block metadata found for block " + block + " on datanode "
                + id);
            continue;
          }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

      List<InterDatanodeProtocol> datanodeProxies =
        new ArrayList<InterDatanodeProtocol>();
      //check generation stamps
      for(DatanodeID id : datanodeids) {
        try {
          InterDatanodeProtocol datanode;
          if (getDNRegistrationForNS(namespaceId).equals(id)) {
            LOG.info("Skipping IDNPP creation for local id " + id
                + " when recovering " + block);
            datanode = this;
          } else {
            LOG.info("Creating IDNPP for non-local id " + id + " (dnReg="
                + getDNRegistrationForNS(namespaceId) + ") when recovering "
                + block);
            datanode = DataNode.createInterDataNodeProtocolProxy(
      id, getConf(), socketTimeout);
            datanodeProxies.add(datanode);
          }
          throwIfAfterTime(deadline);
          BlockRecoveryInfo info = datanode.startBlockRecovery(namespaceId, block);
          if (info == null) {
            LOG.info("No block metadata found for block " + block + " on datanode "
                + id);
            continue;
          }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

      this.target = target;
      this.srcFileSystem = data.getFileSystemForBlock(srcNamespaceId, srcBlock);
    }

    public Boolean call() throws Exception {
      InterDatanodeProtocol remoteDatanode = null;
      try {
        File srcBlockFile = data.getBlockFile(srcNamespaceId, srcBlock);
        remoteDatanode = DataNode
            .createInterDataNodeProtocolProxy(target, getConf(), socketTimeout);
        remoteDatanode.copyBlockLocal(srcFileSystem, srcNamespaceId, srcBlock,
            dstNamespaceId, dstBlock,
            srcBlockFile.getAbsolutePath());
      } catch (IOException e) {
        LOG.warn("Cross datanode local block copy failed", e);
        throw e;
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    //check generation stamps
    for(DatanodeID id : datanodeids) {
      try {
        BPOfferService bpos = blockPoolManager.get(blookPoolId);
        DatanodeRegistration bpReg = bpos.bpRegistration;
        InterDatanodeProtocol datanode = bpReg.equals(id)?
            this: DataNode.createInterDataNodeProtocolProxy(id, getConf(),
                dnConf.socketTimeout, dnConf.connectToDnViaHostname);
        ReplicaRecoveryInfo info = callInitReplicaRecovery(datanode, rBlock);
        if (info != null &&
            info.getGenerationStamp() >= block.getGenerationStamp() &&
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

      int errorCount = 0;

      //check generation stamps
      for(DatanodeID id : datanodeids) {
        try {
          InterDatanodeProtocol datanode = dnRegistration.equals(id)?
              this: DataNode.createInterDataNodeProtocolProxy(id, getConf());
          BlockMetaDataInfo info = datanode.getBlockMetaDataInfo(block);
          if (info != null && info.getGenerationStamp() >= block.getGenerationStamp()) {
            if (keepLength) {
              if (info.getNumBytes() == block.getNumBytes()) {
                syncList.add(new BlockRecord(id, datanode, new Block(info)));
              }
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    ReplicaRecoveryInfo replica1 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-1, ReplicaState.FINALIZED);
    ReplicaRecoveryInfo replica2 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-2, ReplicaState.FINALIZED);

    InterDatanodeProtocol dn1 = mock(InterDatanodeProtocol.class);
    InterDatanodeProtocol dn2 = mock(InterDatanodeProtocol.class);

    testSyncReplicas(replica1, replica2, dn1, dn2, REPLICA_LEN1);
    verify(dn1).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);
    verify(dn2).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);   
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    ReplicaRecoveryInfo replica1 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-1, ReplicaState.FINALIZED);
    ReplicaRecoveryInfo replica2 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-2, ReplicaState.RBW);

    InterDatanodeProtocol dn1 = mock(InterDatanodeProtocol.class);
    InterDatanodeProtocol dn2 = mock(InterDatanodeProtocol.class);

    testSyncReplicas(replica1, replica2, dn1, dn2, REPLICA_LEN1);
    verify(dn1).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);
    verify(dn2).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);
   
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    ReplicaRecoveryInfo replica1 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-1, ReplicaState.FINALIZED);
    ReplicaRecoveryInfo replica2 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-2, ReplicaState.RWR);

    InterDatanodeProtocol dn1 = mock(InterDatanodeProtocol.class);
    InterDatanodeProtocol dn2 = mock(InterDatanodeProtocol.class);

    testSyncReplicas(replica1, replica2, dn1, dn2, REPLICA_LEN1);
    verify(dn1).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);
    verify(dn2, never()).updateReplicaUnderRecovery(
        block, RECOVERY_ID, REPLICA_LEN1);
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    ReplicaRecoveryInfo replica1 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-1, ReplicaState.RBW);
    ReplicaRecoveryInfo replica2 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN2, GEN_STAMP-2, ReplicaState.RBW);

    InterDatanodeProtocol dn1 = mock(InterDatanodeProtocol.class);
    InterDatanodeProtocol dn2 = mock(InterDatanodeProtocol.class);

    long minLen = Math.min(REPLICA_LEN1, REPLICA_LEN2);
    testSyncReplicas(replica1, replica2, dn1, dn2, minLen);
    verify(dn1).updateReplicaUnderRecovery(block, RECOVERY_ID, minLen);
    verify(dn2).updateReplicaUnderRecovery(block, RECOVERY_ID, minLen);   
View Full Code Here

Examples of org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol

    ReplicaRecoveryInfo replica1 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-1, ReplicaState.RBW);
    ReplicaRecoveryInfo replica2 = new ReplicaRecoveryInfo(BLOCK_ID,
        REPLICA_LEN1, GEN_STAMP-2, ReplicaState.RWR);

    InterDatanodeProtocol dn1 = mock(InterDatanodeProtocol.class);
    InterDatanodeProtocol dn2 = mock(InterDatanodeProtocol.class);

    testSyncReplicas(replica1, replica2, dn1, dn2, REPLICA_LEN1);
    verify(dn1).updateReplicaUnderRecovery(block, RECOVERY_ID, REPLICA_LEN1);
    verify(dn2, never()).updateReplicaUnderRecovery(
        block, RECOVERY_ID, REPLICA_LEN1);   
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.