Package org.apache.hadoop.util.DiskChecker

Examples of org.apache.hadoop.util.DiskChecker.DiskErrorException


   
    int volsFailed =  volsConfigured - storage.getNumStorageDirs();
   
    if( volsFailed < 0 ||
      volsFailed > volFailuresTolerated ) {
        throw new DiskErrorException("Invalid value for volsFailed : "
            + volsFailed + " , Volumes tolerated : " + volFailuresTolerated);
    }
   
    this.validVolsRequired =  volsConfigured - volFailuresTolerated;
   
    if (validVolsRequired < 1 ||
        validVolsRequired > storage.getNumStorageDirs()) {
      throw new DiskErrorException("Invalid value for validVolsRequired : "
          + validVolsRequired + " , Current valid volumes: " + storage.getNumStorageDirs());
    }
   
    FSVolume[] volArray = new FSVolume[storage.getNumStorageDirs()];
    for (int idx = 0; idx < storage.getNumStorageDirs(); idx++) {
View Full Code Here


    StringBuilder sb = new StringBuilder();
    for(FSVolume fv : failed_vols) {
      sb.append(fv.dataDir.dir.getAbsolutePath() + ";");
    }

    throw  new DiskErrorException("DataNode failed volumes:" + sb);
 
  }
View Full Code Here

  static void checkDirectoryPermissions(FileSystem fs, Path path,
      String configKey) throws IOException, DiskErrorException {
    FileStatus stat = fs.getFileStatus(path);
    FsPermission actual = stat.getPermission();
    if (!stat.isDir())
      throw new DiskErrorException(configKey + " - not a directory: "
          + path.toString());
    FsAction user = actual.getUserAction();
    if (!user.implies(FsAction.READ))
      throw new DiskErrorException("bad " + configKey
          + "- directory is not readable: " + path.toString());
    if (!user.implies(FsAction.WRITE))
      throw new DiskErrorException("bad " + configKey
          + "- directory is not writable " + path.toString());
  }
View Full Code Here

                }
            }
        }

        if( !writable )
            throw new DiskErrorException(
                    "all local directories are not writable" );
    }
View Full Code Here

          it.remove();
          numFailures++;
        }
      }
      if (localDirs.isEmpty()) {
        throw new DiskErrorException(
            "No mapred local directories are writable");
      }
    }
View Full Code Here

        }
      }
    }

    if (!writable)
      throw new DiskErrorException("all local directories are not writable");
  }
View Full Code Here

        }
      }
    }

    if (!writable)
      throw new DiskErrorException(
                                   "all local directories are not writable");
  }
View Full Code Here

    StringBuilder sb = new StringBuilder();
    for(FSVolume fv : failed_vols) {
      sb.append(fv.dataDir.dir.getAbsolutePath() + ";");
    }

    throw  new DiskErrorException("DataNode failed volumes:" + sb);
 
  }
View Full Code Here

    int volsConfigured = (dataDirs == null) ? 0 : dataDirs.length;
    int volsFailed = volsConfigured - storage.getNumStorageDirs();
    this.validVolsRequired = volsConfigured - volFailuresTolerated;

    if (volFailuresTolerated < 0 || volFailuresTolerated >= volsConfigured) {
      throw new DiskErrorException("Invalid volume failure "
          + " config value: " + volFailuresTolerated);
    }
    if (volsFailed > volFailuresTolerated) {
      throw new DiskErrorException("Too many failed volumes - "
          + "current valid volumes: " + storage.getNumStorageDirs()
          + ", volumes configured: " + volsConfigured
          + ", volumes failed: " + volsFailed
          + ", volume failures tolerated: " + volFailuresTolerated);
    }
View Full Code Here

    // report the error
    StringBuilder sb = new StringBuilder();
    for (FsVolumeImpl fv : failedVols) {
      sb.append(fv.getCurrentDir().getAbsolutePath() + ";");
    }
    throw new DiskErrorException("DataNode failed volumes:" + sb);
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.DiskChecker.DiskErrorException

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.