Package org.apache.hadoop.util

Examples of org.apache.hadoop.util.DataDirFileReader


    lastLocalDirsReloadTimeStamp = currentTimeStamp;
    String[] tmpLocalDirsList = null;
    String configFilePath = conf.get("mapred.localdir.confpath");
    if (configFilePath != null) {
      try {
        DataDirFileReader reader = new DataDirFileReader(configFilePath);
        String tempLocalDirs = reader.getNewDirectories();
        tmpLocalDirsList = reader.getArrayOfCurrentDataDirectories();
        if (tempLocalDirs == null) {
          LOG.warn("File is empty, using mapred.local.dir directories");
        }
      } catch (IOException e) {
        LOG.warn("Could not read file, using directories from mapred.local.dir");
View Full Code Here


      String newLocalDirs = null;
      String[] localDirsList = null;
      if ((contextCfgItemName.equals("mapred.local.dir")) &&
        (configFilePath != null)) {
        try {
          DataDirFileReader reader = new DataDirFileReader(configFilePath);
          if (lastReloadTimestamp < reader.getLastModTimeStamp()) {
            lastReloadTimestamp = reader.getLastModTimeStamp();
            String tempLocalDirs = reader.getNewDirectories();
            localDirsList = reader.getArrayOfCurrentDataDirectories();
            if(tempLocalDirs == null) {
              LOG.warn("File is empty, using mapred.local.dir directories");
            }
            else {
              newLocalDirs = tempLocalDirs;
View Full Code Here

  public static String[] getListOfDataDirs(Configuration conf) {
    String[] configFilePath = conf.getStrings("dfs.datadir.confpath");
    String[] dataDirs = null;
    if(configFilePath != null && (configFilePath.length != 0)) {
      try {
        DataDirFileReader reader = new DataDirFileReader(configFilePath[0]);
        dataDirs = reader.getArrayOfCurrentDataDirectories();
        if(dataDirs == null) {
          LOG.warn("File is empty, using dfs.data.dir directories");
        }
      } catch (Exception e) {
        LOG.warn("Could not read file, using directories from dfs.data.dir" +
View Full Code Here

      //Refresh the volumes using default configuration path
      if (confVolumes.equals("--defaultPath")) {
        Configuration conf = getConf();
        confVolumes = conf.get("dfs.datadir.confpath");
      }
      DataDirFileReader reader = new DataDirFileReader(confVolumes);
      this.refreshVolumes(reader.getNewDirectories());
    } catch (Exception e) {
      LOG.error("Cannot refresh the data dirs of the node Exception: " + e);     
      return;
    }
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.DataDirFileReader

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.