Examples of PathFilter


Examples of org.apache.hadoop.fs.PathFilter

    Configuration job = new Configuration();
    Path clusterPath = new Path(clusterPathStr + "/*");
    List<Path> result = new ArrayList<Path>();
    //log.info("I am here");
    // filter out the files
    PathFilter clusterFileFilter = new PathFilter() {
      @Override
      public boolean accept(Path path) {
        return path.getName().startsWith("part");
      }
    };
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      /* We can create input splits only if there does exist a valid column group for split.
       * Otherwise, we do not create input splits. */
      if (splitCGIndex >= 0) {       
        Path path = new Path (reader.getPath().toString() + "/" + reader.getName(splitCGIndex));
        DummyFileInputFormat.setInputPaths(conf, path);
        PathFilter filter = reader.getPathFilter(conf);
        DummyFileInputFormat.setInputPathFilter(conf, filter.getClass());
        InputSplit[] inputSplits = helper.getSplits(conf, (numSplits < 1 ? 1 : numSplits));
       
        long starts[] = new long[inputSplits.length];
        long lengths[] = new long[inputSplits.length];
        Path paths[] = new Path [inputSplits.length];
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   *         <tt>null</tt>
   */
  private static Multimap<String, String> getMapOfServersAndLogs(FileSystem fs, Path logdir,
      Collection<String> toInclude) throws IOException {
    // create a path filter based on the passed directories to include
    PathFilter filter = toInclude == null || toInclude.size() == 0 ? null
        : new MatchesDirectoryNames(toInclude);

    // get all the expected directories
    FileStatus[] serverLogDirs = FSUtils.listStatus(fs, logdir, filter);
    if (serverLogDirs == null) return null;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

        files.add(file.getPath());
      }
      LOG.debug("Current files:" + files);
    }
    // delete the visible folders so we just have hidden files/folders
    final PathFilter dirFilter = new FSUtils.DirFilter(fs);
    PathFilter nonHidden = new PathFilter() {
      @Override
      public boolean accept(Path file) {
        return dirFilter.accept(file) && !file.getName().toString().startsWith(".");
      }
    };
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    FileStatusConverter getAsFile = new FileStatusConverter(fs);
    // otherwise, we attempt to archive the store files

    // build collection of just the store directories to archive
    Collection<File> toArchive = new ArrayList<File>();
    final PathFilter dirFilter = new FSUtils.DirFilter(fs);
    PathFilter nonHidden = new PathFilter() {
      @Override
      public boolean accept(Path file) {
        return dirFilter.accept(file) && !file.getName().toString().startsWith(".");
      }
    };
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   * @throws IOException When scanning the files fails.
   */
  static List<Path> getStoreFiles(FileSystem fs, Path regionDir)
      throws IOException {
    List<Path> res = new ArrayList<Path>();
    PathFilter dirFilter = new FSUtils.DirFilter(fs);
    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);
    for(FileStatus dir : familyDirs) {
      FileStatus[] files = fs.listStatus(dir.getPath());
      for (FileStatus file : files) {
        if (!file.isDir()) {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    // only include the directory paths to tables
    Path tableDir = FSUtils.getTableDir(hbaseRootDir, tableName);
    // Inside a table, there are compaction.dir directories to skip.  Otherwise, all else
    // should be regions.
    PathFilter df = new BlackListDirFilter(fs, HConstants.HBASE_NON_TABLE_DIRS);
    FileStatus[] regionDirs = fs.listStatus(tableDir);
    for (FileStatus regionDir : regionDirs) {
      Path dd = regionDir.getPath();
      if (dd.getName().equals(HConstants.HREGION_COMPACTIONDIR_NAME)) {
        continue;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    } else {
      queryPath = new Path(FSUtils.getTableDir(rootPath, TableName.valueOf(desiredTable)).toString() + "/*/");
    }

    // reject all paths that are not appropriate
    PathFilter pathFilter = new PathFilter() {
      @Override
      public boolean accept(Path path) {
        // this is the region name; it may get some noise data
        if (null == path) {
          return false;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    if (!fs.isDirectory(parentdir)) {
      throw new IOException(parentdirName + " not a directory");
    }
    // Look for regions in parentdir.
    Path [] regiondirs =
      fs.listPaths(parentdir, new PathFilter() {
        public boolean accept(Path path) {
          Matcher m = REGION_NAME_PARSER.matcher(path.getName());
          return m != null && m.matches();
        }
    });
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   * @throws IOException
   */
  private Text [] getFamilies(final FileSystem fs,
      final Path regiondir)
  throws IOException {
    Path [] subdirs = fs.listPaths(regiondir, new PathFilter() {
      public boolean accept(Path path) {
        return !path.getName().equals("log");
      }
    });
    List<Text> families = new ArrayList<Text>();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.