Examples of PathFilter


Examples of org.apache.hadoop.fs.PathFilter

   * @throws IOException When scanning the files fails.
   */
  static List<Path> getStoreFiles(FileSystem fs, Path regionDir)
      throws IOException {
    List<Path> res = new ArrayList<Path>();
    PathFilter dirFilter = new FSUtils.DirFilter(fs);
    FileStatus[] familyDirs = fs.listStatus(regionDir, dirFilter);
    for(FileStatus dir : familyDirs) {
      FileStatus[] files = fs.listStatus(dir.getPath());
      for (FileStatus file : files) {
        if (!file.isDir()) {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      final Path regiondir)
  throws IOException {
    NavigableSet<Path> filesSorted = new TreeSet<Path>();
    Path editsdir = getRegionDirRecoveredEditsDir(regiondir);
    if (!fs.exists(editsdir)) return filesSorted;
    FileStatus[] files = FSUtils.listStatus(fs, editsdir, new PathFilter() {
      @Override
      public boolean accept(Path p) {
        boolean result = false;
        try {
          // Return files and only files that match the editfile names pattern.
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

        // Look for reference files.  Call listPaths with an anonymous
        // instance of PathFilter.

        FileStatus [] ps = fs.listStatus(p,
            new PathFilter () {
              public boolean accept(Path path) {
                return HStore.isReference(path);
              }
            }
        );
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    if (families == null) {
      LOG.info("No families under region directory:" + regionDir);
      return;
    }

    PathFilter fileFilter = new FSUtils.FileFilter(fs);
    for (FileStatus family: families) {
      Path familyDir = family.getPath();
      String familyName = familyDir.getName();

      // get all the storeFiles in the family
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   * @throws IOException
   */
  public static FileStatus getTableInfoPath(final FileSystem fs,
      final Path tabledir)
  throws IOException {
    FileStatus [] status = FSUtils.listStatus(fs, tabledir, new PathFilter() {
      @Override
      public boolean accept(Path p) {
        // Accept any file that starts with TABLEINFO_NAME
        return p.getName().startsWith(TABLEINFO_NAME);
      }
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      Path p = Store.getStoreHomedir(tabledir, split.getEncodedName(),
        family.getName());
      if (!fs.exists(p)) continue;
      // Look for reference files.  Call listStatus with anonymous instance of PathFilter.
      FileStatus [] ps = FSUtils.listStatus(fs, p,
          new PathFilter () {
            public boolean accept(Path path) {
              return StoreFile.isReference(path);
            }
          }
      );
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      // Look for reference files.  Call listStatus with an anonymous
      // instance of PathFilter.

      FileStatus [] ps = this.master.fs.listStatus(p,
          new PathFilter () {
            public boolean accept(Path path) {
              return HStore.isReference(path);
            }
          }
      );
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      totalBytes += it.next().getSize();
    }
    long goalSize = totalBytes / (numSplits < 1 ? 1 : numSplits);
    StringBuilder sb = new StringBuilder();
    boolean first = true;
    PathFilter filter = null;
    List<BasicTable.Reader> realReaders = new ArrayList<BasicTable.Reader>();
    int[] realReaderIndices = new int[readers.size()];

    for (int i = 0; i < readers.size(); ++i) {
      BasicTable.Reader reader = readers.get(i);
      /* Get the index of the column group that will be used for row-split.*/
      int splitCGIndex = reader.getRowSplitCGIndex();
     
      /* We can create input splits only if there does exist a valid column group for split.
       * Otherwise, we do not create input splits. */
      if (splitCGIndex >= 0) {       
        realReaderIndices[realReaders.size()] = i;
        realReaders.add(reader);
        if (first)
        {
          // filter is identical across tables
          filter = reader.getPathFilter(conf);
          first = false;
        } else
          sb.append(",");
        sb.append(reader.getPath().toString() + "/" + reader.getName(splitCGIndex));
      }
    }
   
    DummyFileInputFormat helper = new DummyFileInputFormat(minSplitSize, realReaders);

    if (!realReaders.isEmpty())
    {
      DummyFileInputFormat.setInputPaths(conf, sb.toString());
      DummyFileInputFormat.setInputPathFilter(conf, filter.getClass());
      InputSplit[] inputSplits = helper.getSplits(conf, (numSplits < 1 ? 1 : numSplits));

      int batchesPerSplit = inputSplits.length / (numSplits < 1 ? 1 : numSplits);
      if (batchesPerSplit <= 0)
        batchesPerSplit = 1;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

     
      // creates a MultiPathFilter with the hiddenFileFilter and the
      // user provided one (if any).
      List<PathFilter> filters = new ArrayList<PathFilter>();
      filters.add(hiddenFileFilter);
      PathFilter jobFilter = getInputPathFilter(job);
      if (jobFilter != null) {
        filters.add(jobFilter);
      }
      PathFilter inputFilter = new MultiPathFilter(filters);

      ArrayList<Integer> fileNumberList  = new ArrayList<Integer>();
      int index = 0;
      for (Path p: dirs) {
        FileSystem fs = p.getFileSystem(job);
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

  }

  private List<FileFragment> getFragments(Path tablePath)
      throws IOException {
    List<FileFragment> fragments = Lists.newArrayList();
    FileStatus[] files = fs.listStatus(tablePath, new PathFilter() {
      @Override
      public boolean accept(Path path) {
        return path.getName().charAt(0) != '.';
      }
    });
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.