Examples of PathFilter


Examples of org.apache.hadoop.fs.PathFilter

     * @param path The node directory path
     * @return Returns list of files of this partition, replicaType
     * @throws IOException
     */
    public static FileStatus[] getDataChunkFiles(FileSystem fs, Path path) throws IOException {
        return fs.listStatus(path, new PathFilter() {

            public boolean accept(Path input) {
                if(input.getName().matches("^[\\d]+_[\\d]+_[\\d]+\\.data")) {
                    return true;
                } else {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

     */
    public static FileStatus[] getDataChunkFiles(FileSystem fs,
                                                 Path path,
                                                 final int partitionId,
                                                 final int replicaType) throws IOException {
        return fs.listStatus(path, new PathFilter() {

            public boolean accept(Path input) {
                if(input.getName().matches("^" + Integer.toString(partitionId) + "_"
                                           + Integer.toString(replicaType) + "_[\\d]+\\.data")) {
                    return true;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

    public static FileStatus[] getDataChunkFiles(FileSystem fs,
                                                 Path path,
                                                 final int partitionId,
                                                 final int replicaType,
                                                 final int chunkId) throws IOException {
        return fs.listStatus(path, new PathFilter() {

            public boolean accept(Path input) {
                if(input.getName().matches("^" + Integer.toString(partitionId) + "_"
                                           + Integer.toString(replicaType) + "_"
                                           + Integer.toString(chunkId) + "\\.data")) {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

                    logger.info("Setting permission to 755 for " + nodePath);
                }

                if(checkSumType != CheckSumType.NONE) {

                    FileStatus[] storeFiles = outputFs.listStatus(nodePath, new PathFilter() {

                        public boolean accept(Path arg0) {
                            if(arg0.getName().endsWith("checksum")
                               && !arg0.getName().startsWith(".")) {
                                return true;
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   
    // creates a MultiPathFilter with the hiddenFileFilter and the
    // user provided one (if any).
    List<PathFilter> filters = new ArrayList<PathFilter>();
    filters.add(hiddenFileFilter);
    PathFilter jobFilter = getInputPathFilter(job);
    if (jobFilter != null) {
      filters.add(jobFilter);
    }
    PathFilter inputFilter = new MultiPathFilter(filters);
   
    for (int i=0; i < dirs.length; ++i) {
      Path p = dirs[i];
      FileSystem fs = p.getFileSystem(job.getConfiguration());
      FileStatus[] matches = fs.globStatus(p, inputFilter);
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      // a path filter that matches 4 parts of the filenames namely
      //  - jt-hostname
      //  - job-id
      //  - username
      //  - jobname
      PathFilter filter = new PathFilter() {
        public boolean accept(Path path) {
          String fileName = path.getName();
          try {
            fileName = decodeJobHistoryFileName(fileName);
          } catch (IOException ioe) {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   
    // creates a MultiPathFilter with the hiddenFileFilter and the
    // user provided one (if any).
    List<PathFilter> filters = new ArrayList<PathFilter>();
    filters.add(hiddenFileFilter);
    PathFilter jobFilter = getInputPathFilter(job);
    if (jobFilter != null) {
      filters.add(jobFilter);
    }
    PathFilter inputFilter = new MultiPathFilter(filters);

    for (Path p: dirs) {
      FileSystem fs = p.getFileSystem(job);
      FileStatus[] matches = fs.globStatus(p, inputFilter);
      if (matches == null) {
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

      Path p = Store.getStoreHomedir(tabledir, split.getEncodedName(),
        family.getName());
      if (!fs.exists(p)) continue;
      // Look for reference files.  Call listStatus with anonymous instance of PathFilter.
      FileStatus [] ps = FSUtils.listStatus(fs, p,
          new PathFilter () {
            public boolean accept(Path path) {
              return StoreFile.isReference(path);
            }
          }
      );
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

   * @throws IOException
   */
  private static FileStatus getTableInfoPath(final FileSystem fs,
      final Path tabledir)
  throws IOException {
    FileStatus [] status = FSUtils.listStatus(fs, tabledir, new PathFilter() {
      @Override
      public boolean accept(Path p) {
        // Accept any file that starts with TABLEINFO_NAME
        return p.getName().startsWith(TABLEINFO_NAME);
      }
View Full Code Here

Examples of org.apache.hadoop.fs.PathFilter

  private static boolean isConverged(String filePath, Configuration conf, FileSystem fs) throws IOException {
   
    Path clusterPath = new Path(filePath + "/*");
    List<Path> result = new ArrayList<Path>();
   
    PathFilter clusterFileFilter = new PathFilter() {
      @Override
      public boolean accept(Path path) {
        return path.getName().startsWith("part");
      }
    };
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.