Examples of DirectoryListing


Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

    });
  }

  private static DirectoryListing getDirectoryListing(final NameNode np,
      final String p, byte[] startAfter) throws IOException {
    final DirectoryListing listing = np.getListing(p, startAfter);
    if (listing == null) { // the directory does not exist
      throw new FileNotFoundException("File " + p + " does not exist.");
    }
    return listing;
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

    return listing;
  }
 
  private static StreamingOutput getListingStream(final NameNode np,
      final String p) throws IOException {
    final DirectoryListing first = getDirectoryListing(np, p,
        HdfsFileStatus.EMPTY_NAME);

    return new StreamingOutput() {
      @Override
      public void write(final OutputStream outstream) throws IOException {
        final PrintStream out = new PrintStream(outstream);
        out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
            + FileStatus.class.getSimpleName() + "\":[");

        final HdfsFileStatus[] partial = first.getPartialListing();
        if (partial.length > 0) {
          out.print(JsonUtil.toJsonString(partial[0], false));
        }
        for(int i = 1; i < partial.length; i++) {
          out.println(',');
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

  @Override
  public FileStatus[] listStatus(Path p) throws IOException {
    String src = getPathName(p);
   
    // fetch the first batch of entries in the directory
    DirectoryListing thisListing = dfs.listPaths(
        src, HdfsFileStatus.EMPTY_NAME);
   
    if (thisListing == null) { // the directory does not exist
      return null;
    }
   
    HdfsFileStatus[] partialListing = thisListing.getPartialListing();
    if (!thisListing.hasMore()) { // got all entries of the directory
      FileStatus[] stats = new FileStatus[partialListing.length];
      for (int i = 0; i < partialListing.length; i++) {
        stats[i] = makeQualified(partialListing[i], p);
      }
      return stats;
    }
   
    // The directory size is too big that it needs to fetch more
    // estimate the total number of entries in the directory
    int totalNumEntries =
      partialListing.length + thisListing.getRemainingEntries();
    ArrayList<FileStatus> listing =
      new ArrayList<FileStatus>(totalNumEntries);
    // add the first batch of entries to the array list
    for (HdfsFileStatus fileStatus : partialListing) {
      listing.add(makeQualified(fileStatus, p));
    }

    // now fetch more entries
    do {
      thisListing = dfs.listPaths(src, thisListing.getLastName());
     
      if (thisListing == null) {
        return null; // the directory is deleted
      }
     
      partialListing = thisListing.getPartialListing();
      for (HdfsFileStatus fileStatus : partialListing) {
        listing.add(makeQualified(fileStatus, p));
      }
    } while (thisListing.hasMore());

    return listing.toArray(new FileStatus[listing.size()]);
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

          pathstack.push(path);
          while (!pathstack.empty()) {
            String p = pathstack.pop();
            try {
              byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME;        
              DirectoryListing thisListing;
              do {
                assert lastReturnedName != null;
                thisListing = nn.getListing(p, lastReturnedName);
                if (thisListing == null) {
                  if (lastReturnedName.length == 0) {
                    LOG.warn("ListPathsServlet - Path " + p + " does not exist");
                  }
                  break;
                }
                HdfsFileStatus[] listing = thisListing.getPartialListing();
                for (HdfsFileStatus i : listing) {
                  String localName = i.getLocalName();
                  if (exclude.matcher(localName).matches()
                      || !filter.matcher(localName).matches()) {
                    continue;
                  }
                  if (recur && i.isDir()) {
                    pathstack.push(new Path(p, localName).toUri().getPath());
                  }
                  writeInfo(p, i, doc);
                }
                lastReturnedName = thisListing.getLastName();
              } while (thisListing.hasMore());
            } catch(IOException re) {
              writeXml(re, p, doc);
            }
          }
          if (doc != null) {
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

      INode targetNode = rootDir.getNode(srcs);
      if (targetNode == null)
        return null;
     
      if (!targetNode.isDirectory()) {
        return new DirectoryListing(new HdfsFileStatus[]{createFileStatus(
            HdfsFileStatus.EMPTY_NAME, targetNode)}, 0);
      }
      INodeDirectory dirInode = (INodeDirectory)targetNode;
      List<INode> contents = dirInode.getChildren();
      int startChild = dirInode.nextChild(startAfter);
      int totalNumChildren = contents.size();
      int numOfListing = Math.min(totalNumChildren-startChild, this.lsLimit);
      HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing];
      for (int i=0; i<numOfListing; i++) {
        INode cur = contents.get(startChild+i);
        listing[i] = createFileStatus(cur.name, cur);
      }
      return new DirectoryListing(
          listing, totalNumChildren-startChild-numOfListing);
    }
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

    String path = file.getFullName(parent);
    boolean isOpen = false;

    if (file.isDir()) {
      byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME;
      DirectoryListing thisListing;
      if (showFiles) {
        out.println(path + " <dir>");
      }
      res.totalDirs++;
      do {
        assert lastReturnedName != null;
        thisListing = namenode.getListing(path, lastReturnedName);
        if (thisListing == null) {
          return;
        }
        HdfsFileStatus[] files = thisListing.getPartialListing();
        for (int i = 0; i < files.length; i++) {
          check(path, files[i], res);
        }
        lastReturnedName = thisListing.getLastName();
      } while (thisListing.hasMore());
      return;
    }
    long fileLen = file.getLen();
    LocatedBlocks blocks = namenode.getBlockLocationsNoATime(path, 0, fileLen);
    if (blocks == null) { // the file is deleted
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

       
        if (!s.isDir()) {
            blocks.addAll(dfs.namenode.getBlockLocations(path, 0,
                    Long.MAX_VALUE).getLocatedBlocks());
        } else {
            DirectoryListing drl = dfs.listPaths(path, HdfsFileStatus.EMPTY_NAME);
            for (HdfsFileStatus fileStatus : drl.getPartialListing()) {
                blocks.addAll(getBlocks(fileStatus.getFullName(path), conf, dfs, blocks));
            }
        }
       
        dfs.close();
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

  }

  @Override // ClientProtocol
  public DirectoryListing getListing(String src, byte[] startAfter,
      boolean needLocation) throws IOException {
    DirectoryListing files = namesystem.getListing(
        src, startAfter, needLocation);
    if (files != null) {
      metrics.incrGetListingOps();
      metrics.incrFilesInGetListingOps(files.getPartialListing().length);
    }
    return files;
  }
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

        HdfsFileStatus snapshotFileInfo = namenode.getRpcServer().getFileInfo(
            snapshotPath);
        check(snapshotPath, snapshotFileInfo, res);
      }
      byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME;
      DirectoryListing thisListing;
      if (showFiles) {
        out.println(path + " <dir>");
      }
      res.totalDirs++;
      do {
        assert lastReturnedName != null;
        thisListing = namenode.getRpcServer().getListing(
            path, lastReturnedName, false);
        if (thisListing == null) {
          return;
        }
        HdfsFileStatus[] files = thisListing.getPartialListing();
        for (int i = 0; i < files.length; i++) {
          check(path, files[i], res);
        }
        lastReturnedName = thisListing.getLastName();
      } while (thisListing.hasMore());
      return;
    }
    if (file.isSymlink()) {
      if (showFiles) {
        out.println(path + " <symlink>");
View Full Code Here

Examples of org.apache.hadoop.hdfs.protocol.DirectoryListing

  }

  private DirectoryListing getListingInt(String src, byte[] startAfter,
      boolean needLocation)
    throws AccessControlException, UnresolvedLinkException, IOException {
    DirectoryListing dl;
    FSPermissionChecker pc = getPermissionChecker();
    checkOperation(OperationCategory.READ);
    byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
    String startAfterString = new String(startAfter);
    readLock();
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.