Examples of listPaths()


Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    if (!fs.isDirectory(parentdir)) {
      throw new IOException(parentdirName + " not a directory");
    }
    // Look for regions in parentdir.
    Path [] regiondirs =
      fs.listPaths(parentdir, new PathFilter() {
        public boolean accept(Path path) {
          Matcher m = REGION_NAME_PARSER.matcher(path.getName());
          return m != null && m.matches();
        }
    });
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

  /** Open the output generated by this format. */
  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)
    throws IOException {
    FileSystem fs = dir.getFileSystem(conf);
    Path[] names = fs.listPaths(dir);
   
    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    //pass a job.jar already included in the hadoop build
    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] parents = fs.listPaths(outDir.getParent());
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] parents = fs.listPaths(outDir.getParent());
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

        file.close();
         
        job = new StreamJob(argv, mayExit);     
        job.go();
        String line = null;
        Path[] fileList = fileSys.listPaths(new Path(OUTPUT_DIR));
        for (int i = 0; i < fileList.length; i++){
          System.out.println(fileList[i].toString());
          BufferedReader bread =
            new BufferedReader(new InputStreamReader(fileSys.open(fileList[i])));
          line = bread.readLine();
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    //pass a job.jar already included in the hadoop build
    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();

    Path[] fileList = fs.listPaths(outDir);
    for (int i = 0; i < fileList.length; ++i) {
      BufferedReader file = new BufferedReader(new InputStreamReader(
                                                                     fs.open(fileList[i])));
      String line = file.readLine();
      while (line != null) {
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

  public static String readOutput(Path outDir,
                                  JobConf conf) throws IOException {
    FileSystem fs = outDir.getFileSystem(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

  /** Open the output generated by this format. */
  public static MapFile.Reader[] getReaders(FileSystem ignored, Path dir,
                                            Configuration conf)
    throws IOException {
    FileSystem fs = dir.getFileSystem(conf);
    Path[] names = fs.listPaths(dir);

    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    MapFile.Reader[] parts = new MapFile.Reader[names.length];
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    }
    List<Path> result = new ArrayList<Path>();
    for (Path p: dirs) {
      FileSystem fs = p.getFileSystem(job);
      Path[] matches =
        fs.listPaths(fs.globPaths(p, hiddenFileFilter), hiddenFileFilter);
      for (Path match: matches) {
        result.add(fs.makeQualified(match));
      }
    }
View Full Code Here

Examples of org.apache.hadoop.fs.FileSystem.listPaths()

    for (Path p: inputDirs) {
      FileSystem fs = p.getFileSystem(job);
      if (fs.exists(p)) {
        // make sure all paths are files to avoid exception
        // while generating splits
        for (Path subPath : fs.listPaths(p, hiddenFileFilter)) {
          FileSystem subFS = subPath.getFileSystem(job);
          if (!subFS.exists(subPath)) {
            result.add(new IOException(
                                       "Input path does not exist: " + subPath));
          } else {
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.