Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.FileSystem.listPaths()


    for (Path p: inputDirs) {
      FileSystem fs = p.getFileSystem(job);
      if (fs.exists(p)) {
        // make sure all paths are files to avoid exception
        // while generating splits
        for (Path subPath : fs.listPaths(p, hiddenFileFilter)) {
          FileSystem subFS = subPath.getFileSystem(job);
          if (!subFS.exists(subPath)) {
            result.add(new IOException(
                                       "Input path does not exist: " + subPath));
          } else {
View Full Code Here


    Path localDir = new Path(getUnitTestdir(getName()), "index_" +
      Integer.toString(new Random().nextInt()));
    this.fs.copyToLocalFile(new Path(INDEX_DIR), localDir);
    FileSystem localfs = FileSystem.getLocal(conf);
    Path [] indexDirs = localfs.listPaths(new Path [] {localDir});
    Searcher searcher = null;
    HScannerInterface scanner = null;
    try {
      if (indexDirs.length == 1) {
        searcher = new IndexSearcher((new File(indexDirs[0].
View Full Code Here

    }
    List<Path> result = new ArrayList();
    for (Path p: dirs) {
      FileSystem fs = p.getFileSystem(job);
      Path[] matches =
        fs.listPaths(fs.globPaths(p, hiddenFileFilter),hiddenFileFilter);
      for (Path match: matches) {
        result.add(fs.makeQualified(match));
      }
    }
View Full Code Here

    for (Path p: inputDirs) {
      FileSystem fs = p.getFileSystem(job);
      if (fs.exists(p)) {
        // make sure all paths are files to avoid exception
        // while generating splits
        for (Path subPath : fs.listPaths(p, hiddenFileFilter)) {
          FileSystem subFS = subPath.getFileSystem(job);
          if (!subFS.exists(subPath)) {
            result.add(new IOException(
                "Input path does not exist: " + subPath));
          } else {
View Full Code Here

  /** Open the output generated by this format. */
  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)
    throws IOException {
    FileSystem fs = dir.getFileSystem(conf);
    Path[] names = fs.listPaths(dir);
   
    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
View Full Code Here

  /** Open the output generated by this format. */
  public static MapFile.Reader[] getReaders(FileSystem ignored, Path dir,
                                            Configuration conf)
    throws IOException {
    FileSystem fs = dir.getFileSystem(conf);
    Path[] names = fs.listPaths(dir);

    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    MapFile.Reader[] parts = new MapFile.Reader[names.length];
View Full Code Here

    conf.setNumMapTasks(numMaps);
    conf.setNumReduceTasks(numReduces);
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

    //pass a job.jar already included in the hadoop build
    conf.setJar("build/test/testjar/testjob.jar");
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

    conf.setNumMapTasks(numMaps);
    conf.setNumReduceTasks(numReduces);
    JobClient.runJob(conf);
    StringBuffer result = new StringBuffer();
    {
      Path[] fileList = fs.listPaths(outDir);
      for(int i=0; i < fileList.length; ++i) {
        BufferedReader file =
          new BufferedReader(new InputStreamReader(fs.open(fileList[i])));
        String line = file.readLine();
        while (line != null) {
View Full Code Here

  /** Open the output generated by this format. */
  public static SequenceFile.Reader[] getReaders(Configuration conf, Path dir)
    throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path[] names = fs.listPaths(dir);
   
    // sort names, so that hash partitioning works
    Arrays.sort(names);
   
    SequenceFile.Reader[] parts = new SequenceFile.Reader[names.length];
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.