Package org.apache.hadoop.fs

Examples of org.apache.hadoop.fs.Trash.moveToTrash()


        f = writeFile(lfs, f);

        FileSystem.closeAll();
        FileSystem localFs = FileSystem.get(URI.create("file:///"), conf);
        Trash lTrash = new Trash(localFs, conf);
        lTrash.moveToTrash(f.getParent());
        checkTrash(localFs, lTrash.getCurrentTrashDir(), f);
      } finally {
        if (lfs.exists(p)) {
          lfs.delete(p, true);
        }
View Full Code Here


    // this needs to be filtered out based on version

    Configuration dupConf = new Configuration(conf);
    FileSystem.setDefaultUri(dupConf, fs.getUri());
    Trash trash = new Trash(dupConf);
    return trash.moveToTrash(path);
  }

  @Override
  public long getDefaultBlockSize(FileSystem fs, Path path) {
    return fs.getDefaultBlockSize();
View Full Code Here

    // this needs to be filtered out based on version

    Configuration dupConf = new Configuration(conf);
    FileSystem.setDefaultUri(dupConf, fs.getUri());
    Trash trash = new Trash(dupConf);
    return trash.moveToTrash(path);
  }
  @Override
  public long getDefaultBlockSize(FileSystem fs, Path path) {
    return fs.getDefaultBlockSize();
  }
View Full Code Here

          // start deleting files, if we do not finish they will be cleaned
          // up later
          Trash trash = new Trash(fs, fs.getConf());
          for (Path path : oldDatafiles) {
            Path deleteFile = new Path(location + "/delete+" + compactName + "+" + path.getName());
            if (!trash.moveToTrash(deleteFile))
              fs.delete(deleteFile, true);
          }
        }
       
        // atomically remove old files and add new file
View Full Code Here

        f = writeFile(lfs, f);

        FileSystem.closeAll();
        FileSystem localFs = FileSystem.get(URI.create("file:///"), conf);
        Trash lTrash = new Trash(localFs, conf);
        lTrash.moveToTrash(f.getParent());
        checkTrash(localFs, lTrash.getCurrentTrashDir(), f);
      } finally {
        if (lfs.exists(p)) {
          lfs.delete(p, true);
        }
View Full Code Here

          LOG.severe("To remove directory, please use fs rm --recursive instead");
          return;
        }
        if (!skipTrash) {
            Trash trash = new Trash(fs, getHadoopConfiguration());
            trash.moveToTrash(p);
        }
        fs.delete(p, recursive);
      }
    } catch (Throwable t) {
      LOG.severe("run HDFS shell failed. Message is: " + t.getMessage());
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.