Examples of DistCp


Examples of org.apache.hadoop.tools.DistCp

      for (MyFile f : files) {
        totsize += f.getSize();
      }
      Configuration job = mr.createJobConf();
      job.setLong("distcp.bytes.per.map", totsize / 3);
      ToolRunner.run(new DistCp(job),
          new String[] {"-m", "100",
                        "-log",
                        namenode+"/logs",
                        namenode+"/srcdat",
                        namenode+"/destdat"});
      assertTrue("Source and destination directories do not match.",
                 checkFiles(fs, "/destdat", files));

      String logdir = namenode + "/logs";
      System.out.println(execCmd(shell, "-lsr", logdir));
      FileStatus[] logs = fs.listStatus(new Path(logdir));
      // rare case where splits are exact, logs.length can be 4
      assertTrue("Unexpected map count, logs.length=" + logs.length,
          logs.length == 5 || logs.length == 4);

      deldir(fs, "/destdat");
      deldir(fs, "/logs");
      ToolRunner.run(new DistCp(job),
          new String[] {"-m", "1",
                        "-log",
                        namenode+"/logs",
                        namenode+"/srcdat",
                        namenode+"/destdat"});
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

    MiniDFSCluster cluster = null;
    try {
      cluster = new MiniDFSCluster(conf, 2, true, null);
      final String nnUri = FileSystem.getDefaultUri(conf).toString();
      final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
      final DistCp distcp = new DistCp(conf);
      final FsShell shell = new FsShell(conf)

      final String srcrootdir =  "/src_root";
      final Path srcrootpath = new Path(srcrootdir);
      final String dstrootdir =  "/dst_root";
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

     
      final Path srcrootpath = new Path(home, "src_root");
      final String srcrootdir =  srcrootpath.toString();
      final Path dstrootpath = new Path(home, "dst_root");
      final String dstrootdir =  dstrootpath.toString();
      final DistCp distcp = USER_UGI.doAs(new PrivilegedExceptionAction<DistCp>() {
        public DistCp run() {
          return new DistCp(userConf);
        }
      });

      FileSystem.mkdirs(fs, srcrootpath, new FsPermission((short)0700));
      final String[] args = {"hftp://"+httpAdd+srcrootdir, nnUri+dstrootdir};
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

      cluster = new MiniDFSCluster(conf, 2, true, null);
      final URI nnURI = FileSystem.getDefaultUri(conf);
      final String nnUri = nnURI.toString();
      final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);

      final DistCp distcp = new DistCp(conf);
      final FsShell shell = new FsShell(conf)

      final String srcrootdir = "/src_root";
      final String dstrootdir = "/dst_root";
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

        conf.addResource(confPath);

        String falconFeedStorageType = cmd.getOptionValue("falconFeedStorageType").trim();
        Storage.TYPE feedStorageType = Storage.TYPE.valueOf(falconFeedStorageType);

        DistCp distCp = (feedStorageType == Storage.TYPE.FILESYSTEM)
                ? new CustomReplicator(conf, options)
                : new DistCp(conf, options);
        LOG.info("Started DistCp");
        distCp.execute();

        if (feedStorageType == Storage.TYPE.FILESYSTEM) {
            executePostProcessing(options)// this only applies for FileSystem Storage.
        }
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

    exec("addsplits row5", true);
    exec("config -t " + table + " -s table.split.threshold=345M", true);
    exec("offline " + table, true);
    String export = folder.newFolder().toString();
    exec("exporttable -t " + table + " " + export, true);
    DistCp cp = newDistCp();
    String import_ = folder.newFolder().toString();
    cp.run(new String[] {"-f", export + "/distcp.txt", import_});
    exec("importtable " + table2 + " " + import_, true);
    exec("config -t " + table2 + " -np", true, "345M", true);
    exec("getsplits -t " + table2, true, "row5", true);
    exec("constraint --list -t " + table2, true, "VisibilityConstraint=1", true);
    exec("onlinetable " + table, true);
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

        conf.addResource(confPath);

        String falconFeedStorageType = cmd.getOptionValue("falconFeedStorageType").trim();
        Storage.TYPE feedStorageType = Storage.TYPE.valueOf(falconFeedStorageType);

        DistCp distCp = (feedStorageType == Storage.TYPE.FILESYSTEM)
                ? new CustomReplicator(conf, options)
                : new DistCp(conf, options);
        LOG.info("Started DistCp");
        distCp.execute();

        if (feedStorageType == Storage.TYPE.FILESYSTEM) {
            executePostProcessing(options)// this only applies for FileSystem Storage.
        }
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

    ts.exec("addsplits row5", true);
    ts.exec("config -t " + table + " -s table.split.threshold=345M", true);
    ts.exec("offline " + table, true);
    String export = "file://" + new File(getFolder(), "ShellServerIT.export").toString();
    ts.exec("exporttable -t " + table + " " + export, true);
    DistCp cp = newDistCp();
    String import_ = "file://" + new File(getFolder(), "ShellServerIT.import").toString();
    cp.run(new String[] {"-f", export + "/distcp.txt", import_});
    ts.exec("importtable " + table2 + " " + import_, true);
    ts.exec("config -t " + table2 + " -np", true, "345M", true);
    ts.exec("getsplits -t " + table2, true, "row5", true);
    ts.exec("constraint --list -t " + table2, true, "VisibilityConstraint=2", true);
    ts.exec("onlinetable " + table, true);
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

   
  public void testCopyDuplication() throws Exception {
    final FileSystem localfs = FileSystem.get(LOCAL_FS, new Configuration());
    try {   
      MyFile[] files = createFiles(localfs, TEST_ROOT_DIR+"/srcdat");
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/src2/srcdat"});
      assertTrue("Source and destination directories do not match.",
                 checkFiles(localfs, TEST_ROOT_DIR+"/src2/srcdat", files));
 
      assertEquals(DistCp.DuplicationException.ERROR_CODE,
          ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/src2/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/destdat",}));
    }
    finally {
View Full Code Here

Examples of org.apache.hadoop.tools.DistCp

    FileSystem fs = FileSystem.get(LOCAL_FS, new Configuration());
    Path root = new Path(TEST_ROOT_DIR+"/srcdat");
    try {   
      MyFile[] files = {createFile(root, fs)};
      //copy a dir with a single file
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                        "file:///"+TEST_ROOT_DIR+"/destdat"});
      assertTrue("Source and destination directories do not match.",
                 checkFiles(fs, TEST_ROOT_DIR+"/destdat", files));
     
      //copy a single file
      String fname = files[0].getName();
      Path p = new Path(root, fname);
      FileSystem.LOG.info("fname=" + fname + ", exists? " + fs.exists(p));
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
                        "file:///"+TEST_ROOT_DIR+"/dest2/"+fname});
      assertTrue("Source and destination directories do not match.",
          checkFiles(fs, TEST_ROOT_DIR+"/dest2", files));    
     
      // single file update should skip copy if destination has the file already
      String[] args = {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
          "file:///"+TEST_ROOT_DIR+"/dest2/"+fname};
      Configuration conf = new Configuration();
      JobConf job = new JobConf(conf, DistCp.class);
      DistCp.Arguments distcpArgs = DistCp.Arguments.valueOf(args, conf);
      assertFalse("Single file update failed to skip copying even though the "
          + "file exists at destination.", DistCp.setup(conf, job, distcpArgs));
     
      //copy single file to existing dir
      deldir(fs, TEST_ROOT_DIR+"/dest2");
      fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2"));
      MyFile[] files2 = {createFile(root, fs, 0)};
      String sname = files2[0].getName();
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"-update",
                        "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
                        "file:///"+TEST_ROOT_DIR+"/dest2/"});
      assertTrue("Source and destination directories do not match.",
          checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2));    
      updateFiles(fs, TEST_ROOT_DIR+"/srcdat", files2, 1);
      //copy single file to existing dir w/ dst name conflict
      ToolRunner.run(new DistCp(new Configuration()),
          new String[] {"-update",
                        "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
                        "file:///"+TEST_ROOT_DIR+"/dest2/"});
      assertTrue("Source and destination directories do not match.",
          checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2));    
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.