Package org.apache.hadoop.util

Examples of org.apache.hadoop.util.GenericOptionsParser


        EXPORT_TABLE,
        OUTPUT_DIR,
        "1000"
    };

    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());


    String IMPORT_TABLE = "importTableSimpleCase";
    t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB);
    args = new String[] {
        "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
        IMPORT_TABLE,
        OUTPUT_DIR
    };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());
View Full Code Here


   */
  @Test
  public void testMetaExport() throws Exception {
    String EXPORT_TABLE = ".META.";
    String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1", "0", "0" };
    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
        cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());
View Full Code Here

        EXPORT_TABLE,
        OUTPUT_DIR,
        "1000"
    };

    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());


    String IMPORT_TABLE = "importWithDeletes";
    desc = new HTableDescriptor(IMPORT_TABLE);
    desc.addFamily(new HColumnDescriptor(FAMILYA)
        .setMaxVersions(5)
        .setKeepDeletedCells(true)
    );
    UTIL.getHBaseAdmin().createTable(desc);
    t.close();
    t = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
    args = new String[] {
        IMPORT_TABLE,
        OUTPUT_DIR
    };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());
View Full Code Here

    p.add(FAMILYA, QUAL, now + 4, QUAL);
    exportTable.put(p);

    String[] args = new String[] { EXPORT_TABLE, OUTPUT_DIR, "1000" };

    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
        cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());

    String IMPORT_TABLE = "importWithFilter";
    desc = new HTableDescriptor(IMPORT_TABLE);
    desc.addFamily(new HColumnDescriptor(FAMILYA).setMaxVersions(5));
    UTIL.getHBaseAdmin().createTable(desc);

    HTable importTable = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
        "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, OUTPUT_DIR,
        "1000" };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());

    // get the count of the source table for that time range
    PrefixFilter filter = new PrefixFilter(ROW1);
    int count = getCount(exportTable, filter);

    Assert.assertEquals("Unexpected row count between export and import tables", count,
      getCount(importTable, null));

    // and then test that a broken command doesn't bork everything - easier here because we don't
    // need to re-run the export job

    args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
        "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", EXPORT_TABLE,
        OUTPUT_DIR, "1000" };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertFalse("Job succeeedd, but it had a non-instantiable filter!", job.isSuccessful());
View Full Code Here

    HBaseTestingUtility htu1 = new HBaseTestingUtility();

    htu1.startMiniCluster();
    htu1.startMiniMapReduceCluster();

    GenericOptionsParser opts = new GenericOptionsParser(htu1.getConfiguration(), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    try {
      FileSystem fs = FileSystem.get(conf);
      FSDataOutputStream op = fs.create(new Path(inputFile), true);
      if (data == null) {
View Full Code Here

  }

  @SuppressWarnings("static-access")
  private int parseArgs(String[] args) {
    Options opts = new Options();
    GenericOptionsParser parser =
      new GenericOptionsParser(this.getConf(), opts, args);
    String[] remainingArgs = parser.getRemainingArgs();
    if (remainingArgs.length != 1) {
      usage();
      return -1;
    }
    if (remainingArgs[0].compareTo("check") == 0) {
View Full Code Here

        EXPORT_TABLE,
        OUTPUT_DIR,
        "1000"
    };

    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());


    String IMPORT_TABLE = "importTableSimpleCase";
    t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB);
    args = new String[] {
        "-D" + Import.CF_RENAME_PROP + "="+FAMILYA_STRING+":"+FAMILYB_STRING,
        IMPORT_TABLE,
        OUTPUT_DIR
    };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());
View Full Code Here

        EXPORT_TABLE,
        OUTPUT_DIR,
        "1000"
    };

    GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    Configuration conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    Job job = Export.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());


    String IMPORT_TABLE = "importWithDeletes";
    desc = new HTableDescriptor(IMPORT_TABLE);
    desc.addFamily(new HColumnDescriptor(FAMILYA)
        .setMaxVersions(5)
        .setKeepDeletedCells(true)
    );
    UTIL.getHBaseAdmin().createTable(desc);
    t.close();
    t = new HTable(UTIL.getConfiguration(), IMPORT_TABLE);
    args = new String[] {
        IMPORT_TABLE,
        OUTPUT_DIR
    };

    opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
    conf = opts.getConfiguration();
    args = opts.getRemainingArgs();

    job = Import.createSubmittableJob(conf, args);
    job.getConfiguration().set("mapreduce.framework.name", "yarn");
    job.waitForCompletion(false);
    assertTrue(job.isSuccessful());
View Full Code Here

        "Name of the delegation token renewer");
    fetcherOptions.addOption(CANCEL, false, "cancel the token");
    fetcherOptions.addOption(RENEW, false, "renew the token");
    fetcherOptions.addOption(PRINT, false, "print the token");
    fetcherOptions.addOption(HELP_SHORT, HELP, false, "print out help information");
    GenericOptionsParser parser = new GenericOptionsParser(conf,
        fetcherOptions, args);
    CommandLine cmd = parser.getCommandLine();
   
    // get options
    final String webUrl = cmd.hasOption(WEBSERVICE) ? cmd
        .getOptionValue(WEBSERVICE) : null;
    final String renewer = cmd.hasOption(RENEWER) ?
        cmd.getOptionValue(RENEWER) : null;
    final boolean cancel = cmd.hasOption(CANCEL);
    final boolean renew = cmd.hasOption(RENEW);
    final boolean print = cmd.hasOption(PRINT);
    final boolean help = cmd.hasOption(HELP);
    String[] remaining = parser.getRemainingArgs();

    // check option validity
    if (help) {
      printUsage(System.out);
      System.exit(0);
View Full Code Here

    if (DFSUtil.parseHelpArgument(args,
        ZKFailoverController.USAGE, System.out, true)) {
      System.exit(0);
    }
   
    GenericOptionsParser parser = new GenericOptionsParser(
        new HdfsConfiguration(), args);
    DFSZKFailoverController zkfc = DFSZKFailoverController.create(
        parser.getConfiguration());
   
    System.exit(zkfc.run(parser.getRemainingArgs()));
  }
View Full Code Here

TOP

Related Classes of org.apache.hadoop.util.GenericOptionsParser

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.