Package net.sourceforge.argparse4j.inf

Examples of net.sourceforge.argparse4j.inf.ArgumentParser.addArgument()


                    .required(false)
                    .help("Relative or absolute path to a local dir containing Solr conf/ dir and in particular " +
                            "conf/solrconfig.xml and optionally also lib/ dir. This directory will be uploaded to each MR task. " +
                            "Example: src/test/resources/solr/minimr");

            Argument updateConflictResolverArg = parser.addArgument("--update-conflict-resolver")
                    .metavar("FQCN")
                    .type(String.class)
                    .setDefault(RetainMostRecentUpdateConflictResolver.class.getName())
                    .help("Fully qualified class name of a Java class that implements the UpdateConflictResolver interface. " +
                            "This enables deduplication and ordering of a series of document updates for the same unique document " +
View Full Code Here


                            "recent to most recent (partial) update. The caller of this interface (i.e. the Hadoop Reducer) will then " +
                            "apply the updates to Solr in the order returned by the orderUpdates() method.\n" +
                            "The default RetainMostRecentUpdateConflictResolver implementation ignores all but the most recent document " +
                            "version, based on a configurable numeric Solr field, which defaults to the file_last_modified timestamp");

            Argument mappersArg = parser.addArgument("--mappers")
                    .metavar("INTEGER")
                    .type(Integer.class)
                    .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer
                    .setDefault(-1)
                    .help("Tuning knob that indicates the maximum number of MR mapper tasks to use. -1 indicates use all map slots " +
View Full Code Here

                    .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer
                    .setDefault(-1)
                    .help("Tuning knob that indicates the maximum number of MR mapper tasks to use. -1 indicates use all map slots " +
                            "available on the cluster.");

            Argument reducersArg = parser.addArgument("--reducers")
                    .metavar("INTEGER")
                    .type(Integer.class)
                    .choices(new RangeArgumentChoice(-1, Integer.MAX_VALUE)) // TODO: also support X% syntax where X is an integer
                    .setDefault(-1)
                    .help("Tuning knob that indicates the number of reducers to index into. " +
View Full Code Here

                            "of solr shards expected by the user. It can be seen as an extension of concurrent lucene merges " +
                            "and tiered lucene merges to the clustered case. The subsequent mapper-only phase " +
                            "merges the output of said large number of reducers to the number of shards expected by the user, " +
                            "again by utilizing more available parallelism on the cluster.");

            Argument fanoutArg = parser.addArgument("--fanout")
                    .metavar("INTEGER")
                    .type(Integer.class)
                    .choices(new RangeArgumentChoice(2, Integer.MAX_VALUE))
                    .setDefault(Integer.MAX_VALUE)
                    .help(FeatureControl.SUPPRESS);
View Full Code Here

                    .type(Integer.class)
                    .choices(new RangeArgumentChoice(2, Integer.MAX_VALUE))
                    .setDefault(Integer.MAX_VALUE)
                    .help(FeatureControl.SUPPRESS);

            Argument maxSegmentsArg = parser.addArgument("--max-segments")
                    .metavar("INTEGER")
                    .type(Integer.class)
                    .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE))
                    .setDefault(1)
                    .help("Tuning knob that indicates the maximum number of segments to be contained on output in the index of " +
View Full Code Here

                            "and it can later be queried faster once deployed to a live Solr serving shard. " +
                            "Set maxSegments to 1 to optimize the index for low query latency. " +
                            "In a nutshell, a small maxSegments value trades indexing latency for subsequently improved query latency. " +
                            "This can be a reasonable trade-off for batch indexing systems.");

            Argument fairSchedulerPoolArg = parser.addArgument("--fair-scheduler-pool")
                    .metavar("STRING")
                    .help("Optional tuning knob that indicates the name of the fair scheduler pool to submit jobs to. " +
                            "The Fair Scheduler is a pluggable MapReduce scheduler that provides a way to share large clusters. " +
                            "Fair scheduling is a method of assigning resources to jobs such that all jobs get, on average, an " +
                            "equal share of resources over time. When there is a single job running, that job uses the entire " +
View Full Code Here

                            "forms a queue of jobs, this lets short jobs finish in reasonable time while not starving long jobs. " +
                            "It is also an easy way to share a cluster between multiple of users. Fair sharing can also work with " +
                            "job priorities - the priorities are used as weights to determine the fraction of total compute time " +
                            "that each job gets.");

            Argument dryRunArg = parser.addArgument("--dry-run")
                    .action(Arguments.storeTrue())
                    .help("Run in local mode and print documents to stdout instead of loading them into Solr. This executes " +
                            "the morphline in the client process (without submitting a job to MR) for quicker turnaround during " +
                            "early trial & debug sessions.");
View Full Code Here

                    .action(Arguments.storeTrue())
                    .help("Run in local mode and print documents to stdout instead of loading them into Solr. This executes " +
                            "the morphline in the client process (without submitting a job to MR) for quicker turnaround during " +
                            "early trial & debug sessions.");

            Argument log4jConfigFileArg = parser.addArgument("--log4j")
                    .metavar("FILE")
                    .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead())
                    .help("Relative or absolute path to a log4j.properties config file on the local file system. This file " +
                            "will be uploaded to each MR task. Example: /path/to/log4j.properties");
View Full Code Here

                    .metavar("FILE")
                    .type(new FileArgumentType().verifyExists().verifyIsFile().verifyCanRead())
                    .help("Relative or absolute path to a log4j.properties config file on the local file system. This file " +
                            "will be uploaded to each MR task. Example: /path/to/log4j.properties");

            Argument verboseArg = parser.addArgument("--verbose", "-v")
                    .action(Arguments.storeTrue())
                    .help("Turn on verbose output.");

            ArgumentGroup clusterInfoGroup = parser
                    .addArgumentGroup("Cluster arguments")
View Full Code Here

                    .choices(new RangeArgumentChoice(1, Integer.MAX_VALUE))
                    .setDefault(1000)
                    .help("Tuning knob that indicates the maximum number of live merges to run in parallel at one time.");

            // trailing positional arguments
            Argument inputFilesArg = parser.addArgument("input-files")
                    .metavar("HDFS_URI")
                    .type(new PathArgumentType(conf).verifyHasScheme().verifyExists().verifyCanRead())
                    .nargs("*")
                    .setDefault()
                    .help("HDFS URI of file or directory tree to index.");
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.