Package weka.core

Examples of weka.core.Option


   */
  public Enumeration listOptions() {
   
    Vector newVector = new Vector(6);

    newVector.addElement(new Option(
  "\tSets search method for subset evaluators.\n"
  + "\teg. -S \"weka.attributeSelection.BestFirst -S 8\"",
  "S", 1,
  "-S <\"Name of search class [search options]\">"));

    newVector.addElement(new Option(
  "\tSets attribute/subset evaluator.\n"
  + "\teg. -E \"weka.attributeSelection.CfsSubsetEval -L\"",
  "E", 1,
  "-E <\"Name of attribute/subset evaluation class [evaluator options]\">"));
   
    if ((m_ASEvaluator != null) && (m_ASEvaluator instanceof OptionHandler)) {
      Enumeration enu = ((OptionHandler)m_ASEvaluator).listOptions();
     
      newVector.addElement(new Option("", "", 0, "\nOptions specific to "
     + "evaluator " + m_ASEvaluator.getClass().getName() + ":"));
      while (enu.hasMoreElements()) {
  newVector.addElement((Option)enu.nextElement());
      }
    }
 
    if ((m_ASSearch != null) && (m_ASSearch instanceof OptionHandler)) {
      Enumeration enu = ((OptionHandler)m_ASSearch).listOptions();
   
      newVector.addElement(new Option("", "", 0, "\nOptions specific to "
        + "search " + m_ASSearch.getClass().getName() + ":"));
      while (enu.hasMoreElements()) {
  newVector.addElement((Option)enu.nextElement());
      }
    }
View Full Code Here


   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(7);

    newVector.addElement(new Option(
              "\tSpecifies list of columns to Discretize. First"
        + " and last are valid indexes.\n"
        + "\t(default none)",
              "R", 1, "-R <col1,col2-col4,...>"));

    newVector.addElement(new Option(
              "\tInvert matching sense of column indexes.",
              "V", 0, "-V"));

    newVector.addElement(new Option(
              "\tOutput binary attributes for discretized attributes.",
              "D", 0, "-D"));

    newVector.addElement(new Option(
              "\tUse better encoding of split point for MDL.",
              "E", 0, "-E"));

    newVector.addElement(new Option(
              "\tUse Kononenko's MDL criterion.",
              "K", 0, "-K"));

    return newVector.elements();
  }
View Full Code Here

   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(1);

    newVector.addElement(new Option(
  "\tSets if binary attributes are to be coded as nominal ones.",
  "N", 0, "-N"));
   
    newVector.addElement(new Option(
  "\tFor each nominal value a new attribute is created, \n"
  + "\tnot only if there are more than 2 values.",
  "A", 0, "-A"));

    return newVector.elements();
View Full Code Here

    enm = super.listOptions();
    while (enm.hasMoreElements())
      result.addElement(enm.nextElement());

    result.addElement(new Option(
  "\tThe number of components to compute.\n"
  + "\t(default: 20)",
  "C", 1, "-C <num>"));

    result.addElement(new Option(
  "\tUpdates the class attribute as well.\n"
  + "\t(default: off)",
  "U", 0, "-U"));

    result.addElement(new Option(
  "\tTurns replacing of missing values on.\n"
  + "\t(default: off)",
  "M", 0, "-M"));

    param = "";
    for (i = 0; i < TAGS_ALGORITHM.length; i++) {
      if (i > 0)
  param += "|";
      tag = new SelectedTag(TAGS_ALGORITHM[i].getID(), TAGS_ALGORITHM);
      param += tag.getSelectedTag().getReadable();
    }
    result.addElement(new Option(
  "\tThe algorithm to use.\n"
  + "\t(default: PLS1)",
  "A", 1, "-A <" + param + ">"));

    param = "";
    for (i = 0; i < TAGS_PREPROCESSING.length; i++) {
      if (i > 0)
  param += "|";
      tag = new SelectedTag(TAGS_PREPROCESSING[i].getID(), TAGS_PREPROCESSING);
      param += tag.getSelectedTag().getReadable();
    }
    result.addElement(new Option(
  "\tThe type of preprocessing that is applied to the data.\n"
  + "\t(default: center)",
  "P", 1, "-P <" + param + ">"));

    return result.elements();
View Full Code Here

  public Enumeration listOptions() {

    Vector newVector = new Vector(9);

    newVector.
  addElement(new Option("\tUse unpruned tree.",
            "U", 0, "-U"));
    newVector.
  addElement(new Option("\tSet confidence threshold for pruning.\n" +
            "\t(default 0.25)",
            "C", 1, "-C <pruning confidence>"));
    newVector.
  addElement(new Option("\tSet minimum number of instances per leaf.\n" +
            "\t(default 2)",
            "M", 1, "-M <minimum number of instances>"));
    newVector.
  addElement(new Option("\tUse reduced error pruning.",
            "R", 0, "-R"));
    newVector.
  addElement(new Option("\tSet number of folds for reduced error\n" +
            "\tpruning. One fold is used as pruning set.\n" +
            "\t(default 3)",
            "N", 1, "-N <number of folds>"));
    newVector.
  addElement(new Option("\tUse binary splits only.",
            "B", 0, "-B"));
    newVector.
        addElement(new Option("\tDon't perform subtree raising.",
            "S", 0, "-S"));
    newVector.
        addElement(new Option("\tDo not clean up after the tree has been built.",
            "L", 0, "-L"));
   newVector.
        addElement(new Option("\tLaplace smoothing for predicted probabilities.",
            "A", 0, "-A"));
    newVector.
      addElement(new Option("\tSeed for random data shuffling (default 1).",
          "Q", 1, "-Q <seed>"));

    return newVector.elements();
  }
View Full Code Here

   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(2);

    newVector.addElement(new Option(
       "\tThe name of the field to average over.\n"
        +"\t(default \"Fold\")",
       "F", 1,
       "-F <field name>"));
    newVector.addElement(new Option(
       "\tThe number of results expected per average.\n"
        +"\t(default 10)",
       "X", 1,
       "-X <num results>"));
    newVector.addElement(new Option(
       "\tCalculate standard deviations.\n"
        +"\t(default only averages)",
       "S", 0,
       "-S"));
    newVector.addElement(new Option(
       "\tThe full class name of a ResultProducer.\n"
        +"\teg: weka.experiment.CrossValidationResultProducer",
       "W", 1,
       "-W <class name>"));

    if ((m_ResultProducer != null) &&
  (m_ResultProducer instanceof OptionHandler)) {
      newVector.addElement(new Option(
       "",
       "", 0, "\nOptions specific to result producer "
       + m_ResultProducer.getClass().getName() + ":"));
      Enumeration enu = ((OptionHandler)m_ResultProducer).listOptions();
      while (enu.hasMoreElements()) {
View Full Code Here

  public Enumeration listOptions() {

    Vector newVector = new Vector(2);

    newVector.addElement(
    new Option("\tDon't weight voting intervals by confidence",
         "C", 0,"-C"));
    newVector.addElement(
    new Option("\tSet exponential bias towards confident intervals\n"
         +"\t(default = 0.6)",
         "B", 1,"-B <bias>"));

    return newVector.elements();
  }
View Full Code Here

   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(2);

    newVector.addElement(new Option(
        "\tNumber of folds used for cross validation (default 10).",
        "X", 1, "-X <number of folds>"));
    newVector.addElement(new Option(
        "\tClassifier parameter options.\n"
        + "\teg: \"N 1 5 10\" Sets an optimisation parameter for the\n"
        + "\tclassifier with name -N, with lower bound 1, upper bound\n"
        + "\t5, and 10 optimisation steps. The upper bound may be the\n"
        + "\tcharacter 'A' or 'I' to substitute the number of\n"
View Full Code Here

   * @return an enumeration of all the available options.
   */
  public Enumeration listOptions () {
    Vector result = new Vector();
   
    result.addElement(new Option(
  "\tnumber of clusters. If omitted or -1 specified, then \n"
  + "\tcross validation is used to select the number of clusters.",
  "N", 1, "-N <num>"));

    result.addElement(new Option(
  "\tmax iterations."
  + "\n(default 100)",
  "I", 1, "-I <num>"));
   
    result.addElement(new Option(
  "\tverbose.",
  "V", 0, "-V"));
   
    result.addElement(new Option(
  "\tminimum allowable standard deviation for normal density\n"
  + "\tcomputation\n"
  + "\t(default 1e-6)",
  "M",1,"-M <num>"));

    result.addElement(
              new Option("\tDisplay model in old format (good when there are "
                         + "many clusters)\n",
                         "O", 0, "-O"));

    Enumeration en = super.listOptions();
    while (en.hasMoreElements())
View Full Code Here

  public Enumeration listOptions() {

    Vector newVector = new Vector(9);

    newVector.
       addElement(new Option("\tUse unpruned tree.",
            "U", 0, "-U"));
    newVector.
       addElement(new Option("\tSet confidence threshold for pruning.\n" +
                             "\t(default 0.25)",
           "C", 1, "-C <pruning confidence>"));
    newVector.
       addElement(new Option("\tSet minimum number of instances per leaf.\n" +
            "\t(default 2)",
            "M", 1, "-M <minimum number of instances>"));
    newVector.
       addElement(new Option("\tUse binary splits only.",
            "B", 0, "-B"));
    newVector.
       addElement(new Option("\tDon't perform subtree raising.",
            "S", 0, "-S"));
    newVector.
       addElement(new Option("\tDo not clean up after the tree has been built.",
            "L", 0, "-L"));
    newVector.
       addElement(new Option("\tLaplace smoothing for predicted probabilities.  (note: this option only affects initial tree; grafting process always uses laplace).",
            "A", 0, "-A"));
    newVector.
       addElement(new Option("\tRelabel when grafting.",
                             "E", 0, "-E"));
    return newVector.elements();
  }
View Full Code Here

TOP

Related Classes of weka.core.Option

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.