Package weka.core

Examples of weka.core.Option


    Vector      result;

    result = new Vector();

    result.addElement(
        new Option(
          "\tSet type of solver (default: 1)\n"
          + "\t\t 0 = L2-regularized logistic regression\n"
          + "\t\t 1 = L2-loss support vector machines (dual)\n"
          + "\t\t 2 = L2-loss support vector machines (primal)\n"
          + "\t\t 3 = L1-loss support vector machines (dual)\n"
          + "\t\t 4 = multi-class support vector machines by Crammer and Singer",
          "S", 1, "-S <int>"));

    result.addElement(
        new Option(
          "\tSet the cost parameter C\n"
          + "\t (default: 1)",
          "C", 1, "-C <double>"));

    result.addElement(
        new Option(
          "\tTurn on normalization of input data (default: off)",
          "Z", 0, "-Z"));
   
    result.addElement(
        new Option("\tTurn on nominal to binary conversion.",
            "N", 0, "-N"));
   
    result.addElement(
        new Option("\tTurn off missing value replacement."
            + "\n\tWARNING: use only if your data has no missing "
            + "values.", "M", 0, "-M"));

    result.addElement(
        new Option(
          "\tUse probability estimation (default: off)\n" +
          "currently for L2-regularized logistic regression only! ",
          "P", 0, "-P"));

    result.addElement(
        new Option(
          "\tSet tolerance of termination criterion (default: 0.01)",
          "E", 1, "-E <double>"));

    result.addElement(
        new Option(
          "\tSet the parameters C of class i to weight[i]*C\n"
          + "\t (default: 1)",
          "W", 1, "-W <double>"));

    result.addElement(
        new Option(
          "\tAdd Bias term with the given value if >= 0; if < 0, no bias term added (default: 1)",
          "B", 1, "-B <double>"));

    Enumeration en = super.listOptions();
    while (en.hasMoreElements())
View Full Code Here


   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(7);
   
    newVector.addElement(new Option("\tUse the baLanced version\n"
            + "\t(default false)",
            "L", 0, "-L"));
    newVector.addElement(new Option("\tThe number of iterations to be performed.\n"
            + "\t(default 1)",
            "I", 1, "-I <int>"));
    newVector.addElement(new Option("\tPromotion coefficient alpha.\n"
            + "\t(default 2.0)",
            "A", 1, "-A <double>"));
    newVector.addElement(new Option("\tDemotion coefficient beta.\n"
            + "\t(default 0.5)",
            "B", 1, "-B <double>"));
    newVector.addElement(new Option("\tPrediction threshold.\n"
            + "\t(default -1.0 == number of attributes)",
            "H", 1, "-H <double>"));
    newVector.addElement(new Option("\tStarting weights.\n"
            + "\t(default 2.0)",
            "W", 1, "-W <double>"));
    newVector.addElement(new Option("\tDefault random seed.\n"
            + "\t(default 1)",
            "S", 1, "-S <int>"));

    return newVector.elements();
  }
View Full Code Here

     * @return an enumeration of all the available options.
     */
    public Enumeration listOptions() {
  Vector newVector = new Vector();
 
  newVector.addElement(new Option(
      "\tSet fixed number of iterations for LogitBoost",
      "I",1,"-I <iterations>"));
 
  newVector.addElement(new Option(
      "\tUse stopping criterion on training set (instead of\n"
      + "\tcross-validation)",
      "S",0,"-S"));
 
  newVector.addElement(new Option(
      "\tUse error on probabilities (rmse) instead of\n"
      + "\tmisclassification error for stopping criterion",
      "P",0,"-P"));

  newVector.addElement(new Option(
      "\tSet maximum number of boosting iterations",
      "M",1,"-M <iterations>"));

  newVector.addElement(new Option(
      "\tSet parameter for heuristic for early stopping of\n"
      + "\tLogitBoost.\n"
      + "\tIf enabled, the minimum is selected greedily, stopping\n"
      + "\tif the current minimum has not changed for iter iterations.\n"
      + "\tBy default, heuristic is enabled with value 50. Set to\n"
      + "\tzero to disable heuristic.",
      "H",1,"-H <iterations>"));
       
        newVector.addElement(new Option("\tSet beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.\n",
                                        "W",1,"-W <beta>"));
       
        newVector.addElement(new Option("\tThe AIC is used to choose the best iteration (instead of CV or training error).\n",
                                        "A", 0, "-A"));
 
  return newVector.elements();
    }
View Full Code Here

    Enumeration enm = super.listOptions();
    while (enm.hasMoreElements())
      result.addElement(enm.nextElement());

    result.addElement(new Option(
  "\tTurns off all checks - use with caution!\n"
  + "\tTurning them off assumes that data is purely numeric, doesn't\n"
  + "\tcontain any missing values, and has a nominal class. Turning them\n"
  + "\toff also means that no header information will be stored if the\n"
  + "\tmachine is linear. Finally, it also assumes that no instance has\n"
  + "\ta weight equal to 0.\n"
  + "\t(default: checks on)",
  "no-checks", 0, "-no-checks"));

    result.addElement(new Option(
  "\tThe complexity constant C. (default 1)",
  "C", 1, "-C <double>"));
   
    result.addElement(new Option(
  "\tWhether to 0=normalize/1=standardize/2=neither. " +
  "(default 0=normalize)",
  "N", 1, "-N"));
   
    result.addElement(new Option(
  "\tThe tolerance parameter. " +
  "(default 1.0e-3)",
  "L", 1, "-L <double>"));
   
    result.addElement(new Option(
  "\tThe epsilon for round-off error. " +
  "(default 1.0e-12)",
  "P", 1, "-P <double>"));
   
    result.addElement(new Option(
  "\tFit logistic models to SVM outputs. ",
  "M", 0, "-M"));
   
    result.addElement(new Option(
  "\tThe number of folds for the internal\n" +
  "\tcross-validation. " +
  "(default -1, use training data)",
  "V", 1, "-V <double>"));
   
    result.addElement(new Option(
  "\tThe random number seed. " +
  "(default 1)",
  "W", 1, "-W <double>"));
   
    result.addElement(new Option(
  "\tThe Kernel to use.\n"
  + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)",
  "K", 1, "-K <classname and parameters>"));

    result.addElement(new Option(
  "",
  "", 0, "\nOptions specific to kernel "
  + getKernel().getClass().getName() + ":"));
   
    enm = ((OptionHandler) getKernel()).listOptions();
View Full Code Here

   *
   * @return an enumeration of all the available options
   */
  public Enumeration listOptions() {
    Vector newVector = new Vector(3);
    newVector.addElement(new Option("\tTurn on debugging output.",
            "D", 0, "-D"));
    newVector.addElement(new Option("\tSet the ridge in the log-likelihood.",
            "R", 1, "-R <ridge>"));
    newVector.addElement(new Option("\tSet the maximum number of iterations"+
            " (default -1, until convergence).",
            "M", 1, "-M <number>"));
    return newVector.elements();
  }
View Full Code Here

   * @return an enumeration of all the available options.
   */
  public Enumeration<Option> listOptions() {

    Vector<Option> newVector = new Vector<Option>();
    newVector.add(new Option("\tSet the loss function to minimize. 0 = " +
        "hinge loss (SVM), 1 = log loss (logistic regression).\n" +
        "\t(default = 0)", "F", 1, "-F"));
    newVector.add(new Option("\tThe lambda regularization constant " +
        "(default = 0.0001)",
        "L", 1, "-L <double>"));
    newVector.add(new Option("\tThe number of epochs to perform (" +
        "batch learning only, default = 500)", "E", 1,
        "-E <integer>"));
    newVector.add(new Option("\tDon't normalize the data", "N", 0, "-N"));
    newVector.add(new Option("\tDon't replace missing values", "M", 0, "-M"));
   
    return newVector.elements();
  }
View Full Code Here

    Vector      result;
   
    result = new Vector();
   
    result.addElement(
        new Option(
            "\tSet type of SVM (default: 0)\n"
            + "\t\t 0 = C-SVC\n"
            + "\t\t 1 = nu-SVC\n"
            + "\t\t 2 = one-class SVM\n"
            + "\t\t 3 = epsilon-SVR\n"
            + "\t\t 4 = nu-SVR",
            "S", 1, "-S <int>"));
   
    result.addElement(
        new Option(
            "\tSet type of kernel function (default: 2)\n"
            + "\t\t 0 = linear: u'*v\n"
            + "\t\t 1 = polynomial: (gamma*u'*v + coef0)^degree\n"
            + "\t\t 2 = radial basis function: exp(-gamma*|u-v|^2)\n"
            + "\t\t 3 = sigmoid: tanh(gamma*u'*v + coef0)",
            "K", 1, "-K <int>"));
   
    result.addElement(
        new Option(
            "\tSet degree in kernel function (default: 3)",
            "D", 1, "-D <int>"));
   
    result.addElement(
        new Option(
            "\tSet gamma in kernel function (default: 1/k)",
            "G", 1, "-G <double>"));
   
    result.addElement(
        new Option(
            "\tSet coef0 in kernel function (default: 0)",
            "R", 1, "-R <double>"));
   
    result.addElement(
        new Option(
            "\tSet the parameter C of C-SVC, epsilon-SVR, and nu-SVR\n"
            + "\t (default: 1)",
            "C", 1, "-C <double>"));
   
    result.addElement(
        new Option(
            "\tSet the parameter nu of nu-SVC, one-class SVM, and nu-SVR\n"
            + "\t (default: 0.5)",
            "N", 1, "-N <double>"));
   
    result.addElement(
        new Option(
            "\tTurns on normalization of input data (default: off)",
            "Z", 0, "-Z"));
   
    result.addElement(
        new Option("\tTurn off nominal to binary conversion."
            + "\n\tWARNING: use only if your data is all numeric!",
            "J", 0, "-J"));
   
    result.addElement(
        new Option("\tTurn off missing value replacement."
            + "\n\tWARNING: use only if your data has no missing "
            + "values.", "V", 0, "-V"));
   
    result.addElement(
        new Option(
            "\tSet the epsilon in loss function of epsilon-SVR (default: 0.1)",
            "P", 1, "-P <double>"));
   
    result.addElement(
        new Option(
            "\tSet cache memory size in MB (default: 40)",
            "M", 1, "-M <double>"));
   
    result.addElement(
        new Option(
            "\tSet tolerance of termination criterion (default: 0.001)",
            "E", 1, "-E <double>"));
   
    result.addElement(
        new Option(
            "\tTurns the shrinking heuristics off (default: on)",
            "H", 0, "-H"));
   
    result.addElement(
        new Option(
            "\tSet the parameters C of class i to weight[i]*C, for C-SVC\n"
            + "\tE.g., for a 3-class problem, you could use \"1 1 1\" for equally\n"
            + "\tweighted classes.\n"
            + "\t(default: 1 for all classes)",
            "W", 1, "-W <double>"));
   
    result.addElement(
        new Option(
            "\tTrains a SVC model instead of a SVR one (default: SVR)",
            "B", 0, "-B"));

    Enumeration en = super.listOptions();
    while (en.hasMoreElements())
View Full Code Here

   */
  public Enumeration listOptions() {
    Enumeration enm;
    Vector result = new Vector();
   
    result.addElement(new Option(
  "\tThe complexity constant C.\n"
  + "\t(default 1)",
  "C", 1, "-C <double>"));
   
    result.addElement(new Option(
  "\tWhether to 0=normalize/1=standardize/2=neither.\n"
  + "\t(default 0=normalize)",
  "N", 1, "-N"));
   
    result.addElement(new Option(
  "\tOptimizer class used for solving quadratic optimization problem\n"
  + "\t(default " + RegSMOImproved.class.getName() + ")",
  "I", 1, "-I <classname and parameters>"));
   
    result.addElement(new Option(
  "\tThe Kernel to use.\n"
  + "\t(default: weka.classifiers.functions.supportVector.PolyKernel)",
  "K", 1, "-K <classname and parameters>"));

    result.addElement(new Option(
  "",
  "", 0, "\nOptions specific to optimizer ('-I') "
  + getRegOptimizer().getClass().getName() + ":"));

    enm = ((OptionHandler) getRegOptimizer()).listOptions();
    while (enm.hasMoreElements())
      result.addElement(enm.nextElement());

    result.addElement(new Option(
  "",
  "", 0, "\nOptions specific to kernel ('-K') "
  + getKernel().getClass().getName() + ":"));
   
    enm = ((OptionHandler) getKernel()).listOptions();
View Full Code Here

   */
  public Enumeration listOptions() {

    Vector newVector = new Vector(4);

    newVector.addElement(new Option("\tThe number of iterations to be performed.\n"
            + "\t(default 1)",
            "I", 1, "-I <int>"));
    newVector.addElement(new Option("\tThe exponent for the polynomial kernel.\n"
            + "\t(default 1)",
            "E", 1, "-E <double>"));
    newVector.addElement(new Option("\tThe seed for the random number generation.\n"
            + "\t(default 1)",
            "S", 1, "-S <int>"));
    newVector.addElement(new Option("\tThe maximum number of alterations allowed.\n"
            + "\t(default 10000)",
            "M", 1, "-M <int>"));

    return newVector.elements();
  }
View Full Code Here

    en = super.listOptions();
    while (en.hasMoreElements())
      result.addElement(en.nextElement());

    result.addElement(new Option(
  "\tThe Exponent to use.\n"
  + "\t(default: 1.0)",
  "E", 1, "-E <num>"));

    result.addElement(new Option(
  "\tUse lower-order terms.\n"
  + "\t(default: no)",
  "L", 0, "-L"));

    return result.elements();
View Full Code Here

TOP

Related Classes of weka.core.Option

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.