Package weka.classifiers.functions

Examples of weka.classifiers.functions.SMO$BinarySMO


      // using 5 fold CV to emulate the 80-20 random split of jkms
      Instances train = full.trainCV(5, 0);
      Instances test = full.testCV(5, 0);
     
      // new svm with rbf kernel
      SMO smo = new SMO();
      String[] options = { "-L 1e-15", "-P 1e-15" , "-N 2"};
      smo.setOptions(options);
      smo.setC(1.0); //same as default value for jkms
      RBFKernel rbf = new RBFKernel();
      rbf.setGamma(0.1); //same as default value for jkms
      smo.setKernel(rbf);
     
      smo.buildClassifier(train);
     
     
      Evaluation eval = new Evaluation(train);
      eval.evaluateModel(smo, test);
     
View Full Code Here


   * @see               Capabilities
   */
  public Capabilities getCapabilities() {
    Capabilities        result;
   
    result = new SMO().getCapabilities();
   
    result.setOwner(this);
   
    // only binary attributes are allowed, otherwise the NominalToBinary
    // filter inside SMO will increase the number of attributes which in turn
View Full Code Here

        } else {
          numToElim = (numAttrLeft >= m_numToEliminate) ? m_numToEliminate : numAttrLeft;
        }
       
        // Build the linear SVM with default parameters
        SMO smo = new SMO();
                               
        // SMO seems to get stuck if data not normalised when few attributes remain
        // smo.setNormalizeData(numAttrLeft < 40);
        smo.setFilterType(new SelectedTag(m_smoFilterType, SMO.TAGS_FILTER));
        smo.setEpsilon(m_smoPParameter);
        smo.setToleranceParameter(m_smoTParameter);
        smo.setC(m_smoCParameter);
        smo.buildClassifier(trainCopy);
                               
        // Find the attribute with maximum weight^2
        double[] weightsSparse = smo.sparseWeights()[0][1];
        int[] indicesSparse = smo.sparseIndices()[0][1];
        double[] weights = new double[trainCopy.numAttributes()];
        for (int j = 0; j < weightsSparse.length; j++) {
          weights[indicesSparse[j]] = weightsSparse[j] * weightsSparse[j];
        }
        weights[trainCopy.classIndex()] = Double.MAX_VALUE;
View Full Code Here

        case J48:
          J48 j48 = new J48();     
          j48.setOptions(new String[] { "-C", "0.25", "-M", "2" });
          return j48;
        case SMO:
          SMO smo = new SMO();
          smo.setOptions(Utils.splitOptions("-C 1.0 -L 0.001 -P 1.0E-12 -N 0 -V -1 -W 1 -K \"weka.classifiers.functions.supportVector.PolyKernel -C 250007 -E 1.0\""));
          return smo;
        case LOGISTIC:
          Logistic logistic = new Logistic();
          logistic.setOptions(Utils.splitOptions("-R 1.0E-8 -M -1"));
          return logistic;
View Full Code Here

    public List<Entry<String, Double>> getFeatureWeights(ExampleSet trainingExamples, Model model) {
        WekaModel wekaModel = (WekaModel) model;
        Classifier classifier = wekaModel.getClassifier();
        Instances dataFormat = trainingExamples.getInstances();

        SMO smo = getSMO(classifier);

        double[] sparseWeights = smo.sparseWeights()[0][1];
        int[] sparseIndices = smo.sparseIndices()[0][1];

        Map<String, Double> weights = new HashMap<String, Double>();
        for (int i = 0; i < sparseWeights.length; i++) {
            int index = sparseIndices[i];
            double weight = sparseWeights[i];
View Full Code Here

    private SMO getSMO(Classifier classifier) {
        if (classifier instanceof CostSensitiveClassifier) {
            classifier = ((CostSensitiveClassifier) classifier).getClassifier();
        }

        SMO smo = null;
        if (classifier instanceof SMO) {
            smo = (SMO) classifier;
        } else {
            throw new IllegalArgumentException("Classifier was neither SMO or CostSensitiveClassifier(SMO)");
        }
View Full Code Here

    }

    @Override
    public WekaModel train(ExampleSet examples) {
        System.out.println("SMO Options: " + SMO_OPTIONS);
        SMO smo = new SMO();
        try {
            smo.setOptions(Utils.splitOptions(SMO_OPTIONS));
        } catch (Exception ex) {
            System.err.println("Unable to configure SMO.");
            System.err.println("\t" + ex.getMessage());
            return null;
        }

        //Build logistic models if desired
        smo.setBuildLogisticModels(isBuildLogisticModel());

        Classifier classifier = smo;

        if (useCostTraining) {
            CostSensitiveClassifier cost = new CostSensitiveClassifier();
View Full Code Here

    }

    @Override
    public WekaModel train(ExampleSet examples) {
        //These settings aren't terrible
        SMO smo = new SMO();
        RBFKernel rbf = new RBFKernel();
        rbf.setGamma(0.5);
        smo.setKernel(rbf);
        smo.setC(1.5);
       
        //These also work pretty ok
        Logistic log = new Logistic();
        log.setRidge(100);
       
View Full Code Here

TOP

Related Classes of weka.classifiers.functions.SMO$BinarySMO

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.