Package weka.classifiers

Examples of weka.classifiers.Evaluation.evaluateModel()


        output.setBuffer(new StringBuffer());
        output.setHeader(test);
        output.setAttributes("first");
       
    Evaluation eval = new Evaluation(train);
        eval.evaluateModel(filteredClassifier, test, output);
       
        // Convert predictions to CSV
        // Format: inst#, actual, predicted, error, probability, (ID)
        String[] scores = new String[new Double(eval.numInstances()).intValue()];
        double[] probabilities = new double[new Double(eval.numInstances()).intValue()];
View Full Code Here


            
          // Build the classifier
          filteredClassifier.buildClassifier(train);
         
          // Evaluate
          eval.evaluateModel(filteredClassifier, test);
         
          // Add predictions
          AddClassification filter = new AddClassification();
          filter.setClassifier(classifier);
          filter.setOutputClassification(true);
View Full Code Here

    // calculate error rate if only root node
    if (expansion==0) {
      m_roots[i].m_isLeaf = true;
      eval = new Evaluation(test[i]);
      eval.evaluateModel(m_roots[i], test[i]);
      if (m_UseErrorRate) expansionError += eval.errorRate();
      else expansionError += eval.rootMeanSquaredError();
      count ++;
    }
View Full Code Here

    m_Heuristic, m_UseGini)) {
        m_roots[i] = null; // cannot be expanded
        continue;
      }
      eval = new Evaluation(test[i]);
      eval.evaluateModel(m_roots[i], test[i]);
      if (m_UseErrorRate) expansionError += eval.errorRate();
      else expansionError += eval.rootMeanSquaredError();
      count ++;
    }
  }
View Full Code Here

      for (int i = 0; i < m_numFoldsPruning; i++) {
  modelError[i] = new FastVector();

  m_roots[i].m_isLeaf = true;
  Evaluation eval = new Evaluation(test[i]);
  eval.evaluateModel(m_roots[i], test[i]);
  double error;
  if (m_UseErrorRate) error = eval.errorRate();
  else error = eval.rootMeanSquaredError();
  modelError[i].addElement(new Double(error));
View Full Code Here

  for (int i=0; i<2; i++){
    m_Successors[i].makeLeaf(train);
  }

  Evaluation eval = new Evaluation(test);
  eval.evaluateModel(root, test);
  double error;
  if (useErrorRate) error = eval.errorRate();
  else error = eval.rootMeanSquaredError();
  modelError.addElement(new Double(error));
      }
View Full Code Here

        // learning scheme.
  Instances train = trainData.trainCV(m_NumFolds, j, new Random(1));
  Instances test = trainData.testCV(m_NumFolds, j);
  m_Classifier.buildClassifier(train);
  evaluation.setPriors(train);
  evaluation.evaluateModel(m_Classifier, test);
      }
      double error = evaluation.errorRate();
      if (m_Debug) {
  System.err.println("Cross-validated error rate: "
         + Utils.doubleToString(error, 6, 4));
View Full Code Here

    o_Evaluation = new Evaluation(trainCopy);
    String [] oneROpts = { "-B", ""+getMinimumBucketSize()};
    Classifier oneR = AbstractClassifier.forName("weka.classifiers.rules.OneR", oneROpts);
    if (m_evalUsingTrainingData) {
      oneR.buildClassifier(trainCopy);
      o_Evaluation.evaluateModel(oneR, trainCopy);
    } else {
      /*      o_Evaluation.crossValidateModel("weka.classifiers.rules.OneR",
              trainCopy, 10,
              null, new Random(m_randomSeed)); */
      o_Evaluation.crossValidateModel(oneR, trainCopy, m_folds, new Random(m_randomSeed));
View Full Code Here

    m_NumGenerated = 0;
    double sumOfWeights = train.sumOfWeights();
    for (int j = 0; j < getNumIterations(); j++) {
      performIteration(trainYs, trainFs, probs, trainN, sumOfWeights);
      Evaluation eval = new Evaluation(train);
      eval.evaluateModel(this, test);
      results[j] += eval.correct();
    }
  }
      }
     
View Full Code Here

  sample = trainData.resampleWithWeights(randomInstance, weights);

  // Build and evaluate classifier
  m_Classifiers[m_NumIterationsPerformed].buildClassifier(sample);
  evaluation = new Evaluation(data);
  evaluation.evaluateModel(m_Classifiers[m_NumIterationsPerformed],
         training);
  epsilon = evaluation.errorRate();
  resamplingIterations++;
      } while (Utils.eq(epsilon, 0) &&
        (resamplingIterations < MAX_NUM_RESAMPLING_ITERATIONS));
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.