Package weka.classifiers

Examples of weka.classifiers.Evaluation


   * @throws Exception if subset can't be evaluated
   */
  protected double estimatePerformance(BitSet feature_set, int num_atts)
  throws Exception {

    m_evaluation = new Evaluation(m_theInstances);
    int i;
    int [] fs = new int [num_atts];

    double [] instA = new double [num_atts];
    int classI = m_theInstances.classIndex();
View Full Code Here


    delTransform.setInputFormat(trainCopy);
    trainCopy = Filter.useFilter(trainCopy, delTransform);

    // max of 5 repititions ofcross validation
    for (i = 0; i < 5; i++) {
      m_Evaluation = new Evaluation(trainCopy);
      m_Evaluation.crossValidateModel(m_BaseClassifier, trainCopy, m_folds, Rnd);
      repError[i] = m_Evaluation.errorRate();

      // check on the standard deviation
      if (!repeat(repError, i + 1)) {
View Full Code Here

      throw new Exception("On-demand cost file doesn't exist: " + costFile);
    }
    CostMatrix costMatrix = new CostMatrix(new BufferedReader(
    new FileReader(costFile)));
   
    Evaluation eval = new Evaluation(train, costMatrix);   
    m_Classifier = Classifier.makeCopy(m_Template);
   
    trainTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    m_Classifier.buildClassifier(train);
    if(canMeasureCPUTime)
      trainCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
    testTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    eval.evaluateModel(m_Classifier, test);
    if(canMeasureCPUTime)
      testCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    testTimeElapsed = System.currentTimeMillis() - testTimeStart;
    thMonitor = null;
   
    m_result = eval.toSummaryString();
    // The results stored are all per instance -- can be multiplied by the
    // number of instances to get absolute numbers
    int current = 0;
    result[current++] = new Double(train.numInstances());
    result[current++] = new Double(eval.numInstances());
   
    result[current++] = new Double(eval.correct());
    result[current++] = new Double(eval.incorrect());
    result[current++] = new Double(eval.unclassified());
    result[current++] = new Double(eval.pctCorrect());
    result[current++] = new Double(eval.pctIncorrect());
    result[current++] = new Double(eval.pctUnclassified());
    result[current++] = new Double(eval.totalCost());
    result[current++] = new Double(eval.avgCost());
   
    result[current++] = new Double(eval.meanAbsoluteError());
    result[current++] = new Double(eval.rootMeanSquaredError());
    result[current++] = new Double(eval.relativeAbsoluteError());
    result[current++] = new Double(eval.rootRelativeSquaredError());
   
    result[current++] = new Double(eval.SFPriorEntropy());
    result[current++] = new Double(eval.SFSchemeEntropy());
    result[current++] = new Double(eval.SFEntropyGain());
    result[current++] = new Double(eval.SFMeanPriorEntropy());
    result[current++] = new Double(eval.SFMeanSchemeEntropy());
    result[current++] = new Double(eval.SFMeanEntropyGain());
   
    // K&B stats
    result[current++] = new Double(eval.KBInformation());
    result[current++] = new Double(eval.KBMeanInformation());
    result[current++] = new Double(eval.KBRelativeInformation());
   
    // Timing stats
    result[current++] = new Double(trainTimeElapsed / 1000.0);
    result[current++] = new Double(testTimeElapsed / 1000.0);
    if(canMeasureCPUTime) {
View Full Code Here

    boolean prune = (nodeList.size() > 0);

    //alpha_0 is always zero (unpruned tree)
    alphas[0] = 0;

    Evaluation eval;

    // error of unpruned tree
    if (errors != null) {
      eval = new Evaluation(test);
      eval.evaluateModel(this, test);
      errors[0] = eval.errorRate();
    }

    int iteration = 0;
    double preAlpha = Double.MAX_VALUE;
    while (prune) {

      iteration++;

      // get node with minimum alpha
      SimpleCart nodeToPrune = nodeToPrune(nodeList);

      // do not set m_sons null, want to unprune
      nodeToPrune.m_isLeaf = true;

      // normally would not happen
      if (nodeToPrune.m_Alpha==preAlpha) {
  iteration--;
  treeErrors();
  calculateAlphas();
  nodeList = getInnerNodes();
  prune = (nodeList.size() > 0);
  continue;
      }

      // get alpha-value of node
      alphas[iteration] = nodeToPrune.m_Alpha;

      // log error
      if (errors != null) {
  eval = new Evaluation(test);
  eval.evaluateModel(this, test);
  errors[iteration] = eval.errorRate();
      }
      preAlpha = nodeToPrune.m_Alpha;

      //update errors/alphas
      treeErrors();
View Full Code Here

   * pruned) is rooted. This is needed for calculating the alpha-values.
   *
   * @throws Exception   if something goes wrong
   */
  public void modelErrors() throws Exception{
    Evaluation eval = new Evaluation(m_train);

    if (!m_isLeaf) {
      m_isLeaf = true; //temporarily make leaf

      // calculate distribution for evaluation
      eval.evaluateModel(this, m_train);
      m_numIncorrectModel = eval.incorrect();

      m_isLeaf = false;

      for (int i = 0; i < m_Successors.length; i++)
  m_Successors[i].modelErrors();

    } else {
      eval.evaluateModel(this, m_train);
      m_numIncorrectModel = eval.incorrect();
    }      
  }
View Full Code Here

    }

    // build the classifier
    m_Classifier.buildClassifier(trainCopy);

    m_Evaluation = new Evaluation(trainCopy);
    if (!m_useTraining) {
      m_Evaluation.evaluateModel(m_Classifier, testCopy);
    } else {
      m_Evaluation.evaluateModel(m_Classifier, trainCopy);
    }
View Full Code Here

    testCopy = Filter.useFilter(testCopy, delTransform);

    // build the classifier
    m_Classifier.buildClassifier(trainCopy);

    m_Evaluation = new Evaluation(trainCopy);
    m_Evaluation.evaluateModel(m_Classifier, testCopy);

    if (m_trainingInstances.classAttribute().isNominal()) {
      errorRate = m_Evaluation.errorRate();
    } else {
View Full Code Here

    int bestIndex = -1;
    double bestPerformance = Double.NaN;
    int numClassifiers = m_Classifiers.length;
    for (int i = 0; i < numClassifiers; i++) {
      Classifier currentClassifier = getClassifier(i);
      Evaluation evaluation;
      if (m_NumXValFolds > 1) {
  evaluation = new Evaluation(newData);
  for (int j = 0; j < m_NumXValFolds; j++) {

          // We want to randomize the data the same way for every
          // learning scheme.
    train = newData.trainCV(m_NumXValFolds, j, new Random (1));
    test = newData.testCV(m_NumXValFolds, j);
    currentClassifier.buildClassifier(train);
    evaluation.setPriors(train);
    evaluation.evaluateModel(currentClassifier, test);
  }
      } else {
  currentClassifier.buildClassifier(train);
  evaluation = new Evaluation(train);
  evaluation.evaluateModel(currentClassifier, test);
      }

      double error = evaluation.errorRate();
      if (m_Debug) {
  System.err.println("Error rate: " + Utils.doubleToString(error, 6, 4)
         + " for classifier "
         + currentClassifier.getClass().getName());
      }
View Full Code Here

      throw new Exception("On-demand cost file doesn't exist: " + costFile);
    }
    CostMatrix costMatrix = new CostMatrix(new BufferedReader(
    new FileReader(costFile)));
   
    Evaluation eval = new Evaluation(train, costMatrix);   
    m_Classifier = Classifier.makeCopy(m_Template);
   
    trainTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    m_Classifier.buildClassifier(train);
    if(canMeasureCPUTime)
      trainCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
    testTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    eval.evaluateModel(m_Classifier, test);
    if(canMeasureCPUTime)
      testCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    testTimeElapsed = System.currentTimeMillis() - testTimeStart;
    thMonitor = null;
   
    m_result = eval.toSummaryString();
    // The results stored are all per instance -- can be multiplied by the
    // number of instances to get absolute numbers
    int current = 0;
    result[current++] = new Double(train.numInstances());
    result[current++] = new Double(eval.numInstances());
   
    result[current++] = new Double(eval.correct());
    result[current++] = new Double(eval.incorrect());
    result[current++] = new Double(eval.unclassified());
    result[current++] = new Double(eval.pctCorrect());
    result[current++] = new Double(eval.pctIncorrect());
    result[current++] = new Double(eval.pctUnclassified());
    result[current++] = new Double(eval.totalCost());
    result[current++] = new Double(eval.avgCost());
   
    result[current++] = new Double(eval.meanAbsoluteError());
    result[current++] = new Double(eval.rootMeanSquaredError());
    result[current++] = new Double(eval.relativeAbsoluteError());
    result[current++] = new Double(eval.rootRelativeSquaredError());
   
    result[current++] = new Double(eval.SFPriorEntropy());
    result[current++] = new Double(eval.SFSchemeEntropy());
    result[current++] = new Double(eval.SFEntropyGain());
    result[current++] = new Double(eval.SFMeanPriorEntropy());
    result[current++] = new Double(eval.SFMeanSchemeEntropy());
    result[current++] = new Double(eval.SFMeanEntropyGain());
   
    // K&B stats
    result[current++] = new Double(eval.KBInformation());
    result[current++] = new Double(eval.KBMeanInformation());
    result[current++] = new Double(eval.KBRelativeInformation());
   
    // Timing stats
    result[current++] = new Double(trainTimeElapsed / 1000.0);
    result[current++] = new Double(testTimeElapsed / 1000.0);
    if(canMeasureCPUTime) {
View Full Code Here

    double[][] probs = initialProbs(numInstances);
    m_NumGenerated = 0;
    double sumOfWeights = train.sumOfWeights();
    for (int j = 0; j < getNumIterations(); j++) {
      performIteration(trainYs, trainFs, probs, trainN, sumOfWeights);
      Evaluation eval = new Evaluation(train);
      eval.evaluateModel(this, test);
      results[j] += eval.correct();
    }
  }
      }
     
      // Find the number of iterations with the lowest error
View Full Code Here

TOP

Related Classes of weka.classifiers.Evaluation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.