Package weka.classifiers

Examples of weka.classifiers.Evaluation


   */
  public double evaluateAttribute (int attribute)
    throws Exception {
    int[] featArray = new int[2]; // feat + class
    double errorRate;
    Evaluation o_Evaluation;
    Remove delTransform = new Remove();
    delTransform.setInvertSelection(true);
    // copy the instances
    Instances trainCopy = new Instances(m_trainInstances);
    featArray[0] = attribute;
    featArray[1] = trainCopy.classIndex();
    delTransform.setAttributeIndicesArray(featArray);
    delTransform.setInputFormat(trainCopy);
    trainCopy = Filter.useFilter(trainCopy, delTransform);
    o_Evaluation = new Evaluation(trainCopy);
    String [] oneROpts = { "-B", ""+getMinimumBucketSize()};
    Classifier oneR = Classifier.forName("weka.classifiers.rules.OneR", oneROpts);
    if (m_evalUsingTrainingData) {
      oneR.buildClassifier(trainCopy);
      o_Evaluation.evaluateModel(oneR, trainCopy);
    } else {
      /*      o_Evaluation.crossValidateModel("weka.classifiers.rules.OneR",
              trainCopy, 10,
              null, new Random(m_randomSeed)); */
      o_Evaluation.crossValidateModel(oneR, trainCopy, m_folds, new Random(m_randomSeed));
    }
    errorRate = o_Evaluation.errorRate();
    return  (1 - errorRate)*100.0;
  }
View Full Code Here


    cvParam.m_ParamValue += increment) {
  findParamsByCrossValidation(depth + 1, trainData, random);
      }
    } else {
     
      Evaluation evaluation = new Evaluation(trainData);

      // Set the classifier options
      String [] options = createOptions();
      if (m_Debug) {
  System.err.print("Setting options for "
       + m_Classifier.getClass().getName() + ":");
  for (int i = 0; i < options.length; i++) {
    System.err.print(" " + options[i]);
  }
  System.err.println("");
      }
      ((OptionHandler)m_Classifier).setOptions(options);
      for (int j = 0; j < m_NumFolds; j++) {

        // We want to randomize the data the same way for every
        // learning scheme.
  Instances train = trainData.trainCV(m_NumFolds, j, new Random(1));
  Instances test = trainData.testCV(m_NumFolds, j);
  m_Classifier.buildClassifier(train);
  evaluation.setPriors(train);
  evaluation.evaluateModel(m_Classifier, test);
      }
      double error = evaluation.errorRate();
      if (m_Debug) {
  System.err.println("Cross-validated error rate: "
         + Utils.doubleToString(error, 6, 4));
      }
      if ((m_BestPerformance == -99) || (error < m_BestPerformance)) {
View Full Code Here

    m_attribute = attribute;
    m_cuts = cuts;
    m_values = values;
   
    // Compute sum of squared errors
    Evaluation eval = new Evaluation(insts);
    eval.evaluateModel(this, insts);
    double msq = eval.rootMeanSquaredError();
   
    // Check whether this is the best attribute
    if (msq < m_minMsq) {
      m_minMsq = msq;
    } else {
View Full Code Here

    boolean prune = (nodeList.size() > 0);

    //alpha_0 is always zero (unpruned tree)
    alphas[0] = 0;

    Evaluation eval;

    // error of unpruned tree
    if (errors != null) {
      eval = new Evaluation(test);
      eval.evaluateModel(this, test);
      errors[0] = eval.errorRate();
    }

    int iteration = 0;
    double preAlpha = Double.MAX_VALUE;
    while (prune) {

      iteration++;

      // get node with minimum alpha
      SimpleCart nodeToPrune = nodeToPrune(nodeList);

      // do not set m_sons null, want to unprune
      nodeToPrune.m_isLeaf = true;

      // normally would not happen
      if (nodeToPrune.m_Alpha==preAlpha) {
  iteration--;
  treeErrors();
  calculateAlphas();
  nodeList = getInnerNodes();
  prune = (nodeList.size() > 0);
  continue;
      }

      // get alpha-value of node
      alphas[iteration] = nodeToPrune.m_Alpha;

      // log error
      if (errors != null) {
  eval = new Evaluation(test);
  eval.evaluateModel(this, test);
  errors[iteration] = eval.errorRate();
      }
      preAlpha = nodeToPrune.m_Alpha;

      //update errors/alphas
      treeErrors();
View Full Code Here

   * pruned) is rooted. This is needed for calculating the alpha-values.
   *
   * @throws Exception   if something goes wrong
   */
  public void modelErrors() throws Exception{
    Evaluation eval = new Evaluation(m_train);

    if (!m_isLeaf) {
      m_isLeaf = true; //temporarily make leaf

      // calculate distribution for evaluation
      eval.evaluateModel(this, m_train);
      m_numIncorrectModel = eval.incorrect();

      m_isLeaf = false;

      for (int i = 0; i < m_Successors.length; i++)
  m_Successors[i].modelErrors();

    } else {
      eval.evaluateModel(this, m_train);
      m_numIncorrectModel = eval.incorrect();
    }      
  }
View Full Code Here

  public static double crossValidate(NaiveBayesUpdateable fullModel,
             Instances trainingSet,
             Random r) throws Exception {
    // make some copies for fast evaluation of 5-fold xval
    Classifier [] copies = Classifier.makeCopies(fullModel, 5);
    Evaluation eval = new Evaluation(trainingSet);
    // make some splits
    for (int j = 0; j < 5; j++) {
      Instances test = trainingSet.testCV(5, j);
      // unlearn these test instances
      for (int k = 0; k < test.numInstances(); k++) {
  test.instance(k).setWeight(-test.instance(k).weight());
  ((NaiveBayesUpdateable)copies[j]).updateClassifier(test.instance(k));
  // reset the weight back to its original value
  test.instance(k).setWeight(-test.instance(k).weight());
      }
      eval.evaluateModel(copies[j], test);
    }
    return eval.incorrect();
  }
View Full Code Here

   * This method must be called if pruning is not to be performed.
   *
   * @throws Exception if an error occurs
   */
  public void installLinearModels() throws Exception {
    Evaluation nodeModelEval;
    if (m_isLeaf) {
      buildLinearModel(m_indices);
    } else {
      if (m_left != null) {
  m_left.installLinearModels();
      }

      if (m_right != null) {
  m_right.installLinearModels();
      }
      buildLinearModel(m_indices);
    }
    nodeModelEval = new Evaluation(m_instances);
    nodeModelEval.evaluateModel(m_nodeModel, m_instances);
    m_rootMeanSquaredError = nodeModelEval.rootMeanSquaredError();
    // save space
    if (!m_saveInstances) {
      m_instances = new Instances(m_instances, 0);
    }
  }
View Full Code Here

   * Recursively prune the tree
   *
   * @throws Exception if an error occurs
   */
  public void prune() throws Exception {
    Evaluation nodeModelEval = null;

    if (m_isLeaf) {
      buildLinearModel(m_indices);
      nodeModelEval = new Evaluation(m_instances);

      // count the constant term as a paramter for a leaf
      // Evaluate the model
      nodeModelEval.evaluateModel(m_nodeModel, m_instances);

      m_rootMeanSquaredError = nodeModelEval.rootMeanSquaredError();
    } else {

      // Prune the left and right subtrees
      if (m_left != null) {
  m_left.prune();
      }

      if (m_right != null) {
  m_right.prune()
      }
     
      buildLinearModel(m_indices);
      nodeModelEval = new Evaluation(m_instances);

      double rmsModel;
      double adjustedErrorModel;

      nodeModelEval.evaluateModel(m_nodeModel, m_instances);

      rmsModel = nodeModelEval.rootMeanSquaredError();
      adjustedErrorModel = rmsModel
  * pruningFactor(m_numInstances,
      m_nodeModel.numParameters() + 1);

      // Evaluate this node (ie its left and right subtrees)
      Evaluation nodeEval = new Evaluation(m_instances);
      double     rmsSubTree;
      double     adjustedErrorNode;
      int   l_params = 0, r_params = 0;

      nodeEval.evaluateModel(this, m_instances);

      rmsSubTree = nodeEval.rootMeanSquaredError();

      if (m_left != null) {
  l_params = m_left.numParameters();
      }

View Full Code Here

      name += cname;
    }
          String cmd = m_ClassifierEditor.getValue().getClass().getName();
          if (m_ClassifierEditor.getValue() instanceof OptionHandler)
            cmd += " " + Utils.joinOptions(((OptionHandler) m_ClassifierEditor.getValue()).getOptions());
    Evaluation eval = null;
    try {
      if (m_CVBut.isSelected()) {
        testMode = 1;
        numFolds = Integer.parseInt(m_CVText.getText());
        if (numFolds <= 1) {
    throw new Exception("Number of folds must be greater than 1");
        }
      } else if (m_PercentBut.isSelected()) {
        testMode = 2;
        percent = Double.parseDouble(m_PercentText.getText());
        if ((percent <= 0) || (percent >= 100)) {
    throw new Exception("Percentage must be between 0 and 100");
        }
      } else if (m_TrainBut.isSelected()) {
        testMode = 3;
      } else if (m_TestSplitBut.isSelected()) {
        testMode = 4;
        // Check the test instance compatibility
        if (source == null) {
    throw new Exception("No user test set has been specified");
        }
        if (!inst.equalHeaders(userTestStructure)) {
    throw new Exception("Train and test set are not compatible");
        }
              userTestStructure.setClassIndex(classIndex);
      } else {
        throw new Exception("Unknown test mode");
      }
      inst.setClassIndex(classIndex);

      // set up the structure of the plottable instances for
      // visualization
            if (saveVis) {
              predInstances = setUpVisualizableInstances(inst);
              predInstances.setClassIndex(inst.classIndex()+1);
            }

      // Output some header information
      m_Log.logMessage("Started " + cname);
      m_Log.logMessage("Command: " + cmd);
      if (m_Log instanceof TaskLogger) {
        ((TaskLogger)m_Log).taskStarted();
      }
      outBuff.append("=== Run information ===\n\n");
      outBuff.append("Scheme:       " + cname);
      if (classifier instanceof OptionHandler) {
        String [] o = ((OptionHandler) classifier).getOptions();
        outBuff.append(" " + Utils.joinOptions(o));
      }
      outBuff.append("\n");
      outBuff.append("Relation:     " + inst.relationName() + '\n');
      outBuff.append("Instances:    " + inst.numInstances() + '\n');
      outBuff.append("Attributes:   " + inst.numAttributes() + '\n');
      if (inst.numAttributes() < 100) {
        for (int i = 0; i < inst.numAttributes(); i++) {
    outBuff.append("              " + inst.attribute(i).name()
             + '\n');
        }
      } else {
        outBuff.append("              [list of attributes omitted]\n");
      }

      outBuff.append("Test mode:    ");
      switch (testMode) {
        case 3: // Test on training
    outBuff.append("evaluate on training data\n");
    break;
        case 1: // CV mode
    outBuff.append("" + numFolds + "-fold cross-validation\n");
    break;
        case 2: // Percent split
    outBuff.append("split " + percent
        + "% train, remainder test\n");
    break;
        case 4: // Test on user split
    if (source.isIncremental())
      outBuff.append("user supplied test set: "
          + " size unknown (reading incrementally)\n");
    else
      outBuff.append("user supplied test set: "
          + source.getDataSet().numInstances() + " instances\n");
    break;
      }
            if (costMatrix != null) {
               outBuff.append("Evaluation cost matrix:\n")
               .append(costMatrix.toString()).append("\n");
            }
      outBuff.append("\n");
      m_History.addResult(name, outBuff);
      m_History.setSingle(name);
     
      // Build the model and output it.
      if (outputModel || (testMode == 3) || (testMode == 4)) {
        m_Log.statusMessage("Building model on training data...");

        trainTimeStart = System.currentTimeMillis();
        classifier.buildClassifier(inst);
        trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
      }

      if (outputModel) {
        outBuff.append("=== Classifier model (full training set) ===\n\n");
        outBuff.append(classifier.toString() + "\n");
        outBuff.append("\nTime taken to build model: " +
           Utils.doubleToString(trainTimeElapsed / 1000.0,2)
           + " seconds\n\n");
        m_History.updateResult(name);
        if (classifier instanceof Drawable) {
    grph = null;
    try {
      grph = ((Drawable)classifier).graph();
    } catch (Exception ex) {
    }
        }
        // copy full model for output
        SerializedObject so = new SerializedObject(classifier);
        fullClassifier = (Classifier) so.getObject();
      }
     
      switch (testMode) {
        case 3: // Test on training
        m_Log.statusMessage("Evaluating on training data...");
        eval = new Evaluation(inst, costMatrix);
       
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "training set");
        }

        for (int jj=0;jj<inst.numInstances();jj++) {
    processClassifierPrediction(inst.instance(jj), classifier,
              eval, predInstances, plotShape,
              plotSize);
   
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, inst.instance(jj), jj+1));
    }
    if ((jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on training data. Processed "
              +jj+" instances...");
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append("=== Evaluation on training set ===\n");
        break;

        case 1: // CV mode
        m_Log.statusMessage("Randomizing instances...");
        int rnd = 1;
        try {
    rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    // System.err.println("Using random seed "+rnd);
        } catch (Exception ex) {
    m_Log.logMessage("Trouble parsing random seed value");
    rnd = 1;
        }
        Random random = new Random(rnd);
        inst.randomize(random);
        if (inst.attribute(classIndex).isNominal()) {
    m_Log.statusMessage("Stratifying instances...");
    inst.stratify(numFolds);
        }
        eval = new Evaluation(inst, costMatrix);
     
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "test data");
        }

        // Make some splits and do a CV
        for (int fold = 0; fold < numFolds; fold++) {
    m_Log.statusMessage("Creating splits for fold "
            + (fold + 1) + "...");
    Instances train = inst.trainCV(numFolds, fold, random);
    eval.setPriors(train);
    m_Log.statusMessage("Building model for fold "
            + (fold + 1) + "...");
    Classifier current = null;
    try {
      current = Classifier.makeCopy(template);
    } catch (Exception ex) {
      m_Log.logMessage("Problem copying classifier: " + ex.getMessage());
    }
    current.buildClassifier(train);
    Instances test = inst.testCV(numFolds, fold);
    m_Log.statusMessage("Evaluating model for fold "
            + (fold + 1) + "...");
    for (int jj=0;jj<test.numInstances();jj++) {
      processClassifierPrediction(test.instance(jj), current,
                eval, predInstances, plotShape,
                plotSize);
      if (outputPredictionsText) {
        outBuff.append(predictionText(current, test.instance(jj), jj+1));
      }
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        if (inst.attribute(classIndex).isNominal()) {
    outBuff.append("=== Stratified cross-validation ===\n");
        } else {
    outBuff.append("=== Cross-validation ===\n");
        }
        break;
   
        case 2: // Percent split
        if (!m_PreserveOrderBut.isSelected()) {
    m_Log.statusMessage("Randomizing instances...");
    try {
      rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    } catch (Exception ex) {
      m_Log.logMessage("Trouble parsing random seed value");
      rnd = 1;
    }
    inst.randomize(new Random(rnd));
        }
        int trainSize = (int) Math.round(inst.numInstances() * percent / 100);
        int testSize = inst.numInstances() - trainSize;
        Instances train = new Instances(inst, 0, trainSize);
        Instances test = new Instances(inst, trainSize, testSize);
        m_Log.statusMessage("Building model on training split ("+trainSize+" instances)...");
        Classifier current = null;
        try {
    current = Classifier.makeCopy(template);
        } catch (Exception ex) {
    m_Log.logMessage("Problem copying classifier: " + ex.getMessage());
        }
        current.buildClassifier(train);
        eval = new Evaluation(train, costMatrix);
        m_Log.statusMessage("Evaluating on test split...");
      
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "test split");
        }
    
        for (int jj=0;jj<test.numInstances();jj++) {
    processClassifierPrediction(test.instance(jj), current,
              eval, predInstances, plotShape,
              plotSize);
    if (outputPredictionsText) {
        outBuff.append(predictionText(current, test.instance(jj), jj+1));
    }
    if ((jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on test split. Processed "
              +jj+" instances...");
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append("=== Evaluation on test split ===\n");
        break;
   
        case 4: // Test on user split
        m_Log.statusMessage("Evaluating on test data...");
        eval = new Evaluation(inst, costMatrix);
       
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "test set");
        }

        Instance instance;
        int jj = 0;
        while (source.hasMoreElements(userTestStructure)) {
    instance = source.nextElement(userTestStructure);
    processClassifierPrediction(instance, classifier,
        eval, predInstances, plotShape,
        plotSize);
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, instance, jj+1));
    }
    if ((++jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on test data. Processed "
          +jj+" instances...");
    }
        }

        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append("=== Evaluation on test set ===\n");
        break;

        default:
        throw new Exception("Test mode not implemented");
      }
     
      if (outputSummary) {
        outBuff.append(eval.toSummaryString(outputEntropy) + "\n");
      }

      if (inst.attribute(classIndex).isNominal()) {

        if (outputPerClass) {
    outBuff.append(eval.toClassDetailsString() + "\n");
        }

        if (outputConfusion) {
    outBuff.append(eval.toMatrixString() + "\n");
        }
      }

            if (   (fullClassifier instanceof Sourcable)
                 && m_OutputSourceCode.isSelected()) {
              outBuff.append("=== Source code ===\n\n");
              outBuff.append(
                Evaluation.wekaStaticWrapper(
                    ((Sourcable) fullClassifier),
                    m_SourceCodeClass.getText()));
            }

      m_History.updateResult(name);
      m_Log.logMessage("Finished " + cname);
      m_Log.statusMessage("OK");
    } catch (Exception ex) {
      ex.printStackTrace();
      m_Log.logMessage(ex.getMessage());
      JOptionPane.showMessageDialog(ClassifierPanel.this,
            "Problem evaluating classifier:\n"
            + ex.getMessage(),
            "Evaluate classifier",
            JOptionPane.ERROR_MESSAGE);
      m_Log.statusMessage("Problem evaluating classifier");
    } finally {
      try {
              if (!saveVis && outputModel) {
      FastVector vv = new FastVector();
      vv.addElement(fullClassifier);
      Instances trainHeader = new Instances(m_Instances, 0);
      trainHeader.setClassIndex(classIndex);
      vv.addElement(trainHeader);
                  if (grph != null) {
        vv.addElement(grph);
      }
      m_History.addObject(name, vv);
              } else if (saveVis && predInstances != null &&
                  predInstances.numInstances() > 0) {
    if (predInstances.attribute(predInstances.classIndex())
        .isNumeric()) {
      postProcessPlotInfo(plotSize);
    }
    m_CurrentVis = new VisualizePanel();
    m_CurrentVis.setName(name+" ("+inst.relationName()+")");
    m_CurrentVis.setLog(m_Log);
    PlotData2D tempd = new PlotData2D(predInstances);
    tempd.setShapeSize(plotSize);
    tempd.setShapeType(plotShape);
    tempd.setPlotName(name+" ("+inst.relationName()+")");
    tempd.addInstanceNumberAttribute();
   
    m_CurrentVis.addPlot(tempd);
    m_CurrentVis.setColourIndex(predInstances.classIndex()+1);
     
                FastVector vv = new FastVector();
                if (outputModel) {
                  vv.addElement(fullClassifier);
                  Instances trainHeader = new Instances(m_Instances, 0);
                  trainHeader.setClassIndex(classIndex);
                  vv.addElement(trainHeader);
                  if (grph != null) {
                    vv.addElement(grph);
                  }
                }
                vv.addElement(m_CurrentVis);
               
                if ((eval != null) && (eval.predictions() != null)) {
                  vv.addElement(eval.predictions());
                  vv.addElement(inst.classAttribute());
                }
                m_History.addObject(name, vv);
        }
      } catch (Exception ex) {
View Full Code Here

            boolean outputEntropy = m_OutputEntropyBut.isSelected();
            boolean saveVis = m_StorePredictionsBut.isSelected();
            boolean outputPredictionsText =
              m_OutputPredictionsTextBut.isSelected();
            String grph = null;   
            Evaluation eval = null;

            try {

              boolean incrementalLoader = (m_TestLoader instanceof IncrementalConverter);
              if (m_TestLoader != null && m_TestLoader.getStructure() != null) {
                m_TestLoader.reset();
                source = new DataSource(m_TestLoader);
                userTestStructure = source.getStructure();
              }
              // Check the test instance compatibility
              if (source == null) {
                throw new Exception("No user test set has been specified");
              }
              if (trainHeader != null) {
                if (trainHeader.classIndex() >
                    userTestStructure.numAttributes()-1)
                  throw new Exception("Train and test set are not compatible");
                userTestStructure.setClassIndex(trainHeader.classIndex());
                if (!trainHeader.equalHeaders(userTestStructure)) {
                  throw new Exception("Train and test set are not compatible");
                }
              } else {
                userTestStructure.
                  setClassIndex(userTestStructure.numAttributes()-1);
              }
              if (m_Log instanceof TaskLogger) {
                ((TaskLogger)m_Log).taskStarted();
              }
              m_Log.statusMessage("Evaluating on test data...");
              m_Log.logMessage("Re-evaluating classifier (" + name
                               + ") on test set");
              eval = new Evaluation(userTestStructure, costMatrix);
              eval.useNoPriors();
     
              // set up the structure of the plottable instances for
              // visualization if selected
              if (saveVis) {
                predInstances = setUpVisualizableInstances(userTestStructure);
                predInstances.setClassIndex(userTestStructure.classIndex()+1);
              }
     
              outBuff.append("\n=== Re-evaluation on test set ===\n\n");
              outBuff.append("User supplied test set\n")
              outBuff.append("Relation:     "
                             + userTestStructure.relationName() + '\n');
              if (incrementalLoader)
          outBuff.append("Instances:     unknown (yet). Reading incrementally\n");
              else
          outBuff.append("Instances:    " + source.getDataSet().numInstances() + "\n");
              outBuff.append("Attributes:   "
            + userTestStructure.numAttributes()
            + "\n\n");
              if (trainHeader == null)
                outBuff.append("NOTE - if test set is not compatible then results are "
                               + "unpredictable\n\n");

              if (outputPredictionsText) {
                outBuff.append("=== Predictions on test set ===\n\n");
                outBuff.append(" inst#,    actual, predicted, error");
                if (userTestStructure.classAttribute().isNominal()) {
                  outBuff.append(", probability distribution");
                }
                outBuff.append("\n");
              }

        Instance instance;
        int jj = 0;
        while (source.hasMoreElements(userTestStructure)) {
    instance = source.nextElement(userTestStructure);
    processClassifierPrediction(instance, classifier,
        eval, predInstances, plotShape,
        plotSize);
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, instance, jj+1));
    }
    if ((++jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on test data. Processed "
          +jj+" instances...");
    }
        }

              if (outputPredictionsText) {
                outBuff.append("\n");
              }
     
              if (outputSummary) {
                outBuff.append(eval.toSummaryString(outputEntropy) + "\n");
              }
     
              if (userTestStructure.classAttribute().isNominal()) {
 
                if (outputPerClass) {
                  outBuff.append(eval.toClassDetailsString() + "\n");
                }
 
                if (outputConfusion) {
                  outBuff.append(eval.toMatrixString() + "\n");
                }
              }
     
              m_History.updateResult(name);
              m_Log.logMessage("Finished re-evaluation");
              m_Log.statusMessage("OK");
            } catch (Exception ex) {
              ex.printStackTrace();
              m_Log.logMessage(ex.getMessage());
              m_Log.statusMessage("See error log");

              ex.printStackTrace();
              m_Log.logMessage(ex.getMessage());
              JOptionPane.showMessageDialog(ClassifierPanel.this,
                                            "Problem evaluationg classifier:\n"
                                            + ex.getMessage(),
                                            "Evaluate classifier",
                                            JOptionPane.ERROR_MESSAGE);
              m_Log.statusMessage("Problem evaluating classifier");
            } finally {
              try {
                if (predInstances != null && predInstances.numInstances() > 0) {
                  if (predInstances.attribute(predInstances.classIndex())
                      .isNumeric()) {
                    postProcessPlotInfo(plotSize);
                  }
                  m_CurrentVis = new VisualizePanel();
                  m_CurrentVis.setName(name+" ("
                                       +userTestStructure.relationName()+")");
                  m_CurrentVis.setLog(m_Log);
                  PlotData2D tempd = new PlotData2D(predInstances);
                  tempd.setShapeSize(plotSize);
                  tempd.setShapeType(plotShape);
                  tempd.setPlotName(name+" ("+userTestStructure.relationName()
                                    +")");
                  tempd.addInstanceNumberAttribute();
   
                  m_CurrentVis.addPlot(tempd);
                  m_CurrentVis.setColourIndex(predInstances.classIndex()+1);
   
                  if (classifier instanceof Drawable) {
                    try {
                      grph = ((Drawable)classifier).graph();
                    } catch (Exception ex) {
                    }
                  }

                  if (saveVis) {
                    FastVector vv = new FastVector();
                    vv.addElement(classifier);
                    if (trainHeader != null) vv.addElement(trainHeader);
                    vv.addElement(m_CurrentVis);
                    if (grph != null) {
                      vv.addElement(grph);
                    }
                    if ((eval != null) && (eval.predictions() != null)) {
                      vv.addElement(eval.predictions());
                      vv.addElement(userTestStructure.classAttribute());
                    }
                    m_History.addObject(name, vv);
                  } else {
                    FastVector vv = new FastVector();
View Full Code Here

TOP

Related Classes of weka.classifiers.Evaluation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.