Package weka.classifiers.trees

Examples of weka.classifiers.trees.J48


    } else {
      getSplitPoint();
      m_numSubsets = 2;
    }
    //create distribution for data
    m_distribution = new Distribution(data, this)
  }
View Full Code Here


      double[][] dataZs, double[][] dataWs) throws Exception{

    int numAttributes = data.numAttributes();

    if (numAttributes < 2) throw new Exception("Can't select Model without non-class attribute");
    if (data.numInstances() < m_minNumInstances) return new NoSplit(new Distribution(data));


    double bestGain = -Double.MAX_VALUE;
    int bestAttribute = -1;

    //try split on every attribute
    for (int i = 0; i < numAttributes; i++) {
      if (i != data.classIndex()) {

  //build split
  ResidualSplit split = new ResidualSplit(i);     
  split.buildClassifier(data, dataZs, dataWs);

  if (split.checkModel(m_minNumInstances)){

    //evaluate split
    double gain = split.entropyGain()
    if (gain > bestGain) {
      bestGain = gain;
      bestAttribute = i;
    }
  }
      }         
    }    

    if (bestGain >= m_minInfoGain){
      //return best split
      ResidualSplit split = new ResidualSplit(bestAttribute);
      split.buildClassifier(data, dataZs, dataWs)
      return split;     
    } else {     
      //could not find any split with enough information gain
      return new NoSplit(new Distribution(data));     
    }
  }
View Full Code Here

    Instances [] localInstances;
    double errors = 0;
    int i;

    if (m_isLeaf)
      return getEstimatedErrorsForDistribution(new Distribution(data));
    else{
      Distribution savedDist = m_localModel.distribution();
      m_localModel.resetDistribution(data);
      localInstances = (Instances[])m_localModel.split(data);
      //m_localModel.m_distribution=savedDist;
      for (i=0;i<m_sons.length;i++)
        errors = errors+
View Full Code Here

   *
   * @param instances the data to train the classifier with
   * @throws Exception if classifier can't be built successfully
   */
  public void buildClassifier( Instances instances ) throws Exception {
    ModelSelection modSelection = new HDIGModelSelection( m_minNumObj, instances );

    if( !m_reducedErrorPruning ) {
      m_root = new C45PruneableClassifierTree( modSelection, !m_unpruned, m_CF, m_subtreeRaising, !m_noCleanup );
    } else {
      m_root = new PruneableClassifierTree( modSelection, !m_unpruned, m_numFolds, !m_noCleanup, m_Seed );
View Full Code Here

   *
   * @param instances the data to train the classifier with
   * @throws Exception if classifier can't be built successfully
   */
  public void buildClassifier( Instances instances ) throws Exception {
    ModelSelection modSelection = new HTreeModelSelection( m_minNumObj, instances );

    if( !m_reducedErrorPruning ) {
      m_root = new C45PruneableClassifierTree( modSelection, !m_unpruned, m_CF, m_subtreeRaising, !m_noCleanup );
    } else {
      m_root = new PruneableClassifierTree( modSelection, !m_unpruned, m_numFolds, !m_noCleanup, m_Seed );
View Full Code Here

   * @throws Exception if classifier can't be built successfully
   */
  public void buildClassifier(Instances instances)
       throws Exception {

    ModelSelection modSelection;  

    if (m_binarySplits)
      modSelection = new BinC45ModelSelection(m_minNumObj, instances);
    else
      modSelection = new C45ModelSelection(m_minNumObj, instances);
View Full Code Here

   * @throws Exception if classifier can't be built successfully
   */
  public void buildClassifier(Instances instances)
       throws Exception {

    ModelSelection modSelection;

    if (m_binarySplits)
      modSelection = new BinC45ModelSelection(m_minNumObj, instances);
    else
      modSelection = new C45ModelSelection(m_minNumObj, instances);
View Full Code Here

    }

    int minNumInstances = 2;
 
    //create ModelSelection object, either for splits on the residuals or for splits on the class value
    ModelSelection modSelection; 
    if (m_splitOnResiduals) {
      modSelection = new ResidualModelSelection(minNumInstances);
    } else {
      modSelection = new C45ModelSelection(minNumInstances, filteredData);
    }
View Full Code Here

    }

    int minNumInstances = 2;
 
    //create ModelSelection object, either for splits on the residuals or for splits on the class value
    ModelSelection modSelection; 
    if (m_splitOnResiduals) {
      modSelection = new ResidualModelSelection(minNumInstances);
    } else {
      modSelection = new C45ModelSelection(minNumInstances, filteredData);
    }
View Full Code Here

   *
   * @param instances the data to train the classifier with
   * @throws Exception if classifier can't be built successfully
   */
  public void buildClassifier( Instances instances ) throws Exception {
    ModelSelection modSelection = new HTreeModelSelection( m_minNumObj, instances );

    if( !m_reducedErrorPruning ) {
      m_root = new C45PruneableClassifierTree( modSelection, !m_unpruned, m_CF, m_subtreeRaising, !m_noCleanup );
    } else {
      m_root = new PruneableClassifierTree( modSelection, !m_unpruned, m_numFolds, !m_noCleanup, m_Seed );
View Full Code Here

TOP

Related Classes of weka.classifiers.trees.J48

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.