Package weka.classifiers.meta.ensembleSelection

Examples of weka.classifiers.meta.ensembleSelection.EnsembleSelectionLibraryModel


   
    for (int j = 0; j < subDirectoryFiles.length; j++) {
     
      if (subDirectoryFiles[j].getName().matches(".*.elm")) {
       
        EnsembleSelectionLibraryModel model = EnsembleSelectionLibraryModel
        .loadModel(subDirectoryFiles[j].getPath());
       
        // get those Classifier[] objects garbage collected!
        model.releaseModel();
       
        if (!dataModel.contains(model)) {
   
    modelCount++;
    dataModel.add(model);
View Full Code Here


     
      for (int i = 0; i < 10; i++) {
 
  REPTree tree = new REPTree();
  tree.setSeed(i);
  m_library.addModel(new EnsembleSelectionLibraryModel(tree));
 
      }
     
    }
   
    if (m_library == null) {
      m_library = new EnsembleSelectionLibrary();
      m_library.setDebug(m_Debug);
    }
   
    m_library.setNumFolds(getNumFolds());
    m_library.setValidationRatio(getValidationRatio());
    // train all untrained models, and set "data" to the hillclimbing set.
    Instances data = m_library.trainAll(trainData, m_workingDirectory.getAbsolutePath(),
  m_algorithm);
    // We cache the hillclimb predictions from all of the models in
    // the library so that we can evaluate their performances when we
    // combine them
    // in various ways (without needing to keep the classifiers in memory).
    double predictions[][][] = m_library.getHillclimbPredictions();
    int numModels = predictions.length;
    int modelWeights[] = new int[numModels];
    m_total_weight = 0;
    Random rand = new Random(m_Seed);
   
    if (m_algorithm == ALGORITHM_BUILD_LIBRARY) {
      return;
     
    } else if (m_algorithm == ALGORITHM_BEST) {
      // If we want to choose the best model, just make a model bag that
      // includes all the models, then sort initialize to find the 1 that
      // performs best.
      ModelBag model_bag = new ModelBag(predictions, 1.0, m_Debug);
      int[] modelPicked = model_bag.sortInitialize(1, false, data,
    m_hillclimbMetric);
      // Then give it a weight of 1, while all others remain 0.
      modelWeights[modelPicked[0]] = 1;
    } else {
     
      if (m_Debug)
  System.out.println("Starting hillclimbing algorithm: "
      + m_algorithm);
     
      for (int i = 0; i < getNumModelBags(); ++i) {
  // For the number of bags,
  if (m_Debug)
    System.out.println("Starting on ensemble bag: " + i);
  // Create a new bag of the appropriate size
  ModelBag modelBag = new ModelBag(predictions, getModelRatio(),
      m_Debug);
  // And shuffle it.
  modelBag.shuffle(rand);
  if (getSortInitializationRatio() > 0.0) {
    // Sort initialize, if the ratio greater than 0.
    modelBag.sortInitialize((int) (getSortInitializationRatio()
        * getModelRatio() * numModels),
        getGreedySortInitialization(), data,
        m_hillclimbMetric);
  }
 
  if (m_algorithm == ALGORITHM_BACKWARD) {
    // If we're doing backwards elimination, we just give all
    // models
    // a weight of 1 initially. If the # of hillclimb iterations
    // is too high, we'll end up with just one model in the end
    // (we never delete all models from a bag). TODO - it might
    // be
    // smarter to base this weight off of how many models we
    // have.
    modelBag.weightAll(1); // for now at least, I'm just
    // assuming 1.
  }
  // Now the bag is initialized, and we're ready to hillclimb.
  for (int j = 0; j < getHillclimbIterations(); ++j) {
    if (m_algorithm == ALGORITHM_FORWARD) {
      modelBag.forwardSelect(getReplacement(), data,
    m_hillclimbMetric);
    } else if (m_algorithm == ALGORITHM_BACKWARD) {
      modelBag.backwardEliminate(data, m_hillclimbMetric);
    } else if (m_algorithm == ALGORITHM_FORWARD_BACKWARD) {
      modelBag.forwardSelectOrBackwardEliminate(
    getReplacement(), data, m_hillclimbMetric);
    }
  }
  // Now that we've done all the hillclimbing steps, we can just
  // get
  // the model weights that the bag determined, and add them to
  // our
  // running total.
  int[] bagWeights = modelBag.getModelWeights();
  for (int j = 0; j < bagWeights.length; ++j) {
    modelWeights[j] += bagWeights[j];
  }
      }
    }
    // Now we've done the hard work of actually learning the ensemble. Now
    // we set up the appropriate data structures so that Ensemble Selection
    // can
    // make predictions for future test examples.
    Set modelNames = m_library.getModelNames();
    String[] modelNamesArray = new String[m_library.size()];
    Iterator iter = modelNames.iterator();
    // libraryIndex indexes over all the models in the library (not just
    // those
    // which we chose for the ensemble).
    int libraryIndex = 0;
    // chosenModels will count the total number of models which were
    // selected
    // by EnsembleSelection (those that have non-zero weight).
    int chosenModels = 0;
    while (iter.hasNext()) {
      // Note that we have to be careful of order. Our model_weights array
      // is in the same order as our list of models in m_library.
     
      // Get the name of the model,
      modelNamesArray[libraryIndex] = (String) iter.next();
      // and its weight.
      int weightOfModel = modelWeights[libraryIndex++];
      m_total_weight += weightOfModel;
      if (weightOfModel > 0) {
  // If the model was chosen at least once, increment the
  // number of chosen models.
  ++chosenModels;
      }
    }
    if (m_verboseOutput) {
      // Output every model and its performance with respect to the
      // validation
      // data.
      ModelBag bag = new ModelBag(predictions, 1.0, m_Debug);
      int modelIndexes[] = bag.sortInitialize(modelNamesArray.length,
    false, data, m_hillclimbMetric);
      double modelPerformance[] = bag.getIndividualPerformance(data,
    m_hillclimbMetric);
      for (int i = 0; i < modelIndexes.length; ++i) {
  // TODO - Could do this in a more readable way.
  System.out.println("" + modelPerformance[i] + " "
      + modelNamesArray[modelIndexes[i]]);
      }
    }
    // We're now ready to build our array of the models which were chosen
    // and there associated weights.
    m_chosen_models = new EnsembleSelectionLibraryModel[chosenModels];
    m_chosen_model_weights = new int[chosenModels];
   
    libraryIndex = 0;
    // chosenIndex indexes over the models which were chosen by
    // EnsembleSelection
    // (those which have non-zero weight).
    int chosenIndex = 0;
    iter = m_library.getModels().iterator();
    while (iter.hasNext()) {
      int weightOfModel = modelWeights[libraryIndex++];
     
      EnsembleSelectionLibraryModel model = (EnsembleSelectionLibraryModel) iter
      .next();
     
      if (weightOfModel > 0) {
  // If the model was chosen at least once, add it to our array
  // of chosen models and weights.
View Full Code Here

TOP

Related Classes of weka.classifiers.meta.ensembleSelection.EnsembleSelectionLibraryModel

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.