Package de.bwaldvogel.liblinear

Examples of de.bwaldvogel.liblinear.Problem


      throw new LiblinearException("The parent guide model cannot be found. ");
    }
//    cardinalities = getCardinalities(featureVector);
    if (pathExternalLiblinearTrain == null) {
      try {
        Problem problem = null;
        if (featurePruning) {
          problem = readLibLinearProblemWithFeaturePruning(getInstanceInputStreamReader(".ins"));
        } else {
//          problem = readLibLinearProblem(getInstanceInputStreamReader(".ins"), cardinalities);
        }
View Full Code Here


    return getConfiguration().getConfigurationDir().getConfigFileEntry(owner.getModelName()+getLearningMethodName()+suffix);
  }
 
 
  public Problem readLibLinearProblemWithFeaturePruning(InputStreamReader isr) throws MaltChainedException {
    Problem problem = new Problem();
   
    try {
      final BufferedReader fp = new BufferedReader(isr);
     
      problem.bias = -1;
View Full Code Here

   * @param isr  the instance stream reader for the instance file
   * @param cardinalities  a array containing the number of distinct values for a particular column.
   * @throws LiblinearException
   */
  public Problem readLibLinearProblem(InputStreamReader isr, int[] cardinalities) throws MaltChainedException {
    Problem problem = new Problem();

    try {
      final BufferedReader fp = new BufferedReader(isr);
      int max_index = 0;
      if (xlist == null) {
View Full Code Here

    int[] labels = null;
    int nr_class = 0;
    int nr_feature = 0;
    Parameter parameter = getLiblinearParameters();
    try
      Problem problem = readProblem(getInstanceInputStreamReader(".ins"));
      boolean res = checkProblem(problem);
      if (res == false) {
        throw new LibException("Abort (The number of training instances * the number of classes) > "+Integer.MAX_VALUE+" and this is not supported by LibLinear. ");
      }
      if (configLogger.isInfoEnabled()) {
View Full Code Here

  public void initAllowedLibOptionFlags() {
    allowedLibOptionFlags = "sceB";
  }
 
  private Problem readProblem(InputStreamReader isr) throws MaltChainedException {
    Problem problem = new Problem();
    final FeatureList featureList = new FeatureList();
    if (configLogger.isInfoEnabled()) {
      owner.getGuide().getConfiguration().getConfigLogger().info("- Read all training instances.\n");
    }
    try {
View Full Code Here

*
*/
public class LibLINEAR {

  public static LibLINEARModel trainLinearModel(SparseVector[] featureVectors, double[] target, LibLINEARParameters params) {
    Problem prob = createLinearProblem(featureVectors, target, params.getBias());
    return trainLinearModel(prob, params);
  }
View Full Code Here

    System.out.println("#instances:" + prob.l + ", #features: " + prob.n + ", #avg-non-zero: " + avg);

    Prediction[] prediction;

    double[] target = null;
    Problem trainProb = null;
    Problem testProb = null;

    Map<Double, Double> cc = null;

    Parameter linearParams = params.getParamsCopy();
View Full Code Here

    return pred;
  }

  public static Prediction[] crossValidate(SparseVector[] featureVectors, double[] target, LibLINEARParameters params, int numberOfFolds) {
    Prediction[] pred = new Prediction[target.length];
    Problem trainP;
    Feature[][] testP;
    Problem prob = createLinearProblem(featureVectors, target, params.getBias());

    for (int fold = 1; fold <= numberOfFolds; fold++) {
      if (featureVectors.length >=  5000) {
        System.out.println("CV fold: " + fold);
      }
View Full Code Here

    }
    return pred2;
  }

  public static Prediction[] trainTestSplit(SparseVector[] featureVectors, double[] target, LibLINEARParameters params, float splitFraction) {
    Problem total  = createLinearProblem(featureVectors, target, params.getBias());
    Problem trainP = createProblemTrainSplit(total, splitFraction);   
    Problem testP  = createProblemTestSplit(total, splitFraction);

    return testLinearModel(trainLinearModel(trainP, params), testP.x);
  }
View Full Code Here

    return nodes; 
  }


  private static Problem createLinearProblem(SparseVector[] featureVectors, double[] target, double bias) {
    Problem prob = new Problem();
    prob.y = target;
    prob.x = new FeatureNode[featureVectors.length][]
    prob.l = featureVectors.length;

    int maxIndex = 0;
View Full Code Here

TOP

Related Classes of de.bwaldvogel.liblinear.Problem

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.