Package org.data2semantics.proppred.learners.liblinear

Examples of org.data2semantics.proppred.learners.liblinear.LibLINEARParameters


    evalFuncs.add(new Error());
    evalFuncs.add(new F1());

    List<Double> target = EvaluationUtils.createTarget(labels);

    LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
    linParms.setEvalFunction(new Error());
    linParms.setDoCrossValidation(false);
    linParms.setNumFolds(10);

    Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
    int[] wLabels = new int[counts.size()];
    double[] weights = new double[counts.size()];

    for (double label : counts.keySet()) {
      wLabels[(int) label - 1] = (int) label;
      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);

    svmParms.setWeightLabels(EvaluationUtils.computeWeightLabels(target));
View Full Code Here


     
      double[] cs = new double[csArray.size()];
      for(int i=0;i<cs.length;i++)cs[i] = csArray.get(i);
      target = EvaluationUtils.createTarget(labels);

      linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
      linParms.setDoCrossValidation(true);
      linParms.setNumFolds(5);

      Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
      wLabels = new int[counts.size()];
View Full Code Here

    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());
    List<Double> targets = EvaluationUtils.createTarget(labels);

    LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
    linParms.setEvalFunction(new Accuracy());
    linParms.setDoCrossValidation(true);
    linParms.setSplitFraction((float) 0.8);
    linParms.setEps(0.1);

    Map<Double, Double> counts = EvaluationUtils.computeClassCounts(targets);
    int[] wLabels = new int[counts.size()];
    double[] weights = new double[counts.size()];

    for (double label : counts.keySet()) {
      wLabels[(int) label - 1] = (int) label;
      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);

    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(3);

    /*
 
View Full Code Here

    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
      List<Double> target = EvaluationUtils.createTarget(labels);
     
      LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
      linParms.setDoCrossValidation(false);
      linParms.setNumFolds(0);
      linParms.setSplitFraction((float) 0.7);
     
      Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
      int[] wLabels = new int[counts.size()];
      double[] weights = new double[counts.size()];

      for (double label : counts.keySet()) {
        wLabels[(int) label - 1] = (int) label;
        weights[(int) label - 1] = 1 / counts.get(label);
      }
      linParms.setWeightLabels(wLabels);
      linParms.setWeights(weights);
     

      System.out.println("Running fraction: " + frac);

     
View Full Code Here

          loadDataSet(fraction, seed);

          List<Double> targets = EvaluationUtils.createTarget(labels);

          LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
          linParms.setEvalFunction(new Accuracy());
          linParms.setDoCrossValidation(false);
          linParms.setSplitFraction((float) 0.8);
          linParms.setEps(0.1);

          Map<Double, Double> counts = EvaluationUtils.computeClassCounts(targets);
          int[] wLabels = new int[counts.size()];
          double[] weights = new double[counts.size()];

          for (double label : counts.keySet()) {
            wLabels[(int) label - 1] = (int) label;
            weights[(int) label - 1] = 1 / counts.get(label);
          }
          linParms.setWeightLabels(wLabels);
          linParms.setWeights(weights);


          RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, d, inference, true), s2, linParms, dataset, instances, targets, blackList, evalFuncs);
          res.add(exp.getResults());
View Full Code Here

    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("");

        LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);

    for (int i : depths) {     
      resTable.newRow("");

      LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
      KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(i, false, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

      System.out.println("Running EVP: " + i);
      exp.run();
View Full Code Here

    Map<Value, Double> labelMap = new HashMap<Value, Double>();
    List<Double> target = EvaluationUtils.createTarget(labels, labelMap);

    // Initialize parameters object for LibLINEAR
    double[] cs = {0.0001, 0.001, 0.01, 0.1, 1,10,100,1000, 10000}; // C values to optimize over.
    LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
    linParms.setDoCrossValidation(true);
   
    // Set the weights of the different classes, for the first 100 instances
    Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target.subList(0,100));
    int[] wLabels = new int[counts.size()];
    double[] weights = new double[counts.size()];

    for (double label : counts.keySet()) {
      wLabels[(int) label - 1] = (int) label;
      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);
   
    // Train model on the first 100 instances.
    LibLINEARModel model = LibLINEAR.trainLinearModel(Arrays.copyOfRange(featureVectors, 0, 100), EvaluationUtils.target2Doubles(target.subList(0, 100)), linParms);

    // Test on the rest of the data
View Full Code Here

TOP

Related Classes of org.data2semantics.proppred.learners.liblinear.LibLINEARParameters

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.