Package org.data2semantics.exp.utils

Examples of org.data2semantics.exp.utils.ResultsTable


        }
      }
    }


    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);

    boolean inference = true;

    List<EvaluationFunction> evalFuncs1 = new ArrayList<EvaluationFunction>();
    evalFuncs1.add(new Task1ScoreForBins(bins));

    List<EvaluationFunction> evalFuncs2 = new ArrayList<EvaluationFunction>();
    evalFuncs2.add(new Task1Score());
    evalFuncs2.add(new MeanSquaredError());
    evalFuncs2.add(new MeanAbsoluteError());


    LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
    linParms.setEvalFunction(new Task1ScoreForBothBins(bins));
    linParms.setDoCrossValidation(false);
    linParms.setSplitFraction((float) 0.8);
    linParms.setEps(0.1);
    linParms.setPs(ps1);

    Map<Double, Double> counts = EvaluationUtils.computeClassCounts(targetBins);
    int[] wLabels = new int[counts.size()];
    double[] weights = new double[counts.size()];

    for (double label : counts.keySet()) {
      wLabels[(int) label - 1] = (int) label;
      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);



    LibLINEARParameters linParms2 = new LibLINEARParameters(LibLINEARParameters.SVR_DUAL, cs);
    linParms2.setEvalFunction(new Task1Score());
    linParms2.setDoCrossValidation(false);
    linParms2.setSplitFraction((float) 0.8);
    linParms2.setEps(0.1);
    linParms2.setPs(ps2);
    linParms2.setBias(1);


    for (int d : depths) {     
      resTable.newRow("ITP BoW, depth="+d);

      RDFFeatureVectorKernel kernel = new RDFIntersectionTreeEdgeVertexPathWithTextKernel(d, false, inference, false);

      System.out.println("Running PathWithText kernel: " + d );

      Map<EvaluationFunction, double[]> resultMap = new HashMap<EvaluationFunction,double[]>();
      Map<EvaluationFunction, double[]> resultMap2 = new HashMap<EvaluationFunction,double[]>();

      List<Result> results = new ArrayList<Result>();

      for (EvaluationFunction evalFunc : evalFuncs1) {
        Result res = new Result();
        double[] resA = new double[seeds.length];
        res.setLabel(evalFunc.getLabel());
        res.setScores(resA);
        res.setHigherIsBetter(evalFunc.isHigherIsBetter());
        results.add(res);
        resultMap.put(evalFunc, resA);
      }

      for (EvaluationFunction evalFunc : evalFuncs2) {
        Result res = new Result();
        double[] resA = new double[seeds.length];
        res.setLabel(evalFunc.getLabel());
        res.setScores(resA);
        res.setHigherIsBetter(evalFunc.isHigherIsBetter());
        results.add(res);
        resultMap2.put(evalFunc, resA);
      }

      Result compR = new Result();
      results.add(compR);


      long tic, toc;

      List<Double> tempLabels = new ArrayList<Double>();
      List<Double> tempLabelsBins = new ArrayList<Double>();
      tempLabels.addAll(target);
      tempLabelsBins.addAll(targetBins);

      tic = System.currentTimeMillis();
      SparseVector[] fv = kernel.computeFeatureVectors(dataset, instances, blackList);
      toc = System.currentTimeMillis();

      fv = TextUtils.computeTFIDF(Arrays.asList(fv)).toArray(new SparseVector[1]);
      fv = KernelUtils.normalize(fv);


      List<SparseVector> fvList = Arrays.asList(fv);


      compR.setLabel("kernel comp time");

      for (int j = 0; j < seeds.length; j++) {
        Collections.shuffle(fvList, new Random(seeds[j]));
        Collections.shuffle(tempLabels, new Random(seeds[j]));
        Collections.shuffle(tempLabelsBins, new Random(seeds[j]))

        fv = fvList.toArray(new SparseVector[1]);
        double[] targetA = new double[tempLabels.size()];
        double[] targetABins = new double[tempLabelsBins.size()];
        for (int i = 0; i < targetA.length; i++) {
          targetA[i] = tempLabels.get(i);
          targetABins[i] = tempLabelsBins.get(i);
        }

        Prediction[] pred = LibLINEAR.trainTestSplit(fv, targetABins, linParms, linParms.getSplitFraction());     
        Prediction[] pred2 = LibLINEAR.trainTestSplit(fv, targetA, linParms2, linParms2.getSplitFraction());       

        double[] targetSplit = LibLINEAR.splitTestTarget(targetA, linParms.getSplitFraction());

        for (EvaluationFunction ef : evalFuncs1) {
          resultMap.get(ef)[j] = ef.computeScore(targetSplit, pred)
        }       
        for (EvaluationFunction ef : evalFuncs2) {
          resultMap2.get(ef)[j] = ef.computeScore(targetSplit, pred2)
        }
      }
      double[] comp = {toc - tic};
      compR.setScores(comp);

      for (Result res : results) {
        resTable.addResult(res);
      }     
    }


    for (int d : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL BoW, depth="+d);

        /*
        List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
        kernels.add(new RDFWLSubTreeKernel(it,d, inference, false));
        kernels.add(new RDFSimpleTextKernel(d, inference, false));
        RDFFeatureVectorKernel kernel = new RDFCombinedKernel(kernels, true);
        */
       
        RDFFeatureVectorKernel kernel = new RDFWLSubTreeWithTextKernel(it, d, inference, false);
       
       
       
        System.out.println("Running RDFWL + text kernel: " + d + " " + it);

        Map<EvaluationFunction, double[]> resultMap = new HashMap<EvaluationFunction,double[]>();
        Map<EvaluationFunction, double[]> resultMap2 = new HashMap<EvaluationFunction,double[]>();

        List<Result> results = new ArrayList<Result>();

        for (EvaluationFunction evalFunc : evalFuncs1) {
          Result res = new Result();
          double[] resA = new double[seeds.length];
          res.setLabel(evalFunc.getLabel());
          res.setScores(resA);
          res.setHigherIsBetter(evalFunc.isHigherIsBetter());
          results.add(res);
          resultMap.put(evalFunc, resA);
        }

        for (EvaluationFunction evalFunc : evalFuncs2) {
          Result res = new Result();
          double[] resA = new double[seeds.length];
          res.setLabel(evalFunc.getLabel());
          res.setScores(resA);
          res.setHigherIsBetter(evalFunc.isHigherIsBetter());
          results.add(res);
          resultMap2.put(evalFunc, resA);
        }

        Result compR = new Result();
        results.add(compR);


        long tic, toc;

        List<Double> tempLabels = new ArrayList<Double>();
        List<Double> tempLabelsBins = new ArrayList<Double>();
        tempLabels.addAll(target);
        tempLabelsBins.addAll(targetBins);

        tic = System.currentTimeMillis();
        SparseVector[] fv = kernel.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();

        fv = TextUtils.computeTFIDF(Arrays.asList(fv)).toArray(new SparseVector[1]);
        fv = KernelUtils.normalize(fv);


        List<SparseVector> fvList = Arrays.asList(fv);


        compR.setLabel("kernel comp time");

        for (int j = 0; j < seeds.length; j++) {
          Collections.shuffle(fvList, new Random(seeds[j]));
          Collections.shuffle(tempLabels, new Random(seeds[j]));
          Collections.shuffle(tempLabelsBins, new Random(seeds[j]))

          fv = fvList.toArray(new SparseVector[1]);
          double[] targetA = new double[tempLabels.size()];
          double[] targetABins = new double[tempLabelsBins.size()];
          for (int i = 0; i < targetA.length; i++) {
            targetA[i] = tempLabels.get(i);
            targetABins[i] = tempLabelsBins.get(i);
          }

          Prediction[] pred = LibLINEAR.trainTestSplit(fv, targetABins, linParms, linParms.getSplitFraction());     
          Prediction[] pred2 = LibLINEAR.trainTestSplit(fv, targetA, linParms2, linParms2.getSplitFraction());       

          double[] targetSplit = LibLINEAR.splitTestTarget(targetA, linParms.getSplitFraction());

          for (EvaluationFunction ef : evalFuncs1) {
            resultMap.get(ef)[j] = ef.computeScore(targetSplit, pred)
          }       
          for (EvaluationFunction ef : evalFuncs2) {
            resultMap2.get(ef)[j] = ef.computeScore(targetSplit, pred2)
          }
        }
        double[] comp = {toc - tic};
        compR.setScores(comp);

        for (Result res : results) {
          resTable.addResult(res);
       
      }
    }



    saveResults(resTable, "task1_" + seed + ".ser");
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "task1_" + seed + ".txt");



  }
View Full Code Here


    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);
   
   

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(2);

    boolean inference = false;
    boolean forward = true;



   
    DTGraph<String,String> sGraph = org.nodes.data.RDF.createDirectedGraph(dataset.getStatements(null, null, null, inference), null, null);
    List<DTNode<String,String>> hubs = SlashBurn.getHubs(sGraph, 1, true);
   
    Comparator<DTNode<String,String>> comp = new SlashBurn.SignatureComparator<String,String>();
    MaxObserver<DTNode<String,String>> obs = new MaxObserver<DTNode<String,String>>(hubs.size(), comp);   
    obs.observe(sGraph.nodes());
   
    List<DTNode<String,String>> degreeHubs = new ArrayList<DTNode<String,String>>(obs.elements());
   
    // Remove hubs from list that are root nodes
    List<DTNode<String,String>> rn = new ArrayList<DTNode<String,String>>();
    Set<String> is = new HashSet<String>();
    for (Resource r : instances) {
      is.add(r.toString());
    }
    for (DTNode<String,String> hub : hubs) {
      if (is.contains(hub.label())) {
        rn.add(hub);
      }
    }
    hubs.removeAll(rn);       
    degreeHubs.removeAll(rn);
   
    System.out.println("Total SB hubs: " + hubs.size());
    System.out.println(hubs)
    System.out.println(degreeHubs);
   
    for (int i = 0; i < degreeHubs.size() && i < hubs.size(); i++) {
      if (!hubs.get(i).equals(degreeHubs.get(i))) {
        System.out.println(i + " " + hubs.get(i).label() + " " + degreeHubs.get(i).label());
      }
    }
   
   
    /*
    Map<String,Integer> dMap  = GraphUtils.createDegreeHubMap(degreeHubs, 300);
    Map<String,Integer> sbMap = GraphUtils.createHubMap(hubs, 300);
   
    for (String k : dMap.keySet()) {
      int l = dMap.get(k);
      if (sbMap.get(k) != l) {
        System.out.println("fail in level: " + l + " " + sbMap.get(k));
      }
     
    }
    */
   
   
    //int[] hf = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20};
   
    int[] hf = {1,10};

   
   
   
    ///*
    for (int i : depths) {     
      resTable.newRow("RDF WL forward");
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, i, inference, true, forward, false);
       
        //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


        System.out.println("Running WL RDF fwd: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);
   
   
    //*/

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward Degree " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(degreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd Degree: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      resTable.addCompResults(resTable.getBestResults());
      //resTable.addCompResults(table2.getBestResults());
      System.out.println(resTable);
    }
   
   

    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward SB " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd SB: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      resTable.addCompResults(resTable.getBestResults());
      //resTable.addCompResults(table2.getBestResults());
      System.out.println(resTable);
    }
   
    //*/


    /*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF IST SB " + h);
          RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, inference, true);
          k.setHubThreshold(h);

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running RDF IST SB: " + i + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
          } 
        }
    }
    System.out.println(resTable);
    //*/

   
   
    /*
    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL reverse");

        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true, true, false), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF rev: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        } 
      }
    }
    System.out.println(resTable);

    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL Bi");

        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLBiSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF Bi: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        } 
      }
    }
    System.out.println(resTable);
  //*/



    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);



 
View Full Code Here

   

    PropertyPredictionDataSet dataset;
    PropertyPredictionExperiment exp;

    ResultsTable resultsWL  = new ResultsTable();
    ResultsTable resultsSTF = new ResultsTable();
    ResultsTable resultsSTP = new ResultsTable();
    ResultsTable resultsIGW = new ResultsTable();
    ResultsTable resultsIGP = new ResultsTable();

    Experimenter experimenter = new Experimenter(NUMBER_OF_PROC);
    Thread expT = new Thread(experimenter);
    expT.setDaemon(true);
    expT.start();



    try {
      for (GeneralPredictionDataSetParameters params : dataSetsParams) {
        dataset = DataSetFactory.createPropertyPredictionDataSet(params);
        //dataset.removeSmallClasses(5);
        dataset.setLabels(labels);
        //dataset.removeVertexAndEdgeLabels();

       
       
        resultsWL.newRow(dataset.getLabel() + " WLSubTreeKernel");
        for (int i = 0; i < 4; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "WL" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new WLSubTreeKernel(i), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsWL.addResult(exp.getResults().getAccuracy());
            resultsWL.addResult(exp.getResults().getF1());
          }
        }


       
        resultsSTF.newRow(dataset.getLabel() + " IntersectionFullSubTree");
        for (int i = 0; i < 4; i++) {

          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionFullSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionSubTreeKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTF.addResult(exp.getResults().getAccuracy());
            resultsSTF.addResult(exp.getResults().getF1());
          }
        }

        resultsSTP.newRow(dataset.getLabel() + " IntersectionPartialSubTree");
        for (int i = 0; i < 4; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionPartialSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionPartialSubTreeKernel(i, 0.01), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTP.addResult(exp.getResults().getAccuracy());
            resultsSTP.addResult(exp.getResults().getF1());
          }
        }

        //*/


       
        resultsIGP.newRow(dataset.getLabel() + " IntersectionGraphPath");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphPath" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphPathKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGP.addResult(exp.getResults().getAccuracy());
            resultsIGP.addResult(exp.getResults().getF1());
          }
        }       

        resultsIGW.newRow(dataset.getLabel() + " IntersectionGraphWalk");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphWalk" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphWalkKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGW.addResult(exp.getResults().getAccuracy());
            resultsIGW.addResult(exp.getResults().getF1());
          }
        }
        //*/       

      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    experimenter.stop();

    while (expT.isAlive()) {
      try {
        Thread.sleep(1000);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    try {
      int fileId = (int) (Math.random() * 100000000)
      File file = new File(DATA_DIR + fileId + "_" + "all_results" + ".txt");
      PrintWriter fileOut = new PrintWriter(new FileOutputStream(file));

      List<Result> bestResults = new ArrayList<Result>();
     
      bestResults = resultsWL.getBestResults(bestResults);
      bestResults = resultsSTF.getBestResults(bestResults);
      bestResults = resultsSTP.getBestResults(bestResults);
      bestResults = resultsIGW.getBestResults(bestResults);
      bestResults = resultsIGP.getBestResults(bestResults);
     
      resultsWL.addCompResults(bestResults);
      resultsSTF.addCompResults(bestResults);
      resultsSTP.addCompResults(bestResults);
      resultsIGW.addCompResults(bestResults);
      resultsIGP.addCompResults(bestResults);
           
     
      fileOut.println(resultsWL);
      fileOut.println(resultsSTF);
      fileOut.println(resultsSTP);
      fileOut.println(resultsIGW);
      fileOut.println(resultsIGP);

      fileOut.println(resultsWL.allScoresToString());
      fileOut.println(resultsSTF.allScoresToString());
      fileOut.println(resultsSTP.allScoresToString());
      fileOut.println(resultsIGW.allScoresToString());
      fileOut.println(resultsIGP.allScoresToString());

      System.out.println(resultsWL);
      System.out.println(resultsSTF);
      System.out.println(resultsSTP);
      System.out.println(resultsIGW);
      System.out.println(resultsIGP);

      System.out.println(resultsWL.allScoresToString());
      System.out.println(resultsSTF.allScoresToString());
      System.out.println(resultsSTP.allScoresToString());
      System.out.println(resultsIGW.allScoresToString());
      System.out.println(resultsIGP.allScoresToString());
     
      fileOut.close();


    } catch (Exception e) {
View Full Code Here

    int[] depths = {1, 2, 3};
    int[] iterations = {0, 2, 4, 6};
    dataset = new RDFFileDataSet(dataDir, RDFFormat.NTRIPLES);

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(3);

    boolean inference = true;


    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());

   
   
    for (int i : depths) { 
      resTable.newRow("WL RDF, depth="+i)
      for (int it : iterations) {

        List<List<Result>> res = new ArrayList<List<Result>>();
        for (long seed : seeds) {
          long[] seeds2 = {seed};
         
          createGeoDataSet((int)(1000 * fraction), fraction, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
          List<Double> target = EvaluationUtils.createTarget(labels);

          LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
          linParms.setDoCrossValidation(true);
          linParms.setNumFolds(5);

          Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
          int[] wLabels = new int[counts.size()];
          double[] weights = new double[counts.size()];

          for (double label : counts.keySet()) {
            wLabels[(int) label - 1] = (int) label;
            weights[(int) label - 1] = 1 / counts.get(label);
          }
          linParms.setWeightLabels(wLabels);
          linParms.setWeights(weights);

          RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds2, linParms, dataset, instances, target, blackList, evalFuncs);

          System.out.println("Running WL RDF: " + i + " " + it);
          exp.setDoCV(true);
          exp.run();
          res.add(exp.getResults());
        }

        for (Result res2 : Result.mergeResultLists(res)) {
          resTable.addResult(res2);
        }
      }
    }
    System.out.println(resTable);


    for (int i : depths) { 
      resTable.newRow("WL RDF BoW, depth="+i)
      for (int it : iterations) {

        List<List<Result>> res = new ArrayList<List<Result>>();
        for (long seed : seeds) {
          long[] seeds2 = {seed};
         
          createGeoDataSet((int)(1000 * fraction), fraction, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
          List<Double> target = EvaluationUtils.createTarget(labels);

          LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
          linParms.setDoCrossValidation(true);
          linParms.setNumFolds(5);

          Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
          int[] wLabels = new int[counts.size()];
          double[] weights = new double[counts.size()];

          for (double label : counts.keySet()) {
            wLabels[(int) label - 1] = (int) label;
            weights[(int) label - 1] = 1 / counts.get(label);
          }
          linParms.setWeightLabels(wLabels);
          linParms.setWeights(weights);

          RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeWithTextKernel(it, i, inference, true), seeds2, linParms, dataset, instances, target, blackList, evalFuncs);

          System.out.println("Running WL RDF with text: " + i + " " + it);
          exp.setDoCV(true);
          exp.run();
          res.add(exp.getResults());
        }

        for (Result res2 : Result.mergeResultLists(res)) {
          resTable.addResult(res2);
        }
      }
    }
    System.out.println(resTable);
   
   
    for (int i : depths) { 
      resTable.newRow("ITP, depth="+i)

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] seeds2 = {seed};
        createGeoDataSet((int)(1000 * fraction), fraction, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        List<Double> target = EvaluationUtils.createTarget(labels);

        LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
        linParms.setDoCrossValidation(true);
        linParms.setNumFolds(5);

        Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
        int[] wLabels = new int[counts.size()];
        double[] weights = new double[counts.size()];

        for (double label : counts.keySet()) {
          wLabels[(int) label - 1] = (int) label;
          weights[(int) label - 1] = 1 / counts.get(label);
        }
        linParms.setWeightLabels(wLabels);
        linParms.setWeights(weights);

        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(i, false, inference, true), seeds2, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running EVP: " + i);
        exp.setDoCV(true);
        exp.run();
        res.add(exp.getResults());
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    System.out.println(resTable);
   
    for (int i : depths) { 
      resTable.newRow("ITP BoW, depth="+i)

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] seeds2 = {seed};
        createGeoDataSet((int)(1000 * fraction), fraction, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        List<Double> target = EvaluationUtils.createTarget(labels);

        LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
        linParms.setDoCrossValidation(true);
        linParms.setNumFolds(5);

        Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
        int[] wLabels = new int[counts.size()];
        double[] weights = new double[counts.size()];

        for (double label : counts.keySet()) {
          wLabels[(int) label - 1] = (int) label;
          weights[(int) label - 1] = 1 / counts.get(label);
        }
        linParms.setWeightLabels(wLabels);
        linParms.setWeights(weights);

        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(i, false, inference, false), seeds2, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running EVP with text: " + i);
        exp.setDoCV(true);
        exp.setDoTFIDF(true);
        exp.run();
        res.add(exp.getResults());
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    System.out.println(resTable);

   

    for (int i : depths) { 
      resTable.newRow("IST, depth="+i)

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] seeds2 = {seed};
        createGeoDataSet((int)(1000 * fraction), fraction, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        List<Double> target = EvaluationUtils.createTarget(labels);

        LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
        svmParms.setNumFolds(5);


        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true), seeds2, svmParms, dataset, instances, labels, blackList);

        System.out.println("Running IST: " + i);
        exp.run();
        res.add(exp.getResults());
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    System.out.println(resTable)

    saveResults(resTable, "geo_theme_DMoLD.ser");

    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "geo_theme_DMoLD_.txt");
  }
View Full Code Here

    int depth = 3;
    int iteration = 6;
    boolean inference = true;

    LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    ResultsTable resTable = new ResultsTable();

    resTable.newRow("WL RDF");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {

        createAffiliationPredictionDataSet(frac, seed);

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new ECML2013RDFWLSubTreeKernel(iteration, depth, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }
      resTable.addResult(res);
    }

    resTable.newRow("IST");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {

        createAffiliationPredictionDataSet(frac, seed);

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new RDFIntersectionSubTreeKernel(depth, 1, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running IST: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }

      resTable.addResult(res);
    }


    long tic, toc;



    resTable.newRow("WL");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {

        createAffiliationPredictionDataSet(frac,seed);
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();

        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013WLSubTreeKernel(iteration), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running WL: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res);
    }
    /*
    resTable.newRow("");
    for (double frac : fractions) {
      createAffiliationPredictionDataSet(frac);
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
      toc = System.currentTimeMillis();


      KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(2,1), seeds, parms, ds.getGraphs(), labels);

      System.out.println("Running IGP: " + frac);
      exp.run();

      double[] comps =  {0,0};
  comps[0] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
      comps[1] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
          Result resC = new Result(comps,"comp time 2"); 
      exp.getResults().get(exp.getResults().size()-1).addResult(resC);

      resTable.addResult(exp.getResults().get(exp.getResults().size()-1));
    }*/


    resTable.newRow("IGW");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createAffiliationPredictionDataSet(frac,seed);
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();

        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013IntersectionGraphWalkKernel(2,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + frac);
        exp.run();

        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res);
    }


    //resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "affiliation_runningtime.txt");


  }
View Full Code Here

    createGeoDataSet(1, 1, 10, "http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);
   
    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(2);

    for (int depth : depths) {
      resTable.newRow("WL RDF, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

    for (int depth : depths) {
      resTable.newRow("WL RDF BoW, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeWithTextKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF with Text: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);
   
   
    ResultsTable table2 = new ResultsTable();
   
    for (int depth : depths) {
      resTable.newRow("ITP, depth="+depth);
      table2.newRow("");
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(depth, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running ITP: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
   
    for (int depth : depths) {
      resTable.newRow("ITP BoW, depth="+depth);
      table2.newRow("");
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(depth, false, inference, false), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running ITP with Text: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
   
    for (int depth : depths) {
      resTable.newRow("IST, depth="+depth);
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(depth, 1, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
   
    resTable.addCompResults(resTable.getBestResults());
    resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);

  }
View Full Code Here


    LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    //parms.setEvalFunction(LibSVMParameters.F1);

    ResultsTable resTable = new ResultsTable();

   

    boolean inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, no inference, depth=" + i);
      for (int it : iterations) {
        ECML2013RDFWLSubTreeKernel k = new ECML2013RDFWLSubTreeKernel(it, i, inference, true, blankLabels);
               
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(k, seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    saveResults(resTable, "affiliation.ser");



    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, inference, depth=" + i);
      for (int it : iterations) {
        ECML2013RDFWLSubTreeKernel k = new ECML2013RDFWLSubTreeKernel(it, i, inference, true, blankLabels);
               
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(k, seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    saveResults(resTable, "affiliation.ser");


    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, no inference, depth=" + i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "affiliation.ser");

    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, inference, depth=" + i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "affiliation.ser");


    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, no inference, depth=" + i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IPST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "affiliation.ser");

    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, inference, depth=" + i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IPST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "affiliation.ser");




    List<GeneralPredictionDataSetParameters> dataSetsParams = new ArrayList<GeneralPredictionDataSetParameters>();

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, false));

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));


    int[] iterationsIG = {1,2};
    long tic, toc;

    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("WL");
      for (int it : iterations) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013WLSubTreeKernel(it), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running WL: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
    saveResults(resTable, "affiliation.ser");



    /*
    dataSetsParams = new ArrayList<GeneralPredictionDataSetParameters>();

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, false));

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, true));
     */


    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGP");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGP: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);
      }
    }
    saveResults(resTable, "affiliation.ser");


   
    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGW");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphWalkKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
   
   
    saveResults(resTable, "affiliation.ser");


    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "affiliation" + blankLabels + ".txt");

  }
View Full Code Here

    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());


    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(2);
    //resTable.setManWU(0.05);
   
   
    ///*
    for (int it : iterations) {
      resTable.newRow("WL, it: " + it);
      MoleculeGraphExperiment<UGraph<String>> exp = new MoleculeGraphExperiment<UGraph<String>>(new WLUSubTreeKernel(it, true), seeds, svmParms, graphs, labels, evalFuncs);

      System.out.println("Running WL, it: " + it);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
    //*/

    /*
    for (int it : iterations2) {
      resTable.newRow("WL separate RDF, it: " + it);
      MoleculeLinearGraphExperiment<DTGraph<String,String>> exp = new MoleculeLinearGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, true), seeds, linParms, rdfGraphs, labels, evalFuncs);

      System.out.println("Running WL separate RDF, it: " + it);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
    //*/

   
    for (int d = 1; d < 4; d++) {
      for (int it : iterations2) {   
        resTable.newRow("WL RDF, " + d + ", " + it);
       
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, false, true, true, false);
       
        RDFGraphKernelExperiment exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);

        System.out.println("Running WL RDF, it: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
      System.out.println(resTable);
    }


    for (int d = 1; d < 4; d++) {
      resTable.newRow("WL RDF type, d " + d);
      for (int it : iterations2) {   

        RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, d, false, true, true);
        k.setHubMap(GraphUtils.createRDFTypeHubMap(ts, false));

        RDFGraphKernelExperiment exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);

        System.out.println("Running WL RDF, type it: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
      System.out.println(resTable);
    }
    //*/

    int[] hf = {0,1,2,3,4,5,6,7};

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL NonSig " + h);
        for (int it : iterations2) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, false, true, true);
          k.setHubMap(GraphUtils.createNonSigHubMap(nonSigDegreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);


          System.out.println("Running WL RDF NonSig: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      System.out.println(resTable);
    }
   
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL SB " + h);
        for (int it : iterations2) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, false, true, true);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);


          System.out.println("Running WL RDF SB: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      System.out.println(resTable);
    }


    /*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF IST SB " + h);
          RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, false, true);
          k.setHubThreshold(h);

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);


          System.out.println("Running RDF IST SB: " + i + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
          } 
        }
    }
    System.out.println(resTable);
     */



    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);

  }
 
View Full Code Here

    svmParms.setWeights(EvaluationUtils.computeWeights(target));
    //---------

    //---------
    // Results Table
    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(3);
    //---------

    //-------
    //Data graph, with the label information
    List<Statement> allStmts3 = GraphUtils.getStatements4Depth(dataset, instances, 4, false);
    List<Statement> allStmts4;
    if (fullGraph) {
      allStmts4 = dataset.getStatements(null, null, null, false);
    } else {
      allStmts4 = GraphUtils.getStatements4Depth(dataset, instances, 5, false);
    }

    allStmts3.removeAll(blackList);
    allStmts4.removeAll(blackList);
    DTGraph<String,String> graph3 = org.nodes.data.RDF.createDirectedGraph(allStmts3, null, null); //used to generate instances
    DTGraph<String,String> graph4 = org.nodes.data.RDF.createDirectedGraph(allStmts4, null, null); //Used to find hubs
    System.out.println("Total nodes d3: " + graph3.nodes().size() + ", total nodes d4: " + graph4.nodes().size());

    List<DTNode<String,String>> instanceNodes3 = new ArrayList<DTNode<String,String>>();
    List<DTNode<String,String>> instanceNodes4 = new ArrayList<DTNode<String,String>>();
    for (Resource i : instances) {
      instanceNodes3.add(graph3.node(i.toString()));
      instanceNodes4.add(graph4.node(i.toString()));
    }
    //--------


    //--------
    // Get the different hub lists
    int maxHubs = 1000;

    // RDF.Type hubs
    List<DTNode<String,String>> RDFTypeHubs = GraphUtils.getTypeHubs(graph4);

    // Regular Degree
    Comparator<Node<String>> compRegDeg = new DegreeComparator<String>();
    MaxObserver<Node<String>> obsRegDeg = new MaxObserver<Node<String>>(maxHubs + instances.size(), compRegDeg);
    obsRegDeg.observe(graph4.nodes());
    List<DTNode<String,String>> regDegreeHubs = new ArrayList<DTNode<String,String>>();
    for (Node<String> n : obsRegDeg.elements()) {
      regDegreeHubs.add((DTNode<String,String>) n);
    }

    // Signature Degree
    Comparator<DTNode<String,String>> compSigDeg = new SlashBurn.SignatureComparator<String,String>();
    MaxObserver<DTNode<String,String>> obsSigDeg = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compSigDeg);       
    obsSigDeg.observe(graph4.nodes());
    List<DTNode<String,String>> sigDegreeHubs = new ArrayList<DTNode<String,String>>(obsSigDeg.elements());

    // Informed Degree
    List<Integer> classes = new ArrayList<Integer>();
    for (double d : target) {
      classes.add((int) d);
    }
    Classified<DTNode<String, String>> classified = Classification.combine(instanceNodes4, classes);

    InformedAvoidance ia = new InformedAvoidance(graph4, classified, 4)

    Comparator<DTNode<String, String>> compUnInformed = ia.uninformedComparator(4);
    MaxObserver<DTNode<String,String>> obsUnInformed = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compUnInformed);
    obsUnInformed.observe(graph4.nodes());
    List<DTNode<String,String>> unInformedDegreeHubs = new ArrayList<DTNode<String,String>>(obsUnInformed.elements());

    Iterator<DTNode<String, String>> ite = unInformedDegreeHubs.iterator();
    while(ite.hasNext())
      if(! ia.viableHub(ite.next(), 4, 4))
        ite.remove();

    Comparator<DTNode<String, String>> compInformed = ia.informedComparator(4);
    MaxObserver<DTNode<String,String>> obsInformed = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compInformed);
    obsInformed.observe(graph4.nodes());
    List<DTNode<String,String>> informedDegreeHubs = new ArrayList<DTNode<String,String>>(obsInformed.elements());

    ite = informedDegreeHubs.iterator();
    while(ite.hasNext())
      if(! ia.viableHub(ite.next(), 4, 4))
        ite.remove();

    // Remove hubs from list that are root nodes
    List<DTNode<String,String>> rn = new ArrayList<DTNode<String,String>>();
    Set<String> is = new HashSet<String>();
    for (Resource r : instances) {
      is.add(r.toString());
    }
    for (DTNode<String,String> n : graph4.nodes()) {
      if (is.contains(n.label())) {
        rn.add(n);
      }
    }
    RDFTypeHubs.removeAll(rn);
    regDegreeHubs.removeAll(rn);
    sigDegreeHubs.removeAll(rn);
    unInformedDegreeHubs.removeAll(rn);
    informedDegreeHubs.removeAll(rn);

    List<List<DTNode<String,String>>> hubLists = new ArrayList<List<DTNode<String,String>>>();
    hubLists.add(RDFTypeHubs);
    hubLists.add(regDegreeHubs);
    hubLists.add(sigDegreeHubs);
    //hubLists.add(unInformedDegreeHubs);
    //hubLists.add(informedDegreeHubs);


    boolean forward = true;
    int it = 6;
    int depth = 3;
    int[] hubThs = {0,1,2,3,4,5,10,20,30,40,50,100};
    //int[] hubThs = {};

    MoleculeGraphExperiment<DTGraph<String,String>> exp;

    /*
    int[] iterations = {0,1,2,3,4,5,6};

    for (int i : iterations) {
      resTable.newRow("Baseline: " + i);
      List<DTNode<String,String>> newIN = new ArrayList<DTNode<String,String>>(instanceNodes3);

      exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(i, true, forward),
          seeds, svmParms, GraphUtils.getSubGraphs(graph3, newIN, depth), target, evalFuncs);

      System.out.println("running baseline, it: " + i);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
     */

    for (int th : hubThs) {
      resTable.newRow("Hub Threshold: " + th);

      for (List<DTNode<String,String>> hubList : hubLists) {

        List<List<DTNode<String,String>>> newIN = new ArrayList<List<DTNode<String,String>>>();
        List<DTGraph<String,String>> newGs = GraphUtils.simplifyGraph3Way(graph3, GraphUtils.createHubMap(hubList, th), instanceNodes3, newIN);

        ///*
        //List<DTNode<String,String>> newIN = new ArrayList<DTNode<String,String>>(instanceNodes3);
        //DTGraph<String,String> newG = GraphUtils.simplifyGraph(graph3, GraphUtils.createHubMap(hubList, th), newIN, false, true);
        //System.out.println("New #links: "+ newG.numLinks() + ", old #links: " + graph3.numLinks());

        exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward),
            seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(0), newIN.get(0), depth), target, evalFuncs);

        System.out.println("running, remove hubs, th: " + th);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        //newIN = new ArrayList<DTNode<String,String>>(instanceNodes3);
        //newG = GraphUtils.simplifyGraph(graph3, GraphUtils.createHubMap(hubList, th), newIN, true, false);
        //System.out.println("New #links: "+ newG.numLinks() + ", old #links: " + graph3.numLinks());

        exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward),
            seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(1), newIN.get(1), depth), target, evalFuncs);

        System.out.println("running, relabel hubs, th: " + th);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        //newIN = new ArrayList<DTNode<String,String>>(instanceNodes3);
        //newG = GraphUtils.simplifyGraph(graph3, GraphUtils.createHubMap(hubList, th), newIN, true, true);
        //System.out.println("New #links: "+ newG.numLinks() + ", old #links: " + graph3.numLinks());

        exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward),
            seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(2), newIN.get(2), depth), target, evalFuncs);

        System.out.println("running, relabel+remove hubs, th: " + th);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
        //*/

      }
      System.out.println(resTable);
    }

    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);   
    System.out.println(resTable.allScoresToString());

    saveResults(resTable.toString(), "results_simp_" + System.currentTimeMillis() + ".txt");
    saveResults(resTable.allScoresToString(), "results_full_simp_" + System.currentTimeMillis() + ".txt");

/*
* INSTANCE EXTRACTION!!!!! ah yeah ;)
*
*/
   
    // Discover average size
    List<DTGraph<String,String>> sg = GraphUtils.getSubGraphs(graph3, new ArrayList<DTNode<String,String>>(instanceNodes3), 3);
    double avg = 0;
    for (DTGraph<String,String> sgp : sg) {
      avg += sgp.size();
    }
    avg /= sg.size();
    System.out.println("Average Number of nodes: " + avg);
   
    // Results Table
    ResultsTable resTable2 = new ResultsTable();
    resTable2.setDigits(3);

    double[] fracs = {0.25, 0.5, 0.75, 1.0, 1.5, 2.0};

    for (double frac : fracs) {
      resTable2.newRow("Fraction: " + frac);
     
      List<DTGraph<String,String>> ihDepth = InstanceHelper.getInstances(graph4, instanceNodes4, target, InstanceHelper.Method.DEPTH, (int) Math.round(frac*avg), 4, true)
     
      exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward), seeds, svmParms, ihDepth, target, evalFuncs);
     
      System.out.println("running, Depth: " + frac);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable2.addResult(res);
      }
     
      List<DTGraph<String,String>> ihUnInformed = InstanceHelper.getInstances(graph4, instanceNodes4, target, InstanceHelper.Method.UNINFORMED, (int) Math.round(frac*avg), 4, true);
     
      exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward), seeds, svmParms, ihUnInformed, target, evalFuncs);
     
      System.out.println("running, UnInformed: " + frac);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable2.addResult(res);
      }
     
      List<DTGraph<String,String>> ihInformed = InstanceHelper.getInstances(graph4, instanceNodes4, target, InstanceHelper.Method.INFORMED, (int) Math.round(frac*avg), 4, true);

      exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, forward), seeds, svmParms, ihInformed, target, evalFuncs);
     
      System.out.println("running, Informed: " + frac);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable2.addResult(res);
      }
      System.out.println(resTable2)
    }
   
    resTable2.addCompResults(resTable2.getBestResults());
    System.out.println(resTable2);   
    System.out.println(resTable2.allScoresToString());

    saveResults(resTable2.toString(), "results_ie_" + System.currentTimeMillis() + ".txt");
    saveResults(resTable2.allScoresToString(), "results_full_ie_" + System.currentTimeMillis() + ".txt");
  }
View Full Code Here


    PropertyPredictionDataSet dataset;
    PropertyPredictionExperiment exp;

    ResultsTable resultsWL  = new ResultsTable();
    ResultsTable resultsSTF = new ResultsTable();
    ResultsTable resultsSTP = new ResultsTable();
    ResultsTable resultsIGW = new ResultsTable();
    ResultsTable resultsIGP = new ResultsTable();

    Experimenter experimenter = new Experimenter(NUMBER_OF_PROC);
    Thread expT = new Thread(experimenter);
    expT.setDaemon(true);
    expT.start();



    try {
      for (BinaryPropertyPredictionDataSetParameters params : dataSetsParams) {
        dataset = DataSetFactory.createPropertyPredictionDataSet(params);
        dataset.removeSmallClasses(5);
        dataset.setLabels(labels);
        //dataset.removeVertexAndEdgeLabels();

        resultsWL.newRow(dataset.getLabel() + " WLSubTreeKernel");

        for (int i = 0; i < 4; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "WL" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new WLSubTreeKernel(i), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsWL.addResult(exp.getResults().getAccuracy());
            resultsWL.addResult(exp.getResults().getF1());
           
            System.out.println("Running WL, it " + i + " on " + dataset.getLabel());
          }
        }

       
        resultsSTF.newRow(dataset.getLabel() + " IntersectionFullSubTree");
        for (int i = 0; i < 3; i++) {

          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionFullSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionSubTreeKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTF.addResult(exp.getResults().getAccuracy());
            resultsSTF.addResult(exp.getResults().getF1());
           
            System.out.println("Running STF, it " + i + " on " + dataset.getLabel());
          }
        }

        resultsSTP.newRow(dataset.getLabel() + " IntersectionPartialSubTree");
        for (int i = 0; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionPartialSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionPartialSubTreeKernel(i, 0.01), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTP.addResult(exp.getResults().getAccuracy());
            resultsSTP.addResult(exp.getResults().getF1());
           
            System.out.println("Running STP, it " + i + " on " + dataset.getLabel());
          }
        }


       
        resultsIGP.newRow(dataset.getLabel() + " IntersectionGraphPath");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphPath" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphPathKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGP.addResult(exp.getResults().getAccuracy());
            resultsIGP.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGP, it " + i + " on " + dataset.getLabel());
          }
        }       

        resultsIGW.newRow(dataset.getLabel() + " IntersectionGraphWalk");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphWalk" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphWalkKernel(i, 1), seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGW.addResult(exp.getResults().getAccuracy());
            resultsIGW.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGW, it " + i + " on " + dataset.getLabel());
          }
        }
        //*/


      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    experimenter.stop();

    while (expT.isAlive()) {
      try {
        Thread.sleep(1000);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    try {
      int fileId = (int) (Math.random() * 100000000)
      File file = new File(DATA_DIR + fileId + "_" + "all_results" + ".txt");
      PrintWriter fileOut = new PrintWriter(new FileOutputStream(file));

      List<Result> bestResults = new ArrayList<Result>();
     
      bestResults = resultsWL.getBestResults(bestResults);
      bestResults = resultsSTF.getBestResults(bestResults);
      bestResults = resultsSTP.getBestResults(bestResults);
      bestResults = resultsIGW.getBestResults(bestResults);
      bestResults = resultsIGP.getBestResults(bestResults);
     
      resultsWL.addCompResults(bestResults);
      resultsSTF.addCompResults(bestResults);
      resultsSTP.addCompResults(bestResults);
      resultsIGW.addCompResults(bestResults);
      resultsIGP.addCompResults(bestResults);
           
     
      fileOut.println(resultsWL);
      fileOut.println(resultsSTF);
      fileOut.println(resultsSTP);
      fileOut.println(resultsIGW);
      fileOut.println(resultsIGP);

      fileOut.println(resultsWL.allScoresToString());
      fileOut.println(resultsSTF.allScoresToString());
      fileOut.println(resultsSTP.allScoresToString());
      fileOut.println(resultsIGW.allScoresToString());
      fileOut.println(resultsIGP.allScoresToString());

      System.out.println(resultsWL);
      System.out.println(resultsSTF);
      System.out.println(resultsSTP);
      System.out.println(resultsIGW);
      System.out.println(resultsIGP);

      System.out.println(resultsWL.allScoresToString());
      System.out.println(resultsSTF.allScoresToString());
      System.out.println(resultsSTP.allScoresToString());
      System.out.println(resultsIGW.allScoresToString());
      System.out.println(resultsIGP.allScoresToString());


    } catch (Exception e) {
      e.printStackTrace();
    }
View Full Code Here

TOP

Related Classes of org.data2semantics.exp.utils.ResultsTable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.