Package org.data2semantics.exp.utils

Examples of org.data2semantics.exp.utils.ResultsTable


    svmParms.setNumFolds(10);

    svmParms.setWeightLabels(EvaluationUtils.computeWeightLabels(target));
    svmParms.setWeights(EvaluationUtils.computeWeights(target));

    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(2);

    List<Statement> allStmts = dataset.getStatements(null, null, null, inference);
    allStmts.removeAll(blackList);
    DTGraph<String,String> sGraph = org.nodes.data.RDF.createDirectedGraph(allStmts, null, null);
    System.out.println("Total nodes: " + sGraph.nodes().size());
    List<DTNode<String,String>> hubs = SlashBurn.getHubs(sGraph, 1, true);

    Comparator<DTNode<String,String>> comp = new SlashBurn.SignatureComparator<String,String>();
    MaxObserver<DTNode<String,String>> obs = new MaxObserver<DTNode<String,String>>(hubs.size() + instances.size(), comp);   
    obs.observe(sGraph.nodes());
    List<DTNode<String,String>> degreeHubs = new ArrayList<DTNode<String,String>>(obs.elements());

    Comparator<Node<String>> comp2 = new DegreeComparator<String>();
    MaxObserver<Node<String>> obs2 = new MaxObserver<Node<String>>(hubs.size() + instances.size(), comp2);
    obs2.observe(sGraph.nodes());
    List<DTNode<String,String>> nonSigDegreeHubs = new ArrayList<DTNode<String,String>>();
    for (Node<String> n : obs2.elements()) {
      nonSigDegreeHubs.add((DTNode<String,String>) n);
    }

    List<DTNode<String,String>> in = new ArrayList<DTNode<String,String>>();
   
    for (Resource i : instances) {
      in.add(sGraph.node(i.toString()));
    }
   
   
    /*
    List<DTGraph<String,String>> ihDepth = InstanceHelper.getInstances(sGraph, in, target, InstanceHelper.Method.DEPTH, 200, 4); 
    List<DTGraph<String,String>> ihUnInformed = InstanceHelper.getInstances(sGraph, in, target, InstanceHelper.Method.UNINFORMED, 200, 4);
    List<DTGraph<String,String>> ihInformed = InstanceHelper.getInstances(sGraph, in, target, InstanceHelper.Method.INFORMED, 200, 4);
    //*/

    // Remove hubs from list that are root nodes
    List<DTNode<String,String>> rn = new ArrayList<DTNode<String,String>>();
    Set<String> is = new HashSet<String>();
    for (Resource r : instances) {
      is.add(r.toString());
    }
    for (DTNode<String,String> n : sGraph.nodes()) {
      if (is.contains(n.label())) {
        rn.add(n);
      }
    }
    hubs.removeAll(rn);       
    degreeHubs.removeAll(rn);
    nonSigDegreeHubs.removeAll(rn);

    System.out.println("Total SB hubs: " + hubs.size());
    System.out.println(hubs)
    System.out.println(degreeHubs);
    System.out.println(nonSigDegreeHubs);

    /*
    for (int i = 0; i < degreeHubs.size() && i < nonSigDegreeHubs.size(); i++) {
      if (!nonSigDegreeHubs.get(i).equals(degreeHubs.get(i))) {
        System.out.println(i + " " + nonSigDegreeHubs.get(i).label() + " " + degreeHubs.get(i).label());
      }
    }
     */


    /*
    Map<String,Integer> dMap  = GraphUtils.createDegreeHubMap(degreeHubs, 300);
    Map<String,Integer> sbMap = GraphUtils.createHubMap(hubs, 300);

    for (String k : dMap.keySet()) {
      int l = dMap.get(k);
      if (sbMap.get(k) != l) {
        System.out.println("fail in level: " + l + " " + sbMap.get(k));
      }

    }
     */


    //int[] hf = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20};

    int[] hf = new int[1];
    for (int i = 0; i < hf.length; i++) {
      hf[i] = i+21;
    }

    /*
    for (int it : iterations) {
      resTable.newRow("WL DEPTH, it: " + it);
      MoleculeGraphExperiment<DTGraph<String,String>> exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, true), seeds, svmParms, ihDepth, target, evalFuncs);

      System.out.println("Running WL DEPTH, it: " + it);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
   
    for (int it : iterations) {
      resTable.newRow("WL UN IN, it: " + it);
      MoleculeGraphExperiment<DTGraph<String,String>> exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, true), seeds, svmParms, ihUnInformed, target, evalFuncs);

      System.out.println("Running WL UN IN, it: " + it);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
   
    for (int it : iterations) {
      resTable.newRow("WL INF, it: " + it);
      MoleculeGraphExperiment<DTGraph<String,String>> exp = new MoleculeGraphExperiment<DTGraph<String,String>>(new WLSubTreeKernel(it, true, true), seeds, svmParms, ihInformed, target, evalFuncs);

      System.out.println("Running WL INF, it: " + it);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
    //*/
   
   
   
   
    ///* 
    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL, " + i + ", " + it);

        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, i, inference, true, forward, false);

        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);
    //*/


    ///*
    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL TYPE, " + i + ", " + it);

        RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
        k.setHubMap(GraphUtils.createRDFTypeHubMap(dataset, inference));
        k.setRelabel(relabel);
       
        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF TYPE: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);


    ///*
    for (int h : hf) {
      for (int i : depths) {     
        for (int it : iterations) {
          resTable.newRow("RDF WL Regular Degree, " + h + ", " + i + ", " + it);

          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createNonSigHubMap(nonSigDegreeHubs, h));
          k.setRelabel(relabel);

          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

          System.out.println("Running WL RDF Regular Degree: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
    }
    System.out.println(resTable);
    //*/


    ///*
    for (int h : hf) {
      for (int i : depths) {     
        for (int it : iterations) {
          resTable.newRow("RDF WL Signature Degree (SB), " + h + ", " + i + ", " + it);

          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));
          k.setRelabel(relabel);

          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

          System.out.println("Running WL RDF fwd SB: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
    }
    System.out.println(resTable);
    //*/

    ///*
    for (int i : depths) {     
      resTable.newRow("RDF IST, " + i);
      RDFIntersectionSubTreeKernel k = new RDFIntersectionSubTreeKernel(i, 1, inference, true);
     
      KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

      System.out.println("Running RDF IST: " + i);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
     
    }
    System.out.println(resTable);
    //*/

    ///*
    for (int i : depths) {     
      resTable.newRow("RDF IST TYPE, " + i);
      RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, inference, true);
      k.setHubMap(GraphUtils.createRDFTypeHubMap(dataset, inference));

      KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

      System.out.println("Running RDF IST TYPE: " + i);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
     
    }
    System.out.println(resTable);
    //*/


    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF IST Regular Degree, " + h + ", " + i);
        RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, inference, true);
        k.setHubMap(GraphUtils.createNonSigHubMap(nonSigDegreeHubs, h));

        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running RDF IST Regular Degree: " + i + " " + h);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);
    //*/


    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF IST Signature Degree (SB), " + h + ", " + i);
        RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, inference, true);
        k.setHubMap(GraphUtils.createHubMap(hubs, h));

        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running RDF IST Signature Degree (SB): " + i + " " + h);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);
    //*/


 

    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);
   
    System.out.println(resTable.allScoresToString());
  }
 
View Full Code Here


    int[] iterations = {0,2,4,6};
   
    createCommitteeMemberPredictionDataSet();
   

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(3);
   
    boolean inference = true;
    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("");
       
       
        LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds, parms, dataset, instances, labels, blackList);
       
        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();
       
        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
   
    saveResults(resTable, "cmp.ser");
   
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "cmp_full.txt");
   
  }
View Full Code Here

    double[] fractions = {0.2, 0.4, 0.6, 0.8, 1};
    double[] fractionsSlow = {0.2, 0.4, 0.6, 0.8, 1};

   
    ResultsTable resTable = new ResultsTable();
   
   
    resTable.newRow("WLRDF FV");
    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   

      RDFFeatureVectorKernel k = new RDFWLSubTreeKernel(6,3,false, true);
     
      System.out.println("RDF WL FV: " + frac);
      tic = System.currentTimeMillis();
      k.computeFeatureVectors(dataset, instances, blackList);
      toc = System.currentTimeMillis();
      double[] comp = {toc-tic};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
   
    System.out.println(resTable);
   
    resTable.newRow("WLRDF Kernel");
    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   

      RDFGraphKernel k = new RDFWLSubTreeKernel(6,3,false, true);
     
      System.out.println("RDF WL Kernel: " + frac);
      tic = System.currentTimeMillis();
      k.compute(dataset, instances, blackList);
      toc = System.currentTimeMillis();
      double[] comp = {toc-tic};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
   
    System.out.println(resTable);
   
    resTable.newRow("WLRDF String FV");
    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   
      RDFFeatureVectorKernel k = new RDFWLSubTreeKernelString(6,3, false, true);
 
     
      System.out.println("RDF WL String FV: " + frac);
      tic = System.currentTimeMillis();
      k.computeFeatureVectors(dataset, instances, blackList);
      toc = System.currentTimeMillis();
      double[] comp = {toc-tic};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
    }
    System.out.println(resTable);
   
    resTable.newRow("WLRDF String Kernel");
    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   
      RDFGraphKernel k = new RDFWLSubTreeKernelString(6,3, false, true);
 
     
      System.out.println("RDF WL String: " + frac);
      tic = System.currentTimeMillis();
      k.compute(dataset, instances, blackList);
      toc = System.currentTimeMillis();
      double[] comp = {toc-tic};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
    }
    System.out.println(resTable);
 
   
   
    resTable.newRow("RDF IST");
    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   
      RDFGraphKernel k = new RDFIntersectionSubTreeKernel(3,1, false, true);
 
     
      System.out.println("RDF IST: " + frac);
      tic = System.currentTimeMillis();
      k.compute(dataset, instances, blackList);
      toc = System.currentTimeMillis();
      double[] comp = {toc-tic};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
    }
    System.out.println(resTable);
   
   
   
   
    resTable.newRow("WL FV");
    for (double frac : fractionsSlow) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
      toc = System.currentTimeMillis();
      double dsComp = toc-tic;
     
      FeatureVectorKernel k = new WLSubTreeKernel(6,true);
     
      System.out.println("WL: " + frac);
      tic = System.currentTimeMillis();
      k.computeFeatureVectors(ds.getGraphs());
      toc = System.currentTimeMillis();
      double[] comp = {(toc-tic) + dsComp};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
    }   
    System.out.println(resTable);
   
   
    resTable.newRow("WL Kernel");
    for (double frac : fractionsSlow) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasUnitClass");   
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
      toc = System.currentTimeMillis();
      double dsComp = toc-tic;
     
      GraphKernel k = new WLSubTreeKernel(6,true);
     
      System.out.println("WL: " + frac);
      tic = System.currentTimeMillis();
      k.compute(ds.getGraphs());
      toc = System.currentTimeMillis();
      double[] comp = {(toc-tic) + dsComp};
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
    }   
    System.out.println(resTable);
   
   
   
View Full Code Here

    System.out.println(resTable);
     */

    //---------
    // Results Tables
    ResultsTable resTableWL = new ResultsTable();
    resTableWL.setDigits(3);
    ResultsTable resTableIST = new ResultsTable();
    resTableIST.setDigits(3);
    //---------
     
    MoleculeListMultiGraphExperiment<DTGraph<String,String>> expWL;
    MoleculeListSingleGraphExperiment expIST;
   
   
    ///*
   
    for (int th : hubThs) {
      resTableWL.newRow("Hub Threshold: " + th);
      resTableIST.newRow("Hub Threshold: " + th);

      for (List<DTNode<String,String>> hubList : hubLists) {
        boolean regDegree = false;
        int maxSize = hubList.size();
        if (hubList == regDegreeHubs) {
          regDegree = true;
        }

        List<List<DTNode<String,String>>> newIN = new ArrayList<List<DTNode<String,String>>>();
        List<DTGraph<String,String>> newGs = GraphUtils.simplifyGraph3Way(graph3, GraphUtils.createHubMap(hubList.subList(0, Math.min(maxSize, th)), 10000, regDegree), instanceNodes3, newIN);

        ///*
       
        // 1
        List<WLSubTreeKernel> kernelsWL = new ArrayList<WLSubTreeKernel>();
       
        for (int iti : it) {
          kernelsWL.add(new WLSubTreeKernel(iti, true, forward));
        }   
       
        expWL = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL,  seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(0), newIN.get(0), depth), target, evalFuncs);

        System.out.println("WL running, remove hubs, th: " + th);
        expWL.run();

        for (Result res : expWL.getResults()) {
          resTableWL.addResult(res);
        }

        // 2
        kernelsWL = new ArrayList<WLSubTreeKernel>();
        for (int iti : it) {
          kernelsWL.add(new WLSubTreeKernel(iti, true, forward));
        }
 
        expWL = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL,
            seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(1), newIN.get(1), depth), target, evalFuncs);

        System.out.println("WL running, relabel hubs, th: " + th);
        expWL.run();

        for (Result res : expWL.getResults()) {
          resTableWL.addResult(res);
        }
       
       

        // 3
        kernelsWL = new ArrayList<WLSubTreeKernel>();
        for (int iti : it) {
          kernelsWL.add(new WLSubTreeKernel(iti, true, forward));
        }
       
        expWL = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL,
            seeds, svmParms, GraphUtils.getSubGraphs(newGs.get(2), newIN.get(2), depth), target, evalFuncs);

        System.out.println("WL running, relabel+remove hubs, th: " + th);
        expWL.run();

        for (Result res : expWL.getResults()) {
          resTableWL.addResult(res);
        }
       
        //*/
       
        ///*
        //-------
        // IST
        //-------
       
        // 1
        List<RDFDTGraphIntersectionSubTreeKernel> kernelsIST = new ArrayList<RDFDTGraphIntersectionSubTreeKernel>();
        kernelsIST.add(new RDFDTGraphIntersectionSubTreeKernel(depth,1,true));

        expIST = new MoleculeListSingleGraphExperiment(kernelsIST, seeds, svmParms, newGs.get(0), newIN.get(0), target, evalFuncs);

        System.out.println("IST running, remove hubs, th: " + th);
        expIST.run();

        for (Result res : expIST.getResults()) {
          resTableIST.addResult(res);
        }

        // 2
        kernelsIST = new ArrayList<RDFDTGraphIntersectionSubTreeKernel>();
        kernelsIST.add(new RDFDTGraphIntersectionSubTreeKernel(depth,1,true));
 
        expIST = new MoleculeListSingleGraphExperiment(kernelsIST, seeds, svmParms, newGs.get(1), newIN.get(1), target, evalFuncs);

        System.out.println("IST running, relabel hubs, th: " + th);
        expIST.run();

        for (Result res : expIST.getResults()) {
          resTableIST.addResult(res);
        }

        // 3
        kernelsIST = new ArrayList<RDFDTGraphIntersectionSubTreeKernel>();
        kernelsIST.add(new RDFDTGraphIntersectionSubTreeKernel(depth,1,true));
       
        expIST = new MoleculeListSingleGraphExperiment(kernelsIST, seeds, svmParms, newGs.get(2), newIN.get(2), target, evalFuncs);

        System.out.println("IST running, relabel+remove hubs, th: " + th);
        expIST.run();

        for (Result res : expIST.getResults()) {
          resTableIST.addResult(res);
        }
       
        //*/

      }
      System.out.println(resTableWL);
      System.out.println(resTableIST);
    }

    resTableWL.addCompResults(resTableWL.getBestResults(resTableIST.getBestResults()));
    resTableIST.addCompResults(resTableIST.getBestResults(resTableWL.getBestResults()));
    System.out.println(resTableWL);   
    System.out.println(resTableWL.allScoresToString());
    System.out.println(resTableIST);   
    System.out.println(resTableIST.allScoresToString());

    saveResults(resTableWL.toString() + "\n" + resTableIST.toString(), "results_simp_" + System.currentTimeMillis() + ".txt");
    saveResults(resTableWL.allScoresToString() + "\n" + resTableIST.allScoresToString(), "results_full_simp_" + System.currentTimeMillis() + ".txt");
   
    //*/

  }
 
View Full Code Here

      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);

    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(3);

    /*
    for (int depth : depths2) {
      resTable.newRow("");
     
     
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFSimpleTextKernel(depth, inference, normalize), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      System.out.println("Running Simple Text RDF: " + depth);
      exp.setDoCV(true);
      exp.setDoTFIDF(tfidf);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);

   
    for (int depth : depths2) {
      resTable.newRow("");
     
     
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(depth, false, inference, normalize), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      System.out.println("Running EdgeVertex RDF: " + depth);
      exp.setDoCV(true);
      exp.setDoTFIDF(tfidf);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
   
   
    for (int depth : depths2) {
      resTable.newRow("");
     
     
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(depth, false, inference, normalize), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      System.out.println("Running EdgeVertex with Text RDF: " + depth);
      exp.setDoCV(true);
      exp.setDoTFIDF(tfidf);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
    */
   
    for (int depth : depths) {
      resTable.newRow("");
      for (int it : iterations) {
        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, normalize), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

        System.out.println("Running WL RDF: " + depth + " " + it);
        exp.setDoCV(true);
        exp.setDoTFIDF(tfidf);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

   
System.out.println(resTable);
   
    for (int depth : depths) {
      resTable.newRow("");
      for (int it : iterations) {
        List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, depth, inference, normalize);
       
        kernels.add(k);
        kernels.add(new RDFSimpleTextKernel(depth, inference, normalize));

        RDFFeatureVectorKernel kernel = new RDFCombinedKernel(kernels, normalize);

       
        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(kernel, seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

        System.out.println("Running WL RDF  + text: " + depth + " " + it);
        exp.setDoCV(true);
        exp.setDoTFIDF(tfidf);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

    /*

    for (int depth : depths2) {
      resTable.newRow("");
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgePathKernel(depth, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      System.out.println("Running ITEP RDF: " + depth);
      exp.setDoCV(true);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);

    for (int depth : depths2) {
      resTable.newRow("");
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(depth, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      System.out.println("Running ITEVP RDF: " + depth);
      exp.setDoCV(true);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    System.out.println(resTable);
   
   
    for (int depth : depths) {
      resTable.newRow("");
      for (int it : iterations) {
        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

        System.out.println("Running WL RDF: " + depth + " " + it);
        exp.setDoCV(true);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }

  */
   
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);


  }
 
View Full Code Here

    double[] fractions = {0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1};
    double[] fractionsSlow = {0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1};


    ResultsTable resTable = new ResultsTable();

    for (double frac : fractions) {
      resTable.newRow("");

      //resTable.newRow("WLRDF FV");
      //for (double frac : fractions) {
      double[] comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      Result res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);

      //resTable.newRow("WLRDF Kernel");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //} 
      //System.out.println(resTable);

      //resTable.newRow("WLRDF text FV");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFWLSubTreeWithTextKernel(6,3,false, false);

        System.out.println("RDF WL text FV: " + frac);
        tic = System.currentTimeMillis();
        TextUtils.computeTFIDF(Arrays.asList(k.computeFeatureVectors(dataset, instances, blackList)));       
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);


      //resTable.newRow("EVP FV");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF EVP FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);

      //resTable.newRow("EVP Kernel");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF EVP Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);

      //resTable.newRow("EVP text FV");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFIntersectionTreeEdgeVertexPathWithTextKernel(3,false, false, false);

        System.out.println("EVP text FV: " + frac);
        tic = System.currentTimeMillis();
        TextUtils.computeTFIDF(Arrays.asList(k.computeFeatureVectors(dataset, instances, blackList)));       
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);




      //resTable.newRow("RDF IST");
      //for (double frac : fractions) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        RDFGraphKernel k = new RDFIntersectionSubTreeKernel(3,1, false, true);


        System.out.println("RDF IST: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
      //}
      //System.out.println(resTable);



     
    //resTable.newRow("WL FV");
    //for (double frac : fractionsSlow) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();
        double dsComp = toc-tic;

        FeatureVectorKernel k = new WLSubTreeKernel(6,true);

        System.out.println("WL: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(ds.getGraphs());
        toc = System.currentTimeMillis();
        comp[i] = (toc-tic) + dsComp;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
    //}   
    //System.out.println(resTable);


    //resTable.newRow("WL Kernel");
    //for (double frac : fractionsSlow) {
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();
        double dsComp = toc-tic;

        GraphKernel k = new WLSubTreeKernel(6,true);

        System.out.println("WL: " + frac);
        tic = System.currentTimeMillis();
        k.compute(ds.getGraphs());
        toc = System.currentTimeMillis();
        comp[i] = (toc-tic) + dsComp;
      }
      res = new Result(comp, "comp time");
      resTable.addResult(res);
    }   
    //}
    System.out.println(resTable);
  }
View Full Code Here

    int[] depths = {1, 2, 3};
    int[] iterations = {0, 2, 4, 6};
    dataset = new RDFFileDataSet(dataDir, RDFFormat.NTRIPLES);

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);

    boolean inference = false;
         

    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());

    for (double frac : fractions) {
      createGeoDataSet((int)(1000 * frac), frac, seed, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
      List<Double> target = EvaluationUtils.createTarget(labels);
     
      LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
      linParms.setDoCrossValidation(false);
      linParms.setNumFolds(0);
      linParms.setSplitFraction((float) 0.7);
     
      Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
      int[] wLabels = new int[counts.size()];
      double[] weights = new double[counts.size()];

      for (double label : counts.keySet()) {
        wLabels[(int) label - 1] = (int) label;
        weights[(int) label - 1] = 1 / counts.get(label);
      }
      linParms.setWeightLabels(wLabels);
      linParms.setWeights(weights);
     

      System.out.println("Running fraction: " + frac);

     
      for (int i : depths) {     
        for (int it : iterations) {
          resTable.newRow("")

          KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);
       
          System.out.println("Running WL RDF: " + i + " " + it);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
     

      for (int i : depths) {     
        //for (int it : iterations) {
          resTable.newRow("")

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

          KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(i, false, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

         
          System.out.println("Running EVP: " + i);
          //System.out.println("Running WL RDF: " + i + " " + it);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      //}
    }

    saveResults(resTable, "geo_theme_" + seed + ".ser");

    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "geo_theme_full_" + seed + ".txt");
  }
View Full Code Here

    evalFuncs.add(new F1());




    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(3);

    boolean inference = true;
    for (int d : depths) {
      resTable.newRow("");
      for (int it : iterations) {

        List<List<Result>> res = new ArrayList<List<Result>>();
        for (long seed : seeds) {
          long[] s2 = {seed};

          loadDataSet(fraction, seed);

          List<Double> targets = EvaluationUtils.createTarget(labels);

          LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
          linParms.setEvalFunction(new Accuracy());
          linParms.setDoCrossValidation(false);
          linParms.setSplitFraction((float) 0.8);
          linParms.setEps(0.1);

          Map<Double, Double> counts = EvaluationUtils.computeClassCounts(targets);
          int[] wLabels = new int[counts.size()];
          double[] weights = new double[counts.size()];

          for (double label : counts.keySet()) {
            wLabels[(int) label - 1] = (int) label;
            weights[(int) label - 1] = 1 / counts.get(label);
          }
          linParms.setWeightLabels(wLabels);
          linParms.setWeights(weights);


          RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, d, inference, true), s2, linParms, dataset, instances, targets, blackList, evalFuncs);
          res.add(exp.getResults());

          System.out.println("Running WL RDF: " + d + " " + it);
          exp.run();
        }
        for (Result res2 : Result.mergeResultLists(res)) {
          resTable.addResult(res2);
        }
      }
    }
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);


  }
View Full Code Here

    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);




    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("");

        LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    System.out.println(resTable);

    for (int i : depths) {     
      resTable.newRow("");

      LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
      KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(i, false, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

      System.out.println("Running EVP: " + i);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
     
    }
    System.out.println(resTable);
   
    for (int i : depths) {     
      resTable.newRow("");

      LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
   
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + i);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
     
    }


    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
  }
View Full Code Here

    Experimenter experimenter = new Experimenter(NUMBER_OF_PROC);
    Thread expT = new Thread(experimenter);
    expT.setDaemon(true);
    expT.start();

    ResultsTable resultsWL = new ResultsTable();
    ResultsTable resultsSTF = new ResultsTable();
    ResultsTable resultsSTP = new ResultsTable();
    ResultsTable resultsIGW = new ResultsTable();
    ResultsTable resultsIGP = new ResultsTable();

    ResultsTable resultsWLadd = new ResultsTable();
    ResultsTable resultsSTFadd = new ResultsTable();
    ResultsTable resultsSTPadd = new ResultsTable();
    ResultsTable resultsIGWadd = new ResultsTable();
    ResultsTable resultsIGPadd = new ResultsTable();

   
    /** 
     * FIRST EXPERIMENT, STANDARD SETTINGS
     *
     */   
    List<PropertyPredictionDataSetParameters> dataSetsParams = new ArrayList<PropertyPredictionDataSetParameters>();

   
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, true));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, true));
   
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, true, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, true, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, true, true));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, true, true));
   
    /*
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, true));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, true));
   
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, true, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, true, false));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, true, true));
    dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetB, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, true, true));
    //*/   
 

    try {
     
     
 
 
      for (PropertyPredictionDataSetParameters params : dataSetsParams) {
        dataset = DataSetFactory.createPropertyPredictionDataSet(params);
        dataset.removeSmallClasses(5);
        dataset.removeVertexAndEdgeLabels();

        resultsWL.newRow(dataset.getLabel() + " WLSubTreeKernel");
        for (int i = 0; i < 3; i++) {
          if (experimenter.hasSpace()) { 
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "WL" + "_" + i + ".txt");
            WLSubTreeKernel kernel = new WLSubTreeKernel(i, true);
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), kernel, seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsWL.addResult(exp.getResults().getAccuracy());
            resultsWL.addResult(exp.getResults().getF1());
           
            System.out.println("Running WL, it " + i + " on " + dataset.getLabel());
          }
        }

       
        resultsSTF.newRow(dataset.getLabel() + " IntersectionFullSubTree");
        for (int i = 0; i < 3; i++) {

          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionFullSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionSubTreeKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTF.addResult(exp.getResults().getAccuracy());
            resultsSTF.addResult(exp.getResults().getF1());
           
            System.out.println("Running STF, it " + i + " on " + dataset.getLabel());
          }

        }

        resultsSTP.newRow(dataset.getLabel() + " IntersectionPartialSubTree");
        for (int i = 0; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionPartialSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionPartialSubTreeKernel(i, 0.01), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTP.addResult(exp.getResults().getAccuracy());
            resultsSTP.addResult(exp.getResults().getF1());
           
            System.out.println("Running STP, it " + i + " on " + dataset.getLabel());
          }
        }


       
        resultsIGP.newRow(dataset.getLabel() + " IntersectionGraphPath");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphPath" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphPathKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGP.addResult(exp.getResults().getAccuracy());
            resultsIGP.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGP, it " + i + " on " + dataset.getLabel());
          }
        }       

        resultsIGW.newRow(dataset.getLabel() + " IntersectionGraphWalk");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphWalk" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphWalkKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGW.addResult(exp.getResults().getAccuracy());
            resultsIGW.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGW, it " + i + " on " + dataset.getLabel());
          }
        }
       
      }
     
      //*/
     

      /******
       * ADDITIONAL EXPERIMENTS
       */
      dataSetsParams = new ArrayList<PropertyPredictionDataSetParameters>();
     
     
     
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, false));
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, false));
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 3, false, false));
      //dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 4, false, false));

     
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 1, false, true));
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 2, false, true));
      dataSetsParams.add(new PropertyPredictionDataSetParameters(testSetA, "http://swrc.ontoware.org/ontology#affiliation", "http://swrc.ontoware.org/ontology#employs", 3, false, true));
     

     
      for (PropertyPredictionDataSetParameters params : dataSetsParams) {
        dataset = DataSetFactory.createPropertyPredictionDataSet(params);
        dataset.removeSmallClasses(5);
        dataset.removeVertexAndEdgeLabels();

        resultsWLadd.newRow(dataset.getLabel() + " WLSubTreeKernel");
        for (int i = 0; i < 4; i++) {
          if (experimenter.hasSpace()) { 
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "WL" + "_" + i + ".txt");
            WLSubTreeKernel kernel = new WLSubTreeKernel(i, true);
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), kernel, seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsWLadd.addResult(exp.getResults().getAccuracy());
            resultsWLadd.addResult(exp.getResults().getF1());
           
            System.out.println("Running WL, it " + i + " on " + dataset.getLabel());
          }
        }

       
        resultsSTFadd.newRow(dataset.getLabel() + " IntersectionFullSubTree");
        for (int i = 0; i < 4; i++) {

          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionFullSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionSubTreeKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTFadd.addResult(exp.getResults().getAccuracy());
            resultsSTFadd.addResult(exp.getResults().getF1());
           
            System.out.println("Running STF, it " + i + " on " + dataset.getLabel());
          }

        }

        resultsSTPadd.newRow(dataset.getLabel() + " IntersectionPartialSubTree");
        for (int i = 0; i < 4; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionPartialSubTree" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionPartialSubTreeKernel(i, 0.01), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTPadd.addResult(exp.getResults().getAccuracy());
            resultsSTPadd.addResult(exp.getResults().getF1());
           
            System.out.println("Running STP, it " + i + " on " + dataset.getLabel());
          }
        }
       
        resultsIGPadd.newRow(dataset.getLabel() + " IntersectionGraphPath");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphPath" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphPathKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGPadd.addResult(exp.getResults().getAccuracy());
            resultsIGPadd.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGP, it " + i + " on " + dataset.getLabel());
          }
        }       

        resultsIGWadd.newRow(dataset.getLabel() + " IntersectionGraphWalk");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + fileId + "_" + "IntersectionGraphWalk" + "_" + i + ".txt");
            exp = new PropertyPredictionExperiment(new PropertyPredictionDataSet(dataset), new IntersectionGraphWalkKernel(i, 1), seeds, cs, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsIGWadd.addResult(exp.getResults().getAccuracy());
            resultsIGWadd.addResult(exp.getResults().getF1());
           
            System.out.println("Running IGW, it " + i + " on " + dataset.getLabel());
          }
        }
      }
     

     
     
     
    /***********
     * END OF EXPERIMENTER
     *
     * 
     */
    } catch (Exception e) {
      e.printStackTrace();
    }

    experimenter.stop();

    while (expT.isAlive()) {
      try {
        Thread.sleep(1000);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    /********************************
     * PRINT OUT OF RESULTS
     *
     **/
    try {
      int fileId = (int) (Math.random() * 100000000)
      File file = new File(DATA_DIR + fileId + "_" + "all_results" + ".txt");
      PrintWriter fileOut = new PrintWriter(new FileOutputStream(file));

      List<Result> bestResults = new ArrayList<Result>();
     
      bestResults = resultsWL.getBestResults(bestResults);
      bestResults = resultsSTF.getBestResults(bestResults);
      bestResults = resultsSTP.getBestResults(bestResults);
      bestResults = resultsIGW.getBestResults(bestResults);
      bestResults = resultsIGP.getBestResults(bestResults);
     
      bestResults = resultsWLadd.getBestResults(bestResults);
      bestResults = resultsSTFadd.getBestResults(bestResults);
      bestResults = resultsSTPadd.getBestResults(bestResults);
      bestResults = resultsIGWadd.getBestResults(bestResults);
      bestResults = resultsIGPadd.getBestResults(bestResults);
     
     
      resultsWL.addCompResults(bestResults);
      resultsSTF.addCompResults(bestResults);
      resultsSTP.addCompResults(bestResults);
      resultsIGW.addCompResults(bestResults);
      resultsIGP.addCompResults(bestResults);
     
      resultsWLadd.addCompResults(bestResults);
      resultsSTFadd.addCompResults(bestResults);
      resultsSTPadd.addCompResults(bestResults);
      resultsIGWadd.addCompResults(bestResults);
      resultsIGPadd.addCompResults(bestResults);
     
     
      fileOut.println(resultsWL);
      fileOut.println(resultsSTF);
      fileOut.println(resultsSTP);
      fileOut.println(resultsIGW);
      fileOut.println(resultsIGP);

      fileOut.println(resultsWLadd);
      fileOut.println(resultsSTFadd);
      fileOut.println(resultsSTPadd);
      fileOut.println(resultsIGWadd);
      fileOut.println(resultsIGPadd);
     
     
      fileOut.println(resultsWL.allScoresToString());
      fileOut.println(resultsSTF.allScoresToString());
      fileOut.println(resultsSTP.allScoresToString());
      fileOut.println(resultsIGW.allScoresToString());
      fileOut.println(resultsIGP.allScoresToString());

      fileOut.println(resultsWLadd.allScoresToString());
      fileOut.println(resultsSTFadd.allScoresToString());
      fileOut.println(resultsSTPadd.allScoresToString());
      fileOut.println(resultsIGWadd.allScoresToString());
      fileOut.println(resultsIGPadd.allScoresToString());
     
     
      fileOut.close();

      System.out.println(resultsWL);
      System.out.println(resultsSTF);
      System.out.println(resultsSTP);
      System.out.println(resultsIGW);
      System.out.println(resultsIGP);

      System.out.println(resultsWLadd);
      System.out.println(resultsSTFadd);
      System.out.println(resultsSTPadd);
      System.out.println(resultsIGWadd);
      System.out.println(resultsIGPadd);
     
      System.out.println(resultsWL.allScoresToString());
      System.out.println(resultsSTF.allScoresToString());
      System.out.println(resultsSTP.allScoresToString());
      System.out.println(resultsIGW.allScoresToString());
      System.out.println(resultsIGP.allScoresToString());

      System.out.println(resultsWLadd.allScoresToString());
      System.out.println(resultsSTFadd.allScoresToString());
      System.out.println(resultsSTPadd.allScoresToString());
      System.out.println(resultsIGWadd.allScoresToString());
      System.out.println(resultsIGPadd.allScoresToString());
     

    } catch (Exception e) {
      e.printStackTrace();
    }
View Full Code Here

TOP

Related Classes of org.data2semantics.exp.utils.ResultsTable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.