Package org.data2semantics.proppred.kernels.rdfgraphkernels

Examples of org.data2semantics.proppred.kernels.rdfgraphkernels.RDFWLSubTreeKernel


            weights[(int) label - 1] = 1 / counts.get(label);
          }
          linParms.setWeightLabels(wLabels);
          linParms.setWeights(weights);

          RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true), seeds2, linParms, dataset, instances, target, blackList, evalFuncs);

          System.out.println("Running WL RDF: " + i + " " + it);
          exp.setDoCV(true);
          exp.run();
          res.add(exp.getResults());
View Full Code Here


   
    ///*
    for (int i : depths) {     
      resTable.newRow("RDF WL forward");
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, i, inference, true, forward, false);
       
        //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


        System.out.println("Running WL RDF fwd: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    //*/

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward Degree " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(degreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd Degree: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
    }
    System.out.println(resTable);
   

    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward SB " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


View Full Code Here

        pred = LibLINEAR.trainTestSplit(fv, target, linearParms, linearParms.getSplitFraction());
        targetSplit = LibLINEAR.splitTestTarget(target, linearParms.getSplitFraction());
       
        // If we deal with on RDFWLSubTree, then we show the most used features, based on the featuremap created
        if (kernel instanceof RDFWLSubTreeKernel) {
          RDFWLSubTreeKernel k = (RDFWLSubTreeKernel) kernel;
          LibLINEARModel model = LibLINEAR.trainLinearModel(fv, target, linearParms);
          LibLINEARModel.WeightIndexPair[][] fw = model.getFeatureWeights();
         
          Map<String, String> lm = k.getInverseLabelMap();
         
          System.out.println("Map size: " + lm.size() + " fw length: " + fw[0].length + " fv max index: " + fv[0].getLastIndex());
         
          for (LibLINEARModel.WeightIndexPair[] fwc : fw) {
            Arrays.sort(fwc);
View Full Code Here

   

    for (int d : depths) {
      resTable.newRow("WL RDF, depth="+d);
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, inference, true);
       
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);
       
 
        System.out.println("Running WL RDF: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);
   


    for (int d : depths) {
      resTable.newRow("WL RDF BoW, depth="+d);
      for (int it : iterations) {
        RDFWLSubTreeWithTextKernel k = new RDFWLSubTreeWithTextKernel(it, d, inference, false);
        k.setDoTFIDFkernel(true);
       
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);
   
        System.out.println("Running WL RDF text: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

 
   
   
    for (int d : depths) {
      resTable.newRow("ITP, depth="+d);

      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(d, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running Edge Vertex Tree Path: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
   


    for (int d : depths) {
      resTable.newRow("ITP BoW, depth="+d);

     
      RDFIntersectionTreeEdgeVertexPathWithTextKernel k = new RDFIntersectionTreeEdgeVertexPathWithTextKernel(d, false, inference, false);
      k.setDoTFIDFkernel(true);
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running Edge Vertex Tree Path with Text: " + d);
      exp.run();
View Full Code Here

  }
 
  @Main
  public SparseVector[] computeFeatureVectors() {
    fv = super.computeFeatureVectors(dataset, instances, blacklist,
        new RDFWLSubTreeKernel(iterations, depth, inference, normalize, false, reverse),
        new RDFWLSubTreeKernel(iterations, depth, inference, normalize, false, reverse)
          );
    return fv;
  }
View Full Code Here

   
   
    for (int depth : depths) {
      resTable.newRow("WL RDF forward, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF Fwd: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

    for (int depth : depths) {
      resTable.newRow("WL RDF reverse, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true, true, false), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF Rev: " + depth + " " + it);
        exp.run();
View Full Code Here

      double[] comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      Result res = new Result(comp, "RDF WL FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF WL Kernel");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP Kernel");
      resTable.addResult(res);
   
     
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        RDFGraphKernel k = new RDFIntersectionSubTreeKernel(3,1, false, true);


        System.out.println("RDF IST: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF IST");
      resTable.addResult(res);
View Full Code Here

   
    ///*
    for (int i : depths) {     
      resTable.newRow("RDF WL forward");
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, i, inference, true, forward, false);
       
        //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


        System.out.println("Running WL RDF fwd: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);
   
   
    //*/

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward Degree " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(degreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd Degree: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      resTable.addCompResults(resTable.getBestResults());
      //resTable.addCompResults(table2.getBestResults());
      System.out.println(resTable);
    }
   
   

    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward SB " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


View Full Code Here

    resTable.setDigits(2);

    for (int depth : depths) {
      resTable.newRow("WL RDF, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF: " + depth + " " + it);
        exp.run();
View Full Code Here

   
    for (int d = 1; d < 4; d++) {
      for (int it : iterations2) {   
        resTable.newRow("WL RDF, " + d + ", " + it);
       
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, false, true, true, false);
       
        RDFGraphKernelExperiment exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);

        System.out.println("Running WL RDF, it: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
      System.out.println(resTable);
    }


    for (int d = 1; d < 4; d++) {
      resTable.newRow("WL RDF type, d " + d);
      for (int it : iterations2) {   

        RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, d, false, true, true);
        k.setHubMap(GraphUtils.createRDFTypeHubMap(ts, false));

        RDFGraphKernelExperiment exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);

        System.out.println("Running WL RDF, type it: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
      System.out.println(resTable);
    }
    //*/

    int[] hf = {0,1,2,3,4,5,6,7};

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL NonSig " + h);
        for (int it : iterations2) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, false, true, true);
          k.setHubMap(GraphUtils.createNonSigHubMap(nonSigDegreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);


          System.out.println("Running WL RDF NonSig: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
      System.out.println(resTable);
    }
   
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL SB " + h);
        for (int it : iterations2) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, false, true, true);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, ts, instances, labels, new ArrayList<Statement>(), evalFuncs);


View Full Code Here

TOP

Related Classes of org.data2semantics.proppred.kernels.rdfgraphkernels.RDFWLSubTreeKernel

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.