Package org.data2semantics.exp.utils

Examples of org.data2semantics.exp.utils.ResultsTable


    int depth = 3;
    int iteration = 6;
    boolean inference = true;

    LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    ResultsTable resTable = new ResultsTable();

    resTable.newRow("WL RDF");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {

        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new ECML2013RDFWLSubTreeKernel(iteration, depth, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }
      resTable.addResult(res);
    }

    resTable.newRow("IST");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new RDFIntersectionSubTreeKernel(depth, 1, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running IST: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }
      resTable.addResult(res);
    }


    long tic, toc;



    resTable.newRow("WL");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();

        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013WLSubTreeKernel(iteration), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running WL: " + frac);
        exp.run();

        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res)
    }

    /*
    resTable.newRow("");
    for (double frac : fractions) {
      createGeoDataSet(11,frac,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
      toc = System.currentTimeMillis();


      KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(2,1), seeds, parms, ds.getGraphs(), labels);

      System.out.println("Running IGP: " + frac);
      exp.run();

      double[] comps =  {0,0};
      comps[0] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
      comps[1] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
      Result resC = new Result(comps,"comp time 2"); 
      exp.getResults().get(exp.getResults().size()-1).addResult(resC);


      resTable.addResult(exp.getResults().get(exp.getResults().size()-1)); 
    }
     */

    resTable.newRow("IGW");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();


        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013IntersectionGraphWalkKernel(2,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + frac);
        exp.run();

        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res);
    }

    //resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "lithogenesis_runningtime.txt");


  }
View Full Code Here


    dataset = new RDFFileDataSet(dataDir, RDFFormat.NTRIPLES);

    LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);

    boolean inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, no inference, depth="+i);
      for (int it : iterations) {
        Experimenter experimenter = new Experimenter(2);
        Thread expT = new Thread(experimenter);
        expT.setDaemon(true);
        expT.start();       

        List<List<Result>> res = new ArrayList<List<Result>>();
        for (long seed : seeds) {
          long[] s2 = new long[1];
          s2[0] = seed;
          createGeoDataSet(seed, fraction, minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
          KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new ECML2013RDFWLSubTreeKernel(it, i, inference, true, false), s2, parms, dataset, instances, labels, blackList);
          res.add(exp.getResults());

          System.out.println("Running WL RDF: " + i + " " + it);
          if (experimenter.hasSpace()) {
            experimenter.addExperiment(exp);
          }


        }

        experimenter.stop();
        try {
          while (expT.isAlive()) {
            Thread.sleep(1000);
          }
        } catch (Exception e) {
          e.printStackTrace();
        }

        for (Result res2 : Result.mergeResultLists(res)) {
          resTable.addResult(res2);
        }
      }
    }
    saveResults(resTable, "geo_theme.ser");


    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, inference, depth="+i);
      for (int it : iterations) {
        Experimenter experimenter = new Experimenter(2);
        Thread expT = new Thread(experimenter);
        expT.setDaemon(true);
        expT.start();


        List<List<Result>> res = new ArrayList<List<Result>>();
        for (long seed : seeds) {
          long[] s2 = new long[1];
          s2[0] = seed;
          createGeoDataSet(seed, fraction, minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
          KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new ECML2013RDFWLSubTreeKernel(it, i, inference, true, false), s2, parms, dataset, instances, labels, blackList);
          res.add(exp.getResults());

          System.out.println("Running WL RDF: " + i + " " + it);
          if (experimenter.hasSpace()) {
            experimenter.addExperiment(exp);
          }


        }

        experimenter.stop();

        while (expT.isAlive()) {
          try {
            Thread.sleep(1000);
          } catch (Exception e) {
            e.printStackTrace();
          }
        }

        for (Result res2 : Result.mergeResultLists(res)) {
          resTable.addResult(res2);
        }
      }
    }
    saveResults(resTable, "geo_theme.ser");


    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, no inference, depth="+i);

      Experimenter experimenter = new Experimenter(2);
      Thread expT = new Thread(experimenter);
      expT.setDaemon(true);
      expT.start();

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] s2 = new long[1];
        s2[0] = seed;
        createGeoDataSet(seed, fraction,  minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, false), s2, parms, dataset, instances, labels, blackList);
        res.add(exp.getResults());

        System.out.println("Running IST: " + i);
        if (experimenter.hasSpace()) {
          experimenter.addExperiment(exp);
        }
      }

      experimenter.stop();

      while (expT.isAlive()) {
        try {
          Thread.sleep(1000);
        } catch (Exception e) {
          e.printStackTrace();
        }
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    saveResults(resTable, "geo_theme.ser");


    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, inference, depth="+i);

      Experimenter experimenter = new Experimenter(2);
      Thread expT = new Thread(experimenter);
      expT.setDaemon(true);
      expT.start();

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] s2 = new long[1];
        s2[0] = seed;
        createGeoDataSet(seed, fraction,  minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, false), s2, parms, dataset, instances, labels, blackList);
        res.add(exp.getResults());


        System.out.println("Running IST: " + i);
        if (experimenter.hasSpace()) {
          experimenter.addExperiment(exp);
        }

      }

      experimenter.stop();

      while (expT.isAlive()) {
        try {
          Thread.sleep(1000);
        } catch (Exception e) {
          e.printStackTrace();
        }
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    saveResults(resTable, "geo_theme.ser");


    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, no inference, depth="+i);

      Experimenter experimenter = new Experimenter(2);
      Thread expT = new Thread(experimenter);
      expT.setDaemon(true);
      expT.start();

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] s2 = new long[1];
        s2[0] = seed;
        createGeoDataSet(seed, fraction,  minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, false), s2, parms, dataset, instances, labels, blackList);
        res.add(exp.getResults());

        System.out.println("Running IPST: " + i);
        if (experimenter.hasSpace()) {
          experimenter.addExperiment(exp);
        }
      }

      experimenter.stop();

      while (expT.isAlive()) {
        try {
          Thread.sleep(1000);
        } catch (Exception e) {
          e.printStackTrace();
        }
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    saveResults(resTable, "geo_theme.ser");



    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, inference, depth="+i);

      Experimenter experimenter = new Experimenter(2);
      Thread expT = new Thread(experimenter);
      expT.setDaemon(true);
      expT.start();

      List<List<Result>> res = new ArrayList<List<Result>>();
      for (long seed : seeds) {
        long[] s2 = new long[1];
        s2[0] = seed;
        createGeoDataSet(seed, fraction,  minSize, "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, false), s2, parms, dataset, instances, labels, blackList);
        res.add(exp.getResults());

        System.out.println("Running IPST: " + i);
        if (experimenter.hasSpace()) {
          experimenter.addExperiment(exp);
        }
      }

      experimenter.stop();

      while (expT.isAlive()) {
        try {
          Thread.sleep(1000);
        } catch (Exception e) {
          e.printStackTrace();
        }
      }

      for (Result res2 : Result.mergeResultLists(res)) {
        resTable.addResult(res2);
      }
    }
    saveResults(resTable, "geo_theme.ser");


    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "geo_theme" + fraction + ".txt");
  }
View Full Code Here


    LibSVMParameters parms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    //parms.setEvalFunction(LibSVM.F1);

    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(2);

    boolean inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, no inference, depth="+i);
      for (int it : iterations) {
        ECML2013RDFWLSubTreeKernel k = new ECML2013RDFWLSubTreeKernel(it, i, inference, true, blankLabels);
       
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(k, seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    saveResults(resTable, "geo_litho.ser");



    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("WL RDF, inference, depth="+i);
      for (int it : iterations) {
        ECML2013RDFWLSubTreeKernel k = new ECML2013RDFWLSubTreeKernel(it, i, inference, true, blankLabels)
       
        KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(k, seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    saveResults(resTable, "geo_litho.ser");




    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, no inference, depth="+i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "geo_litho.ser");

    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IST, inference, depth="+i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(i, 1, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "geo_litho.ser");


    inference = false;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, no inference, depth="+i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IPST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "geo_litho.ser");

    inference = true;
    for (int i = 1; i <= depth; i++) {
      resTable.newRow("IPST, inference, depth="+i);
      KernelExperiment<RDFGraphKernel> exp = new RDFOldKernelExperiment(new RDFIntersectionPartialSubTreeKernel(i, 0.01, inference, true, blankLabels), seeds, parms, dataset, instances, labels, blackList);

      System.out.println("Running IPST: " + i + " ");
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
    saveResults(resTable, "geo_litho.ser");



    List<GeneralPredictionDataSetParameters> dataSetsParams = new ArrayList<GeneralPredictionDataSetParameters>();

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, false));

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));


    int[] iterationsIG = {1,2};
    long tic, toc;

    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("WL");
      for (int it : iterations) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013WLSubTreeKernel(it), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running WL: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
    saveResults(resTable, "geo_litho.ser");


    /*
    dataSetsParams = new ArrayList<GeneralPredictionDataSetParameters>();

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, false));

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, true));
     */




    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGP");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGP: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);
      }
    }
    saveResults(resTable, "geo_litho.ser");


    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGW");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphWalkKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
    saveResults(resTable, "geo_litho.ser");




    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);
    saveResults(resTable.toString(), "geo_litho.txt");

  }
View Full Code Here

    boolean inference = false
    boolean forward = true;
   
    createCommitteeMemberPredictionDataSet();

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(3);
   
   
    List<EvaluationFunction> evalFuncs = new ArrayList<EvaluationFunction>();
    evalFuncs.add(new Accuracy());
    evalFuncs.add(new F1());

    List<Double> target = EvaluationUtils.createTarget(labels);

    LibLINEARParameters linParms = new LibLINEARParameters(LibLINEARParameters.SVC_DUAL, cs);
    linParms.setEvalFunction(new Accuracy());
    linParms.setDoCrossValidation(false);
    linParms.setNumFolds(10);

    Map<Double, Double> counts = EvaluationUtils.computeClassCounts(target);
    int[] wLabels = new int[counts.size()];
    double[] weights = new double[counts.size()];

    for (double label : counts.keySet()) {
      wLabels[(int) label - 1] = (int) label;
      weights[(int) label - 1] = 1 / counts.get(label);
    }
    linParms.setWeightLabels(wLabels);
    linParms.setWeights(weights);

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);

    svmParms.setWeightLabels(EvaluationUtils.computeWeightLabels(target));
    svmParms.setWeights(EvaluationUtils.computeWeights(target));

 
   
    DTGraph<String,String> sGraph = org.nodes.data.RDF.createDirectedGraph(dataset.getStatements(null, null, null, inference), null, null);
    List<DTNode<String,String>> hubs = SlashBurn.getHubs(sGraph, 1, true);
   
    Comparator<DTNode<String,String>> comp = new SlashBurn.SignatureComparator<String,String>();
    MaxObserver<DTNode<String,String>> obs = new MaxObserver<DTNode<String,String>>(hubs.size(), comp);   
    obs.observe(sGraph.nodes());
   
    List<DTNode<String,String>> degreeHubs = new ArrayList<DTNode<String,String>>(obs.elements());
   
    // Remove hubs from list that are root nodes
    List<DTNode<String,String>> rn = new ArrayList<DTNode<String,String>>();
    Set<String> is = new HashSet<String>();
    for (Resource r : instances) {
      is.add(r.toString());
    }
    for (DTNode<String,String> hub : hubs) {
      if (is.contains(hub.label())) {
        rn.add(hub);
      }
    }
    hubs.removeAll(rn);       
    degreeHubs.removeAll(rn);
   
    System.out.println("Total SB hubs: " + hubs.size());
    System.out.println(hubs)
    System.out.println(degreeHubs);
   
    for (int i = 0; i < degreeHubs.size() && i < hubs.size(); i++) {
      if (!hubs.get(i).equals(degreeHubs.get(i))) {
        System.out.println(i + " " + hubs.get(i).label() + " " + degreeHubs.get(i).label());
      }
    }
   
   
    /*
    Map<String,Integer> dMap  = GraphUtils.createDegreeHubMap(degreeHubs, 300);
    Map<String,Integer> sbMap = GraphUtils.createHubMap(hubs, 300);
   
    for (String k : dMap.keySet()) {
      int l = dMap.get(k);
      if (sbMap.get(k) != l) {
        System.out.println("fail in level: " + l + " " + sbMap.get(k));
      }
     
    }
    */
   
   
    //int[] hf = {0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20};
   
    int[] hf = {1,2,3,4,5,10,20,30,40,50,60,70,80,90,100};

   
   
   
    ///*
    for (int i : depths) {     
      resTable.newRow("RDF WL forward");
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, i, inference, true, forward, false);
       
        //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
        KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


        System.out.println("Running WL RDF fwd: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
       
      }
    }
    //*/

    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward Degree " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(degreeHubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd Degree: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
    }
    System.out.println(resTable);
   

    ///*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF WL forward SB " + h);
        for (int it : iterations) {
          RDFWLSubTreeSlashBurnKernel k = new RDFWLSubTreeSlashBurnKernel(it, i, inference, true, forward);
          k.setHubMap(GraphUtils.createHubMap(hubs, h));

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running WL RDF fwd SB: " + i + " " + it + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
         
        }
      }
    }
    System.out.println(resTable);
    //*/


    /*
    for (int h : hf) {
      for (int i : depths) {     
        resTable.newRow("RDF IST SB " + h);
          RDFIntersectionSubTreeSlashBurnKernel k = new RDFIntersectionSubTreeSlashBurnKernel(i, 1, inference, true);
          k.setHubThreshold(h);

          //KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, target, blackList, evalFuncs);
          KernelExperiment<RDFGraphKernel> exp = new RDFGraphKernelExperiment(k, seeds, svmParms, dataset, instances, target, blackList, evalFuncs);


          System.out.println("Running RDF IST SB: " + i + " " + h);
          exp.run();

          for (Result res : exp.getResults()) {
            resTable.addResult(res);
          } 
        }
    }
    System.out.println(resTable);
    //*/

   
   
    /*
    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL reverse");

        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, i, inference, true, true, false), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF rev: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        } 
      }
    }
    System.out.println(resTable);

    for (int i : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL Bi");

        KernelExperiment<RDFFeatureVectorKernel> exp = new RDFLinearKernelExperiment(new RDFWLBiSubTreeKernel(it, i, inference, true), seeds, linParms, dataset, instances, target, blackList, evalFuncs);

        System.out.println("Running WL RDF Bi: " + i + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        } 
      }
    }
    System.out.println(resTable);
  //*/



    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);
   
  }
 
View Full Code Here

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);
   
   

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(2);

    boolean inference = true;



   

    for (int d : depths) {
      resTable.newRow("WL RDF, depth="+d);
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, inference, true);
       
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);
       
 
        System.out.println("Running WL RDF: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);
   


    for (int d : depths) {
      resTable.newRow("WL RDF BoW, depth="+d);
      for (int it : iterations) {
        RDFWLSubTreeWithTextKernel k = new RDFWLSubTreeWithTextKernel(it, d, inference, false);
        k.setDoTFIDFkernel(true);
       
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);
   
        System.out.println("Running WL RDF text: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

 
   
   
    for (int d : depths) {
      resTable.newRow("ITP, depth="+d);

      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(d, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running Edge Vertex Tree Path: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
   


    for (int d : depths) {
      resTable.newRow("ITP BoW, depth="+d);

     
      RDFIntersectionTreeEdgeVertexPathWithTextKernel k = new RDFIntersectionTreeEdgeVertexPathWithTextKernel(d, false, inference, false);
      k.setDoTFIDFkernel(true);
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running Edge Vertex Tree Path with Text: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);

   
    for (int d : depths) {
      resTable.newRow("IST, depth="+d);

      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(d, 1, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
   

   
   
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);


  }
View Full Code Here

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);
   
   

    ResultsTable resTable = new ResultsTable();
    resTable.setManWU(0.05);
    resTable.setDigits(2);



    boolean tfidf = false;
    boolean normalize = true;
    boolean inference = true;
   
   
    /*
    for (int d : depths) {
      resTable.newRow("");

      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgePathKernel(d, false, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      exp.setDoCV(true);
      exp.setDoTFIDF(false);

      System.out.println("Running Edge Path: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
    */


    /*

    for (int d : depths) {
      resTable.newRow("");
      List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
      kernels.add(new RDFIntersectionTreeEdgeVertexPathKernel(d, false, inference, false));
      kernels.add(new RDFSimpleTextKernel(d, inference, false));

      RDFFeatureVectorKernel kernel = new RDFCombinedKernel(kernels, false);


      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(kernel, seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      exp.setDoCV(true);
      exp.setDoTFIDF(true);

      System.out.println("Running Edge Vertex Path with Simple Text: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);

    for (int d : depths) {
      resTable.newRow("");

      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFSimpleTextKernel(d, inference, false), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);

      //RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFWLSubTreeKernel(it, d, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);
      exp.setDoCV(true);
      exp.setDoTFIDF(true);

      System.out.println("Running Simple Text Kernel: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
    */

    /*

    for (int d : depths) {
      resTable.newRow("");
      for (int it : iterations) {
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, inference, true);
        //k.setIgnoreLiterals(false);
       
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);
       
        //RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, targets, blackList, evalFuncs);   
        //exp.setDoCV(true);
        //exp.setDoTFIDF(false);

        System.out.println("Running WL RDF: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);
    */


    for (int d : depths) {
      resTable.newRow("WL RDF, depth="+d);
      for (int it : iterations) {
        RDFWLSubTreeWithTextKernel k = new RDFWLSubTreeWithTextKernel(it, d, inference, false);
        //k.setIgnoreLiterals(false);
       
        //RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);

       
        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(k, seeds, linParms, dataset, instances, targets, blackList, evalFuncs);
        exp.setDoCV(true);
        exp.setDoTFIDF(true);

        System.out.println("Running WL RDF text: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

    /*
    for (int d : depths) {
      resTable.newRow("");
      for (int it : iterations) {
        List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
        RDFWLSubTreeKernel k = new RDFWLSubTreeKernel(it, d, inference, false);
        k.setIgnoreLiterals(false);
       
        kernels.add(k);
        kernels.add(new RDFSimpleTextKernel(d, inference, false));

        RDFFeatureVectorKernel kernel = new RDFCombinedKernel(kernels, false);

        RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(kernel, seeds, linParms, dataset, instances, targets, blackList, evalFuncs);
        exp.setDoCV(true);
        exp.setDoTFIDF(true);

        System.out.println("Running Text + WL RDF: " + d + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    */

   
    /*
    for (int d : depths) {
      resTable.newRow("");

      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(d, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

     
      //RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(d, false, inference, true), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);
      //exp.setDoCV(true);
      //exp.setDoTFIDF(false);

      System.out.println("Running Edge Vertex Path: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
    */


    for (int d : depths) {
      resTable.newRow("ITP BoW, depth="+d);

      //RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(d, false, inference, false), seeds, svmParms, dataset, instances, labels, blackList);

     
      RDFLinearKernelExperiment exp = new RDFLinearKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(d, false, inference, false), seeds, linParms, dataset, instances, targets, blackList, evalFuncs);
      exp.setDoCV(true);
//      exp.setDoBinary(true);
      exp.setDoTFIDF(true);

      System.out.println("Running Edge Vertex Path with Text: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);

    /*
    for (int d : depths) {
      resTable.newRow("");

      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(d, 1, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      //exp.setDoCV(true);
      //exp.setDoTFIDF(false);

      System.out.println("Running IST: " + d);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }

    }
    System.out.println(resTable);
    */

   
   
    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);


  }
 
View Full Code Here

    createAffiliationPredictionDataSet(1);

    LibSVMParameters svmParms = new LibSVMParameters(LibSVMParameters.C_SVC, cs);
    svmParms.setNumFolds(10);
   
    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(2);

    for (int depth : depths) {
      resTable.newRow("WL RDF Bi, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLBiSubTreeKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF Bi: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

   
   
    for (int depth : depths) {
      resTable.newRow("WL RDF forward, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF Fwd: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

    for (int depth : depths) {
      resTable.newRow("WL RDF reverse, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeKernel(it, depth, inference, true, true, false), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF Rev: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);

   
    /*

    for (int depth : depths) {
      resTable.newRow("WL RDF BoW, depth="+depth);
      for (int it : iterations) {
        RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFWLSubTreeWithTextKernel(it, depth, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

       
        System.out.println("Running WL RDF with Text: " + depth + " " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }
      }
    }
    System.out.println(resTable);
   
    */
   
    /*
    ResultsTable table2 = new ResultsTable();
   
    for (int depth : depths) {
      resTable.newRow("ITP, depth="+depth);
      table2.newRow("");
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathKernel(depth, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running EVP: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
 
     
    for (int depth : depths) {
      resTable.newRow("ITP with ST, depth="+depth);
      table2.newRow("");
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexWithSuperTypesPathKernel(depth, false, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running EVP with ST: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
   
   
    for (int depth : depths) {
      resTable.newRow("ITP + BoL, depth="+depth);
      table2.newRow("");
     
      List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
      kernels.add(new RDFIntersectionTreeEdgeVertexPathKernel(depth, false, inference, false));
      kernels.add(new RDFWLSubTreeKernel(0, depth, inference, false));
           
      RDFGraphKernel k = new RDFCombinedKernel(kernels, true);
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(k, seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running EVP + BoL: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
   
   
    for (int depth : depths) {
      resTable.newRow("ITP BoW, depth="+depth);
      table2.newRow("");
     
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionTreeEdgeVertexPathWithTextKernel(depth, false, inference, false), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running EVP with Text: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
        table2.addResult(res);
      }
    }
    System.out.println(resTable);
   
    */
   
    /*
   
    for (int depth : depths) {
      resTable.newRow("IST, depth="+depth);
      RDFOldKernelExperiment exp = new RDFOldKernelExperiment(new RDFIntersectionSubTreeKernel(depth, 1, inference, true), seeds, svmParms, dataset, instances, labels, blackList);

      System.out.println("Running IST: " + depth);
      exp.run();

      for (Result res : exp.getResults()) {
        resTable.addResult(res);
      }
    }
   
    */
   
    resTable.addCompResults(resTable.getBestResults());
    //resTable.addCompResults(table2.getBestResults());
    System.out.println(resTable);

  }
 
View Full Code Here

    long[] seeds = {11,21,31};
    long tic, toc;

    double[] fractions = {0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1};
 
    ResultsTable resTable = new ResultsTable();

    for (double frac : fractions) {
      resTable.newRow("Fraction: " + frac);

      double[] comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      Result res = new Result(comp, "RDF WL FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF WL Kernel");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP Kernel");
      resTable.addResult(res);
   
     
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        RDFGraphKernel k = new RDFIntersectionSubTreeKernel(3,1, false, true);


        System.out.println("RDF IST: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF IST");
      resTable.addResult(res);
 
      System.out.println(resTable);
    }
    System.out.println(resTable);
  }
View Full Code Here

    */

    LinkPredictionDataSet dataset;
    LinkPredictionExperiment exp;

    ResultsTable resultsWL  = new ResultsTable();
    ResultsTable resultsSTF = new ResultsTable();
    ResultsTable resultsSTP = new ResultsTable();
    ResultsTable resultsIGW = new ResultsTable();
    ResultsTable resultsIGP = new ResultsTable();

    Experimenter experimenter = new Experimenter(NUMBER_OF_PROC);
    Thread expT = new Thread(experimenter);
    expT.setDaemon(true);
    expT.start();



    try {
      for (LinkPredictionDataSetParameters params : dataSetsParams) {
        dataset = DataSetFactory.createLinkPredictonDataSet(params);
        //dataset.removeSmallClasses(5);

        resultsWL.newRow(dataset.getLabel() + " WLSubTreeKernel");
        for (int i = 0; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + "_" + "WL" + fileId + "_" + i + ".txt");
            exp = new LinkPredictionExperiment(new LinkPredictionDataSet(dataset), new WLSubTreeKernel(i), new WLSubTreeKernel(i), 3.0/6.0, 3.0/6.0, seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsWL.addResult(exp.getResults().getAccuracy());
            resultsWL.addResult(exp.getResults().getF1());
            resultsWL.addResult(exp.getResults().getrPrecision());
            resultsWL.addResult(exp.getResults().getAveragePrecision());
            resultsWL.addResult(exp.getResults().getNdcg());
           
            System.out.println("Running WL, it " + i + " on " + dataset.getLabel());
          }
        }


        resultsSTF.newRow(dataset.getLabel() + " IntersectionFullSubTreeKernel");
        for (int i = 0; i < 3; i++) {

          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + "_" + "IntersectionFullSubTree" + fileId + "_" + i + ".txt");
            exp = new LinkPredictionExperiment(new LinkPredictionDataSet(dataset), new IntersectionSubTreeKernel(i,1), new IntersectionSubTreeKernel(i,1), 3.0/6.0, 3.0/6.0, seeds, cs, maxClassSize, new FileOutputStream(file));
            experimenter.addExperiment(exp);
            resultsSTF.addResult(exp.getResults().getAccuracy());
            resultsSTF.addResult(exp.getResults().getF1());
            resultsSTF.addResult(exp.getResults().getrPrecision());
            resultsSTF.addResult(exp.getResults().getAveragePrecision());
            resultsSTF.addResult(exp.getResults().getNdcg());
           
            System.out.println("Running STF, it " + i + " on " + dataset.getLabel());
          }
        }

        resultsSTP.newRow(dataset.getLabel() + " IntersectionPartialSubTreeKernel");
        for (int i = 0; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000)
            File file = new File(DATA_DIR + "_" + "IntersectionPartialSubTree" + fileId + "_" + i + ".txt");
            exp = new LinkPredictionExperiment(new LinkPredictionDataSet(dataset), new IntersectionPartialSubTreeKernel(i,0.01), new IntersectionPartialSubTreeKernel(i,0.01), 3.0/6.0, 3.0/6.0, seeds, cs, maxClassSize, new FileOutputStream(file))
            experimenter.addExperiment(exp);
            resultsSTP.addResult(exp.getResults().getAccuracy());
            resultsSTP.addResult(exp.getResults().getF1());
            resultsSTP.addResult(exp.getResults().getrPrecision());
            resultsSTP.addResult(exp.getResults().getAveragePrecision());
            resultsSTP.addResult(exp.getResults().getNdcg());
           
            System.out.println("Running STP, it " + i + " on " + dataset.getLabel());
          }
        }


        /*
        resultsIGW.newRow(dataset.getLabel() + " IntersectionGraphWalkKernel");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000); 
            File file = new File(DATA_DIR + "_" + "IntersectionGraphWalk" + fileId + "_" + i + ".txt");
            exp = new LinkPredictionExperiment(new LinkPredictionDataSet(dataset), new IntersectionGraphWalkKernel(i,1), new IntersectionGraphWalkKernel(i,1), 0.5, 0.5, seeds, cs, maxClassSize, new FileOutputStream(file)); 
            experimenter.addExperiment(exp);
            resultsIGW.addResult(exp.getResults().getAccuracy());
            resultsIGW.addResult(exp.getResults().getF1());
            resultsIGW.addResult(exp.getResults().getrPrecision());
            resultsIGW.addResult(exp.getResults().getAveragePrecision());
            resultsIGW.addResult(exp.getResults().getNdcg());
           
            System.out.println("Running IGW, it " + i + " on " + dataset.getLabel());
          }
        }       

        resultsIGP.newRow(dataset.getLabel() + " IntersectionGraphPathKernel");
        for (int i = 1; i < 3; i++) {
          if (experimenter.hasSpace()) {   
            int fileId = (int) (Math.random() * 100000000); 
            File file = new File(DATA_DIR + "_" + "IntersectionGraphPath" + fileId + "_" + i + ".txt");
            exp = new LinkPredictionExperiment(new LinkPredictionDataSet(dataset), new IntersectionGraphPathKernel(i,1), new IntersectionGraphPathKernel(i,1), 0.5, 0.5, seeds, cs, maxClassSize, new FileOutputStream(file)); 
            experimenter.addExperiment(exp);
            resultsIGP.addResult(exp.getResults().getAccuracy());
            resultsIGP.addResult(exp.getResults().getF1());
            resultsIGP.addResult(exp.getResults().getrPrecision());
            resultsIGP.addResult(exp.getResults().getAveragePrecision());
            resultsIGP.addResult(exp.getResults().getNdcg());
           
            System.out.println("Running IGP, it " + i + " on " + dataset.getLabel());
          }
        }

        //*/

      }
    } catch (Exception e) {
      e.printStackTrace();
    }

    experimenter.stop();

    while (expT.isAlive()) {
      try {
        Thread.sleep(1000);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }

    try {
      int fileId = (int) (Math.random() * 100000000)
      File file = new File(DATA_DIR + fileId + "_" + "all_results" + ".txt");
      PrintWriter fileOut = new PrintWriter(new FileOutputStream(file));

     
      List<Result> bestResults = new ArrayList<Result>();
     
      bestResults = resultsWL.getBestResults(bestResults);
      bestResults = resultsSTF.getBestResults(bestResults);
      bestResults = resultsSTP.getBestResults(bestResults);
      bestResults = resultsIGW.getBestResults(bestResults);
      bestResults = resultsIGP.getBestResults(bestResults);
     
      resultsWL.addCompResults(bestResults);
      resultsSTF.addCompResults(bestResults);
      resultsSTP.addCompResults(bestResults);
      resultsIGW.addCompResults(bestResults);
      resultsIGP.addCompResults(bestResults);

     
      fileOut.println(resultsWL);
      fileOut.println(resultsSTF);
      fileOut.println(resultsSTP);
      fileOut.println(resultsIGW);
      fileOut.println(resultsIGP);

      fileOut.println(resultsWL.allScoresToString());
      fileOut.println(resultsSTF.allScoresToString());
      fileOut.println(resultsSTP.allScoresToString());
      fileOut.println(resultsIGW.allScoresToString());
      fileOut.println(resultsIGP.allScoresToString());

      System.out.println(resultsWL);
      System.out.println(resultsSTF);
      System.out.println(resultsSTP);
      System.out.println(resultsIGW);
      System.out.println(resultsIGP);

      System.out.println(resultsWL.allScoresToString());
      System.out.println(resultsSTF.allScoresToString());
      System.out.println(resultsSTP.allScoresToString());
      System.out.println(resultsIGW.allScoresToString());
      System.out.println(resultsIGP.allScoresToString());


    } catch (Exception e) {
      e.printStackTrace();
    }
View Full Code Here

    // Data graph
    DTGraph<String,String> sGraph = org.nodes.data.RDF.createDirectedGraph(ts.getStatements(null, null, null, false), null, null);

    //---------
    // Results Table
    ResultsTable resTable = new ResultsTable();
    resTable.setDigits(3);
    //---------

    //--- instance nodes
    List<DTNode<String,String>> instanceNodes = new ArrayList<DTNode<String,String>>();
    for (Resource i : instances) {
      instanceNodes.add(sGraph.node(i.toString()));
    }
    //--------


    //--------
    // Get the different hub lists
    int maxHubs = 1000;

    // RDF.Type hubs
    List<DTNode<String,String>> RDFTypeHubs = GraphUtils.getTypeHubs(sGraph);

    // Regular Degree
    Comparator<Node<String>> compRegDeg = new DegreeComparator<String>();
    MaxObserver<Node<String>> obsRegDeg = new MaxObserver<Node<String>>(maxHubs + instances.size(), compRegDeg);
    obsRegDeg.observe(sGraph.nodes());
    List<DTNode<String,String>> regDegreeHubs = new ArrayList<DTNode<String,String>>();
    for (Node<String> n : obsRegDeg.elements()) {
      regDegreeHubs.add((DTNode<String,String>) n);
    }

    // Signature Degree
    Comparator<DTNode<String,String>> compSigDeg = new SlashBurn.SignatureComparator<String,String>();
    MaxObserver<DTNode<String,String>> obsSigDeg = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compSigDeg);       
    obsSigDeg.observe(sGraph.nodes());
    List<DTNode<String,String>> sigDegreeHubs = new ArrayList<DTNode<String,String>>(obsSigDeg.elements());

    // Informed Degree
    List<Integer> classes = new ArrayList<Integer>();
    for (double d : target) {
      classes.add((int) d);
    }
    Classified<DTNode<String, String>> classified = Classification.combine(instanceNodes, classes);

    InformedAvoidance ia = new InformedAvoidance(sGraph, classified, 4)

    Comparator<DTNode<String, String>> compUnInformed = ia.uninformedComparator(4);
    MaxObserver<DTNode<String,String>> obsUnInformed = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compUnInformed);
    obsUnInformed.observe(sGraph.nodes());
    List<DTNode<String,String>> unInformedDegreeHubs = new ArrayList<DTNode<String,String>>(obsUnInformed.elements());

    Iterator<DTNode<String, String>> ite = unInformedDegreeHubs.iterator();
    while(ite.hasNext())
      if(! ia.viableHub(ite.next(), 4, 4))
        ite.remove();

    Comparator<DTNode<String, String>> compInformed = ia.informedComparator(4);
    MaxObserver<DTNode<String,String>> obsInformed = new MaxObserver<DTNode<String,String>>(maxHubs + instances.size(), compInformed);
    obsInformed.observe(sGraph.nodes());
    List<DTNode<String,String>> informedDegreeHubs = new ArrayList<DTNode<String,String>>(obsInformed.elements());

    ite = informedDegreeHubs.iterator();
    while(ite.hasNext())
      if(! ia.viableHub(ite.next(), 4, 4))
        ite.remove();

    // Remove hubs from list that are root nodes
    List<DTNode<String,String>> rn = new ArrayList<DTNode<String,String>>();
    Set<String> is = new HashSet<String>();
    for (Resource r : instances) {
      is.add(r.toString());
    }
    for (DTNode<String,String> n : sGraph.nodes()) {
      if (is.contains(n.label())) {
        rn.add(n);
      }
    }
    RDFTypeHubs.removeAll(rn);
    regDegreeHubs.removeAll(rn);
    sigDegreeHubs.removeAll(rn);
    unInformedDegreeHubs.removeAll(rn);
    informedDegreeHubs.removeAll(rn);

    List<List<DTNode<String,String>>> hubLists = new ArrayList<List<DTNode<String,String>>>();
    //hubLists.add(RDFTypeHubs);
    //hubLists.add(regDegreeHubs);
    hubLists.add(sigDegreeHubs);
    //hubLists.add(unInformedDegreeHubs);
    //hubLists.add(informedDegreeHubs);

    boolean forward = true;
    int it = 6;
    int depth = 3;
    int[] hubThs = {0,1,2,3,4,5,6};
    //  int[] hubThs = {100};

    int[] iterations  =   {0,1,2,3,4,5,6};
    int[] iterations2 = {0,2,4,6,8,10,12};

    ///*
    List<WLUSubTreeKernel> kernelsUWL = new ArrayList<WLUSubTreeKernel>();
    for (int i : iterations) {
      kernelsUWL.add(new WLUSubTreeKernel(i,true));     
    }

    MoleculeListMultiGraphExperiment<UGraph<String>> exp = new MoleculeListMultiGraphExperiment<UGraph<String>>(kernelsUWL, seeds, svmParms, graphs, labels, evalFuncs);

    System.out.println("Running UWL");
    exp.run();

    resTable.newRow("UWL - baseline");
    for (Result res : exp.getResults()) {
      resTable.addResult(res);
    }

    System.out.println(resTable);
    //*/


    MoleculeListMultiGraphExperiment<DTGraph<String,String>> exp2;
    for (int th : hubThs) {
      resTable.newRow("Hub Threshold: " + th);

      for (List<DTNode<String,String>> hubList : hubLists) {
       
        boolean regDegree = false;
        int maxSize = hubList.size();
        if (hubList == regDegreeHubs) {
          regDegree = true;
        }
       
        List<WLSubTreeKernel> kernelsWL = new ArrayList<WLSubTreeKernel>();
        for (int i : iterations2) {
          kernelsWL.add(new WLSubTreeKernel(i,true, forward));     
        }

        ///*
        List<DTNode<String,String>> newIN = new ArrayList<DTNode<String,String>>(instanceNodes);
        DTGraph<String,String> newG = GraphUtils.simplifyGraph(sGraph, GraphUtils.createHubMap(hubList.subList(0, Math.min(maxSize, th)), 10000, regDegree), newIN, false, true);

        exp2 = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL, seeds, svmParms, GraphUtils.getSubGraphs(newG, newIN, depth), target, evalFuncs);

        System.out.println("running, remove hubs, th: " + th);
        exp2.run();

        for (Result res : exp2.getResults()) {
          resTable.addResult(res);
        }

        kernelsWL = new ArrayList<WLSubTreeKernel>();
        for (int i : iterations2) {
          kernelsWL.add(new WLSubTreeKernel(i,true, forward));     
        }
       
        newIN = new ArrayList<DTNode<String,String>>(instanceNodes);
        newG = GraphUtils.simplifyGraph(sGraph, GraphUtils.createHubMap(hubList.subList(0, Math.min(maxSize, th)), 10000, regDegree), newIN, true, false);

        exp2 = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL, seeds, svmParms, GraphUtils.getSubGraphs(newG, newIN, depth), target, evalFuncs);

        System.out.println("running, relabel hubs, th: " + th);
        exp2.run();

        for (Result res : exp2.getResults()) {
          resTable.addResult(res);
        }
       
        kernelsWL = new ArrayList<WLSubTreeKernel>();
        for (int i : iterations2) {
          kernelsWL.add(new WLSubTreeKernel(i,true, forward));     
        }

        newIN = new ArrayList<DTNode<String,String>>(instanceNodes);
        newG = GraphUtils.simplifyGraph(sGraph, GraphUtils.createHubMap(hubList.subList(0, Math.min(maxSize, th)), 10000, regDegree), newIN, true, true);

        exp2 = new MoleculeListMultiGraphExperiment<DTGraph<String,String>>(kernelsWL,
            seeds, svmParms, GraphUtils.getSubGraphs(newG, newIN, depth), target, evalFuncs);

        System.out.println("running, relabel+remove hubs, th: " + th);
        exp2.run();

        for (Result res : exp2.getResults()) {
          resTable.addResult(res);
        }
        //*/

      }
      System.out.println(resTable);
    }

    resTable.addCompResults(resTable.getBestResults());
    System.out.println(resTable);   
    System.out.println(resTable.allScoresToString());

  }
 
View Full Code Here

TOP

Related Classes of org.data2semantics.exp.utils.ResultsTable

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.