Package org.data2semantics.exp.utils

Examples of org.data2semantics.exp.utils.Result


    this.seeds = seeds;
    this.maxClassSize = maxClassSize;
    this.cs = cs;
    output = new PrintWriter(outputStream);
    results = new ExperimentResults();
    results.setAccuracy(new Result());
    results.setF1(new Result());
    results.setAveragePrecision(new Result());
    results.setrPrecision(new Result());
    results.setNdcg(new Result());
  }
View Full Code Here


    this.seeds = seeds;
    this.cs = cs;
    this.maxClassSize = maxClassSize;
    output = new PrintWriter(outputStream);
    results = new ExperimentResults();
    results.setAccuracy(new Result());
    results.setF1(new Result());
  }
View Full Code Here

     
      accScores[i] = LibSVM.computeAccuracy(target, prediction);
      fScores[i]   = LibSVM.computeF1(target, prediction);   
    }
   
    Result accRes = results.getAccuracy();
    Result fRes   = results.getF1();
    accRes.setLabel("Accuracy");
    fRes.setLabel("F1");
    accRes.setScores(accScores);
    fRes.setScores(fScores);
   
    output.println(dataSet.getLabel());
    output.println(kernel.getLabel() + ", Seeds=" + Arrays.toString(seeds) + ", C=" + Arrays.toString(cs));
    output.print("Overall Accuracy: " + accRes.getScore());
    output.print(", Average F1: " + fRes.getScore());
    output.println("");
    output.print("All acc: " + Arrays.toString(accScores));
    output.print(", All f1: " + Arrays.toString(fScores));
    output.println("");
   
View Full Code Here

    ResultsTable resTable = new ResultsTable();

    resTable.newRow("WL RDF");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {

        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new ECML2013RDFWLSubTreeKernel(iteration, depth, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running WL RDF: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }
      resTable.addResult(res);
    }

    resTable.newRow("IST");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");

        KernelExperiment<RDFGraphKernel> exp = new RDFKernelRunTimeExperiment(new RDFIntersectionSubTreeKernel(depth, 1, inference, true, false), seeds, parms, dataset, instances, labels, blackList);

        System.out.println("Running IST: " + frac);
        exp.run();
        res.addResult(exp.getResults().get(0));
      }
      resTable.addResult(res);
    }


    long tic, toc;



    resTable.newRow("WL");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();

        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013WLSubTreeKernel(iteration), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running WL: " + frac);
        exp.run();

        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res)
    }

    /*
    resTable.newRow("");
    for (double frac : fractions) {
      createGeoDataSet(11,frac,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
      toc = System.currentTimeMillis();


      KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(2,1), seeds, parms, ds.getGraphs(), labels);

      System.out.println("Running IGP: " + frac);
      exp.run();

      double[] comps =  {0,0};
      comps[0] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
      comps[1] = 2*(toc-tic) + exp.getResults().get(exp.getResults().size()-1).getScore();
      Result resC = new Result(comps,"comp time 2"); 
      exp.getResults().get(exp.getResults().size()-1).addResult(resC);


      resTable.addResult(exp.getResults().get(exp.getResults().size()-1)); 
    }
     */

    resTable.newRow("IGW");
    for (double frac : fractions) {

      Result res = new Result();
      res.setLabel("runtime");
      for (long seed : seeds) {
        createGeoDataSet(seed,frac,6,"http://data.bgs.ac.uk/ref/Lexicon/hasLithogenesis");
        tic = System.currentTimeMillis();
        PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 3, false, true));
        toc = System.currentTimeMillis();


        KernelExperiment<GraphKernel> exp = new GraphKernelRunTimeExperiment(new ECML2013IntersectionGraphWalkKernel(2,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + frac);
        exp.run();

        res.addResult(exp.getResults().get(0));

        double[] comps = {2 * (toc-tic) + res.getScore()};
        Result resC = new Result(comps,"comp time 2")
        res.addResult(resC);
      }

      resTable.addResult(res);
    }
View Full Code Here

        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
    saveResults(resTable, "geo_litho.ser");


    /*
    dataSetsParams = new ArrayList<GeneralPredictionDataSetParameters>();

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, false));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, false));

    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 1, false, true));
    dataSetsParams.add(new GeneralPredictionDataSetParameters(dataset, blackLists, instances, 2, false, true));
     */




    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGP");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphPathKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGP: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);
      }
    }
    saveResults(resTable, "geo_litho.ser");


    for (GeneralPredictionDataSetParameters params : dataSetsParams) {
      tic = System.currentTimeMillis();
      PropertyPredictionDataSet ds = DataSetFactory.createPropertyPredictionDataSet(params);
      toc = System.currentTimeMillis();

      if (blankLabels) {
        ds.removeVertexAndEdgeLabels();
      }

      resTable.newRow("IGW");
      for (int it : iterationsIG) {
        KernelExperiment<GraphKernel> exp = new GraphKernelExperiment(new ECML2013IntersectionGraphWalkKernel(it,1), seeds, parms, ds.getGraphs(), labels);

        System.out.println("Running IGW: " + it);
        exp.run();

        for (Result res : exp.getResults()) {
          resTable.addResult(res);
        }

        double[] comps =  {0,0};
        comps[0] = toc-tic;
        comps[1] = toc-tic;
        Result resC = new Result(comps,"comp time 2");
        resTable.addResult(resC);

      }
    }
    saveResults(resTable, "geo_litho.ser");
View Full Code Here

        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      Result res = new Result(comp, "RDF WL FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFWLSubTreeKernel(6,3,false, true);

        System.out.println("RDF WL Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF WL Kernel");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFFeatureVectorKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP FV: " + frac);
        tic = System.currentTimeMillis();
        k.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP FV");
      resTable.addResult(res);
 
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   

        RDFGraphKernel k = new RDFIntersectionTreeEdgeVertexPathKernel(3,false, false, true);

        System.out.println("RDF ITP Kernel: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF ITP Kernel");
      resTable.addResult(res);
   
     
      comp = new double[seeds.length];
      for (int i = 0; i < seeds.length; i++) {
        createGeoDataSet((int)(1000 * frac), frac, seeds[i], "http://data.bgs.ac.uk/ref/Lexicon/hasTheme");   
        RDFGraphKernel k = new RDFIntersectionSubTreeKernel(3,1, false, true);


        System.out.println("RDF IST: " + frac);
        tic = System.currentTimeMillis();
        k.compute(dataset, instances, blackList);
        toc = System.currentTimeMillis();
        comp[i] = toc-tic;
      }
      res = new Result(comp, "RDF IST");
      resTable.addResult(res);
 
      System.out.println(resTable);
    }
    System.out.println(resTable);
View Full Code Here

    this.kernels = kernels;
   
    resultMap = new HashMap<EvaluationFunction,double[]>();
   
    for (EvaluationFunction evalFunc : evalFunctions) {
      Result res = new Result();
      double[] resA = new double[seeds.length];
      res.setLabel(evalFunc.getLabel());
      res.setScores(resA);
      res.setHigherIsBetter(evalFunc.isHigherIsBetter());
      results.add(res);
      resultMap.put(evalFunc, resA);
    }
   
    compR = new Result();
    results.add(compR);
  }
View Full Code Here

    this.evalFunctions = evalFunctions;
   
    resultMap = new HashMap<EvaluationFunction,double[]>();
   
    for (EvaluationFunction evalFunc : evalFunctions) {
      Result res = new Result();
      double[] resA = new double[seeds.length];
      res.setLabel(evalFunc.getLabel());
      res.setScores(resA);
      res.setHigherIsBetter(evalFunc.isHigherIsBetter());
      results.add(res);
      resultMap.put(evalFunc, resA);
    }
   
    compR = new Result();
    results.add(compR);
  }
View Full Code Here

      Map<EvaluationFunction, double[]> resultMap2 = new HashMap<EvaluationFunction,double[]>();

      List<Result> results = new ArrayList<Result>();

      for (EvaluationFunction evalFunc : evalFuncs1) {
        Result res = new Result();
        double[] resA = new double[seeds.length];
        res.setLabel(evalFunc.getLabel());
        res.setScores(resA);
        res.setHigherIsBetter(evalFunc.isHigherIsBetter());
        results.add(res);
        resultMap.put(evalFunc, resA);
      }

      for (EvaluationFunction evalFunc : evalFuncs2) {
        Result res = new Result();
        double[] resA = new double[seeds.length];
        res.setLabel(evalFunc.getLabel());
        res.setScores(resA);
        res.setHigherIsBetter(evalFunc.isHigherIsBetter());
        results.add(res);
        resultMap2.put(evalFunc, resA);
      }

      Result compR = new Result();
      results.add(compR);


      long tic, toc;

      List<Double> tempLabels = new ArrayList<Double>();
      List<Double> tempLabelsBins = new ArrayList<Double>();
      tempLabels.addAll(target);
      tempLabelsBins.addAll(targetBins);

      tic = System.currentTimeMillis();
      SparseVector[] fv = kernel.computeFeatureVectors(dataset, instances, blackList);
      toc = System.currentTimeMillis();

      fv = TextUtils.computeTFIDF(Arrays.asList(fv)).toArray(new SparseVector[1]);
      fv = KernelUtils.normalize(fv);


      List<SparseVector> fvList = Arrays.asList(fv);


      compR.setLabel("kernel comp time");

      for (int j = 0; j < seeds.length; j++) {
        Collections.shuffle(fvList, new Random(seeds[j]));
        Collections.shuffle(tempLabels, new Random(seeds[j]));
        Collections.shuffle(tempLabelsBins, new Random(seeds[j]))

        fv = fvList.toArray(new SparseVector[1]);
        double[] targetA = new double[tempLabels.size()];
        double[] targetABins = new double[tempLabelsBins.size()];
        for (int i = 0; i < targetA.length; i++) {
          targetA[i] = tempLabels.get(i);
          targetABins[i] = tempLabelsBins.get(i);
        }

        Prediction[] pred = LibLINEAR.trainTestSplit(fv, targetABins, linParms, linParms.getSplitFraction());     
        Prediction[] pred2 = LibLINEAR.trainTestSplit(fv, targetA, linParms2, linParms2.getSplitFraction());       

        double[] targetSplit = LibLINEAR.splitTestTarget(targetA, linParms.getSplitFraction());

        for (EvaluationFunction ef : evalFuncs1) {
          resultMap.get(ef)[j] = ef.computeScore(targetSplit, pred)
        }       
        for (EvaluationFunction ef : evalFuncs2) {
          resultMap2.get(ef)[j] = ef.computeScore(targetSplit, pred2)
        }
      }
      double[] comp = {toc - tic};
      compR.setScores(comp);

      for (Result res : results) {
        resTable.addResult(res);
      }     
    }


    for (int d : depths) {     
      for (int it : iterations) {
        resTable.newRow("RDF WL BoW, depth="+d);

        /*
        List<RDFFeatureVectorKernel> kernels = new ArrayList<RDFFeatureVectorKernel>();
        kernels.add(new RDFWLSubTreeKernel(it,d, inference, false));
        kernels.add(new RDFSimpleTextKernel(d, inference, false));
        RDFFeatureVectorKernel kernel = new RDFCombinedKernel(kernels, true);
        */
       
        RDFFeatureVectorKernel kernel = new RDFWLSubTreeWithTextKernel(it, d, inference, false);
       
       
       
        System.out.println("Running RDFWL + text kernel: " + d + " " + it);

        Map<EvaluationFunction, double[]> resultMap = new HashMap<EvaluationFunction,double[]>();
        Map<EvaluationFunction, double[]> resultMap2 = new HashMap<EvaluationFunction,double[]>();

        List<Result> results = new ArrayList<Result>();

        for (EvaluationFunction evalFunc : evalFuncs1) {
          Result res = new Result();
          double[] resA = new double[seeds.length];
          res.setLabel(evalFunc.getLabel());
          res.setScores(resA);
          res.setHigherIsBetter(evalFunc.isHigherIsBetter());
          results.add(res);
          resultMap.put(evalFunc, resA);
        }

        for (EvaluationFunction evalFunc : evalFuncs2) {
          Result res = new Result();
          double[] resA = new double[seeds.length];
          res.setLabel(evalFunc.getLabel());
          res.setScores(resA);
          res.setHigherIsBetter(evalFunc.isHigherIsBetter());
          results.add(res);
          resultMap2.put(evalFunc, resA);
        }

        Result compR = new Result();
        results.add(compR);


        long tic, toc;

        List<Double> tempLabels = new ArrayList<Double>();
        List<Double> tempLabelsBins = new ArrayList<Double>();
        tempLabels.addAll(target);
        tempLabelsBins.addAll(targetBins);

        tic = System.currentTimeMillis();
        SparseVector[] fv = kernel.computeFeatureVectors(dataset, instances, blackList);
        toc = System.currentTimeMillis();

        fv = TextUtils.computeTFIDF(Arrays.asList(fv)).toArray(new SparseVector[1]);
        fv = KernelUtils.normalize(fv);


        List<SparseVector> fvList = Arrays.asList(fv);


        compR.setLabel("kernel comp time");

        for (int j = 0; j < seeds.length; j++) {
          Collections.shuffle(fvList, new Random(seeds[j]));
          Collections.shuffle(tempLabels, new Random(seeds[j]));
          Collections.shuffle(tempLabelsBins, new Random(seeds[j]))

          fv = fvList.toArray(new SparseVector[1]);
          double[] targetA = new double[tempLabels.size()];
          double[] targetABins = new double[tempLabelsBins.size()];
          for (int i = 0; i < targetA.length; i++) {
            targetA[i] = tempLabels.get(i);
            targetABins[i] = tempLabelsBins.get(i);
          }

          Prediction[] pred = LibLINEAR.trainTestSplit(fv, targetABins, linParms, linParms.getSplitFraction());     
          Prediction[] pred2 = LibLINEAR.trainTestSplit(fv, targetA, linParms2, linParms2.getSplitFraction());       

          double[] targetSplit = LibLINEAR.splitTestTarget(targetA, linParms.getSplitFraction());

          for (EvaluationFunction ef : evalFuncs1) {
            resultMap.get(ef)[j] = ef.computeScore(targetSplit, pred)
          }       
          for (EvaluationFunction ef : evalFuncs2) {
            resultMap2.get(ef)[j] = ef.computeScore(targetSplit, pred2)
          }
        }
        double[] comp = {toc - tic};
        compR.setScores(comp);

        for (Result res : results) {
          resTable.addResult(res);
       
      }
View Full Code Here

    this.iNodes = iNodes;
   
    resultMap = new HashMap<EvaluationFunction,double[]>();
   
    for (EvaluationFunction evalFunc : evalFunctions) {
      Result res = new Result();
      double[] resA = new double[seeds.length];
      res.setLabel(evalFunc.getLabel());
      res.setScores(resA);
      res.setHigherIsBetter(evalFunc.isHigherIsBetter());
      results.add(res);
      resultMap.put(evalFunc, resA);
    }
   
    compR = new Result();
    results.add(compR);
  }
View Full Code Here

TOP

Related Classes of org.data2semantics.exp.utils.Result

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.