Examples of ConfusionMatrix


Examples of org.apache.mahout.classifier.ConfusionMatrix

      Integer count = Double.valueOf(value.get()).intValue();
      rowMatrix.put(classifiedLabel, count);
      confusionMatrix.put(correctLabel, rowMatrix);
    }

    ConfusionMatrix matrix = new ConfusionMatrix(confusionMatrix.keySet(), defaultLabel);
    for (Map.Entry<String,Map<String,Integer>> correctLabelSet : confusionMatrix.entrySet()) {
      Map<String,Integer> rowMatrix = correctLabelSet.getValue();
      for (Map.Entry<String,Integer> classifiedLabelSet : rowMatrix.entrySet()) {
        matrix.addInstance(correctLabelSet.getKey(), classifiedLabelSet.getKey());
        matrix.putCount(correctLabelSet.getKey(), classifiedLabelSet.getKey(), classifiedLabelSet.getValue());
      }
    }
    return matrix;
  }
View Full Code Here

Examples of org.apache.mahout.classifier.ConfusionMatrix

      if (lmp.getTargetCategories().size() <=2 ) {
        collector = new Auc();
      }
     
      OnlineSummarizer slh = new OnlineSummarizer();
      ConfusionMatrix cm = new ConfusionMatrix(lmp.getTargetCategories(), defaultCategory);

      State<Wrapper, CrossFoldLearner> best = lr.getBest();
      if (best == null) {
        output.printf("%s\n",
            "AdaptiveLogisticRegression has not be trained probably.");
        return;
      }
      CrossFoldLearner learner = best.getPayload().getLearner();

      BufferedReader in = TrainLogistic.open(inputFile);
      String line = in.readLine();
      csv.firstLine(line);
      line = in.readLine();
      if (showScores) {
        output.printf(Locale.ENGLISH, "\"%s\", \"%s\", \"%s\", \"%s\"\n",
            "target", "model-output", "log-likelihood", "average-likelihood");
      }
      while (line != null) {
        Vector v = new SequentialAccessSparseVector(lmp.getNumFeatures());
        //TODO: How to avoid extra target values not shown in the training process.
        int target = csv.processLine(line, v);
        double likelihood = learner.logLikelihood(target, v);
        double score = learner.classifyFull(v).maxValue();
       
        slh.add(likelihood);
        cm.addInstance(csv.getTargetString(line), csv.getTargetLabel(target));       
       
        if (showScores) {
          output.printf(Locale.ENGLISH, "%8d, %.12f, %.13f, %.13f\n", target,
              score, learner.logLikelihood(target, v), slh.getMean());
        }
        if (collector != null) {
          collector.add(target, score);
        }
        line = in.readLine();
      }
     
      output.printf(Locale.ENGLISH,"\nLog-likelihood:");
      output.printf(Locale.ENGLISH, "Min=%.2f, Max=%.2f, Mean=%.2f, Median=%.2f\n",
          slh.getMin(), slh.getMax(), slh.getMean(), slh.getMedian());

      if (collector != null) {       
        output.printf(Locale.ENGLISH, "\nAUC = %.2f\n", collector.auc());       
      }
     
      if (showConfusion) {
        output.printf(Locale.ENGLISH, "\n%s\n\n", cm.toString());
       
        if (collector != null){
          Matrix m = collector.entropy();
          output.printf(Locale.ENGLISH,
              "Entropy Matrix: [[%.1f, %.1f], [%.1f, %.1f]]\n", m.get(0, 0),
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

     * @param cpuNumber the number of processors to use. Usually set to 1 because we run as many experiments as there are CPUs and so individual experiments should not consume more computational power than we have available for them.
     */
    public static DifferenceToReferenceFMeasure estimationOfDifference(LearnerGraph referenceGraph, LearnerGraph actualAutomaton, Collection<List<Label>> testSet)
    {
           LearnerGraph learntGraph = new LearnerGraph(actualAutomaton.config);AbstractPathRoutines.removeRejectStates(actualAutomaton,learntGraph);
           ConfusionMatrix mat = DiffExperiments.classify(testSet, referenceGraph, learntGraph);
      return new DifferenceToReferenceFMeasure(mat);
    }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

     * @param cpuNumber the number of processors to use. Usually set to 1 because we run as many experiments as there are CPUs and so individual experiments should not consume more computational power than we have available for them.
     */
    public static DifferenceToReferenceLanguageBCR estimationOfDifference(LearnerGraph referenceGraph, LearnerGraph actualAutomaton, Collection<List<Label>> testSet)
    {
           LearnerGraph learntGraph = new LearnerGraph(actualAutomaton.config);AbstractPathRoutines.removeRejectStates(actualAutomaton,learntGraph);
           ConfusionMatrix mat = DiffExperiments.classify(testSet, referenceGraph, learntGraph);
      return new DifferenceToReferenceLanguageBCR(mat);
    }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

  private Pair<Double,Long> compareLang(LearnerGraph from, LearnerGraph to,
      Collection<List<Label>> sequences)
  {
   
    final long startTime = System.nanoTime();
    ConfusionMatrix matrix = classify(sequences, from,to);
    final long duration = System.nanoTime() - startTime;
    double result = matrix.fMeasure();
    assert !Double.isNaN(result);
    return new Pair<Double,Long>(result,duration);
  }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

      else if(!inTarget && inMutated)
        fp++;
      else if(!inTarget && !inMutated)
        tn++;
    }
    return new ConfusionMatrix(tp,tn,fp,fn);
  }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

    set.clear();
    set.addAll(expected);
    set.removeAll(detected);
    fn = set.size();
   
    ConfusionMatrix conf = new ConfusionMatrix(tp, tn, fp, fn);
    return conf.fMeasure();
  }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

     * @param cpuNumber the number of processors to use. Usually set to 1 because we run as many experiments as there are CPUs and so individual experiments should not consume more computational power than we have available for them.
     */
    public static DifferenceToReferenceFMeasure estimationOfDifference(LearnerGraph referenceGraph, LearnerGraph actualAutomaton, Collection<List<Label>> testSet)
    {
           LearnerGraph learntGraph = new LearnerGraph(actualAutomaton.config);AbstractPathRoutines.removeRejectStates(actualAutomaton,learntGraph);
           ConfusionMatrix mat = DiffExperiments.classify(testSet, referenceGraph, learntGraph);
      return new DifferenceToReferenceFMeasure(mat);
    }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

     * @param cpuNumber the number of processors to use. Usually set to 1 because we run as many experiments as there are CPUs and so individual experiments should not consume more computational power than we have available for them.
     */
    public static DifferenceToReferenceLanguageBCR estimationOfDifference(LearnerGraph referenceGraph, LearnerGraph actualAutomaton, Collection<List<Label>> testSet)
    {
           LearnerGraph learntGraph = new LearnerGraph(actualAutomaton.config);AbstractPathRoutines.removeRejectStates(actualAutomaton,learntGraph);
           ConfusionMatrix mat = DiffExperiments.classify(testSet, referenceGraph, learntGraph);
      return new DifferenceToReferenceLanguageBCR(mat);
    }
View Full Code Here

Examples of statechum.analysis.learning.PrecisionRecall.ConfusionMatrix

     * @param cpuNumber the number of processors to use. Usually set to 1 because we run as many experiments as there are CPUs and so individual experiments should not consume more computational power than we have available for them.
     */
    public static DifferenceToReferenceLanguage estimationOfDifference(LearnerGraph referenceGraph, LearnerGraph actualAutomaton, Collection<List<Label>> testSet)
    {
           LearnerGraph learntGraph = new LearnerGraph(actualAutomaton.config);AbstractPathRoutines.removeRejectStates(actualAutomaton,learntGraph);
           ConfusionMatrix mat = DiffExperiments.classify(testSet, referenceGraph, learntGraph);
      return new DifferenceToReferenceLanguage(mat);
    }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.