Package weka.classifiers

Examples of weka.classifiers.Evaluation


    if (classifier instanceof weka.classifiers.misc.InputMappedClassifier) {
      Instances mappedClassifierHeader =
        ((weka.classifiers.misc.InputMappedClassifier)classifier).
        getModelHeader(new Instances(inst, 0));

      eval = new Evaluation(new Instances(mappedClassifierHeader, 0));

      if (!eval.getHeader().equalHeaders(inst)) {
        // When the InputMappedClassifier is loading a model,
        // we need to make a new dataset that maps the test instances to
        // the structure expected by the mapped classifier - this is only
View Full Code Here


        }
        if (ce.getTrainSet().getDataSet() == null ||
            ce.getTrainSet().getDataSet().numInstances() == 0) {
          // we have no training set to estimate majority class
          // or mean of target from
          Evaluation eval = new Evaluation(ce.getTestSet().getDataSet());
          m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
          m_PlotInstances.setInstances(ce.getTestSet().getDataSet());
          m_PlotInstances.setClassifier(ce.getClassifier());
          m_PlotInstances.setClassIndex(ce.getTestSet().getDataSet().classIndex());
          m_PlotInstances.setEvaluation(eval);

          eval = adjustForInputMappedClassifier(eval, ce.getClassifier(),
              ce.getTestSet().getDataSet(), m_PlotInstances);
          eval.useNoPriors();
          m_eval = new AggregateableEvaluation(eval);
        } else {
          // we can set up with the training set here
          Evaluation eval = new Evaluation(ce.getTrainSet().getDataSet());
          m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
          m_PlotInstances.setInstances(ce.getTrainSet().getDataSet());
          m_PlotInstances.setClassifier(ce.getClassifier());
          m_PlotInstances.setClassIndex(ce.getTestSet().getDataSet().classIndex());
          m_PlotInstances.setEvaluation(eval);
View Full Code Here

      }
      try {

        ClassifierErrorsPlotInstances plotInstances =
          ExplorerDefaults.getClassifierErrorsPlotInstances();
        Evaluation eval = null;

        if (m_trainData == null || m_trainData.numInstances() == 0) {
          eval = new Evaluation(m_testData);
          plotInstances.setInstances(m_testData);
          plotInstances.setClassifier(m_classifier);
          plotInstances.setClassIndex(m_testData.classIndex());
          plotInstances.setEvaluation(eval);
          eval = adjustForInputMappedClassifier(eval, m_classifier,
              m_testData, plotInstances);
         
          eval.useNoPriors();
        } else {
          eval = new Evaluation(m_trainData);
          plotInstances.setInstances(m_trainData);
          plotInstances.setClassifier(m_classifier);
          plotInstances.setClassIndex(m_trainData.classIndex());
          plotInstances.setEvaluation(eval);
          eval = adjustForInputMappedClassifier(eval, m_classifier,
View Full Code Here

        for (int curCfr = 0; curCfr < classifiers.length; curCfr++ ) {

          AbstractClassifier aClassifier = classifiers[curCfr];

          Evaluation eval = new Evaluation(data);

          Long millis = System.currentTimeMillis();
          eval.crossValidateModel(aClassifier, data, numFolds, new Random(curRun));
          long elapsedTime = System.currentTimeMillis() - millis;

          double aucSum = 0.0;
          double sumClassProps = 0;
          for (int c = 0; c < data.numClasses(); c++) {
            if (Double.isNaN(eval.areaUnderROC(c)))
              continue;
            aucSum += eval.areaUnderROC(c) * classProps[c];
            // this should sum to 1.0 in the end, as all the classes with AUC==NaN should have weight 0
            sumClassProps += classProps[c];
          }


          aucScore[curCfr][curRun-1] = aucSum / sumClassProps;
          accyScore[curCfr][curRun-1] = eval.pctCorrect();
          timeScore[curCfr][curRun-1] = elapsedTime;

          s.append(String.format( Locale.US, "%02d|%02d\t%.5f\t%.2f\t%6d\t",
                  curCfr, curRun, aucSum / sumClassProps,
                  eval.pctCorrect(), elapsedTime));

          System.gc();

        } // classifier by classifier

View Full Code Here

    @Override
    public void evaluateClassifier(String resultFile,ArrayList<SupportedFeature> neededFeats) {
        SupervisedPanel svmClassifier = new SupervisedPanel(neededFeats);
        svmClassifier.setCorpus(annotatedCorpus);
        Evaluation evaluator = svmClassifier.kFoldValidate();
        writeStatsToFile(evaluator, resultFile);

    }
View Full Code Here

    }

    public Evaluation crossValidate(ArrayList<LinkedHashMap<Integer, Double>> trainingSamples, ArrayList<Integer> labels, int numFolds, int numFeats) {
        Instances trainingSet = fillDataSet(trainingSamples, labels, numFeats);
        Instances initial = trainingSet.stringFreeStructure();
        Evaluation eTest = null;
        SPegasos cModel = new SPegasos();
        try {
            cModel.buildClassifier(initial);
            for (int i = 0; i < trainingSet.numInstances(); i++) {
                cModel.updateClassifier(trainingSet.instance(i));
            }
            eTest = new Evaluation(trainingSet);
            if (numFolds > trainingSet.numInstances()) {
                numFolds = trainingSet.numInstances();
            }
            eTest.crossValidateModel(cModel, trainingSet, numFolds, new Random(1));
        } catch (Exception ex) {
            Logger.getLogger(WekaWrapper.class.getName()).log(Level.SEVERE, null, ex);
            System.out.println(ex.toString());
        }
        return eTest;
View Full Code Here

                mSummaryOutput.write(("DESCRIPTION: \n" + description + classifier.toString() + "\n" + columns ).getBytes());
              mSummaryOutput.write(ControlBlock.LIST_END);
            }
            if(mFoldsInput != null) {
              Integer folds = TupleUtilities.getNumber(readBlock(mFoldsInput), INPUT_FOLDS);
              Evaluation ev = new Evaluation(dataset);
              if(mSeedInput != null) {
                Integer seed = TupleUtilities.getNumber(readBlock(mSeedInput), INPUT_SEED);
                ev.crossValidateModel(classifier, dataset, folds, new Random(seed));
              }
              else
                ev.crossValidateModel(classifier, dataset, folds, new Random(123));
              if(mEvaluationOutput != null) {
                mEvaluationOutput.write(ev.toSummaryString());
                LOG.debug("BuildClassifier evaluation: " + ev.toSummaryString());
              }
            }
        }
        catch (InvalidWekaOptionsException e) {
          throw new ActivityUserException(e);
View Full Code Here

    m_attribute = attribute;
    m_cuts = cuts;
    m_values = values;
   
    // Compute sum of squared errors
    Evaluation eval = new Evaluation(insts);
    eval.evaluateModel(this, insts);
    double msq = eval.rootMeanSquaredError();
   
    // Check whether this is the best attribute
    if (msq < m_minMsq) {
      m_minMsq = msq;
    } else {
View Full Code Here

   */
  public void acceptClassifier(final IncrementalClassifierEvent ce) {
    try {
      if (ce.getStatus() == IncrementalClassifierEvent.NEW_BATCH) {
  //  m_eval = new Evaluation(ce.getCurrentInstance().dataset());
  m_eval = new Evaluation(ce.getStructure());
        m_eval.useNoPriors();

  m_dataLegend = new Vector();
  m_reset = true;
  m_dataPoint = new double[0];
View Full Code Here

    cvParam.m_ParamValue += increment) {
  findParamsByCrossValidation(depth + 1, trainData, random);
      }
    } else {
     
      Evaluation evaluation = new Evaluation(trainData);

      // Set the classifier options
      String [] options = createOptions();
      if (m_Debug) {
  System.err.print("Setting options for "
       + m_Classifier.getClass().getName() + ":");
  for (int i = 0; i < options.length; i++) {
    System.err.print(" " + options[i]);
  }
  System.err.println("");
      }
      ((OptionHandler)m_Classifier).setOptions(options);
      for (int j = 0; j < m_NumFolds; j++) {

        // We want to randomize the data the same way for every
        // learning scheme.
  Instances train = trainData.trainCV(m_NumFolds, j, new Random(1));
  Instances test = trainData.testCV(m_NumFolds, j);
  m_Classifier.buildClassifier(train);
  evaluation.setPriors(train);
  evaluation.evaluateModel(m_Classifier, test);
      }
      double error = evaluation.errorRate();
      if (m_Debug) {
  System.err.println("Cross-validated error rate: "
         + Utils.doubleToString(error, 6, 4));
      }
      if ((m_BestPerformance == -99) || (error < m_BestPerformance)) {
View Full Code Here

TOP

Related Classes of weka.classifiers.Evaluation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.