Package weka.classifiers

Examples of weka.classifiers.Evaluation


    int addm = (m_AdditionalMeasures != null) ? m_AdditionalMeasures.length : 0;
    Object [] result = new Object[RESULT_SIZE+addm];
    long thID = Thread.currentThread().getId();
    long CPUStartTime=-1, trainCPUTimeElapsed=-1, testCPUTimeElapsed=-1,
         trainTimeStart, trainTimeElapsed, testTimeStart, testTimeElapsed;   
    Evaluation eval = new Evaluation(train);
    m_Classifier = Classifier.makeCopy(m_Template);

    trainTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    m_Classifier.buildClassifier(train);
    if(canMeasureCPUTime)
      trainCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
    testTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    eval.evaluateModel(m_Classifier, test);
    if(canMeasureCPUTime)
      testCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    testTimeElapsed = System.currentTimeMillis() - testTimeStart;
    thMonitor = null;
   
    m_result = eval.toSummaryString();
    // The results stored are all per instance -- can be multiplied by the
    // number of instances to get absolute numbers
    int current = 0;
    result[current++] = new Double(train.numInstances());
    result[current++] = new Double(eval.numInstances());

    result[current++] = new Double(eval.meanAbsoluteError());
    result[current++] = new Double(eval.rootMeanSquaredError());
    result[current++] = new Double(eval.relativeAbsoluteError());
    result[current++] = new Double(eval.rootRelativeSquaredError());
    result[current++] = new Double(eval.correlationCoefficient());

    result[current++] = new Double(eval.SFPriorEntropy());
    result[current++] = new Double(eval.SFSchemeEntropy());
    result[current++] = new Double(eval.SFEntropyGain());
    result[current++] = new Double(eval.SFMeanPriorEntropy());
    result[current++] = new Double(eval.SFMeanSchemeEntropy());
    result[current++] = new Double(eval.SFMeanEntropyGain());
   
    // Timing stats
    result[current++] = new Double(trainTimeElapsed / 1000.0);
    result[current++] = new Double(testTimeElapsed / 1000.0);
    if(canMeasureCPUTime) {
View Full Code Here


    double[][] probs = initialProbs(numInstances);
    m_NumGenerated = 0;
    double sumOfWeights = train.sumOfWeights();
    for (int j = 0; j < getNumIterations(); j++) {
      performIteration(trainYs, trainFs, probs, trainN, sumOfWeights);
      Evaluation eval = new Evaluation(train);
      eval.evaluateModel(this, test);
      results[j] += eval.correct();
    }
  }
      }
     
      // Find the number of iterations with the lowest error
View Full Code Here

  // compute average error
  double expansionError = 0;
  int count = 0;

  for (int i=0; i<m_numFoldsPruning; i++) {
    Evaluation eval;

    // calculate error rate if only root node
    if (expansion==0) {
      m_roots[i].m_isLeaf = true;
      eval = new Evaluation(test[i]);
      eval.evaluateModel(m_roots[i], test[i]);
      if (m_UseErrorRate) expansionError += eval.errorRate();
      else expansionError += eval.rootMeanSquaredError();
      count ++;
    }

    // make tree - expand one node at a time
    else {
      if (m_roots[i] == null) continue; // if the tree cannot be expanded, go to next fold
      m_roots[i].m_isLeaf = false;
      BFTree nodeToSplit = (BFTree)
      (((FastVector)(parallelBFElements[i].elementAt(0))).elementAt(0));
      if (!m_roots[i].makeTree(parallelBFElements[i], m_roots[i], train[i],
    nodeToSplit.m_SortedIndices, nodeToSplit.m_Weights,
    nodeToSplit.m_Dists, nodeToSplit.m_ClassProbs,
    nodeToSplit.m_TotalWeight, nodeToSplit.m_Props, m_minNumObj,
    m_Heuristic, m_UseGini)) {
        m_roots[i] = null; // cannot be expanded
        continue;
      }
      eval = new Evaluation(test[i]);
      eval.evaluateModel(m_roots[i], test[i]);
      if (m_UseErrorRate) expansionError += eval.errorRate();
      else expansionError += eval.rootMeanSquaredError();
      count ++;
    }
  }

  // no tree can be expanded any more
  if (count==0) break;

  expansionError /=count;
  errorList.addElement(new Double(expansionError));
  currentError = expansionError;

  if (!m_UseOneSE) {
    if (currentError>previousError)
      break;
  }

  else {
    if (expansionError < minError) {
      minError = expansionError;
      minExpansion = expansion;
    }

    if (currentError>previousError) {
      double oneSE = Math.sqrt(minError*(1-minError)/
    data.numInstances());
      if (currentError > minError + oneSE) {
        break;
      }
    }
  }

  expansion ++;
  previousError = currentError;
      }

      if (!m_UseOneSE) expansion = expansion - 1;
      else {
  double oneSE = Math.sqrt(minError*(1-minError)/data.numInstances());
  for (int i=0; i<errorList.size(); i++) {
    double error = ((Double)(errorList.elementAt(i))).doubleValue();
    if (error<=minError + oneSE) { // && counts[i]>=m_numFoldsPruning/2) {
      expansion = i;
      break;
    }
  }
      }
    }

    // build a postpruned tree
    else {
      FastVector[] modelError = new FastVector[m_numFoldsPruning];

      // calculate error of each expansion for each fold
      for (int i = 0; i < m_numFoldsPruning; i++) {
  modelError[i] = new FastVector();

  m_roots[i].m_isLeaf = true;
  Evaluation eval = new Evaluation(test[i]);
  eval.evaluateModel(m_roots[i], test[i]);
  double error;
  if (m_UseErrorRate) error = eval.errorRate();
  else error = eval.rootMeanSquaredError();
  modelError[i].addElement(new Double(error));

  m_roots[i].m_isLeaf = false;
  BFTree nodeToSplit = (BFTree)
  (((FastVector)(parallelBFElements[i].elementAt(0))).elementAt(0));
View Full Code Here

      subsetWeights,dists, m_Attribute, useHeuristic, useGini);
  for (int i=0; i<2; i++){
    m_Successors[i].makeLeaf(train);
  }

  Evaluation eval = new Evaluation(test);
  eval.evaluateModel(root, test);
  double error;
  if (useErrorRate) error = eval.errorRate();
  else error = eval.rootMeanSquaredError();
  modelError.addElement(new Double(error));
      }

      if (BestFirstElements.size()!=0) {
  FastVector nextSplitElement = (FastVector)BestFirstElements.elementAt(0);
View Full Code Here

   */
  public double evaluateAttribute (int attribute)
    throws Exception {
    int[] featArray = new int[2]; // feat + class
    double errorRate;
    Evaluation o_Evaluation;
    Remove delTransform = new Remove();
    delTransform.setInvertSelection(true);
    // copy the instances
    Instances trainCopy = new Instances(m_trainInstances);
    featArray[0] = attribute;
    featArray[1] = trainCopy.classIndex();
    delTransform.setAttributeIndicesArray(featArray);
    delTransform.setInputFormat(trainCopy);
    trainCopy = Filter.useFilter(trainCopy, delTransform);
    o_Evaluation = new Evaluation(trainCopy);
    String [] oneROpts = { "-B", ""+getMinimumBucketSize()};
    Classifier oneR = Classifier.forName("weka.classifiers.rules.OneR", oneROpts);
    if (m_evalUsingTrainingData) {
      oneR.buildClassifier(trainCopy);
      o_Evaluation.evaluateModel(oneR, trainCopy);
    } else {
      /*      o_Evaluation.crossValidateModel("weka.classifiers.rules.OneR",
              trainCopy, 10,
              null, new Random(m_randomSeed)); */
      o_Evaluation.crossValidateModel(oneR, trainCopy, m_folds, new Random(m_randomSeed));
    }
    errorRate = o_Evaluation.errorRate();
    return  (1 - errorRate)*100.0;
  }
View Full Code Here

    boolean canMeasureCPUTime = thMonitor.isThreadCpuTimeSupported();
    if(canMeasureCPUTime && !thMonitor.isThreadCpuTimeEnabled())
      thMonitor.setThreadCpuTimeEnabled(true);
   
    Object [] result = new Object[overall_length];
    Evaluation eval = new Evaluation(train);
    m_Classifier = Classifier.makeCopy(m_Template);
    double [] predictions;
    long thID = Thread.currentThread().getId();
    long CPUStartTime=-1, trainCPUTimeElapsed=-1, testCPUTimeElapsed=-1,
         trainTimeStart, trainTimeElapsed, testTimeStart, testTimeElapsed;   

    //training classifier
    trainTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    m_Classifier.buildClassifier(train);   
    if(canMeasureCPUTime)
      trainCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
   
    //testing classifier
    testTimeStart = System.currentTimeMillis();
    if(canMeasureCPUTime)
      CPUStartTime = thMonitor.getThreadUserTime(thID);
    predictions = eval.evaluateModel(m_Classifier, test);
    if(canMeasureCPUTime)
      testCPUTimeElapsed = thMonitor.getThreadUserTime(thID) - CPUStartTime;
    testTimeElapsed = System.currentTimeMillis() - testTimeStart;
    thMonitor = null;
   
    m_result = eval.toSummaryString();
    // The results stored are all per instance -- can be multiplied by the
    // number of instances to get absolute numbers
    int current = 0;
    result[current++] = new Double(train.numInstances());
    result[current++] = new Double(eval.numInstances());
    result[current++] = new Double(eval.correct());
    result[current++] = new Double(eval.incorrect());
    result[current++] = new Double(eval.unclassified());
    result[current++] = new Double(eval.pctCorrect());
    result[current++] = new Double(eval.pctIncorrect());
    result[current++] = new Double(eval.pctUnclassified());
    result[current++] = new Double(eval.kappa());
   
    result[current++] = new Double(eval.meanAbsoluteError());
    result[current++] = new Double(eval.rootMeanSquaredError());
    result[current++] = new Double(eval.relativeAbsoluteError());
    result[current++] = new Double(eval.rootRelativeSquaredError());
   
    result[current++] = new Double(eval.SFPriorEntropy());
    result[current++] = new Double(eval.SFSchemeEntropy());
    result[current++] = new Double(eval.SFEntropyGain());
    result[current++] = new Double(eval.SFMeanPriorEntropy());
    result[current++] = new Double(eval.SFMeanSchemeEntropy());
    result[current++] = new Double(eval.SFMeanEntropyGain());
   
    // K&B stats
    result[current++] = new Double(eval.KBInformation());
    result[current++] = new Double(eval.KBMeanInformation());
    result[current++] = new Double(eval.KBRelativeInformation());
   
    // IR stats
    result[current++] = new Double(eval.truePositiveRate(m_IRclass));
    result[current++] = new Double(eval.numTruePositives(m_IRclass));
    result[current++] = new Double(eval.falsePositiveRate(m_IRclass));
    result[current++] = new Double(eval.numFalsePositives(m_IRclass));
    result[current++] = new Double(eval.trueNegativeRate(m_IRclass));
    result[current++] = new Double(eval.numTrueNegatives(m_IRclass));
    result[current++] = new Double(eval.falseNegativeRate(m_IRclass));
    result[current++] = new Double(eval.numFalseNegatives(m_IRclass));
    result[current++] = new Double(eval.precision(m_IRclass));
    result[current++] = new Double(eval.recall(m_IRclass));
    result[current++] = new Double(eval.fMeasure(m_IRclass));
    result[current++] = new Double(eval.areaUnderROC(m_IRclass));
   
    // Weighted IR stats
    result[current++] = new Double(eval.weightedTruePositiveRate());
    result[current++] = new Double(eval.weightedFalsePositiveRate());
    result[current++] = new Double(eval.weightedTrueNegativeRate());
    result[current++] = new Double(eval.weightedFalseNegativeRate());
    result[current++] = new Double(eval.weightedPrecision());
    result[current++] = new Double(eval.weightedRecall());
    result[current++] = new Double(eval.weightedFMeasure());
    result[current++] = new Double(eval.weightedAreaUnderROC());
   
    // Timing stats
    result[current++] = new Double(trainTimeElapsed / 1000.0);
    result[current++] = new Double(testTimeElapsed / 1000.0);
    if(canMeasureCPUTime) {
View Full Code Here

    int        i;
    Enumeration<PointDouble>  enm;
    Vector<Performance>    performances;
    PointDouble      values;
    Instances      data;
    Evaluation      eval;
    PointDouble      result;
    Classifier      classifier;
    Filter      filter;
    int        size;
    boolean      cached;
    boolean      allCached;
    Performance      p1;
    Performance      p2;
    double      x;
    double      y;
   
    performances = new Vector();
   
    log("Determining best pair with " + cv + "-fold CV in Grid:\n" + grid + "\n");
   
    if (m_Traversal == TRAVERSAL_BY_COLUMN)
      size = grid.width();
    else
      size = grid.height();
   
    allCached = true;

    for (i = 0; i < size; i++) {
      if (m_Traversal == TRAVERSAL_BY_COLUMN)
  enm = grid.column(i);
      else
  enm = grid.row(i);
     
      filter = null;
      data   = null;
     
      while (enm.hasMoreElements()) {
  values = enm.nextElement();
 
  // already calculated?
  cached = m_Cache.isCached(cv, values);
  if (cached) {
    performances.add(m_Cache.get(cv, values));
  }
  else {
    allCached = false;
   
    x = evaluate(values.getX(), true);
    y = evaluate(values.getY(), false);
   
    // data pass through filter
    if (filter == null) {
      filter = (Filter) setup(getFilter(), x, y);
      filter.setInputFormat(inst);
      data = Filter.useFilter(inst, filter);
      // make sure that the numbers don't get too small - otherwise NaNs!
      Filter cleaner = new NumericCleaner();
      cleaner.setInputFormat(data);
      data = Filter.useFilter(data, cleaner);
    }

    // setup classifier
    classifier = (Classifier) setup(getClassifier(), x, y);

    // evaluate
    eval = new Evaluation(data);
    eval.crossValidateModel(classifier, data, cv, new Random(getSeed()));
    performances.add(new Performance(values, eval));
   
    // add to cache
    m_Cache.add(cv, new Performance(values, eval));
  }
View Full Code Here

          if (ce.getGroupIdentifier() != m_currentBatchIdentifier) {
                  if (ce.getTrainSet().getDataSet() == null ||
                      ce.getTrainSet().getDataSet().numInstances() == 0) {
                    // we have no training set to estimate majority class
                    // or mean of target from
                    m_eval = new Evaluation(ce.getTestSet().getDataSet());
                    m_eval.useNoPriors();
                  } else {
                    m_eval = new Evaluation(ce.getTrainSet().getDataSet());
                  }

//      m_classifier = ce.getClassifier();
      if (m_visualizableErrorListeners.size() > 0) {
        m_predInstances =
View Full Code Here

      name += cname;
    }
          String cmd = m_ClassifierEditor.getValue().getClass().getName();
          if (m_ClassifierEditor.getValue() instanceof OptionHandler)
            cmd += " " + Utils.joinOptions(((OptionHandler) m_ClassifierEditor.getValue()).getOptions());
    Evaluation eval = null;
    try {
      if (m_CVBut.isSelected()) {
        testMode = 1;
        numFolds = Integer.parseInt(m_CVText.getText());
        if (numFolds <= 1) {
    throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text_First"));
        }
      } else if (m_PercentBut.isSelected()) {
        testMode = 2;
        percent = Double.parseDouble(m_PercentText.getText());
        if ((percent <= 0) || (percent >= 100)) {
    throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text_Second"));
        }
      } else if (m_TrainBut.isSelected()) {
        testMode = 3;
      } else if (m_TestSplitBut.isSelected()) {
        testMode = 4;
        // Check the test instance compatibility
        if (source == null) {
    throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text_Third"));
        }
        if (!inst.equalHeaders(userTestStructure)) {
    throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text_Fourth"));
        }
              userTestStructure.setClassIndex(classIndex);
      } else {
        throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text_Fifth"));
      }
      inst.setClassIndex(classIndex);

      // set up the structure of the plottable instances for
      // visualization
            if (saveVis) {
              predInstances = setUpVisualizableInstances(inst);
              predInstances.setClassIndex(inst.classIndex()+1);
            }

      // Output some header information
      m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_LogMessage_Text_Second") + cname);
      m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_LogMessage_Text_Third") + cmd);
      if (m_Log instanceof TaskLogger) {
        ((TaskLogger)m_Log).taskStarted();
      }
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_First"));
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Second") + cname);
      if (classifier instanceof OptionHandler) {
        String [] o = ((OptionHandler) classifier).getOptions();
        outBuff.append(" " + Utils.joinOptions(o));
      }
      outBuff.append("\n");
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Fourth") + inst.relationName() + '\n');
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Sixth") + inst.numInstances() + '\n');
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Eigth") + inst.numAttributes() + '\n');
      if (inst.numAttributes() < 100) {
        for (int i = 0; i < inst.numAttributes(); i++) {
    outBuff.append("              " + inst.attribute(i).name()
             + '\n');
        }
      } else {
        outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Twelveth"));
      }

      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Thirteenth"));
      switch (testMode) {
        case 3: // Test on training
    outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Fourteenth"));
    break;
        case 1: // CV mode
    outBuff.append("" + numFolds + Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Sixteenth"));
    break;
        case 2: // Percent split
    outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Seventeenth") + percent
        + Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Eighteenth"));
    break;
        case 4: // Test on user split
    if (source.isIncremental())
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Nineteenth"));
    else
      outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_Twentyth")
          + source.getDataSet().numInstances() + Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_TwentyFirst"));
    break;
      }
            if (costMatrix != null) {
               outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_TwentySecond"))
               .append(costMatrix.toString()).append("\n");
            }
      outBuff.append("\n");
      m_History.addResult(name, outBuff);
      m_History.setSingle(name);
     
      // Build the model and output it.
      if (outputModel || (testMode == 3) || (testMode == 4)) {
        m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Second"));

        trainTimeStart = System.currentTimeMillis();
        classifier.buildClassifier(inst);
        trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
      }

      if (outputModel) {
        outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_TwentySixth"));
        outBuff.append(classifier.toString() + "\n");
        outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_TwentyEighth") +
           Utils.doubleToString(trainTimeElapsed / 1000.0,2)
           + " " + Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_TwentyNineth"));
        m_History.updateResult(name);
        if (classifier instanceof Drawable) {
    grph = null;
    try {
      grph = ((Drawable)classifier).graph();
    } catch (Exception ex) {
    }
        }
        // copy full model for output
        SerializedObject so = new SerializedObject(classifier);
        fullClassifier = (Classifier) so.getObject();
      }
     
      switch (testMode) {
        case 3: // Test on training
        m_Log.statusMessage("Evaluating on training data...");
        eval = new Evaluation(inst, costMatrix);
       
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "training set");
        }

        for (int jj=0;jj<inst.numInstances();jj++) {
    processClassifierPrediction(inst.instance(jj), classifier,
              eval, predInstances, plotShape,
              plotSize);
   
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, inst.instance(jj), jj+1));
    }
    if ((jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on training data. Processed "
              +jj+" instances...");
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append("=== Evaluation on training set ===\n");
        break;

        case 1: // CV mode
        m_Log.statusMessage("Randomizing instances...");
        int rnd = 1;
        try {
    rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    // System.err.println("Using random seed "+rnd);
        } catch (Exception ex) {
    m_Log.logMessage("Trouble parsing random seed value");
    rnd = 1;
        }
        Random random = new Random(rnd);
        inst.randomize(random);
        if (inst.attribute(classIndex).isNominal()) {
    m_Log.statusMessage("Stratifying instances...");
    inst.stratify(numFolds);
        }
        eval = new Evaluation(inst, costMatrix);
     
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, "test data");
        }

        // Make some splits and do a CV
        for (int fold = 0; fold < numFolds; fold++) {
    m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Eighth")
            + (fold + 1) + Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Nineth"));
    Instances train = inst.trainCV(numFolds, fold, random);
    eval.setPriors(train);
    m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Tenth")
            + (fold + 1) + Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Twelveth"));
    Classifier current = null;
    try {
      current = Classifier.makeCopy(template);
    } catch (Exception ex) {
      m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_LogMessage_Text_Fifth") + ex.getMessage());
    }
    current.buildClassifier(train);
    Instances test = inst.testCV(numFolds, fold);
    m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Eleventh")
            + (fold + 1) + Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Twelveth"));
    for (int jj=0;jj<test.numInstances();jj++) {
      processClassifierPrediction(test.instance(jj), current,
                eval, predInstances, plotShape,
                plotSize);
      if (outputPredictionsText) {
        outBuff.append(predictionText(current, test.instance(jj), jj+1));
      }
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        if (inst.attribute(classIndex).isNominal()) {
    outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_ThirtyThird"));
        } else {
    outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_ThirtyFourth"));
        }
        break;
   
        case 2: // Percent split
        if (!m_PreserveOrderBut.isSelected()) {
    m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Thirteenth"));
    try {
      rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    } catch (Exception ex) {
      m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Fourteenth"));
      rnd = 1;
    }
    inst.randomize(new Random(rnd));
        }
        int trainSize = (int) Math.round(inst.numInstances() * percent / 100);
        int testSize = inst.numInstances() - trainSize;
        Instances train = new Instances(inst, 0, trainSize);
        Instances test = new Instances(inst, trainSize, testSize);
        m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Fifteenth") + trainSize+ Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Sixteenth"));
        Classifier current = null;
        try {
    current = Classifier.makeCopy(template);
        } catch (Exception ex) {
    m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_LogMessage_Text_Sixth") + ex.getMessage());
        }
        current.buildClassifier(train);
        eval = new Evaluation(train, costMatrix);
        m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Seventeenth"));
      
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, Messages.getInstance().getString("ClassifierPanel_StartClassifier_PrintPredictionsHeader_Text_First"));
        }
    
        for (int jj=0;jj<test.numInstances();jj++) {
    processClassifierPrediction(test.instance(jj), current,
              eval, predInstances, plotShape,
              plotSize);
    if (outputPredictionsText) {
        outBuff.append(predictionText(current, test.instance(jj), jj+1));
    }
    if ((jj % 100) == 0) {
      m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Eighteenth")
              +jj + Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Nineteenth"));
    }
        }
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_ThirtySixth"));
        break;
   
        case 4: // Test on user split
        m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_Twentyth"));
        eval = new Evaluation(inst, costMatrix);
       
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, inst, Messages.getInstance().getString("ClassifierPanel_StartClassifier_PrintPredictionsHeader_Text_Second"));
        }

        Instance instance;
        int jj = 0;
        while (source.hasMoreElements(userTestStructure)) {
    instance = source.nextElement(userTestStructure);
    processClassifierPrediction(instance, classifier,
        eval, predInstances, plotShape,
        plotSize);
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, instance, jj+1));
    }
    if ((++jj % 100) == 0) {
      m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_TwentyFirst")
          + jj + Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_TwentySecond"));
    }
        }

        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_ThirtyEighth"));
        break;

        default:
        throw new Exception(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Exception_Text"));
      }
     
      if (outputSummary) {
        outBuff.append(eval.toSummaryString(outputEntropy) + "\n");
      }

      if (inst.attribute(classIndex).isNominal()) {

        if (outputPerClass) {
    outBuff.append(eval.toClassDetailsString() + "\n");
        }

        if (outputConfusion) {
    outBuff.append(eval.toMatrixString() + "\n");
        }
      }

            if (   (fullClassifier instanceof Sourcable)
                 && m_OutputSourceCode.isSelected()) {
              outBuff.append(Messages.getInstance().getString("ClassifierPanel_StartClassifier_OutBuffer_Text_FourtySecond"));
              outBuff.append(
                Evaluation.wekaStaticWrapper(
                    ((Sourcable) fullClassifier),
                    m_SourceCodeClass.getText()));
            }

      m_History.updateResult(name);
      m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_LogMessage_Text_Seventh") + cname);
      m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_TwentyThird"));
    } catch (Exception ex) {
      ex.printStackTrace();
      m_Log.logMessage(ex.getMessage());
      JOptionPane.showMessageDialog(ClassifierPanel.this,
            Messages.getInstance().getString("ClassifierPanel_StartClassifier_JOptionPaneShowMessageDialog_Text_First")
            + ex.getMessage(),
            Messages.getInstance().getString("ClassifierPanel_StartClassifier_JOptionPaneShowMessageDialog_Text_Second"),
            JOptionPane.ERROR_MESSAGE);
      m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_StartClassifier_Log_StatusMessage_Text_TwentyFourth"));
    } finally {
      try {
              if (!saveVis && outputModel) {
      FastVector vv = new FastVector();
      vv.addElement(fullClassifier);
      Instances trainHeader = new Instances(m_Instances, 0);
      trainHeader.setClassIndex(classIndex);
      vv.addElement(trainHeader);
                  if (grph != null) {
        vv.addElement(grph);
      }
      m_History.addObject(name, vv);
              } else if (saveVis && predInstances != null &&
                  predInstances.numInstances() > 0) {
    if (predInstances.attribute(predInstances.classIndex())
        .isNumeric()) {
      postProcessPlotInfo(plotSize);
    }
    m_CurrentVis = new VisualizePanel();
    m_CurrentVis.setName(name+" ("+inst.relationName()+")");
    m_CurrentVis.setLog(m_Log);
    PlotData2D tempd = new PlotData2D(predInstances);
    tempd.setShapeSize(plotSize);
    tempd.setShapeType(plotShape);
    tempd.setPlotName(name+" ("+inst.relationName()+")");
    //tempd.addInstanceNumberAttribute();
   
    m_CurrentVis.addPlot(tempd);
    //m_CurrentVis.setColourIndex(predInstances.classIndex()+1);
    m_CurrentVis.setColourIndex(predInstances.classIndex());
     
                FastVector vv = new FastVector();
                if (outputModel) {
                  vv.addElement(fullClassifier);
                  Instances trainHeader = new Instances(m_Instances, 0);
                  trainHeader.setClassIndex(classIndex);
                  vv.addElement(trainHeader);
                  if (grph != null) {
                    vv.addElement(grph);
                  }
                }
                vv.addElement(m_CurrentVis);
               
                if ((eval != null) && (eval.predictions() != null)) {
                  vv.addElement(eval.predictions());
                  vv.addElement(inst.classAttribute());
                }
                m_History.addObject(name, vv);
        }
      } catch (Exception ex) {
View Full Code Here

            boolean outputEntropy = m_OutputEntropyBut.isSelected();
            boolean saveVis = m_StorePredictionsBut.isSelected();
            boolean outputPredictionsText =
              m_OutputPredictionsTextBut.isSelected();
            String grph = null;   
            Evaluation eval = null;

            try {

              boolean incrementalLoader = (m_TestLoader instanceof IncrementalConverter);
              if (m_TestLoader != null && m_TestLoader.getStructure() != null) {
                m_TestLoader.reset();
                source = new DataSource(m_TestLoader);
                userTestStructure = source.getStructure();
              }
              // Check the test instance compatibility
              if (source == null) {
                throw new Exception(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Exception_Text_First"));
              }
              if (trainHeader != null) {
                if (trainHeader.classIndex() >
                    userTestStructure.numAttributes()-1)
                  throw new Exception(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Exception_Text_Second"));
                userTestStructure.setClassIndex(trainHeader.classIndex());
                if (!trainHeader.equalHeaders(userTestStructure)) {
                  throw new Exception(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Exception_Text_Third"));
                }
              } else {
          if (classifier instanceof PMMLClassifier) {
            // set the class based on information in the mining schema
            Instances miningSchemaStructure =
              ((PMMLClassifier)classifier).getMiningSchema().getMiningSchemaAsInstances();
            String className = miningSchemaStructure.classAttribute().name();
            Attribute classMatch = userTestStructure.attribute(className);
            if (classMatch == null) {
              throw new Exception(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Exception_Text_Fourth")
            + className + Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Exception_Text_Fifth"));
            }
            userTestStructure.setClass(classMatch);
          } else {
            userTestStructure.
              setClassIndex(userTestStructure.numAttributes()-1);
          }
              }
              if (m_Log instanceof TaskLogger) {
                ((TaskLogger)m_Log).taskStarted();
              }
              m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Second"));
              m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_LogMessage_Text_First") + name
                               + Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_LogMessage_Text_Second"));
              eval = new Evaluation(userTestStructure, costMatrix);
              eval.useNoPriors();
     
              // set up the structure of the plottable instances for
              // visualization if selected
              if (saveVis) {
                predInstances = setUpVisualizableInstances(userTestStructure);
                predInstances.setClassIndex(userTestStructure.classIndex()+1);
              }
     
              outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Twelveth"));
              outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Thirteenth"))
              outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Fourteenth")
                             + userTestStructure.relationName() + '\n');
              if (incrementalLoader)
          outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Sixteenth"));
              else
          outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Seventeenth") + source.getDataSet().numInstances() + "\n");
              outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_Nineteenth")
            + userTestStructure.numAttributes() + "\n\n");
              if (trainHeader == null &&
                  !(classifier instanceof
                      weka.classifiers.pmml.consumer.PMMLClassifier)) {

                outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_TwentyFirst"));

              }

              if (outputPredictionsText) {
                outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_TwentySecond"));
                outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_TwentyThird"));
                if (userTestStructure.classAttribute().isNominal()) {
                  outBuff.append(Messages.getInstance().getString("ClassifierPanel_SaveClassifier_OutBuffer_Text_TwentyFourth"));
                }
                outBuff.append("\n");
              }

        Instance instance;
        int jj = 0;
        while (source.hasMoreElements(userTestStructure)) {
    instance = source.nextElement(userTestStructure);
    processClassifierPrediction(instance, classifier,
        eval, predInstances, plotShape,
        plotSize);
    if (outputPredictionsText) {
      outBuff.append(predictionText(classifier, instance, jj+1));
    }
    if ((++jj % 100) == 0) {
      m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Third")
          +jj + Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Fourth"));
    }
        }

              if (outputPredictionsText) {
                outBuff.append("\n");
              }
     
              if (outputSummary) {
                outBuff.append(eval.toSummaryString(outputEntropy) + "\n");
              }
     
              if (userTestStructure.classAttribute().isNominal()) {
 
                if (outputPerClass) {
                  outBuff.append(eval.toClassDetailsString() + "\n");
                }
 
                if (outputConfusion) {
                  outBuff.append(eval.toMatrixString() + "\n");
                }
              }
     
              m_History.updateResult(name);
              m_Log.logMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_LogMessage_Text_Third"));
              m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Fifth"));
            } catch (Exception ex) {
              ex.printStackTrace();
              m_Log.logMessage(ex.getMessage());
              m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Sixth"));

              ex.printStackTrace();
              m_Log.logMessage(ex.getMessage());
              JOptionPane.showMessageDialog(ClassifierPanel.this,
                  Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_JOptionPaneShowMessageDialog_Text_First")
                                            + ex.getMessage(),
                                            Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_JOptionPaneShowMessageDialog_Text_Second"),
                                            JOptionPane.ERROR_MESSAGE);
              m_Log.statusMessage(Messages.getInstance().getString("ClassifierPanel_ReEvaluateModel_Log_StatusMessage_Text_Seventh"));
            } finally {
              try {
          if (classifier instanceof PMMLClassifier) {
            // signal the end of the scoring run so
            // that the initialized state can be reset
            // (forces the field mapping to be recomputed
            // for the next scoring run).
            ((PMMLClassifier)classifier).done();
          }
         
                if (predInstances != null && predInstances.numInstances() > 0) {
                  if (predInstances.attribute(predInstances.classIndex())
                      .isNumeric()) {
                    postProcessPlotInfo(plotSize);
                  }
                  m_CurrentVis = new VisualizePanel();
                  m_CurrentVis.setName(name+" ("
                                       +userTestStructure.relationName()+")");
                  m_CurrentVis.setLog(m_Log);
                  PlotData2D tempd = new PlotData2D(predInstances);
                  tempd.setShapeSize(plotSize);
                  tempd.setShapeType(plotShape);
                  tempd.setPlotName(name+" ("+userTestStructure.relationName()
                                    +")");
                  //tempd.addInstanceNumberAttribute();
   
                  m_CurrentVis.addPlot(tempd);
                  m_CurrentVis.setColourIndex(predInstances.classIndex());
                  //m_CurrentVis.setColourIndex(predInstances.classIndex()+1);
   
                  if (classifier instanceof Drawable) {
                    try {
                      grph = ((Drawable)classifier).graph();
                    } catch (Exception ex) {
                    }
                  }

                  if (saveVis) {
                    FastVector vv = new FastVector();
                    vv.addElement(classifier);
                    if (trainHeader != null) vv.addElement(trainHeader);
                    vv.addElement(m_CurrentVis);
                    if (grph != null) {
                      vv.addElement(grph);
                    }
                    if ((eval != null) && (eval.predictions() != null)) {
                      vv.addElement(eval.predictions());
                      vv.addElement(userTestStructure.classAttribute());
                    }
                    m_History.addObject(name, vv);
                  } else {
                    FastVector vv = new FastVector();
View Full Code Here

TOP

Related Classes of weka.classifiers.Evaluation

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.