Package weka.classifiers

Examples of weka.classifiers.Classifier


      if (m_evaluateThread == null) {
  m_evaluateThread = new Thread() {
      public void run() {
        boolean errorOccurred = false;
//        final String oldText = m_visual.getText();
        Classifier classifier = ce.getClassifier();
        try {
    // if (ce.getSetNumber() == 1) {
          if (ce.getGroupIdentifier() != m_currentBatchIdentifier) {
     
      if (ce.getTrainSet().getDataSet() == null ||
          ce.getTrainSet().getDataSet().numInstances() == 0) {
        // we have no training set to estimate majority class
        // or mean of target from
        m_eval = new Evaluation(ce.getTestSet().getDataSet());
        m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
        m_PlotInstances.setInstances(ce.getTestSet().getDataSet());
        m_PlotInstances.setClassifier(ce.getClassifier());
        m_PlotInstances.setClassIndex(ce.getTestSet().getDataSet().classIndex());
        m_PlotInstances.setEvaluation(m_eval);

        m_eval = adjustForInputMappedClassifier(m_eval, ce.getClassifier(),
            ce.getTestSet().getDataSet(), m_PlotInstances);
        m_eval.useNoPriors();
      } else {
        // we can set up with the training set here
        m_eval = new Evaluation(ce.getTrainSet().getDataSet());
        m_PlotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
        m_PlotInstances.setInstances(ce.getTrainSet().getDataSet());
        m_PlotInstances.setClassifier(ce.getClassifier());
        m_PlotInstances.setClassIndex(ce.getTestSet().getDataSet().classIndex());
        m_PlotInstances.setEvaluation(m_eval);
       
        m_eval = adjustForInputMappedClassifier(m_eval, ce.getClassifier(),
                        ce.getTrainSet().getDataSet(), m_PlotInstances);
      }
//      m_classifier = ce.getClassifier();

      m_PlotInstances.setUp();
     
      m_currentBatchIdentifier = ce.getGroupIdentifier();
      m_setsComplete = 0;
    }
//    if (ce.getSetNumber() <= ce.getMaxSetNumber()) {
          if (m_setsComplete < ce.getMaxSetNumber()) {
     
      /*if (ce.getTrainSet().getDataSet() != null &&
          ce.getTrainSet().getDataSet().numInstances() > 0) {
        // set the priors
        m_eval.setPriors(ce.getTrainSet().getDataSet());
      } */
     
//      m_visual.setText("Evaluating ("+ce.getSetNumber()+")...");
      if (m_logger != null) {
        m_logger.statusMessage(statusMessagePrefix()
             +"Evaluating ("+ce.getSetNumber()
             +")...");
      }
      m_visual.setAnimated();
      /*
      m_eval.evaluateModel(ce.getClassifier(),
      ce.getTestSet().getDataSet()); */
      for (int i = 0; i < ce.getTestSet().getDataSet().numInstances(); i++) {
        Instance temp = ce.getTestSet().getDataSet().instance(i);
        m_PlotInstances.process(temp, ce.getClassifier(), m_eval);
      }
     
      m_setsComplete++;
    }
   
//    if (ce.getSetNumber() == ce.getMaxSetNumber()) {
          if (m_setsComplete == ce.getMaxSetNumber()) {
                  //      System.err.println(m_eval.toSummaryString());
      // m_resultsString.append(m_eval.toSummaryString());
      // m_outText.setText(m_resultsString.toString());
      String textTitle = classifier.getClass().getName();
      String textOptions = "";
      if (classifier instanceof OptionHandler) {
               textOptions =
                 Utils.joinOptions(((OptionHandler)classifier).getOptions());
      }
View Full Code Here


    m_ClassifierEditor.addPropertyChangeListener(new PropertyChangeListener() {
      public void propertyChange(PropertyChangeEvent e) {
        m_StartBut.setEnabled(true);
        // Check capabilities
        Capabilities currentFilter = m_ClassifierEditor.getCapabilitiesFilter();
        Classifier classifier = (Classifier) m_ClassifierEditor.getValue();
        Capabilities currentSchemeCapabilities =  null;
        if (classifier != null && currentFilter != null &&
            (classifier instanceof CapabilitiesHandler)) {
          currentSchemeCapabilities = ((CapabilitiesHandler)classifier).getCapabilities();
         
View Full Code Here

    int testMode = 0;
    int numFolds = 10;
          double percent = 66;
    int classIndex = m_ClassCombo.getSelectedIndex();
    inst.setClassIndex(classIndex);
    Classifier classifier = (Classifier) m_ClassifierEditor.getValue();
    Classifier template = null;
    try {
      template = AbstractClassifier.makeCopy(classifier);
    } catch (Exception ex) {
      m_Log.logMessage("Problem copying classifier: " + ex.getMessage());
    }
    Classifier fullClassifier = null;
    StringBuffer outBuff = new StringBuffer();
    AbstractOutput classificationOutput = null;
    if (outputPredictionsText) {
      classificationOutput = (AbstractOutput) m_ClassificationOutputEditor.getValue();
      Instances header = new Instances(inst, 0);
      header.setClassIndex(classIndex);
      classificationOutput.setHeader(header);
      classificationOutput.setBuffer(outBuff);
    }
    String name = (new SimpleDateFormat("HH:mm:ss - ")).format(new Date());
    String cname = "";
          String cmd = "";
    Evaluation eval = null;
    try {
      if (m_CVBut.isSelected()) {
        testMode = 1;
        numFolds = Integer.parseInt(m_CVText.getText());
        if (numFolds <= 1) {
    throw new Exception("Number of folds must be greater than 1");
        }
      } else if (m_PercentBut.isSelected()) {
        testMode = 2;
        percent = Double.parseDouble(m_PercentText.getText());
        if ((percent <= 0) || (percent >= 100)) {
    throw new Exception("Percentage must be between 0 and 100");
        }
      } else if (m_TrainBut.isSelected()) {
        testMode = 3;
      } else if (m_TestSplitBut.isSelected()) {
        testMode = 4;
        // Check the test instance compatibility
        if (source == null) {
          throw new Exception("No user test set has been specified");
        }
       
        if (!(classifier instanceof weka.classifiers.misc.InputMappedClassifier)) {
          if (!inst.equalHeaders(userTestStructure)) {
            boolean wrapClassifier = false;
            if (!Utils.
                getDontShowDialog("weka.gui.explorer.ClassifierPanel.AutoWrapInInputMappedClassifier")) {
              JCheckBox dontShow = new JCheckBox("Do not show this message again");
              Object[] stuff = new Object[2];
              stuff[0] = "Train and test set are not compatible.\n" +
              "Would you like to automatically wrap the classifier in\n" +
              "an \"InputMappedClassifier\" before proceeding?.\n";
              stuff[1] = dontShow;

              int result = JOptionPane.showConfirmDialog(ClassifierPanel.this, stuff,
                  "ClassifierPanel", JOptionPane.YES_OPTION);
             
              if (result == JOptionPane.YES_OPTION) {
                wrapClassifier = true;
              }
             
              if (dontShow.isSelected()) {
                String response = (wrapClassifier) ? "yes" : "no";
                Utils.
                  setDontShowDialogResponse("weka.gui.explorer.ClassifierPanel.AutoWrapInInputMappedClassifier",
                      response);
              }

            } else {
              // What did the user say - do they want to autowrap or not?
              String response =
                Utils.getDontShowDialogResponse("weka.gui.explorer.ClassifierPanel.AutoWrapInInputMappedClassifier");
              if (response != null && response.equalsIgnoreCase("yes")) {
                wrapClassifier = true;
              }
            }

            if (wrapClassifier) {
              weka.classifiers.misc.InputMappedClassifier temp =
                new weka.classifiers.misc.InputMappedClassifier();

              // pass on the known test structure so that we get the
              // correct mapping report from the toString() method
              // of InputMappedClassifier
              temp.setClassifier(classifier);
              temp.setTestStructure(userTestStructure);
              classifier = temp;
            } else {
              throw new Exception("Train and test set are not compatible\n" + inst.equalHeadersMsg(userTestStructure));
            }
          }
        }
             
      } else {
        throw new Exception("Unknown test mode");
      }

      cname = classifier.getClass().getName();
      if (cname.startsWith("weka.classifiers.")) {
        name += cname.substring("weka.classifiers.".length());
      } else {
        name += cname;
      }
      cmd = classifier.getClass().getName();
      if (classifier instanceof OptionHandler)
        cmd += " " + Utils.joinOptions(((OptionHandler) classifier).getOptions());
     
      // set up the structure of the plottable instances for
      // visualization
      plotInstances = ExplorerDefaults.getClassifierErrorsPlotInstances();
      plotInstances.setInstances(inst);
      plotInstances.setClassifier(classifier);
      plotInstances.setClassIndex(inst.classIndex());
      plotInstances.setSaveForVisualization(saveVis);

      // Output some header information
      m_Log.logMessage("Started " + cname);
      m_Log.logMessage("Command: " + cmd);
      if (m_Log instanceof TaskLogger) {
        ((TaskLogger)m_Log).taskStarted();
      }
      outBuff.append("=== Run information ===\n\n");
      outBuff.append("Scheme:       " + cname);
      if (classifier instanceof OptionHandler) {
        String [] o = ((OptionHandler) classifier).getOptions();
        outBuff.append(" " + Utils.joinOptions(o));
      }
      outBuff.append("\n");
      outBuff.append("Relation:     " + inst.relationName() + '\n');
      outBuff.append("Instances:    " + inst.numInstances() + '\n');
      outBuff.append("Attributes:   " + inst.numAttributes() + '\n');
      if (inst.numAttributes() < 100) {
        for (int i = 0; i < inst.numAttributes(); i++) {
    outBuff.append("              " + inst.attribute(i).name()
             + '\n');
        }
      } else {
        outBuff.append("              [list of attributes omitted]\n");
      }

      outBuff.append("Test mode:    ");
      switch (testMode) {
        case 3: // Test on training
    outBuff.append("evaluate on training data\n");
    break;
        case 1: // CV mode
    outBuff.append("" + numFolds + "-fold cross-validation\n");
    break;
        case 2: // Percent split
    outBuff.append("split " + percent
        + "% train, remainder test\n");
    break;
        case 4: // Test on user split
    if (source.isIncremental())
      outBuff.append("user supplied test set: "
          + " size unknown (reading incrementally)\n");
    else
      outBuff.append("user supplied test set: "
          + source.getDataSet().numInstances() + " instances\n");
    break;
      }
            if (costMatrix != null) {
               outBuff.append("Evaluation cost matrix:\n")
               .append(costMatrix.toString()).append("\n");
            }
      outBuff.append("\n");
      m_History.addResult(name, outBuff);
      m_History.setSingle(name);
     
      // Build the model and output it.
      if (outputModel || (testMode == 3) || (testMode == 4)) {
        m_Log.statusMessage("Building model on training data...");

        trainTimeStart = System.currentTimeMillis();
        classifier.buildClassifier(inst);
        trainTimeElapsed = System.currentTimeMillis() - trainTimeStart;
      }

      if (outputModel) {
        outBuff.append("=== Classifier model (full training set) ===\n\n");
        outBuff.append(classifier.toString() + "\n");
        outBuff.append("\nTime taken to build model: " +
           Utils.doubleToString(trainTimeElapsed / 1000.0,2)
           + " seconds\n\n");
        m_History.updateResult(name);
        if (classifier instanceof Drawable) {
    grph = null;
    try {
      grph = ((Drawable)classifier).graph();
    } catch (Exception ex) {
    }
        }
        // copy full model for output
        SerializedObject so = new SerializedObject(classifier);
        fullClassifier = (Classifier) so.getObject();
      }
     
      switch (testMode) {
        case 3: // Test on training
        m_Log.statusMessage("Evaluating on training data...");
        eval = new Evaluation(inst, costMatrix);
       
        // make adjustments if the classifier is an InputMappedClassifier
        eval = setupEval(eval, classifier, inst, costMatrix,
            plotInstances, classificationOutput, false);
       
        //plotInstances.setEvaluation(eval);
              plotInstances.setUp();
       
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, classificationOutput, "training set");
        }

        for (int jj=0;jj<inst.numInstances();jj++) {
    plotInstances.process(inst.instance(jj), classifier, eval);
   
    if (outputPredictionsText) {
      classificationOutput.printClassification(classifier, inst.instance(jj), jj);
    }
    if ((jj % 100) == 0) {
      m_Log.statusMessage("Evaluating on training data. Processed "
              +jj+" instances...");
    }
        }
        if (outputPredictionsText)
    classificationOutput.printFooter();
        if (outputPredictionsText && classificationOutput.generatesOutput()) {
    outBuff.append("\n");
        }
        outBuff.append("=== Evaluation on training set ===\n");
        break;

        case 1: // CV mode
        m_Log.statusMessage("Randomizing instances...");
        int rnd = 1;
        try {
    rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    // System.err.println("Using random seed "+rnd);
        } catch (Exception ex) {
    m_Log.logMessage("Trouble parsing random seed value");
    rnd = 1;
        }
        Random random = new Random(rnd);
        inst.randomize(random);
        if (inst.attribute(classIndex).isNominal()) {
    m_Log.statusMessage("Stratifying instances...");
    inst.stratify(numFolds);
        }
        eval = new Evaluation(inst, costMatrix);
       
         // make adjustments if the classifier is an InputMappedClassifier
              eval = setupEval(eval, classifier, inst, costMatrix,
                  plotInstances, classificationOutput, false);
       
//        plotInstances.setEvaluation(eval);
              plotInstances.setUp();
     
        if (outputPredictionsText) {
    printPredictionsHeader(outBuff, classificationOutput, "test data");
        }

        // Make some splits and do a CV
        for (int fold = 0; fold < numFolds; fold++) {
    m_Log.statusMessage("Creating splits for fold "
            + (fold + 1) + "...");
    Instances train = inst.trainCV(numFolds, fold, random);
   
    // make adjustments if the classifier is an InputMappedClassifier
          eval = setupEval(eval, classifier, train, costMatrix,
              plotInstances, classificationOutput, true);
         
//    eval.setPriors(train);
    m_Log.statusMessage("Building model for fold "
            + (fold + 1) + "...");
    Classifier current = null;
    try {
      current = AbstractClassifier.makeCopy(template);
    } catch (Exception ex) {
      m_Log.logMessage("Problem copying classifier: " + ex.getMessage());
    }
    current.buildClassifier(train);
    Instances test = inst.testCV(numFolds, fold);
    m_Log.statusMessage("Evaluating model for fold "
            + (fold + 1) + "...");
    for (int jj=0;jj<test.numInstances();jj++) {
      plotInstances.process(test.instance(jj), current, eval);
      if (outputPredictionsText) {
        classificationOutput.printClassification(current, test.instance(jj), jj);
      }
    }
        }
        if (outputPredictionsText)
    classificationOutput.printFooter();
        if (outputPredictionsText) {
    outBuff.append("\n");
        }
        if (inst.attribute(classIndex).isNominal()) {
    outBuff.append("=== Stratified cross-validation ===\n");
        } else {
    outBuff.append("=== Cross-validation ===\n");
        }
        break;
   
        case 2: // Percent split
        if (!m_PreserveOrderBut.isSelected()) {
    m_Log.statusMessage("Randomizing instances...");
    try {
      rnd = Integer.parseInt(m_RandomSeedText.getText().trim());
    } catch (Exception ex) {
      m_Log.logMessage("Trouble parsing random seed value");
      rnd = 1;
    }
    inst.randomize(new Random(rnd));
        }
        int trainSize = (int) Math.round(inst.numInstances() * percent / 100);
        int testSize = inst.numInstances() - trainSize;
        Instances train = new Instances(inst, 0, trainSize);
        Instances test = new Instances(inst, trainSize, testSize);
        m_Log.statusMessage("Building model on training split ("+trainSize+" instances)...");
        Classifier current = null;
        try {
    current = AbstractClassifier.makeCopy(template);
        } catch (Exception ex) {
    m_Log.logMessage("Problem copying classifier: " + ex.getMessage());
        }
        current.buildClassifier(train);
        eval = new Evaluation(train, costMatrix);
       
        // make adjustments if the classifier is an InputMappedClassifier
              eval = setupEval(eval, classifier, train, costMatrix,
                  plotInstances, classificationOutput, false);
View Full Code Here

    VisualizePanel temp_vp = null;
    String temp_grph = null;
    FastVector temp_preds = null;
    Attribute temp_classAtt = null;
    Classifier temp_classifier = null;
    Instances temp_trainHeader = null;
     
    if (o != null) {
      for (int i = 0; i < o.size(); i++) {
  Object temp = o.elementAt(i);
  if (temp instanceof Classifier) {
    temp_classifier = (Classifier)temp;
  } else if (temp instanceof Instances) { // training header
    temp_trainHeader = (Instances)temp;
  } else if (temp instanceof VisualizePanel) { // normal errors
    temp_vp = (VisualizePanel)temp;
  } else if (temp instanceof String) { // graphable output
    temp_grph = (String)temp;
  } else if (temp instanceof FastVector) { // predictions
    temp_preds = (FastVector)temp;
  } else if (temp instanceof Attribute) { // class attribute
    temp_classAtt = (Attribute)temp;
  }
      }
    }

    final VisualizePanel vp = temp_vp;
    final String grph = temp_grph;
    final FastVector preds = temp_preds;
    final Attribute classAtt = temp_classAtt;
    final Classifier classifier = temp_classifier;
    final Instances trainHeader = temp_trainHeader;
   
    JMenuItem saveModel = new JMenuItem("Save model");
    if (classifier != null) {
      saveModel.addActionListener(new ActionListener() {
    public void actionPerformed(ActionEvent e) {
      saveClassifier(selectedName, classifier, trainHeader);
    }
  });
    } else {
      saveModel.setEnabled(false);
    }
    resultListMenu.add(saveModel);

    JMenuItem reEvaluate =
      new JMenuItem("Re-evaluate model on current test set");
    if (classifier != null && m_TestLoader != null) {
      reEvaluate.addActionListener(new ActionListener() {
    public void actionPerformed(ActionEvent e) {
      reevaluateModel(selectedName, classifier, trainHeader);
    }
  });
    } else {
      reEvaluate.setEnabled(false);
    }
    resultListMenu.add(reEvaluate);
   
    resultListMenu.addSeparator();
   
    JMenuItem visErrors = new JMenuItem("Visualize classifier errors");
    if (vp != null) {
      if ((vp.getXIndex() == 0) && (vp.getYIndex() == 1)) {
  try {
    vp.setXIndex(vp.getInstances().classIndex())// class
    vp.setYIndex(vp.getInstances().classIndex() - 1)// predicted class
  }
  catch (Exception e) {
    // ignored
  }
      }
      visErrors.addActionListener(new ActionListener() {
    public void actionPerformed(ActionEvent e) {
      visualizeClassifierErrors(vp);
    }
  });
    } else {
      visErrors.setEnabled(false);
    }
    resultListMenu.add(visErrors);

    JMenuItem visGrph = new JMenuItem("Visualize tree");
    if (grph != null) {
  if(((Drawable)temp_classifier).graphType()==Drawable.TREE) {
      visGrph.addActionListener(new ActionListener() {
        public void actionPerformed(ActionEvent e) {
      String title;
      if (vp != null) title = vp.getName();
      else title = selectedName;
      visualizeTree(grph, title);
        }
    });
  }
  else if(((Drawable)temp_classifier).graphType()==Drawable.BayesNet) {
      visGrph.setText("Visualize graph");
      visGrph.addActionListener(new ActionListener() {
        public void actionPerformed(ActionEvent e) {
      Thread th = new Thread() {
        public void run() {
        visualizeBayesNet(grph, selectedName);
        }
          };
      th.start();
        }
    });
  }
  else
      visGrph.setEnabled(false);
    } else {
      visGrph.setEnabled(false);
    }
    resultListMenu.add(visGrph);

    JMenuItem visMargin = new JMenuItem("Visualize margin curve");
    if ((preds != null) && (classAtt != null) && (classAtt.isNominal())) {
      visMargin.addActionListener(new ActionListener() {
    public void actionPerformed(ActionEvent e) {
      try {
        MarginCurve tc = new MarginCurve();
        Instances result = tc.getCurve(preds);
        VisualizePanel vmc = new VisualizePanel();
        vmc.setName(result.relationName());
        vmc.setLog(m_Log);
        PlotData2D tempd = new PlotData2D(result);
        tempd.setPlotName(result.relationName());
        tempd.addInstanceNumberAttribute();
        vmc.addPlot(tempd);
        visualizeClassifierErrors(vmc);
      } catch (Exception ex) {
        ex.printStackTrace();
      }
    }
  });
    } else {
      visMargin.setEnabled(false);
    }
    resultListMenu.add(visMargin);

    JMenu visThreshold = new JMenu("Visualize threshold curve");
    if ((preds != null) && (classAtt != null) && (classAtt.isNominal())) {
      for (int i = 0; i < classAtt.numValues(); i++) {
  JMenuItem clv = new JMenuItem(classAtt.value(i));
  final int classValue = i;
  clv.addActionListener(new ActionListener() {
      public void actionPerformed(ActionEvent e) {
        try {
    ThresholdCurve tc = new ThresholdCurve();
    Instances result = tc.getCurve(preds, classValue);
    //VisualizePanel vmc = new VisualizePanel();
    ThresholdVisualizePanel vmc = new ThresholdVisualizePanel();
    vmc.setROCString("(Area under ROC = " +
         Utils.doubleToString(ThresholdCurve.getROCArea(result), 4) + ")");
    vmc.setLog(m_Log);
    vmc.setName(result.relationName()+". (Class value "+
          classAtt.value(classValue)+")");
    PlotData2D tempd = new PlotData2D(result);
    tempd.setPlotName(result.relationName());
    tempd.addInstanceNumberAttribute();
    // specify which points are connected
    boolean[] cp = new boolean[result.numInstances()];
    for (int n = 1; n < cp.length; n++)
      cp[n] = true;
    tempd.setConnectPoints(cp);
    // add plot
    vmc.addPlot(tempd);
    visualizeClassifierErrors(vmc);
        } catch (Exception ex) {
    ex.printStackTrace();
        }
        }
    });
    visThreshold.add(clv);
      }
    } else {
      visThreshold.setEnabled(false);
    }
    resultListMenu.add(visThreshold);
   
    JMenu visCostBenefit = new JMenu("Cost/Benefit analysis");
    if ((preds != null) && (classAtt != null) && (classAtt.isNominal())) {
      for (int i = 0; i < classAtt.numValues(); i++) {
        JMenuItem clv = new JMenuItem(classAtt.value(i));
        final int classValue = i;
        clv.addActionListener(new ActionListener() {
            public void actionPerformed(ActionEvent e) {
              try {
                ThresholdCurve tc = new ThresholdCurve();
                Instances result = tc.getCurve(preds, classValue);

                // Create a dummy class attribute with the chosen
                // class value as index 0 (if necessary).
                Attribute classAttToUse = classAtt;
                if (classValue != 0) {
                  FastVector newNames = new FastVector();
                  newNames.addElement(classAtt.value(classValue));
                  for (int k = 0; k < classAtt.numValues(); k++) {
                    if (k != classValue) {
                      newNames.addElement(classAtt.value(k));
                    }
                  }
                  classAttToUse = new Attribute(classAtt.name(), newNames);
                }
               
                CostBenefitAnalysis cbAnalysis = new CostBenefitAnalysis();
               
                PlotData2D tempd = new PlotData2D(result);
                tempd.setPlotName(result.relationName());
                tempd.m_alwaysDisplayPointsOfThisSize = 10;
                // specify which points are connected
                boolean[] cp = new boolean[result.numInstances()];
                for (int n = 1; n < cp.length; n++)
                  cp[n] = true;
                tempd.setConnectPoints(cp);
               
                String windowTitle = "";
                if (classifier != null) {
                  String cname = classifier.getClass().getName();
                  if (cname.startsWith("weka.classifiers.")) {
                    windowTitle = "" + cname.substring("weka.classifiers.".length()) + " ";
                  }
                }
                windowTitle += " (class = " + classAttToUse.value(0) + ")";               
View Full Code Here

  protected void loadClassifier() {

    int returnVal = m_FileChooser.showOpenDialog(this);
    if (returnVal == JFileChooser.APPROVE_OPTION) {
      File selected = m_FileChooser.getSelectedFile();
      Classifier classifier = null;
      Instances trainHeader = null;

      m_Log.statusMessage("Loading model from file...");

      try {
  InputStream is = new FileInputStream(selected);
  if (selected.getName().endsWith(PMML_FILE_EXTENSION)) {
    PMMLModel model = PMMLFactory.getPMMLModel(is, m_Log);
    if (model instanceof PMMLClassifier) {
      classifier = (PMMLClassifier)model;
      /*trainHeader =
        ((PMMLClassifier)classifier).getMiningSchema().getMiningSchemaAsInstances(); */
    } else {
      throw new Exception("PMML model is not a classification/regression model!");
    }
  } else {
  if (selected.getName().endsWith(".gz")) {
    is = new GZIPInputStream(is);
  }
  ObjectInputStream objectInputStream = new ObjectInputStream(is);
  classifier = (Classifier) objectInputStream.readObject();
  try { // see if we can load the header
    trainHeader = (Instances) objectInputStream.readObject();
  } catch (Exception e) {} // don't fuss if we can't
  objectInputStream.close();
  }
      } catch (Exception e) {
 
  JOptionPane.showMessageDialog(null, e, "Load Failed",
              JOptionPane.ERROR_MESSAGE);
     

      m_Log.statusMessage("OK");
     
      if (classifier != null) {
  m_Log.logMessage("Loaded model from file '" + selected.getName()+ "'");
  String name = (new SimpleDateFormat("HH:mm:ss - ")).format(new Date());
  String cname = classifier.getClass().getName();
  if (cname.startsWith("weka.classifiers."))
    cname = cname.substring("weka.classifiers.".length());
  name += cname + " from file '" + selected.getName() + "'";
  StringBuffer outBuff = new StringBuffer();

  outBuff.append("=== Model information ===\n\n");
  outBuff.append("Filename:     " + selected.getName() + "\n");
  outBuff.append("Scheme:       " + classifier.getClass().getName());
  if (classifier instanceof OptionHandler) {
    String [] o = ((OptionHandler) classifier).getOptions();
    outBuff.append(" " + Utils.joinOptions(o));
  }
  outBuff.append("\n");
  if (trainHeader != null) {
    outBuff.append("Relation:     " + trainHeader.relationName() + '\n');
    outBuff.append("Attributes:   " + trainHeader.numAttributes() + '\n');
    if (trainHeader.numAttributes() < 100) {
      for (int i = 0; i < trainHeader.numAttributes(); i++) {
        outBuff.append("              " + trainHeader.attribute(i).name()
           + '\n');
      }
    } else {
      outBuff.append("              [list of attributes omitted]\n");
    }
  } else {
    outBuff.append("\nTraining data unknown\n");
  }

  outBuff.append("\n=== Classifier model ===\n\n");
  outBuff.append(classifier.toString() + "\n");
 
  m_History.addResult(name, outBuff);
  m_History.setSingle(name);
  FastVector vv = new FastVector();
  vv.addElement(classifier);
View Full Code Here

      }
      m_RunThread = new Thread() {
          public void run() {
            // Copy the current state of things
            m_Log.statusMessage("Setting up...");
            Classifier classifierToUse = classifier;

            StringBuffer outBuff = m_History.getNamedBuffer(name);
            DataSource source = null;
            Instances userTestStructure = null;
            ClassifierErrorsPlotInstances plotInstances = null;
View Full Code Here

    m_ClassifierEditor.setCapabilitiesFilter(filterClass);
   
    // Check capabilities
    m_StartBut.setEnabled(true);
    Capabilities currentFilter = m_ClassifierEditor.getCapabilitiesFilter();
    Classifier classifier = (Classifier) m_ClassifierEditor.getValue();
    Capabilities currentSchemeCapabilities =  null;
    if (classifier != null && currentFilter != null &&
        (classifier instanceof CapabilitiesHandler)) {
      currentSchemeCapabilities = ((CapabilitiesHandler)classifier).getCapabilities();
     
View Full Code Here

  }

  private List<Object> createLagFiller(Instances insts, String targetName)
      throws Exception {
    // Classifier lagFiller = new weka.classifiers.functions.LeastMedSq();
    Classifier lagFiller = new weka.classifiers.functions.LinearRegression();

    ArrayList<Attribute> atts = new ArrayList<Attribute>();
    atts.add(new Attribute("time"));
    atts.add(new Attribute("target"));
    Instances simple = new Instances("simple", atts, insts.numInstances());
    int targetIndex = insts.attribute(targetName).index();
    for (int i = 0; i < insts.numInstances(); i++) {
      double targetValue = insts.instance(i).value(targetIndex);
      double time = i;
      double[] vals = new double[2];
      vals[0] = time;
      vals[1] = targetValue;
      DenseInstance d = new DenseInstance(1.0, vals);
      simple.add(d);
    }

    simple.setClassIndex(1);
    lagFiller.buildClassifier(simple);
    System.err.println(lagFiller);
    simple = new Instances(simple, 0);

    List<Object> results = new ArrayList<Object>();
    results.add(lagFiller);
View Full Code Here

  public void acceptClassifier(BatchClassifierEvent ce) {
    if (ce.getTestSet() == null || ce.getTestSet().isStructureOnly()) {
      return; // can't evaluate empty/non-existent test instances
    }

    Classifier classifier = ce.getClassifier();

    try {
      if (ce.getGroupIdentifier() != m_currentBatchIdentifier) {
        if (m_setsComplete > 0) {
          if (m_logger != null) {
View Full Code Here

        System.out.println(ThresholdCurve.getNPointPrecision(inst, 11));
      } else {
        inst.setClassIndex(inst.numAttributes() - 1);
        ThresholdCurve tc = new ThresholdCurve();
        EvaluationUtils eu = new EvaluationUtils();
        Classifier classifier = new weka.classifiers.functions.Logistic();
        FastVector predictions = new FastVector();
        for (int i = 0; i < 2; i++) { // Do two runs.
          eu.setSeed(i);
          predictions.appendElements(eu.getCVPredictions(classifier, inst, 10));
          //System.out.println("\n\n\n");
View Full Code Here

TOP

Related Classes of weka.classifiers.Classifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.