Package weka.classifiers

Examples of weka.classifiers.Classifier


  argsR = new String [args.length-10];
  for (int j = 10; j < args.length; j++) {
    argsR[j-10] = args[j];
  }
      }
      Classifier c = Classifier.forName(args[9], argsR);
      KDDataGenerator dataGen = new KDDataGenerator();
      dataGen.setKernelBandwidth(bandWidth);
      bv.setDataGenerator(dataGen);
      bv.setNumSamplesPerRegion(loc);
      bv.setGeneratorSamplesBase(base);
View Full Code Here


  if (((Double)((FastVector)m_focus.m_ranges.elementAt(0)).
       elementAt(0)).intValue() != LEAF) {
    m_iView.setShapes(m_focus.m_ranges);
  }
 
  Classifier classifierAtNode = m_focus.getClassifier();
        if (classifierAtNode != null) {
          m_classifiers.setValue(classifierAtNode);
        }
  m_propertyDialog = new PropertyDialog((Frame) null, m_classifiers,
                m_mainWin.getLocationOnScreen().x,
View Full Code Here

        System.out.println(ThresholdCurve.getNPointPrecision(inst, 11));
      } else {
        inst.setClassIndex(inst.numAttributes() - 1);
        ThresholdCurve tc = new ThresholdCurve();
        EvaluationUtils eu = new EvaluationUtils();
        Classifier classifier = new weka.classifiers.functions.Logistic();
        FastVector predictions = new FastVector();
        for (int i = 0; i < 2; i++) { // Do two runs.
          eu.setSeed(i);
          predictions.appendElements(eu.getCVPredictions(classifier, inst, 10));
          //System.out.println("\n\n\n");
View Full Code Here

    repaint();
  }
      });
    ((GenericObjectEditor.GOEPanel) m_ClassifierEditor.getCustomEditor()).addOkListener(new ActionListener() {
  public void actionPerformed(ActionEvent e) {
    Classifier newCopy =
      (Classifier) copyObject(m_ClassifierEditor.getValue());
    addNewAlgorithm(newCopy);
  }
      });
   
View Full Code Here

          try {
            File file = m_FileChooser.getSelectedFile();
            if (!file.getAbsolutePath().toLowerCase().endsWith(".xml"))
              file = new File(file.getAbsolutePath() + ".xml");
            XMLClassifier xmlcls = new XMLClassifier();
            Classifier c = (Classifier) xmlcls.read(file);
            m_AlgorithmListModel.setElementAt(c, m_List.getSelectedIndex());
            updateExperiment();
          }
          catch (Exception ex) {
            ex.printStackTrace();
View Full Code Here

   *
   * @return the classifier string.
   */
  protected String getClassifierSpec() {
   
    Classifier c = getClassifier();
    if (c instanceof OptionHandler) {
      return c.getClass().getName() + " "
  + Utils.joinOptions(((OptionHandler)c).getOptions());
    }
    return c.getClass().getName();
  }
View Full Code Here

   *
   * @param args optional commandline parameters
   */
  public static void main(String [] args) {
    Instances     inst;
    Classifier     classifier;
    int      runs;
    int      folds;
    String     tmpStr;
    boolean    compute;
    Instances     result;
View Full Code Here

      m_Exp.setRunLower(1);
      m_Exp.setRunUpper(m_numRepetitions);
    }

    SplitEvaluator se = null;
    Classifier sec = null;
    if (m_ExpClassificationRBut.isSelected()) {
      se = new ClassifierSplitEvaluator();
      sec = ((ClassifierSplitEvaluator)se).getClassifier();
    } else {
      se = new RegressionSplitEvaluator();
View Full Code Here

  protected String getClassifierSpec(int index) {
   
    if (m_Classifiers.length < index) {
      return "";
    }
    Classifier c = getClassifier(index);
    if (c instanceof OptionHandler) {
      return c.getClass().getName() + " "
  + Utils.joinOptions(((OptionHandler)c).getOptions());
    }
    return c.getClass().getName();
  }
View Full Code Here

    if (newData.classAttribute().isNominal() && (m_NumXValFolds > 1)) {
      newData.stratify(m_NumXValFolds);
    }
    Instances train = newData;               // train on all data by default
    Instances test = newData;               // test on training data by default
    Classifier bestClassifier = null;
    int bestIndex = -1;
    double bestPerformance = Double.NaN;
    int numClassifiers = m_Classifiers.length;
    for (int i = 0; i < numClassifiers; i++) {
      Classifier currentClassifier = getClassifier(i);
      Evaluation evaluation;
      if (m_NumXValFolds > 1) {
  evaluation = new Evaluation(newData);
  for (int j = 0; j < m_NumXValFolds; j++) {

          // We want to randomize the data the same way for every
          // learning scheme.
    train = newData.trainCV(m_NumXValFolds, j, new Random (1));
    test = newData.testCV(m_NumXValFolds, j);
    currentClassifier.buildClassifier(train);
    evaluation.setPriors(train);
    evaluation.evaluateModel(currentClassifier, test);
  }
      } else {
  currentClassifier.buildClassifier(train);
  evaluation = new Evaluation(train);
  evaluation.evaluateModel(currentClassifier, test);
      }

      double error = evaluation.errorRate();
      if (m_Debug) {
  System.err.println("Error rate: " + Utils.doubleToString(error, 6, 4)
         + " for classifier "
         + currentClassifier.getClass().getName());
      }

      if ((i == 0) || (error < bestPerformance)) {
  bestClassifier = currentClassifier;
  bestPerformance = error;
View Full Code Here

TOP

Related Classes of weka.classifiers.Classifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.