Package weka.classifiers

Examples of weka.classifiers.Classifier


   * Weka classifier.
   *
   * @return the scheme name and options of the underlying Weka classifier
   */
  protected String getForecasterSpec() {
    Classifier c = getBaseForecaster();

    if (c instanceof OptionHandler) {
      return c.getClass().getName() + " "
          + Utils.joinOptions(((OptionHandler) c).getOptions());
    } else {
      return c.getClass().getName();
    }
  }
View Full Code Here


    // m_lastHistoricInstance = insts.instance(insts.numInstances() - 1);

    m_singleTargetForecasters = new ArrayList<SingleTargetForecaster>();
    for (int i = 0; i < m_fieldsToForecast.size(); i++) {
      SingleTargetForecaster f = new SingleTargetForecaster();
      Classifier c = AbstractClassifier.makeCopy(m_forecaster);
      f.setClassifier(c);
      f.buildForecaster(trainingData, m_fieldsToForecast.get(i));
      m_singleTargetForecasters.add(f);
    }
View Full Code Here

  private final static Logger logger = Logger
  .getLogger(WekaRegressionModelFactory.class);

  public static WekaRegressionModelFactory createForDir(String models, WekaOptions config, OpponentModelListener... listeners) throws IOException, ClassNotFoundException {
    Classifier preBetModel, preFoldModel, preCallModel, preRaiseModel, postBetModel, postFoldModel, postCallModel, postRaiseModel,
    showdown0Model, showdown1Model, showdown2Model, showdown3Model, showdown4Model, showdown5Model;
    ClassLoader classLoader = WekaRegressionModelFactory.class.getClassLoader();
    ObjectInputStream in = new ObjectInputStream(classLoader.getResourceAsStream(models+"preBet.model"));
    preBetModel = (Classifier)in.readObject();
    in.close();
View Full Code Here

        return trainingSet;
    }

    public double classify(LinkedHashMap<Integer, Double> sample) {
        try {
            Classifier cModel = (LibSVM) loadClassifier();
            ArffLoader loader = new ArffLoader();
            loader.setFile(new File(PropertiesGetter.getProperty("TrainingDataSetObject")));
            Instances structure = loader.getStructure();
            structure.setClassIndex(structure.numAttributes() - 1);
            int numAtts = structure.numAttributes();
            Instance instanceToClassify = getNewInstance(numAtts);
            Iterator<Entry<Integer, Double>> currentIt = sample.entrySet().iterator();
            Entry<Integer, Double> tempEntry;
            while (currentIt.hasNext()) {
                tempEntry = currentIt.next();
                instanceToClassify.setValue(tempEntry.getKey(), tempEntry.getValue());
            }
            instanceToClassify.setDataset(structure);
            instanceToClassify.setClassMissing();
            try {
                return cModel.classifyInstance(instanceToClassify);
            } catch (Exception ex) {
                Logger.getLogger(WekaWrapper.class.getName()).log(Level.SEVERE, null, ex);
                System.out.println(ex.toString());
                return -2;
            }
View Full Code Here

        }
        return newInst;
    }

    private Classifier loadClassifier() {
        Classifier cModel = null;
        InputStream is = null;
        try {
            ObjectInputStream objectInputStream = null;
            is = new FileInputStream(PropertiesGetter.getProperty("SVMModelFile"));
            objectInputStream = new ObjectInputStream(is);
View Full Code Here

    this.classifier = loadClassifier();
    translator = Translator.getInstance();
  }
 
  private Classifier loadClassifier(){
    Classifier classifier = null;
    Properties configProperties = loadConfigProperties();
    try {
      classifier = (Classifier) SerializationHelper.read(configProperties.getProperty("modelPath"));
    } catch (Exception e) {
      // TODO Auto-generated catch block
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

      uas_F=new RBoxPlot<String>("Time","F-measure",new File("time_"+name+"_f.pdf")),
      uas_Diff=new RBoxPlot<String>("Time","Diff-measure",new File("time_"+name+"_Diff.pdf"));
    SquareBagPlot gr_diff_to_f = new SquareBagPlot("f-measure","diff-based measure",new File("diff-to-f.pdf"),0,1,true);

    Set<Integer> allFrames = collectionOfTraces.get(UAVAllSeeds).tracesForUAVandFrame.get(UAVAllSeeds).keySet();
    Classifier classifiers[] = loadClassifierFromArff(arffName);
    ProgressIndicator progress = new ProgressIndicator("UAS", allFrames.size()*classifiers.length);
    LearnerEvaluationConfiguration initConfiguration = new LearnerEvaluationConfiguration(learnerInitConfiguration.config);
    initConfiguration.setLabelConverter(learnerInitConfiguration.getLabelConverter());// we do not copy if-then automata here because we do not wish to augment from if-then on every iteration because our properties are pairwise and this permits augmentation to be carried out first thing and not any more.
    initConfiguration.config.setUseConstraints(false);// do not use if-then during learning (refer to the explanation above)
   
View Full Code Here

          //repTree.setNoPruning(true);// since we only use the tree as a classifier (as a conservative extension of what is currently done) and do not actually look at it, elimination of pruning is not a problem.
          // As part of learning, we also prune some of the nodes where the ratio of correctly-classified pairs to those incorrectly classified is comparable.
          // The significant advantage of not pruning is that the result is no longer sensitive to the order of elements in the tree and hence does not depend on the order in which elements have been obtained by concurrent threads.
          //final weka.classifiers.lazy.IB1 ib1 = new weka.classifiers.lazy.IB1();
          //final weka.classifiers.trees.J48 classifier = new weka.classifiers.trees.J48();
          final Classifier classifier = repTree;
          classifier.buildClassifier(dataCollector.trainingData);
          System.out.println("Entries in the classifier: "+dataCollector.trainingData.numInstances());
          System.out.println(classifier);
          dataCollector=null;// throw all the training data away.
         
          {// serialise the classifier, this is the only way to store it.
View Full Code Here

TOP

Related Classes of weka.classifiers.Classifier

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.