Package cc.mallet.types

Examples of cc.mallet.types.FeatureSelection


          String wn = (String)outputAlphabet.lookupObject(i) + "->" + (String)outputAlphabet.lookupObject(j);
          weightNames[destinationIndex][1] = wn;
          int wi = getWeightsIndex (wn);
          // A new empty FeatureSelection won't allow any features here, so we only
          // get the default feature for transitions
          featureSelections[wi] = new FeatureSelection(trainingSet.getDataAlphabet());
          destinationIndex++;
        }
      addState ((String)outputAlphabet.lookupObject(i), 0.0, 0.0,
          destinationNames, destinationNames, weightNames);
    }
View Full Code Here


        String wn = (String)outputAlphabet.lookupObject(i) + "->" + (String)outputAlphabet.lookupObject(j);
        weightNames[j][1] = wn;
        int wi = getWeightsIndex (wn);
        // A new empty FeatureSelection won't allow any features here, so we only
        // get the default feature for transitions
        featureSelections[wi] = new FeatureSelection(trainingSet.getDataAlphabet());
      }
      addState ((String)outputAlphabet.lookupObject(i), 0.0, 0.0,
          destinationNames, destinationNames, weightNames);
    }
  }
View Full Code Here

                if (defaults != null && defaults[i]) {
                  int wi = getWeightsIndex (weightNames[nt][i]);
                  // Using empty feature selection gives us only the
                  // default features
                  featureSelections[wi] =
                    new FeatureSelection(trainingSet.getDataAlphabet());
                }
              }
              nt++;
            }
          }
View Full Code Here

        nfeatures = max;
        newWeights [i] = new SparseVector (null, new double [max],
            max, max, false, false, false);
      } else {
        // Respect the featureSelection
        FeatureSelection fs = featureSelections[i];
        nfeatures = fs.getBitSet ().cardinality ();
        int[] idxs = new int [nfeatures];
        int j = 0, thisIdx = -1;
        while ((thisIdx = fs.nextSelectedIndex (thisIdx + 1)) >= 0) {
          idxs[j++] = thisIdx;
        }
        newWeights[i] = new IndexedSparseVector (idxs, new double [nfeatures], nfeatures, nfeatures, false, false, false);
      }
      newWeights [i].plusEqualsSparse (parameters.weights [i]);
View Full Code Here

   * @param ilist Instance list to be trained on
   * @return Classifier object containing learned weights
   */
  public Winnow train (InstanceList trainingList)
  {
    FeatureSelection selectedFeatures = trainingList.getFeatureSelection();
    if (selectedFeatures != null)
      // xxx Attend to FeatureSelection!!!
      throw new UnsupportedOperationException ("FeatureSelection not yet implemented.");
    // if "train" is run more than once,
    // we will be reinitializing the weights
View Full Code Here

 
  public boolean isFinishedTraining() { return finished; }
  public DecisionTree getClassifier() { return classifier; }
 
  public DecisionTree train (InstanceList trainingList) {
    FeatureSelection selectedFeatures = trainingList.getFeatureSelection();
    DecisionTree.Node root = new DecisionTree.Node (trainingList, null, selectedFeatures);
    splitTree (root, selectedFeatures, 0);
    root.stopGrowth();
    finished = true;
    System.out.println ("DecisionTree learned:");
View Full Code Here

    int numLabels = crf.outputAlphabet.size();

    crf.globalFeatureSelection = trainingData.getFeatureSelection();
    if (crf.globalFeatureSelection == null) {
      // Mask out all features; some will be added later by FeatureInducer.induceFeaturesFor(.)
      crf.globalFeatureSelection = new FeatureSelection (trainingData.getDataAlphabet());
      trainingData.setFeatureSelection (crf.globalFeatureSelection);
    }
    // TODO Careful!  If validationData and testingData get removed as arguments to this method
    // then the next two lines of work will have to be done somewhere.
    if (validationData != null) validationData.setFeatureSelection (crf.globalFeatureSelection);
View Full Code Here

    splitTree(node.getRightChild(), depth+1);
  }
 
  public C45 train (InstanceList trainingList)
  {
    FeatureSelection selectedFeatures = trainingList.getFeatureSelection();
    if (selectedFeatures != null)
      // xxx Attend to FeatureSelection!!!
      throw new UnsupportedOperationException ("FeatureSelection not yet implemented.");
    C45.Node root = new C45.Node(trainingList, null, m_minNumInsts);
    splitTree(root, 0);
View Full Code Here

   * @param trainingList Instance list to be trained on
   * @return Classifier object containing learned weights
   */
  public BalancedWinnow train (InstanceList trainingList)
  {
    FeatureSelection selectedFeatures = trainingList.getFeatureSelection();
    if (selectedFeatures != null)
      // xxx Attend to FeatureSelection!!!
      throw new UnsupportedOperationException ("FeatureSelection not yet implemented.");

    double epsilon = m_epsilon;
View Full Code Here

          String wn = (String)outputAlphabet.lookupObject(i) + "->" + (String)outputAlphabet.lookupObject(j);
          weightNames[destinationIndex][1] = wn;
          int wi = getWeightsIndex (wn);
          // A new empty FeatureSelection won't allow any features here, so we only
          // get the default feature for transitions
          featureSelections[wi] = new FeatureSelection(trainingSet.getDataAlphabet());
          destinationIndex++;
        }
      addState ((String)outputAlphabet.lookupObject(i), 0.0, 0.0,
          destinationNames, destinationNames, weightNames);
    }
View Full Code Here

TOP

Related Classes of cc.mallet.types.FeatureSelection

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.