Examples of Lexicon


Examples of com.digitalpebble.classification.Lexicon

  public static void getAttributeScores(String modelPath, String lexiconF,
      int topAttributesNumber) throws IOException {
    // load the model + the lexicon
    // try to see if we can get a list of the best scores from the model
    // works only for liblinear
    Lexicon lexicon = new Lexicon(lexiconF);
    Model liblinearModel = Model.load(new File(modelPath));
    double[] weights = liblinearModel.getFeatureWeights();
    // dump all the weights
    int numClasses = liblinearModel.getNrClass();
    int numFeatures = liblinearModel.getNrFeature();

    Map<Integer, String> invertedAttributeIndex = lexicon
        .getInvertedIndex();

    Map<String, WeightedAttributeQueue> topAttributesPerLabel = new HashMap<String, WeightedAttributeQueue>(
        numClasses);

    for (int i = 0; i < weights.length; i++) {
      // get current class num
      int classNum = i / numFeatures;
      int featNum = i % numFeatures;
      String classLabel = lexicon.getLabel(classNum);
      String attLabel = invertedAttributeIndex.get(featNum + 1);

      // display the values between -0.001 and +0.001 as 0
      if (weights[i] < 0.001 && weights[i] > -0.001)
        weights[i] = 0;

Examples of com.digitalpebble.classification.Lexicon

    public static void dumpBestAttributes(String raw, String lexiconF)
            throws IOException {
        // load the corpus + the lexicon
        // load the lexicon and the raw file
        Lexicon lexicon = new Lexicon(lexiconF);
        FileTrainingCorpus corpus = new FileTrainingCorpus(new File(raw));
        AttributeScorer scorer = logLikelihoodAttributeScorer.getScorer(corpus,
                lexicon);
    }

Examples of com.digitalpebble.classification.Lexicon

                .getProperty("compact.attribute.nums"));

        String format = props.getProperty("format");

        // load the lexicon and the raw file
        Lexicon lexicon = new Lexicon(lexiconF);

        String weightingScheme = props.getProperty(
                "classification_weight_scheme", "tfidf");
        WeightingMethod method = WeightingMethod
                .methodFromString(weightingScheme);
        lexicon.setMethod(method);

        // get the raw file
        FileTrainingCorpus ftc = new FileTrainingCorpus(new File(raw));

        int keepNBestAttributes = Integer.parseInt(props.getProperty(
                "keepNBestAttributes", "-1"));

        if (keepNBestAttributes != -1) {
            // double scores[] = logLikelihoodAttributeFilter.getScores(ftc,
            // lexicon);
            // lexicon.setLogLikelihoodRatio(scores);
            // lexicon.keepTopNAttributesLLR(keepNBestAttributes);
            AttributeScorer scorer = logLikelihoodAttributeScorer.getScorer(
                    ftc, lexicon);
            lexicon.setAttributeScorer(scorer);
            lexicon.applyAttributeFilter(scorer, keepNBestAttributes);
        } else {
            // apply the filters on the Lexicon
            int minFreq = Integer.parseInt(props
                    .getProperty("classification_minFreq"));
            int maxFreq = Integer.MAX_VALUE;

            lexicon.pruneTermsDocFreq(minFreq, maxFreq);
        }

        // change the indices of the attributes to remove
        // gaps between them
        Map<Integer, Integer> equiv = null;
        if (compact) {
            // create a new Lexicon object
            equiv = lexicon.compact();
        }

        // save the modified lexicon file
        if (newLexicon != null)
            lexicon.saveToFile(newLexicon);

        // dump a new vector file
        Utils.writeExamples(ftc, lexicon, true, vector_location, equiv, format);
    }

Examples of com.digitalpebble.classification.Lexicon

  public static void getAttributeScores(String modelPath, String lexiconF,
      int topAttributesNumber) throws IOException {
    // load the model + the lexicon
    // try to see if we can get a list of the best scores from the model
    // works only for liblinear
    Lexicon lexicon = new Lexicon(lexiconF);
    Model liblinearModel = Model.load(new File(modelPath));
    double[] weights = liblinearModel.getFeatureWeights();
    // dump all the weights
    int numClasses = liblinearModel.getNrClass();
    int numFeatures = liblinearModel.getNrFeature();

    Map<Integer, String> invertedAttributeIndex = lexicon
        .getInvertedIndex();

    Map<String, WeightedAttributeQueue> topAttributesPerLabel = new HashMap<String, WeightedAttributeQueue>(
        numClasses);

    // for (int i = 0; i < nr_w; i++) {
    // double contrib = w[(idx - 1) * nr_w + i] * lx.getValue();
    // }
    //
    // idx 1 in class 1 -> 0 x 22 + 0 = 0
    // idx 2 in class 1 -> 1 x 22 + 0 = 22
    // idx 1 in class 2 -> 0 x 22 + 1 = 1
    // idx 2 in class 2 -> 1 x 22 + 1 = 23

    // initialise the queues
    if (topAttributesNumber != -1) {
      for (int classNum = 0; classNum < numClasses; classNum++) {
        String classLabel = lexicon.getLabel(classNum);
        WeightedAttributeQueue queue = new WeightedAttributeQueue(
            topAttributesNumber);
        topAttributesPerLabel.put(classLabel, queue);
      }
    }

    for (int classNum = 0; classNum < numClasses; classNum++) {
      String classLabel = lexicon.getLabel(classNum);
      WeightedAttributeQueue queue = topAttributesPerLabel
          .get(classLabel);
      for (int featNum = 0; featNum < numFeatures; featNum++) {
        int pos = featNum * numClasses + classNum;
        double featWeight = weights[pos];

Examples of com.digitalpebble.classification.Lexicon

  private boolean cross_validation = false;

  public LibSVMModelCreator(String lexicon_location, String model_location,
      String vectorFile) {
    lexicon = new Lexicon();
    this.model_file_name = model_location;
    this.lexiconLocation = lexicon_location;
    this.vector_location = vectorFile;
  }

Examples of com.digitalpebble.classification.Lexicon

  private String outputLearner;

  public LibLinearModelCreator(String lexicon_location,
      String model_location, String vector_location) {
    lexicon = new Lexicon();
    this.SVM_Model_location = model_location;
    this.lexiconLocation = lexicon_location;
    this.vector_location = vector_location;

    learner_filename = System.getProperty("liblinear_train",

Examples of com.sun.speech.freetts.lexicon.Lexicon

  String stress = NO_STRESS;
  Relation syl = utterance.createRelation(Relation.SYLLABLE);
  Relation sylstructure =
            utterance.createRelation(Relation.SYLLABLE_STRUCTURE);
  Relation seg = utterance.createRelation(Relation.SEGMENT);
  Lexicon lex = utterance.getVoice().getLexicon();
  List syllableList = null;

  for (Item word = utterance.getRelation(Relation.WORD).getHead();
      word != null; word = word.getNext()) {
      Item ssword = sylstructure.appendItem(word);
      Item sylItem = null;   // item denoting syllable boundaries
      Item segItem = null;   // item denoting phonelist (segments)
      Item sssyl = null;     // item denoting syl in word

      String[] phones = null;

      Item token = word.getItemAs("Token");
      FeatureSet featureSet = null;

      if (token != null) {
    Item parent = token.getParent();
    featureSet = parent.getFeatures();
      }
     
      if (featureSet != null && featureSet.isPresent("phones")) {
    phones = (String[]) featureSet.getObject("phones");
      } else {
    phones = lex.getPhones(word.toString(), null);
      }

      for (int j = 0; j < phones.length; j++) {
    if (sylItem == null) {
        sylItem = syl.appendItem();
        sssyl = ssword.addDaughter(sylItem);
        stress = NO_STRESS;
        syllableList = new ArrayList();
    }
    segItem = seg.appendItem();
    if (isStressed(phones[j])) {
        stress = STRESS;
        phones[j] = deStress(phones[j]);
    }
    segItem.getFeatures().setString("name", phones[j]);
    sssyl.addDaughter(segItem);
    syllableList.add(phones[j]);
    if (lex.isSyllableBoundary(syllableList, phones, j + 1))  {
        sylItem =  null;
        if (sssyl != null) {
      sssyl.getFeatures().setString("stress", stress);
        }
    }

Examples of edu.stanford.nlp.parser.lexparser.Lexicon

      lexOptions.unknownPrefixSize = 1;
      lexOptions.unknownSuffixSize = 1;
    }
    Index<String> wordIndex = new HashIndex<String>();
    Index<String> tagIndex = new HashIndex<String>();
    Lexicon lex = tlpp.lex(op, wordIndex, tagIndex);
   
    int computeAfter = (int) (0.50 * tb.size());
    Counter<String> vocab = new ClassicCounter<String>();
    Counter<String> unkCounter = new ClassicCounter<String>();
    int treeId = 0;
    for(Tree t : tb) {
      List<Label> yield = t.yield();
      int posId = 0;
      for(Label word : yield) {
        vocab.incrementCount(word.value());
        if(treeId > computeAfter && vocab.getCount(word.value()) < 2.0)
//          if(lex.getUnknownWordModel().getSignature(word.value(), posId++).equals("UNK"))
//            pw.println(word.value());
          unkCounter.incrementCount(lex.getUnknownWordModel().getSignature(word.value(), posId++));
      }
      treeId++;
    }
   
    List<String> biggestKeys = new ArrayList<String>(unkCounter.keySet());

Examples of lexicon.jaxb.Lexicon

  public static ElementaryTree buildTree(String word) throws
  FileNotFoundException, XMLStreamException, JAXBException,java.lang.NullPointerException{
   
    JAXBContext jc = JAXBContext.newInstance(Lexicon.class)
    Unmarshaller u = jc.createUnmarshaller();
    Lexicon lex =(Lexicon) (u.unmarshal(new File("binding\\En-Fr.xml")));
    System.out.println(lex.getTgt_language());
    System.out.println( "D�but: " );
    System.out.println(lex.getEntryCount());
    System.out.println(lex.getEntry(1).getSource().getForm());
   
    for (int i = 0; i < lex.getEntryCount(); i++) {
     
      if(lex.getEntry(i).getSource().getForm().equals("John"))
      {
        System.out.println(lex.getEntry(i).getSource().getLexicalisation().getTree_id());
      }
     
    }
          
    return null;

Examples of net.fortytwo.ripple.model.Lexicon

    private void updateCompletors() {
        logger.fine("updating completors");
        List<Completer> completors = new ArrayList<Completer>();

        try {
            Lexicon lex = queryEngine.getLexicon();

            synchronized (lex) {
                completors.add(lex.getCompletor());
            }

            ArrayList<String> directives = new ArrayList<String>();
            directives.add("@help");
            directives.add("@list");
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.