Package opennlp.tools.cmdline

Examples of opennlp.tools.cmdline.PerformanceMonitor


        ParserFactory.create(model, beamSize, advancePercentage);

    ObjectStream<String> lineStream =
      new PlainTextByLineStream(new InputStreamReader(System.in));
   
    PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
    perfMon.start();
   
    try {
      String line;
      while ((line = lineStream.read()) != null) {
        if (line.length() == 0) {
          System.out.println();
        }
        else {
          Parse[] parses = parseLine(line, parser, numParses);
         
          for (int pi=0,pn=parses.length;pi<pn;pi++) {
            if (showTopK) {
              System.out.print(pi+" "+parses[pi].getProb()+" ");
            }
           
            parses[pi].show();
           
            perfMon.incrementCounter();
          }
        }
      }
    }
    catch (IOException e) {
      CmdLineUtil.handleStdinIoError(e);
    }
   
    perfMon.stopAndPrintFinalResult();
  }
View Full Code Here


      TokenNameFinderEvaluator evaluator = new TokenNameFinderEvaluator(nameFinder);
     
      final NameSampleDataStream sampleStream = new NameSampleDataStream(
          new PlainTextByLineStream(new InputStreamReader(new FileInputStream(args[2]), args[1])));
     
      final PerformanceMonitor monitor = new PerformanceMonitor("sent");
     
      monitor.startAndPrintThroughput();
     
      ObjectStream<NameSample> iterator = new ObjectStream<NameSample>() {

        public NameSample read() throws IOException {
          monitor.incrementCounter();
          return sampleStream.read();
        }
       
        public void reset() throws IOException {
          sampleStream.reset();
        }
       
        public void close() throws IOException {
          sampleStream.close();
        }
      };
     
      evaluator.evaluate(iterator);
     
      monitor.stopAndPrintFinalResult();
     
      System.out.println();
      System.out.println("F-Measure: " + evaluator.getFMeasure().getFMeasure());
      System.out.println("Recall: " + evaluator.getFMeasure().getRecallScore());
      System.out.println("Precision: " + evaluator.getFMeasure().getPrecisionScore());
View Full Code Here

    TokenNameFinderEvaluator evaluator = new TokenNameFinderEvaluator(
        new NameFinderME(model),
        listeners.toArray(new TokenNameFinderEvaluationMonitor[listeners.size()]));

    final PerformanceMonitor monitor = new PerformanceMonitor("sent");

    ObjectStream<NameSample> measuredSampleStream = new ObjectStream<NameSample>() {

      public NameSample read() throws IOException {
        monitor.incrementCounter();
        return sampleStream.read();
      }

      public void reset() throws IOException {
        sampleStream.reset();
      }

      public void close() throws IOException {
        sampleStream.close();
      }
    };

    monitor.startAndPrintThroughput();

    try {
      evaluator.evaluate(measuredSampleStream);
    } catch (IOException e) {
      System.err.println("failed");
      throw new TerminateToolException(-1, "IO error while reading test data: " + e.getMessage(), e);
    } finally {
      try {
        measuredSampleStream.close();
      } catch (IOException e) {
        // sorry that this can fail
      }
    }

    monitor.stopAndPrintFinalResult();

    System.out.println();

    if(detailedFListener == null) {
      System.out.println(evaluator.getFMeasure());
View Full Code Here

      POSTaggerME tagger = new POSTaggerME(model);

      ObjectStream<String> lineStream =
        new PlainTextByLineStream(new InputStreamReader(System.in));

      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
      perfMon.start();

      try {
        String line;
        while ((line = lineStream.read()) != null) {

          String whitespaceTokenizerLine[] = WhitespaceTokenizer.INSTANCE.tokenize(line);
          String[] tags = tagger.tag(whitespaceTokenizerLine);

          POSSample sample = new POSSample(whitespaceTokenizerLine, tags);
          System.out.println(sample.toString());

          perfMon.incrementCounter();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }

      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

      }

      ObjectStream<String> untokenizedLineStream =
          new PlainTextByLineStream(new InputStreamReader(System.in));

      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
      perfMon.start();

      try {
        String line;
        while((line = untokenizedLineStream.read()) != null) {
          String whitespaceTokenizerLine[] = WhitespaceTokenizer.INSTANCE.tokenize(line);

          // A new line indicates a new document,
          // adaptive data must be cleared for a new document

          if (whitespaceTokenizerLine.length == 0) {
            for (NameFinderME nameFinder : nameFinders) {
              nameFinder.clearAdaptiveData();
            }
          }

          List<Span> names = new ArrayList<Span>();

          for (TokenNameFinder nameFinder : nameFinders) {
            Collections.addAll(names, nameFinder.find(whitespaceTokenizerLine));
          }

          // Simple way to drop intersecting spans, otherwise the
          // NameSample is invalid
          Span reducedNames[] = NameFinderME.dropOverlappingSpans(
              names.toArray(new Span[names.size()]));

          NameSample nameSample = new NameSample(whitespaceTokenizerLine,
              reducedNames, false);

          System.out.println(nameSample.toString());

          perfMon.incrementCounter();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }

      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

      }
     
      ObjectStream<String> lineStream =
          new PlainTextByLineStream(new InputStreamReader(System.in));
     
      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "parses");
      perfMon.start();
     
      try {
       
        int sentenceNumber = 0;
        List<Mention> document = new ArrayList<Mention>();
        List<Parse> parses = new ArrayList<Parse>();
       
        String line;
        while ((line = lineStream.read()) != null) {

          if (line.equals("")) {
            DiscourseEntity[] entities = treebankLinker.getEntities(document.toArray(new Mention[document.size()]));
            //showEntities(entities);
            new CorefParse(parses,entities).show();
            sentenceNumber=0;
            document.clear();
            parses.clear();
          }
          else {
            Parse p = Parse.parseParse(line);
            parses.add(p);
            Mention[] extents = treebankLinker.getMentionFinder().getMentions(new DefaultParse(p,sentenceNumber));
            //construct new parses for mentions which don't have constituents.
            for (int ei=0,en=extents.length;ei<en;ei++) {
              //System.err.println("PennTreebankLiner.main: "+ei+" "+extents[ei]);

              if (extents[ei].getParse() == null) {
                //not sure how to get head index, but its not used at this point.
                Parse snp = new Parse(p.getText(),extents[ei].getSpan(),"NML",1.0,0);
                p.insert(snp);
                extents[ei].setParse(new DefaultParse(snp,sentenceNumber));
              }

            }
            document.addAll(Arrays.asList(extents));
            sentenceNumber++;
          }
         
          perfMon.incrementCounter();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }
     
      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

      SentenceDetectorME sdetector = new SentenceDetectorME(model);

      ObjectStream<String> paraStream =
        new ParagraphStream(new PlainTextByLineStream(new InputStreamReader(System.in)));

      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
      perfMon.start();

      try {
        String para;
        while ((para = paraStream.read()) != null) {

          String[] sents = sdetector.sentDetect(para);
          for (String sentence : sents) {
            System.out.println(sentence);
          }

          perfMon.incrementCounter(sents.length);

          System.out.println();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }

      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

          new DetokenizationDictionaryLoader().load(new File(args[0])));

      ObjectStream<String> tokenizedLineStream =
        new PlainTextByLineStream(new InputStreamReader(System.in));

      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "sent");
      perfMon.start();

      try {
        String tokenizedLine;
        while ((tokenizedLine = tokenizedLineStream.read()) != null) {

          // white space tokenize line
          String tokens[] = WhitespaceTokenizer.INSTANCE.tokenize(tokenizedLine);

          System.out.println(detokenizer.detokenize(tokens, null));

          perfMon.incrementCounter();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }

      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

      DocumentCategorizerME doccat = new DocumentCategorizerME(model);

      ObjectStream<String> documentStream = new ParagraphStream(
          new PlainTextByLineStream(new InputStreamReader(System.in)));

      PerformanceMonitor perfMon = new PerformanceMonitor(System.err, "doc");
      perfMon.start();

      try {
        String document;
        while ((document = documentStream.read()) != null) {
          double prob[] = doccat.categorize(WhitespaceTokenizer.INSTANCE.tokenize(document));
          String category = doccat.getBestCategory(prob);

          DocumentSample sample = new DocumentSample(category, document);
          System.out.println(sample.toString());

          perfMon.incrementCounter();
        }
      }
      catch (IOException e) {
        CmdLineUtil.handleStdinIoError(e);
      }

      perfMon.stopAndPrintFinalResult();
    }
  }
View Full Code Here

    ChunkerEvaluator evaluator = new ChunkerEvaluator(new ChunkerME(model,
        ChunkerME.DEFAULT_BEAM_SIZE),
        listeners.toArray(new ChunkerEvaluationMonitor[listeners.size()]));
   
    final PerformanceMonitor monitor = new PerformanceMonitor("sent");

    ObjectStream<ChunkSample> measuredSampleStream = new ObjectStream<ChunkSample>() {

      public ChunkSample read() throws IOException {
        monitor.incrementCounter();
        return sampleStream.read();
      }

      public void reset() throws IOException {
        sampleStream.reset();
      }

      public void close() throws IOException {
        sampleStream.close();
      }
    };

    monitor.startAndPrintThroughput();

    try {
      evaluator.evaluate(measuredSampleStream);
    } catch (IOException e) {
      System.err.println("failed");
      throw new TerminateToolException(-1, "IO error while reading test data: " + e.getMessage(), e);
    } finally {
      try {
        measuredSampleStream.close();
      } catch (IOException e) {
        // sorry that this can fail
      }
    }

    monitor.stopAndPrintFinalResult();

    System.out.println();

    if (detailedFMeasureListener == null) {
      System.out.println(evaluator.getFMeasure());
View Full Code Here

TOP

Related Classes of opennlp.tools.cmdline.PerformanceMonitor

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.