}
if (dict || all) {
System.err.println("Building dictionary");
ObjectStream<Parse> data = new ParseSampleStream(new PlainTextByLineStream(new FileReader(inFile)));
Dictionary mdict = buildDictionary(data, rules, cutoff);
System.out.println("Saving the dictionary");
mdict.serialize(new FileOutputStream(dictFile));
}
if (tag || all) {
System.err.println("Training tagger");
ObjectStream<POSSample> tes = new PosSampleStream(new ParseSampleStream(new PlainTextByLineStream(new java.io.FileReader(inFile))));
POSModel posModel = POSTaggerME.train("en", tes, ModelType.MAXENT, null, null, cutoff, 100);
System.out.println("Saving the tagger model as: " + tagFile);
OutputStream posOutputStream = new FileOutputStream(tagFile);
posModel.serialize(posOutputStream);
posOutputStream.close();
}
if (chunk || all) {
System.err.println("Training chunker");
ObjectStream<ChunkSample> ces = new ChunkSampleStream(new ParseSampleStream(new PlainTextByLineStream(new java.io.FileReader(inFile))));
ChunkerModel chunkModel = ChunkerME.train("en", ces, cutoff, iterations,
new ChunkContextGenerator());
System.out.println("Saving the chunker model as: " + chunkFile);
OutputStream chunkOutputStream = new FileOutputStream(chunkFile);
chunkModel.serialize(chunkOutputStream);
chunkOutputStream.close();
}
if (build || all) {
System.err.println("Loading Dictionary");
Dictionary tridict = new Dictionary(new FileInputStream(dictFile.toString()),true);
System.err.println("Training builder");
opennlp.model.EventStream bes = new ParserEventStream(new ParseSampleStream(new PlainTextByLineStream(new java.io.FileReader(inFile))), rules, ParserEventTypeEnum.BUILD,tridict);
AbstractModel buildModel = train(bes, iterations, cutoff);
System.out.println("Saving the build model as: " + buildFile);
new opennlp.maxent.io.SuffixSensitiveGISModelWriter(buildModel, buildFile).persist();