* @param first the index of the first file
*/
private static void profile(Grammar grammar, String[] files, int first) {
File file = new File(files[first]);
Tokenizer tokenizer;
Parser parser;
Node node;
int fileCount = files.length - first;
long time;
int counter;
// Profile tokenizer
try {
System.out.println("Tokenizing " + fileCount + " file(s)...");
tokenizer = grammar.createTokenizer(new FileReader(file));
time = System.currentTimeMillis();
counter = 0;
for (int i = first; i < files.length; i++) {
if (i > first) {
file = new File(files[i]);
tokenizer.reset(new FileReader(file));
}
while (tokenizer.next() != null) {
counter++;
}
}
time = System.currentTimeMillis() - time + 1;
System.out.println(" Time elapsed: " + time + " millisec");
System.out.println(" Tokens found: " + counter);
System.out.println(" Average speed: " + (counter / time) +
" tokens/millisec");
System.out.println();
} catch (FileNotFoundException e) {
printError(file.toString(), e);
System.exit(1);
} catch (GrammarException e) {
printInternalError(e);
System.exit(2);
} catch (ParseException e) {
printError(file.toString(), e);
System.exit(1);
}
// Profile parser
try {
System.out.println("Parsing " + fileCount + " file(s)...");
file = new File(files[first]);
tokenizer = grammar.createTokenizer(new FileReader(file));
parser = grammar.createParser(tokenizer);
time = System.currentTimeMillis();
counter = 0;
for (int i = first; i < files.length; i++) {
if (i > first) {
file = new File(files[i]);
parser.reset(new FileReader(file));
}
node = parser.parse();
counter += 1 + node.getDescendantCount();
}
time = System.currentTimeMillis() - time + 1;
System.out.println(" Time elapsed: " + time + " millisec");
System.out.println(" Nodes found: " + counter);