File fullFile = tool.getImportedVocabFile(vocabName);
try {
FileReader fr = new FileReader(fullFile);
BufferedReader br = new BufferedReader(fr);
StreamTokenizer tokenizer = new StreamTokenizer(br);
tokenizer.parseNumbers();
tokenizer.wordChars('_', '_');
tokenizer.eolIsSignificant(true);
tokenizer.slashSlashComments(true);
tokenizer.slashStarComments(true);
tokenizer.ordinaryChar('=');
tokenizer.quoteChar('\'');
tokenizer.whitespaceChars(' ',' ');
tokenizer.whitespaceChars('\t','\t');
int lineNum = 1;
int token = tokenizer.nextToken();
while (token != StreamTokenizer.TT_EOF) {
String tokenID;
if ( token == StreamTokenizer.TT_WORD ) {
tokenID = tokenizer.sval;
}
else if ( token == '\'' ) {
tokenID = "'"+tokenizer.sval+"'";
}
else {
ErrorManager.error(ErrorManager.MSG_TOKENS_FILE_SYNTAX_ERROR,
vocabName+CodeGenerator.VOCAB_FILE_EXTENSION,
Utils.integer(lineNum));
while ( tokenizer.nextToken() != StreamTokenizer.TT_EOL ) {}
token = tokenizer.nextToken();
continue;
}
token = tokenizer.nextToken();
if ( token != '=' ) {
ErrorManager.error(ErrorManager.MSG_TOKENS_FILE_SYNTAX_ERROR,
vocabName+CodeGenerator.VOCAB_FILE_EXTENSION,
Utils.integer(lineNum));
while ( tokenizer.nextToken() != StreamTokenizer.TT_EOL ) {}
token = tokenizer.nextToken();
continue;
}
token = tokenizer.nextToken(); // skip '='
if ( token != StreamTokenizer.TT_NUMBER ) {
ErrorManager.error(ErrorManager.MSG_TOKENS_FILE_SYNTAX_ERROR,
vocabName+CodeGenerator.VOCAB_FILE_EXTENSION,
Utils.integer(lineNum));
while ( tokenizer.nextToken() != StreamTokenizer.TT_EOL ) {}
token = tokenizer.nextToken();
continue;
}
int tokenType = (int)tokenizer.nval;
token = tokenizer.nextToken();
//System.out.println("import "+tokenID+"="+tokenType);
composite.maxTokenType = Math.max(composite.maxTokenType,tokenType);
defineToken(tokenID, tokenType);
lineNum++;
if ( token != StreamTokenizer.TT_EOL ) {
ErrorManager.error(ErrorManager.MSG_TOKENS_FILE_SYNTAX_ERROR,
vocabName+CodeGenerator.VOCAB_FILE_EXTENSION,
Utils.integer(lineNum));
while ( tokenizer.nextToken() != StreamTokenizer.TT_EOL ) {}
token = tokenizer.nextToken();
continue;
}
token = tokenizer.nextToken(); // skip newline
}
br.close();
}
catch (FileNotFoundException fnfe) {
ErrorManager.error(ErrorManager.MSG_CANNOT_FIND_TOKENS_FILE,