*/
public class AntlrGeneratorFragmentWithCustomLexer extends BaseAntlrGeneratorFragmentEx {
@Override
public void generate(final Grammar grammar, final XpandExecutionContext ctx) {
KeywordHelper helper = new KeywordHelper(grammar, getOptions().isIgnoreCase());
super.generate(grammar, ctx);
final String srcGenPath = ctx.getOutput().getOutlet(Generator.SRC_GEN).getPath();
final String encoding = getEncoding(ctx, Generator.SRC_GEN);
final String lexerBaseFileName = srcGenPath+"/"+getFragmentHelper().getLexerGrammarFileName(grammar).replace('.', '/');
String libPath = lexerBaseFileName;
libPath = libPath.substring(0, libPath.lastIndexOf('/'));
String absoluteLexerFileName = lexerBaseFileName + ".g";
String absoluteParserFileName = srcGenPath+"/"+getFragmentHelper().getParserGrammarFileName(grammar).replace('.', '/')+".g";
addAntlrParam("-fo");
addAntlrParam(absoluteParserFileName.substring(0, absoluteParserFileName.lastIndexOf('/')));
String[] lexerAntlrParams = getAntlrParams();
lexerAntlrParams[lexerAntlrParams.length - 1] = absoluteLexerFileName.substring(0, absoluteLexerFileName.lastIndexOf('/'));
// copy
copy(new File(absoluteLexerFileName), new File(lexerBaseFileName + ".gxtext"));
writeFile(absoluteLexerFileName, readFile(absoluteLexerFileName.replace("src-gen", "java")));
getAntlrTool().runWithEncodingAndParams(absoluteLexerFileName.replace("//", "/"), encoding, lexerAntlrParams);
cleanupLexerTokensFile(lexerBaseFileName, helper, encoding);
addAntlrParam("-lib");
addAntlrParam(libPath);
getAntlrTool().runWithEncodingAndParams(absoluteParserFileName, encoding, getAntlrParams());
Charset charset = Charset.forName(encoding);
simplifyUnorderedGroupPredicatesIfRequired(grammar, absoluteParserFileName, charset);
splitParserAndLexerIfEnabled(absoluteLexerFileName, absoluteParserFileName, charset);
suppressWarnings(absoluteLexerFileName, absoluteParserFileName, charset);
normalizeLineDelimiters(absoluteLexerFileName, absoluteParserFileName, charset);
normalizeTokens(absoluteParserFileName, charset);
MutableTokenDefProvider provider = createLexerTokensProvider(lexerBaseFileName, helper, charset);
for(Map.Entry<Integer, String> entry: provider.getTokenDefMap().entrySet()) {
String value = entry.getValue();
if(helper.isKeywordRule(value)) {
String keywordAsAntlrString = AntlrGrammarGenUtil.toAntlrString(helper.getKeywordValue(value));
entry.setValue("'" + keywordAsAntlrString + "'");
} else if (value.startsWith("'")) {
value = AntlrGrammarGenUtil.toAntlrString(value);
entry.setValue("'" + value + "'");
}
}
try {
provider.writeTokenFile(new PrintWriter(new File(srcGenPath+"/"+getFragmentHelper().getParserGrammarFileName(grammar).replace('.', '/') + ".tokens"), encoding));
}
catch (IOException e) {
throw new RuntimeException(e);
}
normalizeTokens(absoluteLexerFileName, charset);
helper.discardHelper(grammar);
}