// $ANTLR 2.7.4: "template.g" -> "BasicTemplateAntlrParser.java"$
package org.jostraca;
import org.jostraca.util.Standard;
import org.jostraca.unit.UnitList;
import org.jostraca.unit.BasicUnitList;
import org.jostraca.unit.BasicUnit;
import org.jostraca.unit.BasicUnitOrigin;
import org.jostraca.unit.BasicUnitProcessor;
import org.jostraca.comp.antlr.CommonToken;
import org.jostraca.comp.antlr.TokenBuffer;
import org.jostraca.comp.antlr.TokenStreamException;
import org.jostraca.comp.antlr.TokenStreamIOException;
import org.jostraca.comp.antlr.ANTLRException;
import org.jostraca.comp.antlr.LLkParser;
import org.jostraca.comp.antlr.Token;
import org.jostraca.comp.antlr.TokenStream;
import org.jostraca.comp.antlr.RecognitionException;
import org.jostraca.comp.antlr.NoViableAltException;
import org.jostraca.comp.antlr.MismatchedTokenException;
import org.jostraca.comp.antlr.SemanticException;
import org.jostraca.comp.antlr.ParserSharedInputState;
import org.jostraca.comp.antlr.collections.impl.BitSet;
/** Note: origin end points are calculated later using start points and text, see BasicUnitOrigin.resolveEnds() */
public class BasicTemplateAntlrParser extends org.jostraca.comp.antlr.LLkParser implements BasicTemplateAntlrLexerTokenTypes
{
private BlockList iBlocks = new BlockList();
public BlockList getBlockList() {
return iBlocks;
}
/** reference identifier for the source being parsed */
private String iReference = Standard.EMPTY;
public void setReference( String pReference ) {
iReference = pReference;
}
private BasicUnitList iUnitList = new BasicUnitList();
public UnitList getUnitList() {
return iUnitList;
}
protected BasicTemplateAntlrParser(TokenBuffer tokenBuf, int k) {
super(tokenBuf,k);
tokenNames = _tokenNames;
}
public BasicTemplateAntlrParser(TokenBuffer tokenBuf) {
this(tokenBuf,1);
}
protected BasicTemplateAntlrParser(TokenStream lexer, int k) {
super(lexer,k);
tokenNames = _tokenNames;
}
public BasicTemplateAntlrParser(TokenStream lexer) {
this(lexer,1);
}
public BasicTemplateAntlrParser(ParserSharedInputState state) {
super(state,1);
tokenNames = _tokenNames;
}
public final void template() throws RecognitionException, TokenStreamException {
try { // for error handling
{
_loop14:
do {
switch ( LA(1)) {
case TEXT:
{
text();
break;
}
case SCRIPT:
{
script();
break;
}
default:
{
break _loop14;
}
}
} while (true);
}
}
catch (RecognitionException ex) {
reportError(ex);
consume();
consumeUntil(_tokenSet_0);
}
}
public final void text() throws RecognitionException, TokenStreamException {
Token t = null;
try { // for error handling
t = LT(1);
match(TEXT);
iBlocks.add( new Block( Block.TYPE_text, t.getText() ) );
//System.out.println( "t:"+t.getText() );
iUnitList.add( new BasicUnit( BasicUnitProcessor.TYPE_text, t.getText(),
new BasicUnitOrigin( iReference, t.getLine(), t.getColumn(), 0, 0 ) ) );
}
catch (RecognitionException ex) {
reportError(ex);
consume();
consumeUntil(_tokenSet_1);
}
}
public final void script() throws RecognitionException, TokenStreamException {
Token t = null;
try { // for error handling
t = LT(1);
match(SCRIPT);
iBlocks.add( new Block( Block.TYPE_script, t.getText() ) );
//System.out.println( "s:"+t.getText() );
iUnitList.add( new BasicUnit( BasicUnitProcessor.TYPE_script, t.getText(),
new BasicUnitOrigin( iReference, t.getLine(), t.getColumn(), 0, 0 ) ) );
}
catch (RecognitionException ex) {
reportError(ex);
consume();
consumeUntil(_tokenSet_1);
}
}
public static final String[] _tokenNames = {
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"COMMENT",
"SCRIPT",
"TEXT"
};
private static final long[] mk_tokenSet_0() {
long[] data = { 2L, 0L};
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
private static final long[] mk_tokenSet_1() {
long[] data = { 98L, 0L};
return data;
}
public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
}