// $ANTLR 2.7.4: "template.g" -> "BasicTemplateAntlrLexer.java"$
package org.jostraca;
import org.jostraca.util.Standard;
import org.jostraca.unit.UnitList;
import org.jostraca.unit.BasicUnitList;
import org.jostraca.unit.BasicUnit;
import org.jostraca.unit.BasicUnitOrigin;
import org.jostraca.unit.BasicUnitProcessor;
import org.jostraca.comp.antlr.CommonToken;
import java.io.InputStream;
import org.jostraca.comp.antlr.TokenStreamException;
import org.jostraca.comp.antlr.TokenStreamIOException;
import org.jostraca.comp.antlr.TokenStreamRecognitionException;
import org.jostraca.comp.antlr.CharStreamException;
import org.jostraca.comp.antlr.CharStreamIOException;
import org.jostraca.comp.antlr.ANTLRException;
import java.io.Reader;
import java.util.Hashtable;
import org.jostraca.comp.antlr.CharScanner;
import org.jostraca.comp.antlr.InputBuffer;
import org.jostraca.comp.antlr.ByteBuffer;
import org.jostraca.comp.antlr.CharBuffer;
import org.jostraca.comp.antlr.Token;
import org.jostraca.comp.antlr.CommonToken;
import org.jostraca.comp.antlr.RecognitionException;
import org.jostraca.comp.antlr.NoViableAltForCharException;
import org.jostraca.comp.antlr.MismatchedCharException;
import org.jostraca.comp.antlr.TokenStream;
import org.jostraca.comp.antlr.ANTLRHashString;
import org.jostraca.comp.antlr.LexerSharedInputState;
import org.jostraca.comp.antlr.collections.impl.BitSet;
import org.jostraca.comp.antlr.SemanticException;
public class BasicTemplateAntlrLexer extends org.jostraca.comp.antlr.CharScanner implements BasicTemplateAntlrLexerTokenTypes, TokenStream
{
/* The lexing rules have been carefully constructed so that
* char literals can be globally substituted out. This allows
* the code markers to be changed at runtime.
* To facilitate automatic substitution the chars < % # >
* are used in the grammar, whereas the actual defaults are < % % >
* Substitution is performed by ant
* Note: lexing rules of the form ~{'a'|'b') are thus not allowed.
*/
private char iOpenOuterChar = "<".charAt(0);
private char iOpenInnerChar = "%".charAt(0);
private char iCloseInnerChar = "%".charAt(0);
private char iCloseOuterChar = ">".charAt(0);
public void setOpenOuterChar( char pOpenOuterChar ) { iOpenOuterChar = pOpenOuterChar; }
public void setOpenInnerChar( char pOpenInnerChar ) { iOpenInnerChar = pOpenInnerChar; }
public void setCloseInnerChar( char pCloseInnerChar ) { iCloseInnerChar = pCloseInnerChar; }
public void setCloseOuterChar( char pCloseOuterChar ) { iCloseOuterChar = pCloseOuterChar; }
public BasicTemplateAntlrLexer(InputStream in) {
this(new ByteBuffer(in));
}
public BasicTemplateAntlrLexer(Reader in) {
this(new CharBuffer(in));
}
public BasicTemplateAntlrLexer(InputBuffer ib) {
this(new LexerSharedInputState(ib));
}
public BasicTemplateAntlrLexer(LexerSharedInputState state) {
super(state);
caseSensitiveLiterals = true;
setCaseSensitive(true);
literals = new Hashtable();
}
public Token nextToken() throws TokenStreamException {
Token theRetToken=null;
tryAgain:
for (;;) {
Token _token = null;
int _ttype = Token.INVALID_TYPE;
resetText();
try { // for char stream error handling
try { // for lexical error handling
if ((LA(1)==iOpenOuterChar) && (LA(2)==iOpenInnerChar) && (LA(3)=='-') && (LA(4)=='-')) {
mCOMMENT(true);
theRetToken=_returnToken;
}
else if ((LA(1)==iOpenOuterChar) && (LA(2)==iOpenInnerChar) && ((LA(3) >= '\u0000' && LA(3) <= '\ufffe')) && ((LA(4) >= '\u0000' && LA(4) <= '\ufffe'))) {
mSCRIPT(true);
theRetToken=_returnToken;
}
else if (((LA(1) >= '\u0000' && LA(1) <= '\ufffe')) && (true) && (true) && (true)) {
mTEXT(true);
theRetToken=_returnToken;
}
else {
if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}
else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
if ( _returnToken==null ) continue tryAgain; // found SKIP token
_ttype = _returnToken.getType();
_ttype = testLiteralsTable(_ttype);
_returnToken.setType(_ttype);
return _returnToken;
}
catch (RecognitionException e) {
throw new TokenStreamRecognitionException(e);
}
}
catch (CharStreamException cse) {
if ( cse instanceof CharStreamIOException ) {
throw new TokenStreamIOException(((CharStreamIOException)cse).io);
}
else {
throw new TokenStreamException(cse.getMessage());
}
}
}
}
public final void mCOMMENT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = COMMENT;
int _saveIndex;
match(iOpenOuterChar);
match(iOpenInnerChar);
match('-');
match('-');
{
_loop3:
do {
// nongreedy exit test
if ((LA(1)=='-') && (LA(2)=='-') && (LA(3)==iCloseInnerChar) && (LA(4)==iCloseOuterChar)) break _loop3;
if (((LA(1) >= '\u0000' && LA(1) <= '\ufffe')) && ((LA(2) >= '\u0000' && LA(2) <= '\ufffe')) && ((LA(3) >= '\u0000' && LA(3) <= '\ufffe')) && ((LA(4) >= '\u0000' && LA(4) <= '\ufffe'))) {
matchNot(EOF_CHAR);
}
else {
break _loop3;
}
} while (true);
}
match('-');
match('-');
match(iCloseInnerChar);
match(iCloseOuterChar);
_ttype = Token.SKIP;
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mSCRIPT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = SCRIPT;
int _saveIndex;
_saveIndex=text.length();
match(iOpenOuterChar);
text.setLength(_saveIndex);
_saveIndex=text.length();
match(iOpenInnerChar);
text.setLength(_saveIndex);
{
_loop7:
do {
// nongreedy exit test
if ((LA(1)==iCloseInnerChar) && (LA(2)==iCloseOuterChar) && (true)) break _loop7;
if ((_tokenSet_0.member(LA(1))) && ((LA(2) >= '\u0000' && LA(2) <= '\ufffe')) && ((LA(3) >= '\u0000' && LA(3) <= '\ufffe'))) {
{
match(_tokenSet_0);
}
}
else if ((LA(1)=='\n')) {
match('\n');
newline();
}
else {
break _loop7;
}
} while (true);
}
_saveIndex=text.length();
match(iCloseInnerChar);
text.setLength(_saveIndex);
_saveIndex=text.length();
match(iCloseOuterChar);
text.setLength(_saveIndex);
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mTEXT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = TEXT;
int _saveIndex;
{
int _cnt11=0;
_loop11:
do {
if (((LA(1)==iOpenOuterChar) && (true) && (true) && (true))&&(LA(2)!=iOpenInnerChar)) {
match(iOpenOuterChar);
}
else if (((_tokenSet_0.member(LA(1))) && (true) && (true) && (true))&&(LA(1)!=iOpenOuterChar)) {
{
match(_tokenSet_0);
}
}
else if ((LA(1)=='\n')) {
match('\n');
newline();
}
else {
if ( _cnt11>=1 ) { break _loop11; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
_cnt11++;
} while (true);
}
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
private static final long[] mk_tokenSet_0() {
long[] data = new long[2048];
data[0]=-1025L;
for (int i = 1; i<=1022; i++) { data[i]=-1L; }
data[1023]=9223372036854775807L;
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
}