// $ANTLR 2.7.7 (2006-11-01): "XQDocParser.g" -> "XQDocLexer.java"$
package org.exist.xquery.xqdoc.parser;
import org.exist.xquery.xqdoc.XQDocHelper;
import java.io.InputStream;
import antlr.TokenStreamException;
import antlr.TokenStreamIOException;
import antlr.TokenStreamRecognitionException;
import antlr.CharStreamException;
import antlr.CharStreamIOException;
import antlr.ANTLRException;
import java.io.Reader;
import java.util.Hashtable;
import antlr.CharScanner;
import antlr.InputBuffer;
import antlr.ByteBuffer;
import antlr.CharBuffer;
import antlr.Token;
import antlr.CommonToken;
import antlr.RecognitionException;
import antlr.NoViableAltForCharException;
import antlr.MismatchedCharException;
import antlr.TokenStream;
import antlr.ANTLRHashString;
import antlr.LexerSharedInputState;
import antlr.collections.impl.BitSet;
import antlr.SemanticException;
public class XQDocLexer extends antlr.CharScanner implements XQDocParserTokenTypes, TokenStream
{
public XQDocLexer(InputStream in) {
this(new ByteBuffer(in));
}
public XQDocLexer(Reader in) {
this(new CharBuffer(in));
}
public XQDocLexer(InputBuffer ib) {
this(new LexerSharedInputState(ib));
}
public XQDocLexer(LexerSharedInputState state) {
super(state);
caseSensitiveLiterals = true;
setCaseSensitive(true);
literals = new Hashtable();
}
public Token nextToken() throws TokenStreamException {
Token theRetToken=null;
tryAgain:
for (;;) {
Token _token = null;
int _ttype = Token.INVALID_TYPE;
resetText();
try { // for char stream error handling
try { // for lexical error handling
if ((LA(1)=='(') && (LA(2)==':')) {
mXQDOC_START(true);
theRetToken=_returnToken;
}
else if ((LA(1)==':') && (LA(2)==')')) {
mXQDOC_END(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='@') && (_tokenSet_0.member(LA(2)))) {
mTAG(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='@') && (true)) {
mAT(true);
theRetToken=_returnToken;
}
else if ((_tokenSet_1.member(LA(1))) && (true)) {
mCHARS(true);
theRetToken=_returnToken;
}
else if ((LA(1)=='\n')) {
mTRIM(true);
theRetToken=_returnToken;
}
else if ((LA(1)==':') && (true)) {
mSIMPLE_COLON(true);
theRetToken=_returnToken;
}
else {
if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}
else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
if ( _returnToken==null ) continue tryAgain; // found SKIP token
_ttype = _returnToken.getType();
_ttype = testLiteralsTable(_ttype);
_returnToken.setType(_ttype);
return _returnToken;
}
catch (RecognitionException e) {
throw new TokenStreamRecognitionException(e);
}
}
catch (CharStreamException cse) {
if ( cse instanceof CharStreamIOException ) {
throw new TokenStreamIOException(((CharStreamIOException)cse).io);
}
else {
throw new TokenStreamException(cse.getMessage());
}
}
}
}
public final void mXQDOC_START(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = XQDOC_START;
int _saveIndex;
match("(:~");
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mXQDOC_END(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = XQDOC_END;
int _saveIndex;
match(":)");
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mAT(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = AT;
int _saveIndex;
match('@');
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mCHARS(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = CHARS;
int _saveIndex;
{
int _cnt16=0;
_loop16:
do {
if ((_tokenSet_1.member(LA(1)))) {
{
match(_tokenSet_1);
}
}
else {
if ( _cnt16>=1 ) { break _loop16; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
_cnt16++;
} while (true);
}
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mTRIM(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = TRIM;
int _saveIndex;
match('\n');
{
_loop19:
do {
switch ( LA(1)) {
case '\t':
{
match('\t');
break;
}
case ' ':
{
match(' ');
break;
}
default:
{
break _loop19;
}
}
} while (true);
}
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mSIMPLE_COLON(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = SIMPLE_COLON;
int _saveIndex;
match(':');
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
public final void mTAG(boolean _createToken) throws RecognitionException, CharStreamException, TokenStreamException {
int _ttype; Token _token=null; int _begin=text.length();
_ttype = TAG;
int _saveIndex;
match('@');
{
int _cnt26=0;
_loop26:
do {
switch ( LA(1)) {
case 'A': case 'B': case 'C': case 'D':
case 'E': case 'F': case 'G': case 'H':
case 'I': case 'J': case 'K': case 'L':
case 'M': case 'N': case 'O': case 'P':
case 'Q': case 'R': case 'S': case 'T':
case 'U': case 'V': case 'W': case 'X':
case 'Y': case 'Z':
{
matchRange('A','Z');
break;
}
case 'a': case 'b': case 'c': case 'd':
case 'e': case 'f': case 'g': case 'h':
case 'i': case 'j': case 'k': case 'l':
case 'm': case 'n': case 'o': case 'p':
case 'q': case 'r': case 's': case 't':
case 'u': case 'v': case 'w': case 'x':
case 'y': case 'z':
{
matchRange('a','z');
break;
}
case '0': case '1': case '2': case '3':
case '4': case '5': case '6': case '7':
case '8': case '9':
{
matchRange('0','9');
break;
}
default:
{
if ( _cnt26>=1 ) { break _loop26; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
}
_cnt26++;
} while (true);
}
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
_token = makeToken(_ttype);
_token.setText(new String(text.getBuffer(), _begin, text.length()-_begin));
}
_returnToken = _token;
}
private static final long[] mk_tokenSet_0() {
long[] data = new long[1025];
data[0]=287948901175001088L;
data[1]=576460743847706622L;
return data;
}
public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
private static final long[] mk_tokenSet_1() {
long[] data = new long[2048];
data[0]=-288230376151712776L;
data[1]=-2L;
for (int i = 2; i<=1022; i++) { data[i]=-1L; }
data[1023]=9223372036854775807L;
return data;
}
public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
}