throws IOException {
Map<String,Integer> tokenMap = new HashMap<String,Integer>();
final int maxFieldLength = 10000;
Analyzer analyzer = createAnalyzer();
for (Fieldable field : doc.getFields()) {
String fieldName = field.name();
if (field.isIndexed()) {
if (field.isTokenized()) { // un-tokenized field
Reader reader; // find or make Reader
if (field.readerValue() != null)
reader = field.readerValue();
else if (field.stringValue() != null)
reader = new StringReader(field.stringValue());
else
throw new IllegalArgumentException
("field must have either String or Reader value");
int position = 0;
// Tokenize field and add to postingTable
TokenStream stream = analyzer.tokenStream(fieldName, reader);
TermAttribute termAtt = stream.addAttribute(TermAttribute.class);
PositionIncrementAttribute posIncrAtt = stream.addAttribute(PositionIncrementAttribute.class);
try {
while (stream.incrementToken()) {