@Override
public FacetDataCache load(BoboIndexReader reader) throws IOException {
int maxDoc = reader.maxDoc();
BigIntArray order = new BigIntArray(maxDoc);
TermValueList mterms = _termListFactory == null ? new TermStringList() : _termListFactory.createTermList();
IntArrayList minIDList=new IntArrayList();
IntArrayList maxIDList=new IntArrayList();
IntArrayList freqList = new IntArrayList();
TermDocs termDocs = null;
TermEnum termEnum = null;
int t = 0; // current term number
mterms.add(null);
minIDList.add(-1);
maxIDList.add(-1);
freqList.add(0);
t++;
try {
termDocs = reader.termDocs();
termEnum = reader.terms(new Term(_indexFieldName, ""));
do {
if (termEnum == null)
break;
Term term = termEnum.term();
if (term == null || !_indexFieldName.equals(term.field()))
break;
// store term text
// we expect that there is at most one term per document
if (t > MAX_VAL_COUNT) {
throw new IOException("maximum number of value cannot exceed: "
+ MAX_VAL_COUNT);
}
String val = term.text();
mterms.add(val);
int bit = (0x00000001 << (t-1));
termDocs.seek(termEnum);
//freqList.add(termEnum.docFreq()); // removed because the df doesn't take into account the num of deletedDocs
int df = 0;
int minID=-1;
int maxID=-1;
if(termDocs.next())
{
df++;
int docid = termDocs.doc();
order.add(docid, order.get(docid) | bit);
minID = docid;
while (termDocs.next())
{
df++;
docid = termDocs.doc();
order.add(docid, order.get(docid) | bit);
}
maxID = docid;
}
freqList.add(df);
minIDList.add(minID);