* Returns a DocIdSet with documents that should be permitted in search
* results.
*/
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, final Bits acceptDocs) throws IOException {
final SortedDocValues fcsi = FieldCache.DEFAULT.getTermsIndex(context.reader(), query.field);
// Cannot use FixedBitSet because we require long index (ord):
final OpenBitSet termSet = new OpenBitSet(fcsi.getValueCount());
TermsEnum termsEnum = query.getTermsEnum(new Terms() {
@Override
public Comparator<BytesRef> getComparator() {
return BytesRef.getUTF8SortedAsUnicodeComparator();
}
@Override
public TermsEnum iterator(TermsEnum reuse) {
return fcsi.termsEnum();
}
@Override
public long getSumTotalTermFreq() {
return -1;
}
@Override
public long getSumDocFreq() {
return -1;
}
@Override
public int getDocCount() {
return -1;
}
@Override
public long size() {
return -1;
}
@Override
public boolean hasOffsets() {
return false;
}
@Override
public boolean hasPositions() {
return false;
}
@Override
public boolean hasPayloads() {
return false;
}
});
assert termsEnum != null;
if (termsEnum.next() != null) {
// fill into a OpenBitSet
do {
long ord = termsEnum.ord();
if (ord >= 0) {
termSet.set(ord);
}
} while (termsEnum.next() != null);
} else {
return null;
}
return new FieldCacheDocIdSet(context.reader().maxDoc(), acceptDocs) {
@Override
protected final boolean matchDoc(int doc) throws ArrayIndexOutOfBoundsException {
int ord = fcsi.getOrd(doc);
if (ord == -1) {
return false;
}
return termSet.get(ord);
}