if (origQuery != null) {
if (origQuery instanceof SpanNearQuery == false) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Illegal query type. The incoming query must be a Lucene SpanNearQuery and it was a " + origQuery.getClass().getName());
}
SpanNearQuery sQuery = (SpanNearQuery) origQuery;
SolrIndexSearcher searcher = rb.req.getSearcher();
IndexReader reader = searcher.getIndexReader();
Spans spans = sQuery.getSpans(reader);
//Assumes the query is a SpanQuery
//Build up the query term weight map and the bi-gram
Map<String, Float> termWeights = new HashMap<String, Float>();
Map<String, Float> bigramWeights = new HashMap<String, Float>();
createWeights(params.get(CommonParams.Q), sQuery, termWeights, bigramWeights, reader);
float adjWeight = params.getFloat(ADJACENT_WEIGHT, DEFAULT_ADJACENT_WEIGHT);
float secondAdjWeight = params.getFloat(SECOND_ADJ_WEIGHT, DEFAULT_SECOND_ADJACENT_WEIGHT);
float bigramWeight = params.getFloat(BIGRAM_WEIGHT, DEFAULT_BIGRAM_WEIGHT);
//get the passages
int primaryWindowSize = params.getInt(QAParams.PRIMARY_WINDOW_SIZE, DEFAULT_PRIMARY_WINDOW_SIZE);
int adjacentWindowSize = params.getInt(QAParams.ADJACENT_WINDOW_SIZE, DEFAULT_ADJACENT_WINDOW_SIZE);
int secondaryWindowSize = params.getInt(QAParams.SECONDARY_WINDOW_SIZE, DEFAULT_SECONDARY_WINDOW_SIZE);
WindowBuildingTVM tvm = new WindowBuildingTVM(primaryWindowSize, adjacentWindowSize, secondaryWindowSize);
PassagePriorityQueue rankedPassages = new PassagePriorityQueue();
//intersect w/ doclist
DocList docList = rb.getResults().docList;
while (spans.next() == true) {
//build up the window
if (docList.exists(spans.doc())) {
tvm.spanStart = spans.start();
tvm.spanEnd = spans.end();
reader.getTermFreqVector(spans.doc(), sQuery.getField(), tvm);
//The entries map contains the window, do some ranking of it
if (tvm.passage.terms.isEmpty() == false) {
log.debug("Candidate: Doc: {} Start: {} End: {} ",
new Object[]{spans.doc(), spans.start(), spans.end()});
}
tvm.passage.lDocId = spans.doc();
tvm.passage.field = sQuery.getField();
//score this window
try {
addPassage(tvm.passage, rankedPassages, termWeights, bigramWeights, adjWeight, secondAdjWeight, bigramWeight);
} catch (CloneNotSupportedException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Internal error cloning Passage", e);
}
//clear out the entries for the next round
tvm.passage.clear();
}
}
NamedList qaResp = new NamedList();
rb.rsp.add("qaResponse", qaResp);
int rows = params.getInt(QA_ROWS, 5);
SchemaField uniqField = rb.req.getSchema().getUniqueKeyField();
if (rankedPassages.size() > 0) {
int size = Math.min(rows, rankedPassages.size());
Set<String> fields = new HashSet<String>();
for (int i = size - 1; i >= 0; i--) {
Passage passage = rankedPassages.pop();
if (passage != null) {
NamedList passNL = new NamedList();
qaResp.add(("answer"), passNL);
String idName;
String idValue;
if (uniqField != null) {
idName = uniqField.getName();
fields.add(idName);
fields.add(passage.field);//prefetch this now, so that it is cached
idValue = searcher.doc(passage.lDocId, fields).get(idName);
} else {
idName = "luceneDocId";
idValue = String.valueOf(passage.lDocId);
}
passNL.add(idName, idValue);
passNL.add("field", passage.field);
//get the window
String fldValue = searcher.doc(passage.lDocId, fields).get(passage.field);
if (fldValue != null) {
//get the window of words to display, we don't use the passage window, as that is based on the term vector
int start = passage.terms.first().start;//use the offsets
int end = passage.terms.last().end;
if (start >= 0 && start < fldValue.length() &&