long start = System.currentTimeMillis();
long startQuery = System.currentTimeMillis();
Filter filter;
if (uak == null) {
filter = new QueryFilter(queryParser.parse(
"accesorid:public"
));
LOGGER.debug("filtering query with accesorid:public");
} else {
StringBuffer f = new StringBuffer("accesorid:public OR accesorid:").append(uak.getUserId());
for (Iterator iter = uak.getGroups().iterator(); iter.hasNext();) {
Long gid = (Long) iter.next();
f.append(" OR accesorid:").append(gid);
}
filter = new QueryFilter(queryParser.parse(
f.toString()
));
LOGGER.debug("filtering query with "+f);
}
Hits hits = is.search(query, filter);
long endQuery = System.currentTimeMillis();
int hitsCount = hits.length();
long queryProcessTime = endQuery - startQuery;
Map h = new LinkedHashMap();
List results = new ArrayList(RESULTS_PER_PAGE);
// first we only look for the accurate hits
int i;
int countModuleHit = 0;
for (i = 0; i < hits.length() && results.size() < RESULTS_PER_PAGE; i++) {
Document doc = hits.doc(i);
String key = doc.get("visibility")+"/"+doc.get("organisation")+"/"+doc.get("module")+"/"+doc.get("fqcn");
Hit hit = (Hit) h.get(key);
if (hit == null) {
hit = new Hit(doc.get("visibility"), doc.get("organisation"), doc.get("module"), doc.get("fqcn"), hits.score(i));
h.put(key, hit);
if (countModuleHit >= startIndex) {
results.add(hit);
}
countModuleHit++;
}
// not stricly necessary, but sometimes lucene doesn't find the revisions in the second step...
hit.addRevision(doc.get("path"), doc.get("revision"));
}
Long nextResults = i == hits.length()?null:new Long(startIndex+RESULTS_PER_PAGE);
Long previousResults = startIndex == 0?null:new Long(startIndex-RESULTS_PER_PAGE);
// then we fill in revisions
for (Iterator iter = results.iterator(); iter.hasNext();) {
Hit hit = (Hit) iter.next();
hits = is.search(query, new QueryFilter(queryParser.parse(
"organisation:"+hit.getOrganisation()
+" AND module:"+hit.getModule()
+" AND fqcn:"+hit.getClassname()
)));
for (i = 0; i < hits.length(); i++) {