fillBoostValues(config.getString(BOOSTED_ATTRIBUTES_KEY));
IndexAccessor indexAccessor = null;
IndexWriter indexWriter = null;
IndexReader indexReader = null;
TaxonomyAccessor taxonomyAccessor = null;
TaxonomyWriter taxonomyWriter = null;
LuceneIndexUpdateChecker luceneIndexUpdateChecker = null;
boolean finishedIndexJobSuccessfull = false;
boolean finishedIndexJobWithError = false;
try {
indexLocation.checkLock();
Collection<CRResolvableBean> slice = null;
try {
status.setCurrentStatusString("Writer accquired. Starting" + "index job.");
if (rp == null) {
throw new CRException("FATAL ERROR", "RequestProcessor not available");
}
String bsString = (String) config.get(BATCH_SIZE_KEY);
int crBatchSize = batchSize;
if (bsString != null) {
try {
crBatchSize = Integer.parseInt(bsString);
} catch (NumberFormatException e) {
log.error("The configured " + BATCH_SIZE_KEY + " for the Current CR" + " did not contain a parsable integer. ", e);
}
}
// and get the current rule
String rule = (String) config.get(RULE_KEY);
if (rule == null) {
rule = "";
}
if (rule.length() == 0) {
rule = "(1 == 1)";
} else {
rule = "(" + rule + ")";
}
List<ContentTransformer> transformerlist = ContentTransformer.getTransformerList(config);
boolean create = true;
if (indexLocation.isContainingIndex()) {
create = false;
log.debug("Index already exists.");
}
if (indexLocation instanceof LuceneIndexLocation) {
luceneIndexUpdateChecker = new LuceneIndexUpdateChecker((LuceneIndexLocation) indexLocation, CR_FIELD_KEY, crid,
idAttribute);
} else {
log.error("IndexLocation is not created for Lucene. " + "Using the " + CRLuceneIndexJob.class.getName()
+ " requires that you use the " + LuceneIndexLocation.class.getName()
+ ". You can configure another Job by setting the " + IndexLocation.UPDATEJOBCLASS_KEY + " key in your config.");
throw new CRException(new CRError("Error", "IndexLocation is not created for Lucene."));
}
Collection<CRResolvableBean> objectsToIndex = null;
//Clear Index and remove stale Documents
//if (!create) {
log.debug("Will do differential index.");
try {
CRRequest req = new CRRequest();
req.setRequestFilter(rule);
req.set(CR_FIELD_KEY, crid);
status.setCurrentStatusString("Get objects to update " + "in the index ...");
objectsToIndex = getObjectsToUpdate(req, rp, false, luceneIndexUpdateChecker);
} catch (Exception e) {
log.error("ERROR while cleaning index", e);
}
//}
//Obtain accessor and writer after clean
if (indexLocation instanceof LuceneIndexLocation) {
indexAccessor = ((LuceneIndexLocation) indexLocation).getAccessor();
indexWriter = indexAccessor.getWriter();
indexReader = indexAccessor.getReader(false);
useFacets = ((LuceneIndexLocation) indexLocation).useFacets();
if (useFacets) {
taxonomyAccessor = ((LuceneIndexLocation) indexLocation).getTaxonomyAccessor();
taxonomyWriter = taxonomyAccessor.getTaxonomyWriter();
}
} else {
log.error("IndexLocation is not created for Lucene. " + "Using the " + CRLuceneIndexJob.class.getName()
+ " requires that you use the " + LuceneIndexLocation.class.getName()
+ ". You can configure another Job by setting the " + IndexLocation.UPDATEJOBCLASS_KEY + " key in your config.");
throw new CRException(new CRError("Error", "IndexLocation is not created for Lucene."));
}
log.debug("Using rule: " + rule);
// prepare the map of indexed/stored attributes
Map<String, Boolean> attributes = new HashMap<String, Boolean>();
List<String> containedAttributes = IndexerUtil.getListFromString(config.getString(CONTAINED_ATTRIBUTES_KEY), ",");
List<String> indexedAttributes = IndexerUtil.getListFromString(config.getString(INDEXED_ATTRIBUTES_KEY), ",");
List<String> reverseAttributes = ((LuceneIndexLocation) indexLocation).getReverseAttributes();
// first put all indexed attributes into the map
for (String name : indexedAttributes) {
attributes.put(name, Boolean.FALSE);
}
// now put all contained attributes
for (String name : containedAttributes) {
attributes.put(name, Boolean.TRUE);
}
// finally, put the "contentid" (always contained)
attributes.put(idAttribute, Boolean.TRUE);
if (objectsToIndex == null) {
log.debug("Rule returned no objects to index. Skipping...");
return;
}
status.setObjectCount(objectsToIndex.size());
log.debug(" index job with " + objectsToIndex.size() + " objects to index.");
// now get the first batch of objects from the collection
// (remove them from the original collection) and index them
slice = new Vector(crBatchSize);
int sliceCounter = 0;
status.setCurrentStatusString("Starting to index slices.");
boolean interrupted = Thread.currentThread().isInterrupted();
for (Iterator<CRResolvableBean> iterator = objectsToIndex.iterator(); iterator.hasNext();) {
CRResolvableBean obj = iterator.next();
slice.add(obj);
iterator.remove();
sliceCounter++;
if (Thread.currentThread().isInterrupted()) {
interrupted = true;
break;
}
if (sliceCounter == crBatchSize) {
// index the current slice
log.debug("Indexing slice with " + slice.size() + " objects.");
indexSlice(
crid,
indexWriter,
indexReader,
slice,
attributes,
rp,
create,
config,
transformerlist,
reverseAttributes,
taxonomyWriter,
taxonomyAccessor);
// clear the slice and reset the counter
slice.clear();
sliceCounter = 0;
}
}
if (!slice.isEmpty()) {
// index the last slice
indexSlice(
crid,
indexWriter,
indexReader,
slice,
attributes,
rp,
create,
config,
transformerlist,
reverseAttributes,
taxonomyWriter,
taxonomyAccessor);
}
if (!interrupted) {
// Only Optimize the Index if the thread
// has not been interrupted
if (optimize) {
log.debug("Executing optimize command.");
UseCase uc = MonitorFactory.startUseCase("optimize(" + crid + ")");
try {
indexWriter.optimize();
} finally {
uc.stop();
}
} else if (maxSegmentsString != null) {
log.debug("Executing optimize command with max" + " segments: " + maxSegmentsString);
int maxs = Integer.parseInt(maxSegmentsString);
UseCase uc = MonitorFactory.startUseCase("optimize(" + crid + ")");
try {
indexWriter.optimize(maxs);
} finally {
uc.stop();
}
}
} else {
log.debug("Job has been interrupted and will now be closed." + " Missing objects " + "will be reindexed next run.");
}
finishedIndexJobSuccessfull = true;
} catch (Exception ex) {
log.error("Could not complete index run... indexed Objects: " + status.getObjectsDone()
+ ", trying to close index and remove lock.", ex);
finishedIndexJobWithError = true;
status.setError("Could not complete index run... indexed " + "Objects: " + status.getObjectsDone()
+ ", trying to close index and remove lock.");
} finally {
if (!finishedIndexJobSuccessfull && !finishedIndexJobWithError) {
log.fatal("There seems to be a run time exception from this" + " index job.\nLast slice was: " + slice);
}
//Set status for job if it was not locked
status.setCurrentStatusString("Finished job.");
int objectCount = status.getObjectsDone();
log.debug("Indexed " + objectCount + " objects...");
if (taxonomyAccessor != null && taxonomyWriter != null) {
taxonomyAccessor.release(taxonomyWriter);
}
if (indexAccessor != null && indexWriter != null) {
indexAccessor.release(indexWriter);
}