// (then can store feeds - doesn't matter that the event/entities have been modified by the aggregation)
// 3. (Scheduled for efficiency) Update all documents' frequencies based on new entities and events
// 4. (Scheduled for efficiency) Synchronize with index [after this, queries can find them - so (2) must have happened]
// (Syncronization currently "corrupts" the entities so needs to be run last)
AggregationManager perSourceAggregation = null;
if (!props.getAggregationDisabled()) {
perSourceAggregation = new AggregationManager();
}
// 1+2]
if (null != perSourceAggregation) {
perSourceAggregation.doAggregation(toAdd, toDelete);
perSourceAggregation.createOrUpdateFeatureEntries();
}
// Save feeds to feeds collection in MongoDb
// (second field determines if content gets saved)
if (null != perSourceAggregation) {
perSourceAggregation.applyAggregationToDocs(toAdd);
// (First save aggregated statistics back to the docs' entity/event instances)
}
storeFeeds(toAdd, (harvestType != InfiniteEnums.DATABASE), source);
// Then finish aggregation:
if (null != perSourceAggregation) {
// 3]
perSourceAggregation.runScheduledDocumentUpdates();
// 4] This needs to happen last because it "corrupts" the entities and events
perSourceAggregation.runScheduledSynchronization();
}
}//TESTED (by eye - logic is v simple)