SegmentCacheWorker testWorker =
new SegmentCacheWorker(mockCache, null);
// Flush the cache before we start. Wait a second for the cache
// flush to propagate.
final CacheControl cc =
getTestContext().getConnection().getCacheControl(null);
Cube salesCube = getCube("Sales");
cc.flush(cc.createMeasuresRegion(salesCube));
Thread.sleep(1000);
MondrianServer.forConnection(getTestContext().getConnection())
.getAggregationManager().cacheMgr.segmentCacheWorkers
.add(testWorker);
final List<SegmentHeader> createdHeaders =
new ArrayList<SegmentHeader>();
final List<SegmentHeader> deletedHeaders =
new ArrayList<SegmentHeader>();
final SegmentCache.SegmentCacheListener listener =
new SegmentCache.SegmentCacheListener() {
public void handle(SegmentCacheEvent e) {
switch (e.getEventType()) {
case ENTRY_CREATED:
createdHeaders.add(e.getSource());
break;
case ENTRY_DELETED:
deletedHeaders.add(e.getSource());
break;
default:
throw new UnsupportedOperationException();
}
}
};
try {
// Register our custom listener.
((CompositeSegmentCache)MondrianServer
.forConnection(getTestContext().getConnection())
.getAggregationManager().cacheMgr.compositeCache)
.addListener(listener);
// Now execute a query and check the events
executeQuery(
"select {[Measures].[Unit Sales]} on columns from [Sales]");
// Wait for propagation.
Thread.sleep(2000);
assertEquals(2, createdHeaders.size());
assertEquals(0, deletedHeaders.size());
assertEquals("Sales", createdHeaders.get(0).cubeName);
assertEquals("FoodMart", createdHeaders.get(0).schemaName);
assertEquals("Unit Sales", createdHeaders.get(0).measureName);
createdHeaders.clear();
deletedHeaders.clear();
// Now flush the segment and check the events.
cc.flush(cc.createMeasuresRegion(salesCube));
// Wait for propagation.
Thread.sleep(2000);
assertEquals(0, createdHeaders.size());
assertEquals(2, deletedHeaders.size());