package org.jboss.cache.loader;
import junit.framework.TestCase;
import org.jboss.cache.CacheException;
import org.jboss.cache.Fqn;
import org.jboss.cache.TreeCache;
import java.util.*;
/**
* Test case that proves that the TreeCache is serialized inside the CacheLoaderInterceptor
* when the node is empty. ANY call to retrieve a node that is not loaded will lock
* up inside CacheLoaderInterceptor while the node is loaded and any other thread
* that wants a node out of the cache will wait for the first one to finish before it can even _start_ loading.
*
* @author paulsmith
*
*/
public class InterceptorSynchronizationTest extends TestCase {
final int CACHELOADER_WAITTIME = 2000; // lets say loading a node takes 2 seconds
final int numThreadsPerTopLevelNode = 5; // we'll have 5 requests for nodes within a branch
public void testBlockingProblem() throws Exception {
TreeCache cache = new TreeCache();
cache.setCacheLoader(new TestSlowCacheLoader());
cache.startService();
long begin = System.currentTimeMillis();
Collection threads = new ArrayList();
/*
* Create lots of threads all trying to load DIFFERENT fqn's, as well as a set with different top level nodes.
*/
for(int i=0; i< numThreadsPerTopLevelNode; i++) {
Thread thread = new Thread(new Retriever(cache,"/Moo/"+i));
threads.add(thread);
Thread thread2 = new Thread(new Retriever(cache,"/Meow/"+i));
threads.add(thread2);
}
for (Iterator iter = threads.iterator(); iter.hasNext();) {
Thread thread = (Thread) iter.next();
thread.start();
}
for (Iterator iter = threads.iterator(); iter.hasNext();) {
Thread thread = (Thread) iter.next();
thread.join();
}
long end = System.currentTimeMillis();
long timeTaken = (end-begin);
/*
* My expectation is that if there are 2 top level nodes they should be loaded in parallel at the very least,
* but even bottom level nodes that are different should be able to be loaded concurrently.
*
* In this test, NONE of the threads operate in parallel once entered into CacheLoaderInterceptor.
*/
int totalTimeExpectedToWaitIfNotSerialized = 3*CACHELOADER_WAITTIME; // i'm being very generous here. 3 times the wait time for each node is more than enough if it was in parallel.
assertTrue("If it was parallel, it should have finished quicker than this:" +timeTaken, timeTaken<totalTimeExpectedToWaitIfNotSerialized);
}
/**
* Dummy cache loader that emulates a slow loading of any node from a virtual backing store.
*
* @author paulsmith
*
*/
public class TestSlowCacheLoader implements CacheLoader {
public void setConfig(Properties arg0) {
}
public void setCache(TreeCache arg0) {
}
public Set getChildrenNames(Fqn arg0) throws Exception {
return null;
}
public Object get(Fqn arg0, Object arg1) throws Exception {
return null;
}
public Map get(Fqn arg0) throws Exception {
Thread.sleep(CACHELOADER_WAITTIME);
return Collections.singletonMap("foo", "bar");
}
public boolean exists(Fqn arg0) throws Exception {
return true;
}
public Object put(Fqn arg0, Object arg1, Object arg2) throws Exception {
return null;
}
public void put(Fqn arg0, Map arg1) throws Exception {
}
public void put(List arg0) throws Exception {
}
public Object remove(Fqn arg0, Object arg1) throws Exception {
return null;
}
public void remove(Fqn arg0) throws Exception {
}
public void removeData(Fqn arg0) throws Exception {
}
public void prepare(Object arg0, List arg1, boolean arg2) throws Exception {
}
public void commit(Object arg0) throws Exception {
}
public void rollback(Object arg0) {
}
public byte[] loadEntireState() throws Exception {
return null;
}
public void storeEntireState(byte[] arg0) throws Exception {
}
public void create() throws Exception {
}
public void start() throws Exception {
}
public void stop() {
}
public void destroy() {
}
}
private static class Retriever implements Runnable{
private final String fqn;
private TreeCache cache;
private Retriever(TreeCache cache, String fqn) {
this.fqn = fqn;
this.cache = cache;
}
public void run() {
try {
cache.get(fqn, "foo");
} catch (CacheException e) {
throw new RuntimeException("Unexpected",e);
}
}
}
}