Integer bucketId = rs.getInt(2);
if (immediateLockForWriting(bucketId)) {
if (log.isTraceEnabled()) {
log.tracef("Adding bucket keyed %s for purging.", bucketId);
}
Bucket bucket;
InputStream binaryStream = rs.getBinaryStream(1);
bucket = unmarshallBucket(binaryStream);
bucket.setBucketId(bucketId);
expiredBuckets.add(bucket);
if (expiredBuckets.size() == batchSize) {
ecs.submit(new BucketPurger(expiredBuckets, task, ctx.getMarshaller(), conn, emptyBuckets));
taskCount++;
expiredBuckets = new HashSet<Bucket>(batchSize);
}
} else {
if (log.isTraceEnabled()) {
log.tracef("Could not acquire write lock for %s, this won't be purged even though it has expired elements", bucketId);
}
}
}
if (!expiredBuckets.isEmpty())
ecs.submit(new BucketPurger(expiredBuckets, task, ctx.getMarshaller(), conn, emptyBuckets));
} catch (Exception ex) {
// if something happens make sure buckets locks are being release
releaseLocks(expiredBuckets);
log.failedClearingJdbcCacheStore(ex);
throw new CacheLoaderException("Failed clearing JdbcBinaryStore", ex);
} finally {
JdbcUtil.safeClose(ps);
JdbcUtil.safeClose(rs);
}
try {
PersistenceUtil.waitForAllTasksToComplete(ecs, taskCount);
} catch (CacheLoaderException e) {
releaseLocks(emptyBuckets);
}
if (emptyBuckets.isEmpty())
return;
if (log.isTraceEnabled()) {
log.tracef("About to remove empty buckets %s", emptyBuckets);
}
// then remove the empty buckets
try {
String sql = tableManipulation.getDeleteRowSql();
ps = conn.prepareStatement(sql);
int deletionCount = 0;
for (Bucket bucket : emptyBuckets) {
ps.setString(1, bucket.getBucketIdAsString());
ps.addBatch();
deletionCount++;
if (deletionCount % batchSize == 0) {
if (log.isTraceEnabled()) {
log.tracef("Flushing deletion batch, total deletion count so far is %d", deletionCount);