}
} catch (Exception ex) {
// if something happens make sure buckets locks are being release
releaseLocks(expiredBuckets);
log.failedClearingJdbcCacheStore(ex);
throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
} finally {
JdbcUtil.safeClose(ps);
JdbcUtil.safeClose(rs);
}
if (log.isTraceEnabled()) {
log.tracef("Found following buckets: %s which are about to be expired", expiredBuckets);
}
if (expiredBuckets.isEmpty()) {
return;
}
Set<Bucket> emptyBuckets = new HashSet<Bucket>();
// now update all the buckets in batch
try {
String sql = tableManipulation.getUpdateRowSql();
ps = conn.prepareStatement(sql);
int updateCount = 0;
Iterator<Bucket> it = expiredBuckets.iterator();
while (it.hasNext()) {
Bucket bucket = it.next();
bucket.removeExpiredEntries();
if (!bucket.isEmpty()) {
ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
ps.setString(3, bucket.getBucketIdAsString());
ps.addBatch();
updateCount++;
if (updateCount % batchSize == 0) {
ps.executeBatch();
if (log.isTraceEnabled()) {
log.tracef("Flushing batch, update count is: %d", updateCount);
}
}
} else {
it.remove();
emptyBuckets.add(bucket);
}
}
// flush the batch
if (updateCount % batchSize != 0) {
if (log.isTraceEnabled()) {
log.tracef("Flushing batch, update count is: %d", updateCount);
}
ps.executeBatch();
}
if (log.isTraceEnabled()) {
log.tracef("Updated %d buckets.", updateCount);
}
} catch (InterruptedException ie) {
if (log.isTraceEnabled()) {
log.trace("Interrupted while marshalling to purge expired entries");
}
Thread.currentThread().interrupt();
} catch (Exception ex) {
// if something happens make sure buckets locks are being release
releaseLocks(emptyBuckets);
log.failedClearingJdbcCacheStore(ex);
throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
} finally {
// release locks for the updated buckets.This won't include empty
// buckets, as these were migrated to emptyBuckets
releaseLocks(expiredBuckets);
JdbcUtil.safeClose(ps);
}
if (log.isTraceEnabled()) {
log.tracef("About to remove empty buckets %s", emptyBuckets);
}
if (emptyBuckets.isEmpty()) {
return;
}
// then remove the empty buckets
try {
String sql = tableManipulation.getDeleteRowSql();
ps = conn.prepareStatement(sql);
int deletionCount = 0;
for (Bucket bucket : emptyBuckets) {
ps.setString(1, bucket.getBucketIdAsString());
ps.addBatch();
deletionCount++;
if (deletionCount % batchSize == 0) {
if (log.isTraceEnabled()) {
log.tracef("Flushing deletion batch, total deletion count so far is %d", deletionCount);
}
ps.executeBatch();
}
}
if (deletionCount % batchSize != 0) {
int[] batchResult = ps.executeBatch();
if (log.isTraceEnabled()) {
log.tracef("Flushed the batch and received following results: %s", Arrays.toString(batchResult));
}
}
} catch (Exception ex) {
// if something happens make sure buckets locks are being release
log.failedClearingJdbcCacheStore(ex);
throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
} finally {
releaseLocks(emptyBuckets);
JdbcUtil.safeClose(ps);
}
} finally {