while (rs.next()) {
String key = rs.getString(2);
if (immediateLockForWriting(key)) {
if (log.isTraceEnabled()) log.trace("Adding bucket keyed " + key + " for purging.");
InputStream binaryStream = rs.getBinaryStream(1);
Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
bucket.setBucketName(key);
expiredBuckets.add(bucket);
} else {
if (log.isTraceEnabled())
log.trace("Could not acquire write lock for " + key + ", this won't be purged even though it has expired elements");
}
}
} catch (SQLException ex) {
//if something happens make sure buckets locks are being release
releaseLocks(expiredBuckets);
connectionFactory.releaseConnection(conn);
logAndThrow(ex, "Failed clearing JdbcBinaryCacheStore");
} finally {
JdbcUtil.safeClose(ps);
JdbcUtil.safeClose(rs);
}
if (log.isTraceEnabled())
log.trace("Found following buckets: " + expiredBuckets + " which are about to be expired");
if (expiredBuckets.isEmpty()) return;
Set<Bucket> emptyBuckets = new HashSet<Bucket>();
//now update all the buckets in batch
try {
String sql = tableManipulation.getUpdateRowSql();
ps = conn.prepareStatement(sql);
int updateCount = 0;
Iterator<Bucket> it = expiredBuckets.iterator();
while (it.hasNext()) {
Bucket bucket = it.next();
bucket.removeExpiredEntries();
if (!bucket.isEmpty()) {
ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
ps.setString(3, bucket.getBucketName());
ps.addBatch();
updateCount++;
if (updateCount % batchSize == 0) {
ps.executeBatch();
if (log.isTraceEnabled()) log.trace("Flushing batch, update count is: " + updateCount);
}
} else {
it.remove();
emptyBuckets.add(bucket);
}
}
//flush the batch
if (updateCount % batchSize != 0) {
if (log.isTraceEnabled()) log.trace("Flushing batch, update count is: " + updateCount);
ps.executeBatch();
}
if (log.isTraceEnabled()) log.trace("Updated " + updateCount + " buckets.");
} catch (SQLException ex) {
//if something happens make sure buckets locks are being release
releaseLocks(emptyBuckets);
connectionFactory.releaseConnection(conn);
logAndThrow(ex, "Failed clearing JdbcBinaryCacheStore");
} catch (InterruptedException ie) {
if (log.isTraceEnabled()) log.trace("Interrupted while marshalling to purge expired entries");
Thread.currentThread().interrupt();
} finally {
//release locks for the updated buckets.This won't include empty buckets, as these were migrated to emptyBuckets
releaseLocks(expiredBuckets);
JdbcUtil.safeClose(ps);
}
if (log.isTraceEnabled()) log.trace("About to remove empty buckets " + emptyBuckets);
if (emptyBuckets.isEmpty()) return;
//then remove the empty buckets
try {
String sql = tableManipulation.getDeleteRowSql();
ps = conn.prepareStatement(sql);
int deletionCount = 0;
for (Bucket bucket : emptyBuckets) {
ps.setString(1, bucket.getBucketName());
ps.addBatch();
deletionCount++;
if (deletionCount % batchSize == 0) {
if (log.isTraceEnabled())
log.trace("Flushing deletion batch, total deletion count so far is " + deletionCount);