while (tasksCompleted < tasksScheduled) {
tasksCompleted += unlockCompleted(ecs, true);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new PersistenceException("Interrupted purging JdbcBinaryStore", e);
}
// when all tasks have completed, we may have up to BATCH_SIZE empty buckets waiting to be deleted
PreparedStatement deletePs = null;
try {
deletePs = conn.prepareStatement(tableManipulation.getDeleteRowSql());
Bucket bucket;
while ((bucket = emptyBuckets.poll()) != null) {
deletePs.setString(1, bucket.getBucketIdAsString());
deletePs.addBatch();
unlock(bucket.getBucketId());
}
log.tracef("Flushing deletion batch");
deletePs.executeBatch();
log.tracef("Flushed deletion batch");
} catch (Exception ex) {
// if something happens make sure buckets locks are being release
log.failedClearingJdbcCacheStore(ex);
} finally {
JdbcUtil.safeClose(deletePs);
}
} catch (Exception ex) {
// if something happens make sure buckets locks are released
log.failedClearingJdbcCacheStore(ex);
throw new PersistenceException("Failed clearing JdbcBinaryStore", ex);
} finally {
JdbcUtil.safeClose(ps);
JdbcUtil.safeClose(rs);
try {
while (tasksCompleted < tasksScheduled) {
tasksCompleted += unlockCompleted(ecs, true);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new PersistenceException("Interrupted purging JdbcBinaryStore", e);
} finally {
connectionFactory.releaseConnection(conn);
}
}
}