Package org.infinispan.loaders.bucket

Examples of org.infinispan.loaders.bucket.Bucket


   }

   private Bucket uncompress(Blob blob, String bucketName, InputStream content) throws IOException, CacheLoaderException, ClassNotFoundException {
      //TODO go back to fully streamed version and get rid of the byte buffers
      BZip2CompressorInputStream is;
      Bucket bucket;
      ByteArrayOutputStream bos = new ByteArrayOutputStream();

      Streams.copy(content, bos);
      final byte[] compressedByteArray = bos.toByteArray();
View Full Code Here


         if (!rs.next()) {
            return null;
         }
         String bucketName = rs.getString(1);
         InputStream inputStream = rs.getBinaryStream(2);
         Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
         bucket.setBucketId(bucketName);//bucket name is volatile, so not persisted.
         return bucket;
      } catch (SQLException e) {
         log.sqlFailureLoadingKey(String.valueOf(keyHashCode), e);
         throw new CacheLoaderException(String.format(
               "Sql failure while loading key: %s", keyHashCode), e);
View Full Code Here

               Integer key = rs.getInt(2);
               if (immediateLockForWriting(key)) {
                  if (log.isTraceEnabled()) {
                     log.tracef("Adding bucket keyed %s for purging.", key);
                  }
                  Bucket bucket = null;
                  try {
                     InputStream binaryStream = rs.getBinaryStream(1);
                     bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
                  } catch (Exception ex) {
                     // If something goes wrong during unmarshalling, unlock the
                     // key before rethrowing
                     unlock(key);
                     throw ex;
                  }
                  bucket.setBucketId(key);
                  expiredBuckets.add(bucket);
               } else {
                  if (log.isTraceEnabled()) {
                     log.tracef("Could not acquire write lock for %s, this won't be purged even though it has expired elements", key);
                  }
               }
            }
         } catch (Exception ex) {
            // if something happens make sure buckets locks are being release
            releaseLocks(expiredBuckets);
            log.failedClearingJdbcCacheStore(ex);
            throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
         } finally {
            JdbcUtil.safeClose(ps);
            JdbcUtil.safeClose(rs);
         }

         if (log.isTraceEnabled()) {
            log.tracef("Found following buckets: %s which are about to be expired", expiredBuckets);
         }

         if (expiredBuckets.isEmpty()) {
            return;
         }

         Set<Bucket> emptyBuckets = new HashSet<Bucket>();
         // now update all the buckets in batch
         try {
            String sql = tableManipulation.getUpdateRowSql();
            ps = conn.prepareStatement(sql);
            int updateCount = 0;
            Iterator<Bucket> it = expiredBuckets.iterator();
            while (it.hasNext()) {
               Bucket bucket = it.next();
               bucket.removeExpiredEntries();
               if (!bucket.isEmpty()) {
                  ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
                  ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
                  ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
                  ps.setString(3, bucket.getBucketIdAsString());
                  ps.addBatch();
                  updateCount++;
                  if (updateCount % batchSize == 0) {
                     ps.executeBatch();
                     if (log.isTraceEnabled()) {
                        log.tracef("Flushing batch, update count is: %d", updateCount);
                     }
                  }
               } else {
                  it.remove();
                  emptyBuckets.add(bucket);
               }
            }
            // flush the batch
            if (updateCount % batchSize != 0) {
               if (log.isTraceEnabled()) {
                  log.tracef("Flushing batch, update count is: %d", updateCount);
               }
               ps.executeBatch();
            }
            if (log.isTraceEnabled()) {
               log.tracef("Updated %d buckets.", updateCount);
            }
         } catch (InterruptedException ie) {
            if (log.isTraceEnabled()) {
               log.trace("Interrupted while marshalling to purge expired entries");
            }
            Thread.currentThread().interrupt();
         } catch (Exception ex) {
            // if something happens make sure buckets locks are being release
            releaseLocks(emptyBuckets);
            log.failedClearingJdbcCacheStore(ex);
            throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
         } finally {
            // release locks for the updated buckets.This won't include empty
            // buckets, as these were migrated to emptyBuckets
            releaseLocks(expiredBuckets);
            JdbcUtil.safeClose(ps);
         }

         if (log.isTraceEnabled()) {
            log.tracef("About to remove empty buckets %s", emptyBuckets);
         }

         if (emptyBuckets.isEmpty()) {
            return;
         }
         // then remove the empty buckets
         try {
            String sql = tableManipulation.getDeleteRowSql();
            ps = conn.prepareStatement(sql);
            int deletionCount = 0;
            for (Bucket bucket : emptyBuckets) {
               ps.setString(1, bucket.getBucketIdAsString());
               ps.addBatch();
               deletionCount++;
               if (deletionCount % batchSize == 0) {
                  if (log.isTraceEnabled()) {
                     log.tracef("Flushing deletion batch, total deletion count so far is %d", deletionCount);
View Full Code Here

         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis)) {
                  result.add(ice);
               }
            }
         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result, int maxEntries) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis))
                  result.add(ice);

               if (result.size() == maxEntries)
                  break;
            }
         }

         @Override
         public void loadAllKeysProcess(ResultSet rs, Set<Object> keys, Set<Object> keysToExclude) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis) && includeKey(ice.getKey(), keysToExclude)) {
                  keys.add(ice.getKey());
               }
            }
         }

         @Override
         public void toStreamProcess(ResultSet rs, InputStream is, ObjectOutput objectOutput) throws CacheLoaderException, SQLException, IOException {
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), is);
            String bucketName = rs.getString(2);
            marshaller.objectToObjectStream(bucketName, objectOutput);
            marshaller.objectToObjectStream(bucket, objectOutput);
         }

         @Override
         public boolean fromStreamProcess(Object bucketName, PreparedStatement ps, ObjectInput objectInput)
               throws SQLException, CacheLoaderException, IOException, ClassNotFoundException, InterruptedException {
            if (bucketName instanceof String) {
               Bucket bucket = (Bucket) marshaller.objectFromObjectStream(objectInput);
               ByteBuffer buffer = JdbcUtil.marshall(getMarshaller(), bucket);
               ps.setBinaryStream(1, buffer.getStream(), buffer.getLength());
               ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
               ps.setString(3, (String) bucketName);
               return true;
            } else {
               return false;
            }
View Full Code Here

         if (!rs.next()) {
            return null;
         }
         String bucketName = rs.getString(1);
         InputStream inputStream = rs.getBinaryStream(2);
         Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
         bucket.setBucketId(bucketName);//bucket name is volatile, so not persisted.
         return bucket;
      } catch (SQLException e) {
         log.sqlFailureLoadingKey(String.valueOf(keyHashCode), e);
         throw new CacheLoaderException(String.format(
               "Sql failure while loading key: %s", keyHashCode), e);
View Full Code Here

               Integer key = rs.getInt(2);
               if (immediateLockForWriting(key)) {
                  if (log.isTraceEnabled()) {
                     log.tracef("Adding bucket keyed %s for purging.", key);
                  }
                  Bucket bucket = null;
                  try {
                     InputStream binaryStream = rs.getBinaryStream(1);
                     bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
                  } catch (Exception ex) {
                     // If something goes wrong during unmarshalling, unlock the
                     // key before rethrowing
                     unlock(key);
                     throw ex;
                  }
                  bucket.setBucketId(key);
                  expiredBuckets.add(bucket);
               } else {
                  if (log.isTraceEnabled()) {
                     log.tracef("Could not acquire write lock for %s, this won't be purged even though it has expired elements", key);
                  }
               }
            }
         } catch (Exception ex) {
            // if something happens make sure buckets locks are being release
            releaseLocks(expiredBuckets);
            log.failedClearingJdbcCacheStore(ex);
            throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
         } finally {
            JdbcUtil.safeClose(ps);
            JdbcUtil.safeClose(rs);
         }

         if (log.isTraceEnabled()) {
            log.tracef("Found following buckets: %s which are about to be expired", expiredBuckets);
         }

         if (expiredBuckets.isEmpty()) {
            return;
         }

         Set<Bucket> emptyBuckets = new HashSet<Bucket>();
         // now update all the buckets in batch
         try {
            String sql = tableManipulation.getUpdateRowSql();
            ps = conn.prepareStatement(sql);
            int updateCount = 0;
            Iterator<Bucket> it = expiredBuckets.iterator();
            while (it.hasNext()) {
               Bucket bucket = it.next();
               bucket.removeExpiredEntries();
               if (!bucket.isEmpty()) {
                  ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
                  ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
                  ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
                  ps.setString(3, bucket.getBucketIdAsString());
                  ps.addBatch();
                  updateCount++;
                  if (updateCount % batchSize == 0) {
                     ps.executeBatch();
                     if (log.isTraceEnabled()) {
                        log.tracef("Flushing batch, update count is: %d", updateCount);
                     }
                  }
               } else {
                  it.remove();
                  emptyBuckets.add(bucket);
               }
            }
            // flush the batch
            if (updateCount % batchSize != 0) {
               if (log.isTraceEnabled()) {
                  log.tracef("Flushing batch, update count is: %d", updateCount);
               }
               ps.executeBatch();
            }
            if (log.isTraceEnabled()) {
               log.tracef("Updated %d buckets.", updateCount);
            }
         } catch (InterruptedException ie) {
            if (log.isTraceEnabled()) {
               log.trace("Interrupted while marshalling to purge expired entries");
            }
            Thread.currentThread().interrupt();
         } catch (Exception ex) {
            // if something happens make sure buckets locks are being release
            releaseLocks(emptyBuckets);
            log.failedClearingJdbcCacheStore(ex);
            throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
         } finally {
            // release locks for the updated buckets.This won't include empty
            // buckets, as these were migrated to emptyBuckets
            releaseLocks(expiredBuckets);
            JdbcUtil.safeClose(ps);
         }

         if (log.isTraceEnabled()) {
            log.tracef("About to remove empty buckets %s", emptyBuckets);
         }

         if (emptyBuckets.isEmpty()) {
            return;
         }
         // then remove the empty buckets
         try {
            String sql = tableManipulation.getDeleteRowSql();
            ps = conn.prepareStatement(sql);
            int deletionCount = 0;
            for (Bucket bucket : emptyBuckets) {
               ps.setString(1, bucket.getBucketIdAsString());
               ps.addBatch();
               deletionCount++;
               if (deletionCount % batchSize == 0) {
                  if (log.isTraceEnabled()) {
                     log.tracef("Flushing deletion batch, total deletion count so far is %d", deletionCount);
View Full Code Here

         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis)) {
                  result.add(ice);
               }
            }
         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result, int maxEntries) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis))
                  result.add(ice);

               if (result.size() == maxEntries)
                  break;
            }
         }

         @Override
         public void loadAllKeysProcess(ResultSet rs, Set<Object> keys, Set<Object> keysToExclude) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = timeService.wallClockTime();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis) && includeKey(ice.getKey(), keysToExclude)) {
                  keys.add(ice.getKey());
               }
            }
         }

         @Override
         public void toStreamProcess(ResultSet rs, InputStream is, ObjectOutput objectOutput) throws CacheLoaderException, SQLException, IOException {
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), is);
            String bucketName = rs.getString(2);
            marshaller.objectToObjectStream(bucketName, objectOutput);
            marshaller.objectToObjectStream(bucket, objectOutput);
         }

         @Override
         public boolean fromStreamProcess(Object bucketName, PreparedStatement ps, ObjectInput objectInput)
               throws SQLException, CacheLoaderException, IOException, ClassNotFoundException, InterruptedException {
            if (bucketName instanceof String) {
               Bucket bucket = (Bucket) marshaller.objectFromObjectStream(objectInput);
               ByteBuffer buffer = JdbcUtil.marshall(getMarshaller(), bucket);
               ps.setBinaryStream(1, buffer.getStream(), buffer.getLength());
               ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
               ps.setString(3, (String) bucketName);
               return true;
            } else {
               return false;
            }
View Full Code Here

         ps.setString(1, keyHashCode);
         rs = ps.executeQuery();
         if (!rs.next()) return null;
         String bucketName = rs.getString(1);
         InputStream inputStream = rs.getBinaryStream(2);
         Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
         bucket.setBucketName(bucketName);//bucket name is volatile, so not persisted.
         return bucket;
      } catch (SQLException e) {
         String message = "sql failure while loading key: " + keyHashCode;
         log.error(message, e);
         throw new CacheLoaderException(message, e);
View Full Code Here

         rs = ps.executeQuery();
         rs.setFetchSize(config.getFetchSize());
         Set<InternalCacheEntry> result = new HashSet<InternalCacheEntry>();
         while (rs.next()) {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            result.addAll(bucket.getStoredEntries());
         }
         return result;
      } catch (SQLException e) {
         String message = "sql failure while loading key: ";
         log.error(message, e);
View Full Code Here

         int readBuckets = 0;
         int batchSize = config.getBatchSize();
         Object bucketName = marshaller.objectFromObjectStream(objectInput);
         while (bucketName instanceof String) {
            Bucket bucket = (Bucket) marshaller.objectFromObjectStream(objectInput);
            readBuckets++;
            ByteBuffer buffer = JdbcUtil.marshall(getMarshaller(), bucket);
            ps.setBinaryStream(1, buffer.getStream(), buffer.getLength());
            ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
            ps.setString(3, (String) bucketName);
            if (readBuckets % batchSize == 0) {
               ps.executeBatch();
               if (log.isTraceEnabled())
                  log.trace("Executing batch " + (readBuckets / batchSize) + ", batch size is " + batchSize);
View Full Code Here

TOP

Related Classes of org.infinispan.loaders.bucket.Bucket

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.