Package org.infinispan.loaders.bucket

Examples of org.infinispan.loaders.bucket.Bucket$Externalizer


         if (!rs.next()) {
            return null;
         }
         String bucketName = rs.getString(1);
         InputStream inputStream = rs.getBinaryStream(2);
         Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
         bucket.setBucketId(bucketName);//bucket name is volatile, so not persisted.
         return bucket;
      } catch (SQLException e) {
         log.sqlFailureLoadingKey(String.valueOf(keyHashCode), e);
         throw new CacheLoaderException(String.format(
               "Sql failure while loading key: %s", keyHashCode), e);
View Full Code Here


            Integer key = rs.getInt(2);
            if (immediateLockForWriting(key)) {
               if (log.isTraceEnabled()) {
                  log.tracef("Adding bucket keyed %s for purging.", key);
               }
               Bucket bucket = null;
               try {
                  InputStream binaryStream = rs.getBinaryStream(1);
                  bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
               } catch (Exception ex) {
                  // If something goes wrong during unmarshalling, unlock the key before rethrowing
                  unlock(key);
                  throw ex;
               }
               bucket.setBucketId(key);
               expiredBuckets.add(bucket);
            } else {
               if (log.isTraceEnabled()) {
                  log.tracef("Could not acquire write lock for %s, this won't be purged even though it has expired elements", key);
               }
            }
         }
      } catch (Exception ex) {
         //if something happens make sure buckets locks are being release
         releaseLocks(expiredBuckets);
         connectionFactory.releaseConnection(conn);
         log.failedClearingJdbcCacheStore(ex);
         throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
      } finally {
         JdbcUtil.safeClose(ps);
         JdbcUtil.safeClose(rs);
      }

      if (log.isTraceEnabled()) {
         log.tracef("Found following buckets: %s which are about to be expired", expiredBuckets);
      }

      if (expiredBuckets.isEmpty()) {
         return;
      }
      Set<Bucket> emptyBuckets = new HashSet<Bucket>();
      //now update all the buckets in batch
      try {
         String sql = tableManipulation.getUpdateRowSql();
         ps = conn.prepareStatement(sql);
         int updateCount = 0;
         Iterator<Bucket> it = expiredBuckets.iterator();
         while (it.hasNext()) {
            Bucket bucket = it.next();
            bucket.removeExpiredEntries();
            if (!bucket.isEmpty()) {
               ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
               ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
               ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
               ps.setString(3, bucket.getBucketIdAsString());
               ps.addBatch();
               updateCount++;
               if (updateCount % batchSize == 0) {
                  ps.executeBatch();
                  if (log.isTraceEnabled()) {
                     log.tracef("Flushing batch, update count is: %d", updateCount);
                  }
               }
            } else {
               it.remove();
               emptyBuckets.add(bucket);
            }
         }
         //flush the batch
         if (updateCount % batchSize != 0) {
            if (log.isTraceEnabled()) {
               log.tracef("Flushing batch, update count is: %d", updateCount);
            }
            ps.executeBatch();
         }
         if (log.isTraceEnabled()) {
            log.tracef("Updated %d buckets.", updateCount);
         }
      } catch (SQLException ex) {
         //if something happens make sure buckets locks are being release
         releaseLocks(emptyBuckets);
         connectionFactory.releaseConnection(conn);
         log.failedClearingJdbcCacheStore(ex);
         throw new CacheLoaderException("Failed clearing JdbcBinaryCacheStore", ex);
      } catch (InterruptedException ie) {
         if (log.isTraceEnabled()) {
            log.trace("Interrupted while marshalling to purge expired entries");
         }
         Thread.currentThread().interrupt();
      } finally {
         //release locks for the updated buckets.This won't include empty buckets, as these were migrated to emptyBuckets
         releaseLocks(expiredBuckets);
         JdbcUtil.safeClose(ps);
      }


      if (log.isTraceEnabled()) {
         log.tracef("About to remove empty buckets %s", emptyBuckets);
      }

      if (emptyBuckets.isEmpty()) {
         return;
      }
      //then remove the empty buckets
      try {
         String sql = tableManipulation.getDeleteRowSql();
         ps = conn.prepareStatement(sql);
         int deletionCount = 0;
         for (Bucket bucket : emptyBuckets) {
            ps.setString(1, bucket.getBucketIdAsString());
            ps.addBatch();
            deletionCount++;
            if (deletionCount % batchSize == 0) {
               if (log.isTraceEnabled()) {
                  log.tracef("Flushing deletion batch, total deletion count so far is %d", deletionCount);
View Full Code Here

   protected void loopOverBuckets(BucketHandler handler) throws CacheLoaderException {
      try {
         File[] listFiles;
         if (root != null && (listFiles = root.listFiles()) != null) {
            for (File bucketFile : listFiles) {
               Bucket bucket = loadBucket(bucketFile);
               if (handler.handle(bucket)) {
                  break;
               }
            }
         }
View Full Code Here

               purgerService.execute(new Runnable() {
                  @Override
                  public void run() {
                     Integer bucketKey = Integer.valueOf(bucketFile.getName());
                     try {
                        Bucket bucket = loadBucket(bucketFile);

                        if (bucket != null) {
                           if (bucket.removeExpiredEntries())
                              lockForWriting(bucketKey);
                           updateBucket(bucket);
                        }
                     } catch (InterruptedException ie) {
                        if (log.isDebugEnabled()) {
                           log.debug("Interrupted, so finish work.");
                        }
                     } catch (CacheLoaderException e) {
                        log.problemsPurgingFile(bucketFile, e);
                     } finally {
                        unlock(bucketKey);
                     }
                  }
               });
            } else {
               Integer bucketKey = Integer.valueOf(bucketFile.getName());
               try {
                  Bucket bucket = loadBucket(bucketFile);

                  if (bucket != null) {
                     if (bucket.removeExpiredEntries())
                        lockForWriting(bucketKey);
                     updateBucket(bucket);
                  }
               } finally {
                  unlock(bucketKey);
View Full Code Here

         return null;
      }
   }

   protected Bucket loadBucket(File bucketFile) throws CacheLoaderException, InterruptedException {
      Bucket bucket = null;
      if (bucketFile.exists()) {
         if (trace) {
            log.trace("Found bucket file: '" + bucketFile + "'");
         }
         InputStream is = null;
         try {
            // It could happen that the output buffer might not have been
            // flushed, so just in case, flush it to be able to read it.
            fileSync.flush(bucketFile);
            if (bucketFile.length() == 0) {
               // short circuit
               return null;
            }
            is = new FileInputStream(bucketFile);
            bucket = (Bucket) objectFromInputStreamInReentrantMode(is);
         } catch (InterruptedException ie) {
            throw ie;
         } catch (Exception e) {
            log.errorReadingFromFile(bucketFile.getAbsoluteFile(), e);
            throw new CacheLoaderException("Error while reading from file", e);
         } finally {
            safeClose(is);
         }
      }
      if (bucket != null) {
         bucket.setBucketId(bucketFile.getName());
      }
      return bucket;
   }
View Full Code Here

   protected void loopOverBuckets(BucketHandler handler) throws CacheLoaderException {
      try {
         File[] listFiles;
         if (root != null && (listFiles = root.listFiles()) != null) {
            for (File bucketFile : listFiles) {
               Bucket bucket = loadBucket(bucketFile);
               if (handler.handle(bucket)) break;
            }
         }
      } catch (InterruptedException ie) {
         if (log.isDebugEnabled()) log.debug("Interrupted, so stop looping over buckets.");
View Full Code Here

            for (final File bucketFile : root.listFiles()) {
               if (multiThreadedPurge) {
                  purgerService.execute(new Runnable() {
                     @Override
                     public void run() {
                        Bucket bucket;
                        try {
                           if ((bucket = loadBucket(bucketFile)) != null && bucket.removeExpiredEntries())
                              updateBucket(bucket);
                        } catch (InterruptedException ie) {
                           if (log.isDebugEnabled()) log.debug("Interrupted, so finish work.");
                        } catch (CacheLoaderException e) {
                           log.warn("Problems purging file " + bucketFile, e);
                        }
                     }
                  });
               } else {
                  Bucket bucket;
                  if ((bucket = loadBucket(bucketFile)) != null && bucket.removeExpiredEntries()) updateBucket(bucket);
               }
            }
         } catch (InterruptedException ie) {
            if (log.isDebugEnabled()) log.debug("Interrupted, so stop loading and finish with purging.");
            Thread.currentThread().interrupt();
View Full Code Here

         return null;
      }
   }

   protected Bucket loadBucket(File bucketFile) throws CacheLoaderException, InterruptedException {
      Bucket bucket = null;
      if (bucketFile.exists()) {
         if (log.isTraceEnabled()) log.trace("Found bucket file: '" + bucketFile + "'");
         FileInputStream is = null;
         try {
            is = new FileInputStream(bucketFile);
            bucket = (Bucket) objectFromInputStreamInReentrantMode(is);
         } catch (InterruptedException ie) {
            throw ie;
         } catch (Exception e) {
            String message = "Error while reading from file: " + bucketFile.getAbsoluteFile();
            log.error(message, e);
            throw new CacheLoaderException(message, e);
         } finally {
            safeClose(is);
         }
      }
      if (bucket != null) {
         bucket.setBucketName(bucketFile.getName());
      }
      return bucket;
   }
View Full Code Here

         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = System.currentTimeMillis();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis)) {
                  result.add(ice);
               }
            }
         }

         @Override
         public void loadAllProcess(ResultSet rs, Set<InternalCacheEntry> result, int maxEntries) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = System.currentTimeMillis();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis))
                  result.add(ice);

               if (result.size() == maxEntries)
                  break;
            }
         }

         @Override
         public void loadAllKeysProcess(ResultSet rs, Set<Object> keys, Set<Object> keysToExclude) throws SQLException, CacheLoaderException {
            InputStream binaryStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
            long currentTimeMillis = System.currentTimeMillis();
            for (InternalCacheEntry ice: bucket.getStoredEntries()) {
               if (!ice.isExpired(currentTimeMillis) && includeKey(ice.getKey(), keysToExclude)) {
                  keys.add(ice.getKey());
               }
            }
         }

         @Override
         public void toStreamProcess(ResultSet rs, InputStream is, ObjectOutput objectOutput) throws CacheLoaderException, SQLException, IOException {
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), is);
            String bucketName = rs.getString(2);
            marshaller.objectToObjectStream(bucketName, objectOutput);
            marshaller.objectToObjectStream(bucket, objectOutput);
         }

         @Override
         public boolean fromStreamProcess(Object bucketName, PreparedStatement ps, ObjectInput objectInput)
               throws SQLException, CacheLoaderException, IOException, ClassNotFoundException, InterruptedException {
            if (bucketName instanceof String) {
               Bucket bucket = (Bucket) marshaller.objectFromObjectStream(objectInput);
               ByteBuffer buffer = JdbcUtil.marshall(getMarshaller(), bucket);
               ps.setBinaryStream(1, buffer.getStream(), buffer.getLength());
               ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
               ps.setString(3, (String) bucketName);
               return true;
            } else {
               return false;
            }
View Full Code Here

         if (!rs.next()) {
            return null;
         }
         String bucketName = rs.getString(1);
         InputStream inputStream = rs.getBinaryStream(2);
         Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
         bucket.setBucketId(bucketName);//bucket name is volatile, so not persisted.
         return bucket;
      } catch (SQLException e) {
         log.sqlFailureLoadingKey(String.valueOf(keyHashCode), e);
         throw new CacheLoaderException(String.format(
               "Sql failure while loading key: %s", keyHashCode), e);
View Full Code Here

TOP

Related Classes of org.infinispan.loaders.bucket.Bucket$Externalizer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.