Package org.infinispan.loaders.bucket

Examples of org.infinispan.loaders.bucket.Bucket$Externalizer


         ps = conn.prepareStatement(sql);
         rs = ps.executeQuery();
         rs.setFetchSize(config.getFetchSize());
         while (rs.next()) {
            InputStream inputStream = rs.getBinaryStream(1);
            Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), inputStream);
            String bucketName = rs.getString(2);
            marshaller.objectToObjectStream(bucketName, objectOutput);
            marshaller.objectToObjectStream(bucket, objectOutput);
         }
         marshaller.objectToObjectStream(BINARY_STREAM_DELIMITER, objectOutput);
View Full Code Here


         while (rs.next()) {
            String key = rs.getString(2);
            if (immediateLockForWriting(key)) {
               if (log.isTraceEnabled()) log.trace("Adding bucket keyed " + key + " for purging.");
               InputStream binaryStream = rs.getBinaryStream(1);
               Bucket bucket = (Bucket) JdbcUtil.unmarshall(getMarshaller(), binaryStream);
               bucket.setBucketName(key);
               expiredBuckets.add(bucket);
            } else {
               if (log.isTraceEnabled())
                  log.trace("Could not acquire write lock for " + key + ", this won't be purged even though it has expired elements");
            }
         }
      } catch (SQLException ex) {
         //if something happens make sure buckets locks are being release
         releaseLocks(expiredBuckets);
         connectionFactory.releaseConnection(conn);
         logAndThrow(ex, "Failed clearing JdbcBinaryCacheStore");
      } finally {
         JdbcUtil.safeClose(ps);
         JdbcUtil.safeClose(rs);
      }

      if (log.isTraceEnabled())
         log.trace("Found following buckets: " + expiredBuckets + " which are about to be expired");

      if (expiredBuckets.isEmpty()) return;
      Set<Bucket> emptyBuckets = new HashSet<Bucket>();
      //now update all the buckets in batch
      try {
         String sql = tableManipulation.getUpdateRowSql();
         ps = conn.prepareStatement(sql);
         int updateCount = 0;
         Iterator<Bucket> it = expiredBuckets.iterator();
         while (it.hasNext()) {
            Bucket bucket = it.next();
            bucket.removeExpiredEntries();
            if (!bucket.isEmpty()) {
               ByteBuffer byteBuffer = JdbcUtil.marshall(getMarshaller(), bucket);
               ps.setBinaryStream(1, byteBuffer.getStream(), byteBuffer.getLength());
               ps.setLong(2, bucket.timestampOfFirstEntryToExpire());
               ps.addBatch();
               updateCount++;
               if (updateCount % batchSize == 0) {
                  ps.executeBatch();
                  if (log.isTraceEnabled()) log.trace("Flushing batch, update count is: " + updateCount);
               }
            } else {
               it.remove();
               emptyBuckets.add(bucket);
            }
         }
         //flush the batch
         if (updateCount % batchSize != 0) {
            ps.executeBatch();
         }
         if (log.isTraceEnabled()) log.trace("Updated " + updateCount + " buckets.");
      } catch (SQLException ex) {
         //if something happens make sure buckets locks are being release
         releaseLocks(emptyBuckets);
         connectionFactory.releaseConnection(conn);
         logAndThrow(ex, "Failed clearing JdbcBinaryCacheStore");
      } finally {
         //release locks for the updated buckets.This won't include empty buckets, as these were migrated to emptyBuckets
         releaseLocks(expiredBuckets);
         JdbcUtil.safeClose(ps);
      }


      if (log.isTraceEnabled()) log.trace("About to remove empty buckets " + emptyBuckets);

      if (emptyBuckets.isEmpty()) return;
      //then remove the empty buckets
      try {
         String sql = tableManipulation.getDeleteRowSql();
         ps = conn.prepareStatement(sql);
         int deletionCount = 0;
         for (Bucket bucket : emptyBuckets) {
            ps.setString(1, bucket.getBucketName());
            ps.addBatch();
            deletionCount++;
            if (deletionCount % batchSize == 0) {
               if (log.isTraceEnabled())
                  log.trace("Flushing deletion batch, total deletion count so far is " + deletionCount);
View Full Code Here

   protected void loopOverBuckets(BucketHandler handler) throws CacheLoaderException {
      try {
         File[] listFiles;
         if (root != null && (listFiles = root.listFiles(NUMERIC_NAMED_FILES_FILTER)) != null) {
            for (File bucketFile : listFiles) {
               Bucket bucket = loadBucket(bucketFile);
               if (handler.handle(bucket)) {
                  break;
               }
            }
         }
View Full Code Here

   private boolean doPurge(File bucketFile) {
      Integer bucketKey = Integer.valueOf(bucketFile.getName());
      boolean interrupted = false;
      try {
         lockForReading(bucketKey);
         Bucket bucket = loadBucket(bucketFile);

         if (bucket != null) {
            if (bucket.removeExpiredEntries()) {
               upgradeLock(bucketKey);
               updateBucket(bucket);
            }
         } else {
            // Bucket may be an empty 0-length file
View Full Code Here

         return null;
      }
   }

   protected Bucket loadBucket(File bucketFile) throws CacheLoaderException, InterruptedException {
      Bucket bucket = null;
      if (bucketFile.exists()) {
         if (trace) {
            log.trace("Found bucket file: '" + bucketFile + "'");
         }
         InputStream is = null;
         try {
            // It could happen that the output buffer might not have been
            // flushed, so just in case, flush it to be able to read it.
            fileSync.flush(bucketFile);
            if (bucketFile.length() == 0) {
               // short circuit
               return null;
            }
            is = new FileInputStream(bucketFile);
            bucket = (Bucket) objectFromInputStreamInReentrantMode(is);
         } catch (InterruptedException ie) {
            throw ie;
         } catch (Exception e) {
            log.errorReadingFromFile(bucketFile.getAbsoluteFile(), e);
            throw new CacheLoaderException("Error while reading from file", e);
         } finally {
            safeClose(is);
         }
      }
      if (bucket != null) {
         bucket.setBucketId(bucketFile.getName());
      }
      return bucket;
   }
View Full Code Here

   protected void loopOverBuckets(BucketHandler handler) throws CacheLoaderException {
      try {
         File[] listFiles;
         if (root != null && (listFiles = root.listFiles()) != null) {
            for (File bucketFile : listFiles) {
               Bucket bucket = loadBucket(bucketFile);
               if (handler.handle(bucket)) {
                  break;
               }
            }
         }
View Full Code Here

                  @Override
                  public void run() {
                     Integer bucketKey = Integer.valueOf(bucketFile.getName());
                     boolean lockAcquired = false;
                     try {
                        Bucket bucket = loadBucket(bucketFile);

                        if (bucket != null) {
                           if (bucket.removeExpiredEntries()) {
                              lockForWriting(bucketKey);
                              lockAcquired = true;
                           }
                           updateBucket(bucket);
                        }
                     } catch (InterruptedException ie) {
                        log.debug("Interrupted, so finish work.");
                     } catch (CacheLoaderException e) {
                        log.problemsPurgingFile(bucketFile, e);
                     } finally {
                        if (lockAcquired)
                           unlock(bucketKey);
                     }
                  }
               });
            } else {
               Integer bucketKey = Integer.valueOf(bucketFile.getName());
               boolean lockAcquired = false;
               try {
                  Bucket bucket = loadBucket(bucketFile);

                  if (bucket != null) {
                     if (bucket.removeExpiredEntries()) {
                        lockForWriting(bucketKey);
                        lockAcquired = true;
                     }
                     updateBucket(bucket);
                  }
View Full Code Here

         return null;
      }
   }

   protected Bucket loadBucket(File bucketFile) throws CacheLoaderException, InterruptedException {
      Bucket bucket = null;
      if (bucketFile.exists()) {
         if (trace) {
            log.trace("Found bucket file: '" + bucketFile + "'");
         }
         InputStream is = null;
         try {
            // It could happen that the output buffer might not have been
            // flushed, so just in case, flush it to be able to read it.
            fileSync.flush(bucketFile);
            if (bucketFile.length() == 0) {
               // short circuit
               return null;
            }
            is = new FileInputStream(bucketFile);
            bucket = (Bucket) objectFromInputStreamInReentrantMode(is);
         } catch (InterruptedException ie) {
            throw ie;
         } catch (Exception e) {
            log.errorReadingFromFile(bucketFile.getAbsoluteFile(), e);
            throw new CacheLoaderException("Error while reading from file", e);
         } finally {
            safeClose(is);
         }
      }
      if (bucket != null) {
         bucket.setBucketId(bucketFile.getName());
      }
      return bucket;
   }
View Full Code Here

   }

   protected Set<InternalCacheEntry> loadAllLockSafe() throws CacheLoaderException {
      Set<InternalCacheEntry> result = new HashSet<InternalCacheEntry>();
      for (File bucketFile : root.listFiles()) {
         Bucket bucket = loadBucket(bucketFile);
         if (bucket != null) {
            if (bucket.removeExpiredEntries()) {
               saveBucket(bucket);
            }
            result.addAll(bucket.getStoredEntries());
         }
      }
      return result;
   }
View Full Code Here

   protected Bucket loadBucket(String bucketName) throws CacheLoaderException {
      return loadBucket(new File(root, bucketName));
   }

   protected Bucket loadBucket(File bucketFile) throws CacheLoaderException {
      Bucket bucket = null;
      if (bucketFile.exists()) {
         if (log.isTraceEnabled()) log.trace("Found bucket file: '" + bucketFile + "'");
         FileInputStream is = null;
         try {
            is = new FileInputStream(bucketFile);
            bucket = (Bucket) marshaller.objectFromInputStream(is);
         } catch (Exception e) {
            String message = "Error while reading from file: " + bucketFile.getAbsoluteFile();
            log.error(message, e);
            throw new CacheLoaderException(message, e);
         } finally {
            safeClose(is);
         }
      }
      if (bucket != null) {
         bucket.setBucketName(bucketFile.getName());
      }
      return bucket;
   }
View Full Code Here

TOP

Related Classes of org.infinispan.loaders.bucket.Bucket$Externalizer

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.