Package org.infinispan.persistence

Examples of org.infinispan.persistence.TaskContextImpl


   @Override
   public void process(KeyFilter keyFilter, CacheLoaderTask cacheLoaderTask, Executor executor, boolean loadValues, boolean loadMetadata) {

      int batchSize = 100;
      ExecutorAllCompletionService eacs = new ExecutorAllCompletionService(executor);
      final TaskContext taskContext = new TaskContextImpl();

      List<Map.Entry<byte[], byte[]>> entries = new ArrayList<Map.Entry<byte[], byte[]>>(batchSize);
      DBIterator it = db.iterator(new ReadOptions().fillCache(false));
      try {
         for (it.seekToFirst(); it.hasNext();) {
View Full Code Here


               ps = conn.prepareStatement(sql);
               ps.setLong(1, ctx.getTimeService().wallClockTime());
               rs = ps.executeQuery();
               rs.setFetchSize(tableManipulation.getFetchSize());

               TaskContext taskContext = new TaskContextImpl();
               while (rs.next()) {
                  String keyStr = rs.getString(2);
                  Object key = ((TwoWayKey2StringMapper) key2StringMapper).getKeyMapping(keyStr);
                  if (taskContext.isStopped()) break;
                  if (filter != null && !filter.shouldLoadKey(key))
                     continue;
                  InputStream inputStream = rs.getBinaryStream(1);
                  MarshalledEntry entry;
                  if (fetchValue || fetchMetadata) {
View Full Code Here

         ps = conn.prepareStatement(sql);
         ps.setLong(1, ctx.getTimeService().wallClockTime());
         rs = ps.executeQuery();
         rs.setFetchSize(tableManipulation.getFetchSize());
         ExecutorAllCompletionService ecs = new ExecutorAllCompletionService(executor);
         final TaskContextImpl taskContext = new TaskContextImpl();
         //we can do better here: ATM we load the entries in the caller's thread and process them in parallel
         // we can do the loading (expensive operation) in parallel as well.
         while (rs.next()) {
            InputStream binaryStream = rs.getBinaryStream(1);
            final Bucket bucket = unmarshallBucket(binaryStream);
            ecs.submit(new Callable<Void>() {
               @Override
               public Void call() throws Exception {
                  try {
                     for (MarshalledEntry me : bucket.getStoredEntries(filter, ctx.getTimeService()).values()) {
                        if (!taskContext.isStopped()) {
                           task.processEntry(me, taskContext);
                        }
                     }
                     return null;
                  } catch (Exception e) {
View Full Code Here

   public void process(KeyFilter keyFilter, CacheLoaderTask cacheLoaderTask, Executor executor, boolean loadValues, boolean loadMetadata) {

      int batchSize = 100;
      ExecutorCompletionService ecs = new ExecutorCompletionService(executor);
      int tasks = 0;
      final TaskContext taskContext = new TaskContextImpl();

      Set<Object> allKeys = new HashSet<Object>(batchSize);
      Set<Object> batch = new HashSet<Object>();
      loadAllKeys(state.get(), allKeys, keyFilter, executor);
      for (Iterator it = allKeys.iterator(); it.hasNext(); ) {
View Full Code Here

         }
      }

      ExecutorCompletionService<Void> ecs = new ExecutorCompletionService<Void>(executor);

      final TaskContextImpl taskContext = new TaskContextImpl();
      int taskCount = 0;
      for (Object k : keysToLoad) {
         if (taskContext.isStopped())
            break;

         taskCount++;
         final Object key = k;
         ecs.submit(new Callable<Void>() {
View Full Code Here

   @Override
   public void process(KeyFilter keyFilter, CacheLoaderTask cacheLoaderTask, Executor executor, boolean loadValues, boolean loadMetadata) {

      int batchSize = 100;
      ExecutorAllCompletionService eacs = new ExecutorAllCompletionService(executor);
      final TaskContext taskContext = new TaskContextImpl();

      List<Map.Entry<byte[], byte[]>> entries = new ArrayList<Map.Entry<byte[], byte[]>>(batchSize);
      DBIterator it = db.iterator(new ReadOptions().fillCache(false));
      try {
         for (it.seekToFirst(); it.hasNext();) {
View Full Code Here

   public void process(KeyFilter keyFilter, CacheLoaderTask cacheLoaderTask, Executor executor, boolean loadValues, boolean loadMetadata) {

      int batchSize = 100;
      ExecutorCompletionService ecs = new ExecutorCompletionService(executor);
      int tasks = 0;
      final TaskContext taskContext = new TaskContextImpl();

      Set<Object> allKeys = new HashSet<Object>(batchSize);
      Set<Object> batch = new HashSet<Object>();
      loadAllKeys(state.get(), allKeys, keyFilter, executor);
      for (Iterator it = allKeys.iterator(); it.hasNext(); ) {
View Full Code Here

   @Override
   public void process(KeyFilter keyFilter, CacheLoaderTask cacheLoaderTask, Executor executor, boolean loadValues, boolean loadMetadata) {

      int batchSize = 100;
      ExecutorAllCompletionService eacs = new ExecutorAllCompletionService(executor);
      final TaskContext taskContext = new TaskContextImpl();

      List<Map.Entry<byte[], byte[]>> entries = new ArrayList<Map.Entry<byte[], byte[]>>(batchSize);
      DBIterator it = db.iterator(new ReadOptions().fillCache(false));
      try {
         for (it.seekToFirst(); it.hasNext();) {
View Full Code Here

   @Override
   public void process(KeyFilter filter, CacheLoaderTask task, Executor executor, boolean fetchValue, boolean fetchMetadata) {
      record("process");
      final long currentTimeMillis = System.currentTimeMillis();
      TaskContext tx = new TaskContextImpl();
      for (Iterator<Map.Entry<Object, byte[]>> i = store.entrySet().iterator(); i.hasNext();) {
         Map.Entry<Object, byte[]> entry = i.next();
         if (tx.isStopped()) break;
         if (filter == null || filter.shouldLoadKey(entry.getKey())) {
            MarshalledEntry se = deserialize(entry.getKey(), entry.getValue());
            if (isExpired(se, currentTimeMillis)) {
               log.debugf("Key %s exists, but has expired.  Entry is %s", entry.getKey(), se);
               i.remove();
View Full Code Here

      }
   }

   @Override
   public void process(KeyFilter filter, final CacheLoaderTask task, final Executor executor, final boolean fetchValue, final boolean fetchMetadata) {
      final TaskContext context = new TaskContextImpl();
      final KeyFilter notNullFilter = PersistenceUtil.notNull(filter);
      final AtomicLong tasksSubmitted = new AtomicLong();
      final AtomicLong tasksFinished = new AtomicLong();
      forEachOnDisk(fetchMetadata, fetchValue, new EntryFunctor() {
         @Override
         public boolean apply(int file, int offset, int size,
                              final byte[] serializedKey, final byte[] serializedMetadata, final byte[] serializedValue,
                              long seqId, long expiration) throws IOException, ClassNotFoundException {
            if (context.isStopped()) {
               return false;
            }
            final Object key = marshaller.objectFromByteBuffer(serializedKey);
            if (!notNullFilter.accept(key)) {
               return true;
            }
            EntryPosition entry = temporaryTable.get(key);
            if (entry == null) {
               entry = index.getPosition(key, serializedKey);
            }
            if (entry != null) {
               FileProvider.Handle handle = fileProvider.getFile(entry.file);
               try {
                  EntryHeader header = EntryRecord.readEntryHeader(handle, entry.offset);
                  if (header == null) {
                     throw new IllegalStateException("Cannot read " + entry.file + ":" + entry.offset);
                  }
                  if (seqId < header.seqId()) {
                     return true;
                  }
               } finally {
                  handle.close();
               }
            } else {
               // entry is not in index = it was deleted
               return true;
            }
            if (serializedValue != null && (expiration < 0 || expiration > timeService.wallClockTime())) {
               executor.execute(new Runnable() {
                  @Override
                  public void run() {
                     try {
                        task.processEntry(marshalledEntryFactory.newMarshalledEntry(key,
                              serializedValue == null ? null : marshaller.objectFromByteBuffer(serializedValue),
                              serializedMetadata == null ? null : (InternalMetadata) marshaller.objectFromByteBuffer(serializedMetadata)),
                              context);
                     } catch (Exception e) {
                        log.error("Failed to process task for key " + key, e);
                     } finally {
                        long finished = tasksFinished.incrementAndGet();
                        if (finished == tasksSubmitted.longValue()) {
                           synchronized (context) {
                              context.notifyAll();
                           }
                        }
                     }
                  }
               });
               tasksSubmitted.incrementAndGet();
               return !context.isStopped();
            }
            return true;
         }
      }, new FileFunctor() {
         @Override
         public void afterFile(int file) {
            // noop
         }
      });
      while (tasksSubmitted.longValue() > tasksFinished.longValue()) {
         synchronized (context) {
            try {
               context.wait(100);
            } catch (InterruptedException e) {
               log.error("Iteration was interrupted", e);
               Thread.currentThread().interrupt();
               return;
            }
View Full Code Here

TOP

Related Classes of org.infinispan.persistence.TaskContextImpl

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.