Package org.hibernate.search.backend

Examples of org.hibernate.search.backend.Work


      searchFactoryImplementor.getWorker().performWork( work, transactionContext );

      // purge the subclasses
      Set<Class<?>> subClasses = builder.getMappedSubclasses();
      for ( Class clazz : subClasses ) {
        @SuppressWarnings( "unchecked" )
        Work subClassWork = new Work( clazz, id, WorkType.PURGE_ALL );
        searchFactoryImplementor.getWorker().performWork( subClassWork, transactionContext );
      }
    }
    else {
      work = new Work<T>( entityType, id, WorkType.PURGE );
View Full Code Here


    if ( builder == null ) {
      String msg = "Entity to index is not an @Indexed entity: " + entityType.getName();
      throw new IllegalArgumentException( msg );
    }

    Work work;
    if ( id == null ) {
      // purge the main entity
      work = new Work( entityType, id, WorkType.PURGE_ALL );
      searchFactoryImplementor.getWorker().performWork( work, transactionContext );

      // purge the subclasses
      Set<Class<?>> subClasses = builder.getMappedSubclasses();
      for ( Class clazz : subClasses ) {
        work = new Work( clazz, id, WorkType.PURGE_ALL );
        searchFactoryImplementor.getWorker().performWork( work, transactionContext );
      }
    }
    else {
      work = new Work( entityType, id, WorkType.PURGE );
      searchFactoryImplementor.getWorker().performWork( work, transactionContext );
    }
  }
View Full Code Here

    if ( searchFactoryImplementor.getDocumentBuilder( clazz ) == null ) {
      String msg = "Entity to index is not an @Indexed entity: " + entity.getClass().getName();
      throw new IllegalArgumentException( msg );
    }
    Serializable id = session.getIdentifier( entity );
    Work work = new Work( entity, id, WorkType.INDEX );
    searchFactoryImplementor.getWorker().performWork( work, transactionContext );

    //TODO
    //need to add elements in a queue kept at the Session level
    //the queue will be processed by a Lucene(Auto)FlushEventListener
View Full Code Here

      }
    }
  }

  protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
    Work work = new Work( entity, id, workType );
    final EventSourceTransactionContext transactionContext = new EventSourceTransactionContext( event.getSession() );
    searchFactoryImplementor.getWorker().performWork( work, transactionContext );
  }
View Full Code Here

    workQueue.setSealedQueue( luceneQueue );
  }

  private void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
    for ( int i = 0 ; i < initialSize ; i++ ) {
      Work work = queue.get( i );
      if ( work != null) {
        if ( layer.isRightLayer( work.getType() ) ) {
          queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
          addWorkToBuilderQueue( luceneQueue, work );
        }
      }
    }
View Full Code Here

        type = WorkType.PURGE_ALL;
      }
      else {
        type = WorkType.PURGE;
      }
      Work work = new Work(entityType, id, type);
      searchFactoryImplementor.getWorker().performWork( work, eventSource );
    }
  }
View Full Code Here

    SearchFactoryImplementor searchFactoryImplementor = getSearchFactoryImplementor();
    //not strictly necessary but a small optimization
    DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( clazz );
    if ( builder != null ) {
      Serializable id = session.getIdentifier( entity );
      Work work = new Work(entity, id, WorkType.INDEX);
      searchFactoryImplementor.getWorker().performWork( work, eventSource );
    }
    //TODO
    //need to add elements in a queue kept at the Session level
    //the queue will be processed by a Lucene(Auto)FlushEventListener
View Full Code Here

      }
    }
  }

  protected void processWork(Object entity, Serializable id, WorkType workType, AbstractEvent event) {
    Work work = new Work(entity, id, workType);
    searchFactoryImplementor.getWorker().performWork( work, event.getSession() );
  }
View Full Code Here

    workQueue.setSealedQueue( luceneQueue );
  }

  private void processWorkByLayer(List<Work> queue, int initialSize, List<LuceneWork> luceneQueue, Layer layer) {
    for ( int i = 0 ; i < initialSize ; i++ ) {
      Work work = queue.get( i );
      if ( work != null) {
        if ( layer.isRightLayer( work.getType() ) ) {
          queue.set( i, null ); // help GC and avoid 2 loaded queues in memory
          Class entityClass = work.getEntityClass() != null ?
                work.getEntityClass() :
                Hibernate.getClass( work.getEntity() );
          DocumentBuilder<Object> builder = searchFactoryImplementor.getDocumentBuilders().get( entityClass );
          if ( builder == null ) continue; //or exception?
          builder.addWorkToQueue(entityClass, work.getEntity(), work.getId(), work.getType(), luceneQueue, searchFactoryImplementor );
        }
      }
    }
  }
View Full Code Here

         String fqnString = cacheEntityId.getFqn().toString()// Vars for logging
         String keyString = (String) key;


         searchFactory.getWorker().performWork(new Work(dataMap.get(key), cacheEntityId.getDocumentId(), WorkType.DELETE), ctx);
         searchFactory.getWorker().performWork(new Work(dataMap.get(key), cacheEntityId.getDocumentId(), WorkType.ADD), ctx);


         if (log.isTraceEnabled())
         {
            log.debug("Added your object into Lucene with Fqn " + fqnString + " and key " + keyString);
View Full Code Here

TOP

Related Classes of org.hibernate.search.backend.Work

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.