Package org.hibernate.search.backend

Examples of org.hibernate.search.backend.UpdateLuceneWork


//    field.setOmitNorms( true );
//    field.setOmitTermFreqAndPositions( true );
    field.setBoost( 3f );
    doc.add( field );

    works.add( new UpdateLuceneWork( 1234, "1234", RemoteEntity.class, doc ) );
    works.add( new AddLuceneWork( 125, "125", RemoteEntity.class, new Document() ) );
    return works;
  }
View Full Code Here


    CopyTokenStream tokenStream = new CopyTokenStream( tokens );
    field = new Field( "tokenstream", tokenStream, Field.TermVector.WITH_POSITIONS_OFFSETS );
    field.setBoost( 3f );
    doc.add( field );

    works.add( new UpdateLuceneWork( 1234, "1234", RemoteEntity.class, doc ) );
    works.add( new AddLuceneWork( 125, "125", RemoteEntity.class, new Document() ) );
    return works;
  }
View Full Code Here

         return;
      }
      ConversionContext conversionContext = new ContextualExceptionBridgeHelper();
      DocumentBuilderIndexedEntity docBuilder = entityIndexBinding.getDocumentBuilder();
      final String idInString = keyTransformationHandler.keyToString(key);
      UpdateLuceneWork updateTask = docBuilder.createUpdateWork(
            clazz,
            value,
            idInString,
            idInString,
            SimpleInitializer.INSTANCE,
View Full Code Here

  }

  public UpdateLuceneWork createUpdateWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext contextualBridge) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, contextualBridge );
    final UpdateLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

  }
 
  public UpdateLuceneWork createUpdateWork(Class<T> entityClass, T entity, Serializable id, String idInString, EntityInitializer sessionInitializer) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer );
    final UpdateLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

  }
 
  public UpdateLuceneWork createUpdateWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext contextualBridge) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, contextualBridge );
    final UpdateLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

  }

  public UpdateLuceneWork createUpdateWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext contextualBridge) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, contextualBridge );
    final UpdateLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

      public UpdateLuceneWork cloneOverridingIdString(final UpdateLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) {
         final Serializable id = lw.getId();
         if (id == null) {
            //this is serialized work received from a remote node: take the getIdAsString instead
            final String idInString = lw.getIdInString();
            return new UpdateLuceneWork(idInString, idInString, lw.getEntityClass(), lw.getDocument(), lw.getFieldToAnalyzerMap());
         }
         else {
            return lw;
         }
      }
View Full Code Here

  }
 
  public UpdateLuceneWork createUpdateWork(Class<T> entityClass, T entity, Serializable id, String idInString, EntityInitializer sessionInitializer) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer );
    final UpdateLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new UpdateLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

      public UpdateLuceneWork cloneOverridingIdString(final UpdateLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) {
         final Serializable id = lw.getId();
         if (id == null) {
            //this is serialized work received from a remote node: take the getIdAsString instead
            final String idInString = lw.getIdInString();
            return new UpdateLuceneWork(idInString, idInString, lw.getEntityClass(), lw.getDocument(), lw.getFieldToAnalyzerMap());
         }
         else {
            return lw;
         }
      }
View Full Code Here

TOP

Related Classes of org.hibernate.search.backend.UpdateLuceneWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.