Package org.hibernate.search.backend

Examples of org.hibernate.search.backend.AddLuceneWork


    numField = new LongField( "long", 23l, Store.NO );
    doc.add( numField );

    Map<String, String> analyzers = new HashMap<String, String>();
    analyzers.put( "godo", "ngram" );
    works.add( new AddLuceneWork( 123, "123", RemoteEntity.class, doc, analyzers ) );

    doc = new Document();
    Field field = new Field(
        "StringF",
        "String field",
        Field.Store.YES,
        Field.Index.ANALYZED,
        Field.TermVector.WITH_OFFSETS
    );
    field.setBoost( 3f );
    doc.add( field );

    field = new Field(
        "StringF2",
        "String field 2",
        Field.Store.YES,
        Field.Index.ANALYZED,
        Field.TermVector.WITH_OFFSETS
    );
    doc.add( field );

    byte[] array = new byte[4];
    array[0] = 2;
    array[1] = 5;
    array[2] = 5;
    array[3] = 8;
    field = new Field( "binary", array, 0, array.length );
    doc.add( field );

    SerializableStringReader reader = new SerializableStringReader();
    field = new Field( "ReaderField", reader, Field.TermVector.WITH_OFFSETS );
    doc.add( field );

    List<List<AttributeImpl>> tokens = AvroTestHelpers.buildTokenSteamWithAttributes();

    CopyTokenStream tokenStream = new CopyTokenStream( tokens );
    field = new Field( "tokenstream", tokenStream, Field.TermVector.WITH_POSITIONS_OFFSETS );
    field.setBoost( 3f );
    doc.add( field );

    works.add( new UpdateLuceneWork( 1234, "1234", RemoteEntity.class, doc ) );
    works.add( new AddLuceneWork( 125, "125", RemoteEntity.class, new Document() ) );
    return works;
  }
View Full Code Here


    doc.add( field );
    field = new Field( "logo", shirt.getLogo(), Field.Store.NO, Field.Index.ANALYZED );
    doc.add( field );
    DoubleField numField = new DoubleField( "length", shirt.getLength(), Field.Store.NO );
    doc.add( numField );
    LuceneWork luceneWork = new AddLuceneWork(
        shirt.getId(), String.valueOf( shirt.getId() ), shirt.getClass(), doc
    );
    List<LuceneWork> queue = new ArrayList<LuceneWork>();
    queue.add( luceneWork );
    return queue;
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext );
    final AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, EntityInitializer sessionInitializer) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer );
    final AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext );
    final AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

    DocumentBuilderIndexedEntity docBuilder = documentBuilders.get( clazz );
    TwoWayFieldBridge idBridge = docBuilder.getIdBridge();
    String idInString = idBridge.objectToString( id );
    //depending on the complexity of the object graph going to be indexed it's possible
    //that we hit the database several times during work construction.
    AddLuceneWork addWork = docBuilder.createAddWork( clazz, entity, id, idInString, true );
    backend.enqueueAsyncWork( addWork );
  }
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, boolean isBatch) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap );
    AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, isBatch );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap, isBatch );
    }
    return addWork;
  }
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, boolean isBatch) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap );
    AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, isBatch );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap, isBatch );
    }
    return addWork;
  }
View Full Code Here

  }

  public AddLuceneWork createAddWork(Class<T> entityClass, T entity, Serializable id, String idInString, InstanceInitializer sessionInitializer, ConversionContext conversionContext) {
    Map<String, String> fieldToAnalyzerMap = new HashMap<String, String>();
    Document doc = getDocument( entity, id, fieldToAnalyzerMap, sessionInitializer, conversionContext );
    final AddLuceneWork addWork;
    if ( fieldToAnalyzerMap.isEmpty() ) {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc );
    }
    else {
      addWork = new AddLuceneWork( id, idInString, entityClass, doc, fieldToAnalyzerMap );
    }
    return addWork;
  }
View Full Code Here

      public AddLuceneWork cloneOverridingIdString(final AddLuceneWork lw, final KeyTransformationHandler keyTransformationHandler) {
         final Serializable id = lw.getId();
         if (id == null) {
            //this is serialized work received from a remote node: take the getIdAsString instead
            final String idInString = lw.getIdInString();
            return new AddLuceneWork(idInString, idInString, lw.getEntityClass(), lw.getDocument(), lw.getFieldToAnalyzerMap());
         }
         else {
            return lw;
         }
      }
View Full Code Here

TOP

Related Classes of org.hibernate.search.backend.AddLuceneWork

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.