numField = new LongField( "long", 23l, Store.NO );
doc.add( numField );
Map<String, String> analyzers = new HashMap<String, String>();
analyzers.put( "godo", "ngram" );
works.add( new AddLuceneWork( 123, "123", RemoteEntity.class, doc, analyzers ) );
doc = new Document();
Field field = new Field(
"StringF",
"String field",
Field.Store.YES,
Field.Index.ANALYZED,
Field.TermVector.WITH_OFFSETS
);
// field.setOmitNorms( true );
// field.setOmitTermFreqAndPositions( true );
field.setBoost( 3f );
doc.add( field );
field = new Field(
"StringF2",
"String field 2",
Field.Store.YES,
Field.Index.ANALYZED,
Field.TermVector.WITH_OFFSETS
);
doc.add( field );
byte[] array = new byte[4];
array[0] = 2;
array[1] = 5;
array[2] = 5;
array[3] = 8;
field = new Field( "binary", array, 0, array.length );
doc.add( field );
SerializableStringReader reader = new SerializableStringReader();
field = new Field( "ReaderField", reader, Field.TermVector.WITH_OFFSETS );
doc.add( field );
List<List<AttributeImpl>> tokens = AvroTestHelpers.buildTokenSteamWithAttributes();
CopyTokenStream tokenStream = new CopyTokenStream( tokens );
field = new Field( "tokenstream", tokenStream, Field.TermVector.WITH_POSITIONS_OFFSETS );
// field.setOmitNorms( true );
// field.setOmitTermFreqAndPositions( true );
field.setBoost( 3f );
doc.add( field );
works.add( new UpdateLuceneWork( 1234, "1234", RemoteEntity.class, doc ) );
works.add( new AddLuceneWork( 125, "125", RemoteEntity.class, new Document() ) );
return works;
}