new int[] { 1, 1 });
}
/** test that offsets are correct when mappingcharfilter is previously applied */
public void testChangedOffsets() throws IOException {
final NormalizeCharMap norm = new NormalizeCharMap();
norm.add("a", "一二");
norm.add("b", "二三");
Analyzer analyzer = new ReusableAnalyzerBase() {
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
return new TokenStreamComponents(tokenizer, new CJKBigramFilter(tokenizer));