Package org.apache.lucene.analysis.util

Examples of org.apache.lucene.analysis.util.TokenizerFactory


    // prepare bi-gram tokenizer factory
    Map<String, String> args = new HashMap<String, String>();
    args.put(AbstractAnalysisFactory.LUCENE_MATCH_VERSION_PARAM, "4.4");
    args.put("minGramSize","2");
    args.put("maxGramSize","2");
    TokenizerFactory tf = new NGramTokenizerFactory(args);
   
    // (ab)->(bc)->(cd)->[ef][fg][gh]
    List<String> rules = new ArrayList<String>();
    rules.add( "abcd=>efgh" );
    synMap = new SlowSynonymMap( true );
View Full Code Here


     
      if (Tokenizer.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Tokenizer"));
        String simpleName = clazzName.substring(0, clazzName.length() - 9);
        TokenizerFactory instance = null;
        try {
          instance = TokenizerFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          assertSame(c, instance.create(new StringReader("")).getClass());
        } catch (IllegalArgumentException e) {
          if (!e.getMessage().contains("SPI")) {
            throw e;
          }
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (TokenFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Filter"));
        String simpleName = clazzName.substring(0, clazzName.length() - (clazzName.endsWith("TokenFilter") ? 11 : 6));
        TokenFilterFactory instance = null;
        try {
          instance = TokenFilterFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends TokenStream> createdClazz = instance.create(new KeywordTokenizer(new StringReader(""))).getClass();
          // only check instance if factory have wrapped at all!
          if (KeywordTokenizer.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
          if (!e.getMessage().contains("SPI")) {
            throw e;
          }
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (CharFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("CharFilter"));
        String simpleName = clazzName.substring(0, clazzName.length() - 10);
        CharFilterFactory instance = null;
        try {
          instance = CharFilterFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends Reader> createdClazz = instance.create(new StringReader("")).getClass();
          // only check instance if factory have wrapped at all!
          if (StringReader.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
View Full Code Here

    }
  }
 
  private void doTestTokenizer(String tokenizer) throws IOException {
    Class<? extends TokenizerFactory> factoryClazz = TokenizerFactory.lookupClass(tokenizer);
    TokenizerFactory factory = (TokenizerFactory) initialize(factoryClazz);
    if (factory != null) {
      // we managed to fully create an instance. check a few more things:
     
      // if it implements MultiTermAware, sanity check its impl
      if (factory instanceof MultiTermAwareComponent) {
View Full Code Here

      Version luceneMatchVersion,
      ServiceManager serviceManager) throws IOException {
    ResourceLoader defaultResourceLoader = new HibernateSearchResourceLoader( serviceManager );
    TokenizerDef token = analyzerDef.tokenizer();
    final Map<String, String> tokenMapsOfParameters = getMapOfParameters( token.params(), luceneMatchVersion );
    TokenizerFactory tokenFactory = instanceFromClass(
        TokenizerFactory.class,
        token.factory(),
        "Tokenizer factory",
        tokenMapsOfParameters
    );
View Full Code Here

 
  public void testBigramTokenizer() throws Exception {
    SlowSynonymMap synMap;

    // prepare bi-gram tokenizer factory
    TokenizerFactory tf = new NGramTokenizerFactory();
    Map<String, String> args = new HashMap<String, String>();
    args.put("minGramSize","2");
    args.put("maxGramSize","2");
    tf.init( args );

    // (ab)->(bc)->(cd)->[ef][fg][gh]
    List<String> rules = new ArrayList<String>();
    rules.add( "abcd=>efgh" );
    synMap = new SlowSynonymMap( true );
View Full Code Here

      doTestCharFilter(charFilter);
    }
  }
 
  private void doTestTokenizer(String tokenizer) throws IOException {
    TokenizerFactory factory = TokenizerFactory.forName(tokenizer);
    if (initialize(factory)) {
      // we managed to fully create an instance. check a few more things:
     
      // if it implements MultiTermAware, sanity check its impl
      if (factory instanceof MultiTermAwareComponent) {
View Full Code Here

     
      if (Tokenizer.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Tokenizer"));
        String simpleName = clazzName.substring(0, clazzName.length() - 9);
        TokenizerFactory instance = TokenizerFactory.forName(simpleName);
        assertNotNull(instance);
        try {
          instance.setLuceneMatchVersion(TEST_VERSION_CURRENT);
          instance.init(Collections.<String,String>emptyMap());
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          assertSame(c, instance.create(new StringReader("")).getClass());
        } catch (IllegalArgumentException e) {
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (TokenFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Filter"));
        String simpleName = clazzName.substring(0, clazzName.length() - (clazzName.endsWith("TokenFilter") ? 11 : 6));
        TokenFilterFactory instance = TokenFilterFactory.forName(simpleName);
        assertNotNull(instance);
        try {
          instance.setLuceneMatchVersion(TEST_VERSION_CURRENT);
          instance.init(Collections.<String,String>emptyMap());
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends TokenStream> createdClazz = instance.create(new KeywordTokenizer(new StringReader(""))).getClass();
          // only check instance if factory have wrapped at all!
          if (KeywordTokenizer.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (CharFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("CharFilter"));
        String simpleName = clazzName.substring(0, clazzName.length() - 10);
        CharFilterFactory instance = CharFilterFactory.forName(simpleName);
        assertNotNull(instance);
        try {
          instance.setLuceneMatchVersion(TEST_VERSION_CURRENT);
          instance.init(Collections.<String,String>emptyMap());
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends Reader> createdClazz = instance.create(new StringReader("")).getClass();
          // only check instance if factory have wrapped at all!
          if (StringReader.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
View Full Code Here

public class TestHMMChineseTokenizerFactory extends BaseTokenStreamTestCase {
 
  /** Test showing the behavior */
  public void testSimple() throws Exception {
    Reader reader = new StringReader("我购买了道具和服装。");
    TokenizerFactory factory = new HMMChineseTokenizerFactory(new HashMap<String,String>());
    Tokenizer tokenizer = factory.create(newAttributeFactory(), reader);
    // TODO: fix smart chinese to not emit punctuation tokens
    // at the moment: you have to clean up with WDF, or use the stoplist, etc
    assertTokenStreamContents(tokenizer,
       new String[] { "我", "购买", "了", "道具", "和", "服装", "," });
  }
View Full Code Here

      if (Tokenizer.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Tokenizer"));
        String simpleName = clazzName.substring(0, clazzName.length() - 9);
        assertNotNull(TokenizerFactory.lookupClass(simpleName));
        TokenizerFactory instance = null;
        try {
          instance = TokenizerFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          assertSame(c, instance.create(new StringReader("")).getClass());
        } catch (IllegalArgumentException e) {
          if (e.getCause() instanceof NoSuchMethodException) {
            // there is no corresponding ctor available
            throw e;
          }
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (TokenFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("Filter"));
        String simpleName = clazzName.substring(0, clazzName.length() - (clazzName.endsWith("TokenFilter") ? 11 : 6));
        assertNotNull(TokenFilterFactory.lookupClass(simpleName));
        TokenFilterFactory instance = null;
        try {
          instance = TokenFilterFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends TokenStream> createdClazz = instance.create(new KeywordTokenizer(new StringReader(""))).getClass();
          // only check instance if factory have wrapped at all!
          if (KeywordTokenizer.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
          if (e.getCause() instanceof NoSuchMethodException) {
            // there is no corresponding ctor available
            throw e;
          }
          // TODO: For now pass because some factories have not yet a default config that always works
        }
      } else if (CharFilter.class.isAssignableFrom(c)) {
        String clazzName = c.getSimpleName();
        assertTrue(clazzName.endsWith("CharFilter"));
        String simpleName = clazzName.substring(0, clazzName.length() - 10);
        assertNotNull(CharFilterFactory.lookupClass(simpleName));
        CharFilterFactory instance = null;
        try {
          instance = CharFilterFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends Reader> createdClazz = instance.create(new StringReader("")).getClass();
          // only check instance if factory have wrapped at all!
          if (StringReader.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {
View Full Code Here

    }
  }
 
  private void doTestTokenizer(String tokenizer) throws IOException {
    Class<? extends TokenizerFactory> factoryClazz = TokenizerFactory.lookupClass(tokenizer);
    TokenizerFactory factory = (TokenizerFactory) initialize(factoryClazz);
    if (factory != null) {
      // we managed to fully create an instance. check a few more things:
     
      // if it implements MultiTermAware, sanity check its impl
      if (factory instanceof MultiTermAwareComponent) {
View Full Code Here

TOP

Related Classes of org.apache.lucene.analysis.util.TokenizerFactory

Copyright © 2018 www.massapicom. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.