Examples of create()


Examples of org.apache.lucene.analysis.core.WhitespaceTokenizerFactory.create()

    Reader reader = new StringReader("What's this thing do?");
    WhitespaceTokenizerFactory factory = new WhitespaceTokenizerFactory();
    factory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    Map<String, String> args = Collections.emptyMap();
    factory.init(args);
    Tokenizer stream = factory.create(reader);
    assertTokenStreamContents(stream,
        new String[] {"What's", "this", "thing", "do?"});
  }
 
  /**
 

Examples of org.apache.lucene.analysis.fa.PersianCharFilterFactory.create()

    PersianCharFilterFactory charfilterFactory = new PersianCharFilterFactory();
    StandardTokenizerFactory tokenizerFactory = new StandardTokenizerFactory();
    tokenizerFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    Map<String, String> args = Collections.emptyMap();
    tokenizerFactory.init(args);
    TokenStream stream = tokenizerFactory.create(charfilterFactory.create(reader));
    assertTokenStreamContents(stream, new String[] { "می", "خورد" });
  }
}

Examples of org.apache.lucene.analysis.in.IndicNormalizationFilterFactory.create()

    filterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    Map<String, String> args = Collections.emptyMap();
    factory.init(args);
    filterFactory.init(args);
    Tokenizer tokenizer = factory.create(reader);
    TokenStream stream = filterFactory.create(tokenizer);
    assertTokenStreamContents(stream, new String[] { "ৎ", "और" });
  }
 
  /**
   * Test HindiNormalizationFilterFactory

Examples of org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilterFactory.create()

    Tokenizer tokenizer = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false);
    ASCIIFoldingFilterFactory factory = new ASCIIFoldingFilterFactory();
    factory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    Map<String, String> args = Collections.emptyMap();
    factory.init(args);
    TokenStream stream = factory.create(tokenizer);
    assertTokenStreamContents(stream, new String[] { "Ceska" });
  }
}

Examples of org.apache.lucene.analysis.standard.StandardTokenizerFactory.create()

    IndicNormalizationFilterFactory filterFactory = new IndicNormalizationFilterFactory();
    filterFactory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    Map<String, String> args = Collections.emptyMap();
    factory.init(args);
    filterFactory.init(args);
    Tokenizer tokenizer = factory.create(reader);
    TokenStream stream = filterFactory.create(tokenizer);
    assertTokenStreamContents(stream, new String[] { "ৎ", "और" });
  }
 
  /**
 

Examples of org.apache.lucene.analysis.synonym.SynonymFilterFactory.create()

    Map<String,String> args = new HashMap<String,String>();
    args.put("synonyms", "synonyms.txt");
    factory.setLuceneMatchVersion(TEST_VERSION_CURRENT);
    factory.init(args);
    factory.inform(new StringMockResourceLoader("a b c,d"));
    TokenStream ts = factory.create(new MockTokenizer(new StringReader("a e"), MockTokenizer.WHITESPACE, false));
    // This fails because ["e","e"] is the value of the token stream
    assertTokenStreamContents(ts, new String[] { "a", "e" });
  }
}

Examples of org.apache.lucene.analysis.util.CharFilterFactory.create()

          instance = CharFilterFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          Class<? extends Reader> createdClazz = instance.create(new StringReader("")).getClass();
          // only check instance if factory have wrapped at all!
          if (StringReader.class != createdClazz) {
            assertSame(c, createdClazz);
          }
        } catch (IllegalArgumentException e) {

Examples of org.apache.lucene.analysis.util.TokenFilterFactory.create()

    String turkishUpperCase = "I WİLL USE TURKİSH CASING";
    String turkishLowerCase = "ı will use turkish casıng";
    TokenFilterFactory factory = tokenFilterFactory("ICUCollationKey",
        "locale", "tr",
        "strength", "primary");
    TokenStream tsUpper = factory.create(
        new KeywordTokenizer(new StringReader(turkishUpperCase)));
    TokenStream tsLower = factory.create(
        new KeywordTokenizer(new StringReader(turkishLowerCase)));
    assertCollatesToSame(tsUpper, tsLower);
  }

Examples of org.apache.lucene.analysis.util.TokenizerFactory.create()

          instance = TokenizerFactory.forName(simpleName, args);
          assertNotNull(instance);
          if (instance instanceof ResourceLoaderAware) {
            ((ResourceLoaderAware) instance).inform(loader);
          }
          assertSame(c, instance.create(new StringReader("")).getClass());
        } catch (IllegalArgumentException e) {
          if (!e.getMessage().contains("SPI")) {
            throw e;
          }
          // TODO: For now pass because some factories have not yet a default config that always works

Examples of org.apache.lucene.benchmark.byTask.utils.AnalyzerFactory.create()

        analyzerName = "org.apache.lucene.analysis.standard.StandardAnalyzer";
      }
      // First, lookup analyzerName as a named analyzer factory
      AnalyzerFactory factory = getRunData().getAnalyzerFactories().get(analyzerName);
      if (null != factory) {
        analyzer = factory.create();
      } else {
        if (analyzerName.contains(".")) {
          if (analyzerName.startsWith("standard.")) {
            analyzerName = "org.apache.lucene.analysis." + analyzerName;
          }
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.