Package org.apache.lucene.index

Examples of org.apache.lucene.index.IndexWriter.commit()


    writer.addDocument(doc("prop1=val2", "prop3=val1", "prop7=val7"));
    writer.addDocument(doc("prop1=val2", "prop3=val2", "prop3=val3"));
    writer.addDocument(doc("prop1=val1", "prop2=val1"));
    writer.addDocument(doc("prop1=val1", "prop2=val1"));
    writer.addDocument(doc("prop1=val1", "prop2=val1", "prop4=val2", "prop4=val3"));
    writer.commit();

    attributesFacetHandler = new AttributesFacetHandler(AttributeHandlerName, AttributeHandlerName,
        null, null, new HashMap<String, String>());
    facetHandlers.add(attributesFacetHandler);
    DirectoryReader reader = DirectoryReader.open(directory);
View Full Code Here


    writer.addDocument(doc("prop1=val2", "prop3=val1", "prop7=val7"));
    writer.addDocument(doc("prop1=val2", "prop3=val2", "prop3=val3"));
    writer.addDocument(doc("prop1=val1", "prop2=val1"));
    writer.addDocument(doc("prop1=val1", "prop2=val1"));
    writer.addDocument(doc("prop1=val1", "prop2=val1", "prop4=val2", "prop4=val3"));
    writer.commit();

    HashMap<String, String> facetProps = new HashMap<String, String>();
    facetProps.put(AttributesFacetHandler.MAX_FACETS_PER_KEY_PROP_NAME, "1");
    attributesFacetHandler = new AttributesFacetHandler(AttributeHandlerName, AttributeHandlerName,
        null, null, facetProps);
View Full Code Here

    config.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(directory, config);
    Document doc = new Document();
    addMetaDataField(doc, PathHandlerName, new String[] { "/a/b/c", "/a/b/d" });
    writer.addDocument(doc);
    writer.commit();

    PathFacetHandler pathHandler = new PathFacetHandler("path", true);
    facetHandlers.add(pathHandler);
  }
View Full Code Here

      Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_43);
      IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, analyzer);
      IndexWriter idxWriter = new IndexWriter(_indexDir, config);
      idxWriter.deleteDocuments(new Term("id", "1"));
      idxWriter.deleteDocuments(new Term("id", "2"));
      idxWriter.commit();
      reader = newIndexReader();

      br = new BrowseRequest();
      br.setCount(10);
      br.setOffset(0);
View Full Code Here

    for (long l = 0; l < 53; l++) {
      Document d = new Document();
      d.add(buildMetaField("timeinmillis", df.format(now - l * 3500000)));
      idxWriter.addDocument(d);
      idxWriter.forceMerge(1);
      idxWriter.commit();
    }
    idxWriter.close();
    DirectoryReader idxReader = DirectoryReader.open(idxDir);
    BoboMultiReader boboReader = BoboMultiReader.getInstance(idxReader, facetHandlers);
    BoboBrowser browser = new BoboBrowser(boboReader);
View Full Code Here

      _lock.writeLock().lock();
      try {
        IndexWriter indexWriter = _trackingWriter.getIndexWriter();
        for (HdfsDirectory directory : indexesToImport) {
          LOG.info("Starting import [{0}], commiting on [{1}/{2}]", directory, shard, table);
          indexWriter.commit();
          boolean isSuccess = true;
          boolean isRollbackDueToException = false;
          boolean emitDeletes = indexWriter.numDocs() != 0;
          try {
            isSuccess = applyDeletes(directory, indexWriter, shard, emitDeletes);
View Full Code Here

            LOG.info("Add index [{0}] [{1}/{2}]", directory, shard, table);
            indexWriter.addIndexes(directory);
            LOG.info("Removing delete markers [{0}] on [{1}/{2}]", directory, shard, table);
            indexWriter.deleteDocuments(new Term(BlurConstants.DELETE_MARKER, BlurConstants.DELETE_MARKER_VALUE));
            LOG.info("Finishing import [{0}], commiting on [{1}/{2}]", directory, shard, table);
            indexWriter.commit();
            indexWriter.maybeMerge();
            LOG.info("Cleaning up old directory [{0}] for [{1}/{2}]", dirPath, shard, table);
            fileSystem.delete(dirPath, true);
            LOG.info("Import complete on [{0}/{1}]", shard, table);
          } else {
View Full Code Here

                        TEST_VERSION_CURRENT, new MockAnalyzer(random)));
    Document doc = new Document();
    doc.add(newField("f", "", Field.Store.NO, Field.Index.NOT_ANALYZED));
    doc.add(newField("t", "1", Field.Store.NO, Field.Index.NOT_ANALYZED));
    w.addDocument(doc);
    w.commit();
    doc = new Document();
    doc.add(newField("t", "1", Field.Store.NO, Field.Index.NOT_ANALYZED));
    w.addDocument(doc);

    IndexReader r = IndexReader.open(w, true);
View Full Code Here

    Directory dir = new RAMDirectory();
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null);
    conf.setMergeScheduler(new ReportingMergeScheduler());
    IndexWriter writer = new IndexWriter(dir, conf);
    writer.addDocument(new Document());
    writer.commit(); // trigger flush
    writer.addDocument(new Document());
    writer.commit(); // trigger flush
    writer.optimize();
    writer.close();
    dir.close();
View Full Code Here

        assertTrue("RAMDirectory.setLockFactory did not take",
                   NoLockFactory.class.isInstance(dir.getLockFactory()));

        IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)));
        writer.commit(); // required so the second open succeed
        // Create a 2nd IndexWriter.  This is normally not allowed but it should run through since we're not
        // using any locks:
        IndexWriter writer2 = null;
        try {
            writer2 = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).setOpenMode(OpenMode.APPEND));
View Full Code Here

TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.