Examples of BaseDirectoryWrapper


Examples of org.apache.lucene.store.BaseDirectoryWrapper

@Monster("takes ~ 45 minutes")
public class Test2BBinaryDocValues extends LuceneTestCase {
 
  // indexes Integer.MAX_VALUE docs with a fixed binary field
  public void testFixedBinary() throws Exception {
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BFixedBinary"));
    if (dir instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
   
    IndexWriter w = new IndexWriter(dir,
        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
        .setRAMBufferSizeMB(256.0)
        .setMergeScheduler(new ConcurrentMergeScheduler())
        .setMergePolicy(newLogMergePolicy(false, 10))
        .setOpenMode(IndexWriterConfig.OpenMode.CREATE));

    Document doc = new Document();
    byte bytes[] = new byte[4];
    BytesRef data = new BytesRef(bytes);
    BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
    doc.add(dvField);
   
    for (int i = 0; i < Integer.MAX_VALUE; i++) {
      bytes[0] = (byte)(i >> 24);
      bytes[1] = (byte)(i >> 16);
      bytes[2] = (byte)(i >> 8);
      bytes[3] = (byte) i;
      w.addDocument(doc);
      if (i % 100000 == 0) {
        System.out.println("indexed: " + i);
        System.out.flush();
      }
    }
   
    w.forceMerge(1);
    w.close();
   
    System.out.println("verifying...");
    System.out.flush();
   
    DirectoryReader r = DirectoryReader.open(dir);
    int expectedValue = 0;
    for (AtomicReaderContext context : r.leaves()) {
      AtomicReader reader = context.reader();
      BinaryDocValues dv = reader.getBinaryDocValues("dv");
      for (int i = 0; i < reader.maxDoc(); i++) {
        bytes[0] = (byte)(expectedValue >> 24);
        bytes[1] = (byte)(expectedValue >> 16);
        bytes[2] = (byte)(expectedValue >> 8);
        bytes[3] = (byte) expectedValue;
        final BytesRef term = dv.get(i);
        assertEquals(data, term);
        expectedValue++;
      }
    }
   
    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

    dir.close();
  }
 
  // indexes Integer.MAX_VALUE docs with a variable binary field
  public void testVariableBinary() throws Exception {
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BVariableBinary"));
    if (dir instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
   
    IndexWriter w = new IndexWriter(dir,
        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
        .setRAMBufferSizeMB(256.0)
        .setMergeScheduler(new ConcurrentMergeScheduler())
        .setMergePolicy(newLogMergePolicy(false, 10))
        .setOpenMode(IndexWriterConfig.OpenMode.CREATE));

    Document doc = new Document();
    byte bytes[] = new byte[4];
    ByteArrayDataOutput encoder = new ByteArrayDataOutput(bytes);
    BytesRef data = new BytesRef(bytes);
    BinaryDocValuesField dvField = new BinaryDocValuesField("dv", data);
    doc.add(dvField);
   
    for (int i = 0; i < Integer.MAX_VALUE; i++) {
      encoder.reset(bytes);
      encoder.writeVInt(i % 65535); // 1, 2, or 3 bytes
      data.length = encoder.getPosition();
      w.addDocument(doc);
      if (i % 100000 == 0) {
        System.out.println("indexed: " + i);
        System.out.flush();
      }
    }
   
    w.forceMerge(1);
    w.close();
   
    System.out.println("verifying...");
    System.out.flush();
   
    DirectoryReader r = DirectoryReader.open(dir);
    int expectedValue = 0;
    ByteArrayDataInput input = new ByteArrayDataInput();
    for (AtomicReaderContext context : r.leaves()) {
      AtomicReader reader = context.reader();
      BinaryDocValues dv = reader.getBinaryDocValues("dv");
      for (int i = 0; i < reader.maxDoc(); i++) {
        final BytesRef term = dv.get(i);
        input.reset(term.bytes, term.offset, term.length);
        assertEquals(expectedValue % 65535, input.readVInt());
        assertTrue(input.eof());
        expectedValue++;
      }
    }
   
    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

  // Simulate a corrupt index by removing last byte of
  // latest segments file and make sure we get an
  // IOException trying to open the index:
  public void testSimulatedCorruptIndex1() throws IOException {
      BaseDirectoryWrapper dir = newDirectory();
      dir.setCheckIndexOnClose(false); // we are corrupting it!

      IndexWriter writer = null;

      writer  = new IndexWriter(dir, newIndexWriterConfig( TEST_VERSION_CURRENT, new MockAnalyzer(random())));

      // add 100 documents
      for (int i = 0; i < 100; i++) {
          addDoc(writer);
      }

      // close
      writer.close();

      long gen = SegmentInfos.getLastCommitGeneration(dir);
      assertTrue("segment generation should be > 0 but got " + gen, gen > 0);

      String fileNameIn = SegmentInfos.getLastCommitSegmentsFileName(dir);
      String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
                                                                 "",
                                                                 1+gen);
      IndexInput in = dir.openInput(fileNameIn, newIOContext(random()));
      IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random()));
      long length = in.length();
      for(int i=0;i<length-1;i++) {
        out.writeByte(in.readByte());
      }
      in.close();
      out.close();
      dir.deleteFile(fileNameIn);

      IndexReader reader = null;
      try {
        reader = DirectoryReader.open(dir);
        fail("reader did not hit IOException on opening a corrupt index");
      } catch (Exception e) {
      }
      if (reader != null) {
        reader.close();
      }
      dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

  // Simulate a corrupt index by removing one of the cfs
  // files and make sure we get an IOException trying to
  // open the index:
  public void testSimulatedCorruptIndex2() throws IOException {
    BaseDirectoryWrapper dir = newDirectory();
    dir.setCheckIndexOnClose(false); // we are corrupting it!
    IndexWriter writer = null;

    writer  = new IndexWriter(
                              dir,
                              newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random())).
                              setMergePolicy(newLogMergePolicy(true)).setUseCompoundFile(true)
                              );
    MergePolicy lmp = writer.getConfig().getMergePolicy();
    // Force creation of CFS:
    lmp.setNoCFSRatio(1.0);
    lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);

    // add 100 documents
    for (int i = 0; i < 100; i++) {
      addDoc(writer);
    }

    // close
    writer.close();

    long gen = SegmentInfos.getLastCommitGeneration(dir);
    assertTrue("segment generation should be > 0 but got " + gen, gen > 0);

    String[] files = dir.listAll();
    boolean corrupted = false;
    for(int i=0;i<files.length;i++) {
      if (files[i].endsWith(".cfs")) {
        dir.deleteFile(files[i]);
        corrupted = true;
        break;
      }
    }
    assertTrue("failed to find cfs file to remove", corrupted);

    IndexReader reader = null;
    try {
      reader = DirectoryReader.open(dir);
      fail("reader did not hit IOException on opening a corrupt index");
    } catch (Exception e) {
    }
    if (reader != null) {
      reader.close();
    }
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

    }
  }

  // LUCENE-4147
  public void testRollbackAndCommitWithThreads() throws Exception {
    final BaseDirectoryWrapper d = newDirectory();
    if (d instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)d).setPreventDoubleWrite(false);
    }

    final int threadCount = TestUtil.nextInt(random(), 2, 6);

    final AtomicReference<IndexWriter> writerRef = new AtomicReference<>();
    MockAnalyzer analyzer = new MockAnalyzer(random());
    analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH));

    writerRef.set(new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)));
    final LineFileDocs docs = new LineFileDocs(random());
    final Thread[] threads = new Thread[threadCount];
    final int iters = atLeast(100);
    final AtomicBoolean failed = new AtomicBoolean();
    final Lock rollbackLock = new ReentrantLock();
    final Lock commitLock = new ReentrantLock();
    for(int threadID=0;threadID<threadCount;threadID++) {
      threads[threadID] = new Thread() {
          @Override
          public void run() {
            for(int iter=0;iter<iters && !failed.get();iter++) {
              //final int x = random().nextInt(5);
              final int x = random().nextInt(3);
              try {
                switch(x) {
                case 0:
                  rollbackLock.lock();
                  if (VERBOSE) {
                    System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now rollback");
                  }
                  try {
                    writerRef.get().rollback();
                    if (VERBOSE) {
                      System.out.println("TEST: " + Thread.currentThread().getName() + ": rollback done; now open new writer");
                    }
                    writerRef.set(new IndexWriter(d, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))));
                  } finally {
                    rollbackLock.unlock();
                  }
                  break;
                case 1:
                  commitLock.lock();
                  if (VERBOSE) {
                    System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now commit");
                  }
                  try {
                    if (random().nextBoolean()) {
                      writerRef.get().prepareCommit();
                    }
                    writerRef.get().commit();
                  } catch (AlreadyClosedException ace) {
                    // ok
                  } catch (NullPointerException npe) {
                    // ok
                  } finally {
                    commitLock.unlock();
                  }
                  break;
                case 2:
                  if (VERBOSE) {
                    System.out.println("\nTEST: " + Thread.currentThread().getName() + ": now add");
                  }
                  try {
                    writerRef.get().addDocument(docs.nextDoc());
                  } catch (AlreadyClosedException ace) {
                    // ok
                  } catch (NullPointerException npe) {
                    // ok
                  } catch (AssertionError ae) {
                    // ok
                  }
                  break;
                }
              } catch (Throwable t) {
                failed.set(true);
                throw new RuntimeException(t);
              }
            }
          }
        };
      threads[threadID].start();
    }

    for(int threadID=0;threadID<threadCount;threadID++) {
      threads[threadID].join();
    }

    assertTrue(!failed.get());
    writerRef.get().close();
    d.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

      if (VERBOSE) {
        System.out.println("TEST: index " + unsupportedNames[i]);
      }
      File oldIndxeDir = createTempDir(unsupportedNames[i]);
      TestUtil.unzip(getDataFile("unsupported." + unsupportedNames[i] + ".zip"), oldIndxeDir);
      BaseDirectoryWrapper dir = newFSDirectory(oldIndxeDir);
      // don't checkindex, these are intentionally not supported
      dir.setCheckIndexOnClose(false);

      IndexReader reader = null;
      IndexWriter writer = null;
      try {
        reader = DirectoryReader.open(dir);
        fail("DirectoryReader.open should not pass for "+unsupportedNames[i]);
      } catch (IndexFormatTooOldException e) {
        // pass
      } finally {
        if (reader != null) reader.close();
        reader = null;
      }

      try {
        writer = new IndexWriter(dir, newIndexWriterConfig(
          TEST_VERSION_CURRENT, new MockAnalyzer(random())));
        fail("IndexWriter creation should not pass for "+unsupportedNames[i]);
      } catch (IndexFormatTooOldException e) {
        // pass
        if (VERBOSE) {
          System.out.println("TEST: got expected exc:");
          e.printStackTrace(System.out);
        }
        // TODO: test *SOMEWHERE ELSE* that exc message includes a path=
      } finally {
        // we should fail to open IW, and so it should be null when we get here.
        // However, if the test fails (i.e., IW did not fail on open), we need
        // to close IW. However, if merges are run, IW may throw
        // IndexFormatTooOldException, and we don't want to mask the fail()
        // above, so close without waiting for merges.
        if (writer != null) {
          writer.close(false);
        }
        writer = null;
      }
     
      ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
      CheckIndex checker = new CheckIndex(dir);
      checker.setInfoStream(new PrintStream(bos, false, IOUtils.UTF_8));
      CheckIndex.Status indexStatus = checker.checkIndex();
      assertFalse(indexStatus.clean);
      assertTrue(bos.toString(IOUtils.UTF_8).contains(IndexFormatTooOldException.class.getName()));

      dir.close();
      TestUtil.rm(oldIndxeDir);
    }
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

    final int TERMS_PER_DOC = TestUtil.nextInt(random(), 100000, 1000000);

    List<BytesRef> savedTerms = null;

    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BTerms"));
    //MockDirectoryWrapper dir = newFSDirectory(new File("/p/lucene/indices/2bindex"));
    if (dir instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
    dir.setCheckIndexOnClose(false); // don't double-checkindex

    if (true) {

      IndexWriter w = new IndexWriter(dir,
                                      new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
                                      .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
                                      .setRAMBufferSizeMB(256.0)
                                      .setMergeScheduler(new ConcurrentMergeScheduler())
                                      .setMergePolicy(newLogMergePolicy(false, 10))
                                      .setOpenMode(IndexWriterConfig.OpenMode.CREATE));

      MergePolicy mp = w.getConfig().getMergePolicy();
      if (mp instanceof LogByteSizeMergePolicy) {
        // 1 petabyte:
        ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024);
      }

      Document doc = new Document();
      final MyTokenStream ts = new MyTokenStream(random(), TERMS_PER_DOC);

      FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
      customType.setIndexOptions(IndexOptions.DOCS_ONLY);
      customType.setOmitNorms(true);
      Field field = new Field("field", ts, customType);
      doc.add(field);
      //w.setInfoStream(System.out);
      final int numDocs = (int) (TERM_COUNT/TERMS_PER_DOC);

      System.out.println("TERMS_PER_DOC=" + TERMS_PER_DOC);
      System.out.println("numDocs=" + numDocs);

      for(int i=0;i<numDocs;i++) {
        final long t0 = System.currentTimeMillis();
        w.addDocument(doc);
        System.out.println(i + " of " + numDocs + " " + (System.currentTimeMillis()-t0) + " msec");
      }
      savedTerms = ts.savedTerms;

      System.out.println("TEST: full merge");
      w.forceMerge(1);
      System.out.println("TEST: close writer");
      w.close();
    }

    System.out.println("TEST: open reader");
    final IndexReader r = DirectoryReader.open(dir);
    if (savedTerms == null) {
      savedTerms = findTerms(r);
    }
    final int numSavedTerms = savedTerms.size();
    final List<BytesRef> bigOrdTerms = new ArrayList<>(savedTerms.subList(numSavedTerms-10, numSavedTerms));
    System.out.println("TEST: test big ord terms...");
    testSavedTerms(r, bigOrdTerms);
    System.out.println("TEST: test all saved terms...");
    testSavedTerms(r, savedTerms);
    r.close();

    System.out.println("TEST: now CheckIndex...");
    CheckIndex.Status status = TestUtil.checkIndex(dir);
    final long tc = status.segmentInfos.get(0).termIndexStatus.termCount;
    assertTrue("count " + tc + " is not > " + Integer.MAX_VALUE, tc > Integer.MAX_VALUE);

    dir.close();
    System.out.println("TEST: done!");
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

@Monster("very slow")
public class Test2BSortedDocValues extends LuceneTestCase {
 
  // indexes Integer.MAX_VALUE docs with a fixed binary field
  public void testFixedSorted() throws Exception {
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BFixedSorted"));
    if (dir instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
   
    IndexWriter w = new IndexWriter(dir,
        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
        .setRAMBufferSizeMB(256.0)
        .setMergeScheduler(new ConcurrentMergeScheduler())
        .setMergePolicy(newLogMergePolicy(false, 10))
        .setOpenMode(IndexWriterConfig.OpenMode.CREATE));

    Document doc = new Document();
    byte bytes[] = new byte[2];
    BytesRef data = new BytesRef(bytes);
    SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
    doc.add(dvField);
   
    for (int i = 0; i < Integer.MAX_VALUE; i++) {
      bytes[0] = (byte)(i >> 8);
      bytes[1] = (byte) i;
      w.addDocument(doc);
      if (i % 100000 == 0) {
        System.out.println("indexed: " + i);
        System.out.flush();
      }
    }
   
    w.forceMerge(1);
    w.close();
   
    System.out.println("verifying...");
    System.out.flush();
   
    DirectoryReader r = DirectoryReader.open(dir);
    int expectedValue = 0;
    for (AtomicReaderContext context : r.leaves()) {
      AtomicReader reader = context.reader();
      BinaryDocValues dv = reader.getSortedDocValues("dv");
      for (int i = 0; i < reader.maxDoc(); i++) {
        bytes[0] = (byte)(expectedValue >> 8);
        bytes[1] = (byte) expectedValue;
        final BytesRef term = dv.get(i);
        assertEquals(data, term);
        expectedValue++;
      }
    }
   
    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

    dir.close();
  }
 
  // indexes Integer.MAX_VALUE docs with a fixed binary field
  public void test2BOrds() throws Exception {
    BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BOrds"));
    if (dir instanceof MockDirectoryWrapper) {
      ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
    }
   
    IndexWriter w = new IndexWriter(dir,
        new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random()))
        .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
        .setRAMBufferSizeMB(256.0)
        .setMergeScheduler(new ConcurrentMergeScheduler())
        .setMergePolicy(newLogMergePolicy(false, 10))
        .setOpenMode(IndexWriterConfig.OpenMode.CREATE));

    Document doc = new Document();
    byte bytes[] = new byte[4];
    BytesRef data = new BytesRef(bytes);
    SortedDocValuesField dvField = new SortedDocValuesField("dv", data);
    doc.add(dvField);
   
    for (int i = 0; i < Integer.MAX_VALUE; i++) {
      bytes[0] = (byte)(i >> 24);
      bytes[1] = (byte)(i >> 16);
      bytes[2] = (byte)(i >> 8);
      bytes[3] = (byte) i;
      w.addDocument(doc);
      if (i % 100000 == 0) {
        System.out.println("indexed: " + i);
        System.out.flush();
      }
    }
   
    w.forceMerge(1);
    w.close();
   
    System.out.println("verifying...");
    System.out.flush();
   
    DirectoryReader r = DirectoryReader.open(dir);
    int counter = 0;
    for (AtomicReaderContext context : r.leaves()) {
      AtomicReader reader = context.reader();
      BytesRef scratch = new BytesRef();
      BinaryDocValues dv = reader.getSortedDocValues("dv");
      for (int i = 0; i < reader.maxDoc(); i++) {
        bytes[0] = (byte) (counter >> 24);
        bytes[1] = (byte) (counter >> 16);
        bytes[2] = (byte) (counter >> 8);
        bytes[3] = (byte) counter;
        counter++;
        final BytesRef term = dv.get(i);
        assertEquals(data, term);
      }
    }
   
    r.close();
    dir.close();
  }
View Full Code Here

Examples of org.apache.lucene.store.BaseDirectoryWrapper

 
  /*
   * simple test that ensures we getting expected exceptions
   */
  public void testAddIndexMissingCodec() throws IOException {
    BaseDirectoryWrapper toAdd = newDirectory();
    // Disable checkIndex, else we get an exception because
    // of the unregistered codec:
    toAdd.setCheckIndexOnClose(false);
    {
      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
          new MockAnalyzer(random()));
      conf.setCodec(new UnRegisteredCodec());
      IndexWriter w = new IndexWriter(toAdd, conf);
      Document doc = new Document();
      FieldType customType = new FieldType();
      customType.setIndexed(true);
      doc.add(newField("foo", "bar", customType));
      w.addDocument(doc);
      w.close();
    }

    {
      Directory dir = newDirectory();
      IndexWriterConfig conf = newIndexWriterConfig(TEST_VERSION_CURRENT,
          new MockAnalyzer(random()));
      conf.setCodec(TestUtil.alwaysPostingsFormat(new Pulsing41PostingsFormat(1 + random().nextInt(20))));
      IndexWriter w = new IndexWriter(dir, conf);
      try {
        w.addIndexes(toAdd);
        fail("no such codec");
      } catch (IllegalArgumentException ex) {
        // expected
      }
      w.close();
      IndexReader open = DirectoryReader.open(dir);
      assertEquals(0, open.numDocs());
      open.close();
      dir.close();
    }

    try {
      DirectoryReader.open(toAdd);
      fail("no such codec");
    } catch (IllegalArgumentException ex) {
      // expected
    }
    toAdd.close();
  }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.