Java Code Examples for org.apache.lucene.util.TestUtil#addIndexesSlowly()

The following examples show how to use org.apache.lucene.util.TestUtil#addIndexesSlowly() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestIndexWriterReader.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
void doBody(int j, Directory[] dirs) throws Throwable {
  switch (j % 4) {
    case 0:
      mainWriter.addIndexes(dirs);
      mainWriter.forceMerge(1);
      break;
    case 1:
      mainWriter.addIndexes(dirs);
      numaddIndexes.incrementAndGet();
      break;
    case 2:
      TestUtil.addIndexesSlowly(mainWriter, readers);
      break;
    case 3:
      mainWriter.commit();
  }
  count.addAndGet(dirs.length*NUM_INIT_DOCS);
}
 
Example 2
Source File: TestDocValuesIndexing.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testTypeChangeViaAddIndexesIR2() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer = new IndexWriter(dir, conf);
  Document doc = new Document();
  doc.add(new NumericDocValuesField("dv", 0L));
  writer.addDocument(doc);
  writer.close();

  Directory dir2 = newDirectory();
  conf = newIndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter writer2 = new IndexWriter(dir2, conf);
  DirectoryReader reader = DirectoryReader.open(dir);
  TestUtil.addIndexesSlowly(writer2, reader);
  reader.close();
  Document doc2 = new Document();
  doc2.add(new SortedDocValuesField("dv", new BytesRef("foo")));
  expectThrows(IllegalArgumentException.class, () -> {
    writer2.addDocument(doc2);
  });

  writer2.close();
  dir2.close();
  dir.close();
}
 
Example 3
Source File: BaseIndexFileFormatTestCase.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/** The purpose of this test is to make sure that bulk merge doesn't accumulate useless data over runs. */
public void testMergeStability() throws Exception {
  assumeTrue("merge is not stable", mergeIsStable());
  Directory dir = applyCreatedVersionMajor(newDirectory());

  // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio
  // do not use RIW which will change things up!
  MergePolicy mp = newTieredMergePolicy();
  mp.setNoCFSRatio(0);
  IndexWriterConfig cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
  IndexWriter w = new IndexWriter(dir, cfg);
  final int numDocs = atLeast(500);
  for (int i = 0; i < numDocs; ++i) {
    Document d = new Document();
    addRandomFields(d);
    w.addDocument(d);
  }
  w.forceMerge(1);
  w.commit();
  w.close();
  DirectoryReader reader = DirectoryReader.open(dir);

  Directory dir2 = applyCreatedVersionMajor(newDirectory());
  mp = newTieredMergePolicy();
  mp.setNoCFSRatio(0);
  cfg = new IndexWriterConfig(new MockAnalyzer(random())).setUseCompoundFile(false).setMergePolicy(mp);
  w = new IndexWriter(dir2, cfg);
  TestUtil.addIndexesSlowly(w, reader);

  w.commit();
  w.close();

  assertEquals(bytesUsedByExtension(dir), bytesUsedByExtension(dir2));

  reader.close();
  dir.close();
  dir2.close();
}
 
Example 4
Source File: BasePointsFormatTestCase.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private void switchIndex(RandomIndexWriter w, Directory dir, RandomIndexWriter saveW) throws IOException {
  if (random().nextBoolean()) {
    // Add via readers:
    try (DirectoryReader r = w.getReader()) {
      if (random().nextBoolean()) {
        // Add via CodecReaders:
        List<CodecReader> subs = new ArrayList<>();
        for (LeafReaderContext context : r.leaves()) {
          subs.add((CodecReader) context.reader());
        }
        if (VERBOSE) {
          System.out.println("TEST: now use addIndexes(CodecReader[]) to switch writers");
        }
        saveW.addIndexes(subs.toArray(new CodecReader[subs.size()]));
      } else {
        if (VERBOSE) {
          System.out.println("TEST: now use TestUtil.addIndexesSlowly(DirectoryReader[]) to switch writers");
        }
        TestUtil.addIndexesSlowly(saveW.w, r);
      }
    }
  } else {
    // Add via directory:
    if (VERBOSE) {
      System.out.println("TEST: now use addIndexes(Directory[]) to switch writers");
    }
    w.close();
    saveW.addIndexes(new Directory[] {dir});
  }
  w.close();
  dir.close();
}
 
Example 5
Source File: TestIndexWriter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testDeadlock() throws Exception {
  Directory dir = newDirectory();
  IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
                                              .setMaxBufferedDocs(2));
  Document doc = new Document();

  FieldType customType = new FieldType(TextField.TYPE_STORED);
  customType.setStoreTermVectors(true);
  customType.setStoreTermVectorPositions(true);
  customType.setStoreTermVectorOffsets(true);

  doc.add(newField("content", "aaa bbb ccc ddd eee fff ggg hhh iii", customType));
  writer.addDocument(doc);
  writer.addDocument(doc);
  writer.addDocument(doc);
  writer.commit();
  // index has 2 segments

  Directory dir2 = newDirectory();
  IndexWriter writer2 = new IndexWriter(dir2, newIndexWriterConfig(new MockAnalyzer(random())));
  writer2.addDocument(doc);
  writer2.close();

  DirectoryReader r1 = DirectoryReader.open(dir2);
  TestUtil.addIndexesSlowly(writer, r1, r1);
  writer.close();

  IndexReader r3 = DirectoryReader.open(dir);
  assertEquals(5, r3.numDocs());
  r3.close();

  r1.close();

  dir2.close();
  dir.close();
}
 
Example 6
Source File: TestAddIndexes.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
void doBody(int j, Directory[] dirs) throws Throwable {
  switch(j%5) {
  case 0:
    if (VERBOSE) {
      System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes + full merge");
    }
    writer2.addIndexes(dirs);
    writer2.forceMerge(1);
    break;
  case 1:
    if (VERBOSE) {
      System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes");
    }
    writer2.addIndexes(dirs);
    break;
  case 2:
    if (VERBOSE) {
      System.out.println("TEST: " + Thread.currentThread().getName() + ": addIndexes(LR[])");
    }
    TestUtil.addIndexesSlowly(writer2, readers);
    break;
  case 3:
    if (VERBOSE) {
      System.out.println("TEST: " + Thread.currentThread().getName() + ": full merge");
    }
    writer2.forceMerge(1);
    break;
  case 4:
    if (VERBOSE) {
      System.out.println("TEST: " + Thread.currentThread().getName() + ": commit");
    }
    writer2.commit();
  }
}
 
Example 7
Source File: TestAddIndexes.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testNonCFSLeftovers() throws Exception {
  Directory[] dirs = new Directory[2];
  for (int i = 0; i < dirs.length; i++) {
    dirs[i] = new ByteBuffersDirectory();
    IndexWriter w = new IndexWriter(dirs[i], new IndexWriterConfig(new MockAnalyzer(random())));
    Document d = new Document();
    FieldType customType = new FieldType(TextField.TYPE_STORED);
    customType.setStoreTermVectors(true);
    d.add(new Field("c", "v", customType));
    w.addDocument(d);
    w.close();
  }
  
  DirectoryReader[] readers = new DirectoryReader[] { DirectoryReader.open(dirs[0]), DirectoryReader.open(dirs[1]) };
  
  MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory());
  IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true));
  MergePolicy lmp = conf.getMergePolicy();
  // Force creation of CFS:
  lmp.setNoCFSRatio(1.0);
  lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY);
  IndexWriter w3 = new IndexWriter(dir, conf);
  TestUtil.addIndexesSlowly(w3, readers);
  w3.close();
  // we should now see segments_X,
  // _Y.cfs,_Y.cfe, _Z.si
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals("Only one compound segment should exist", 1, sis.size());
  assertTrue(sis.info(0).info.getUseCompoundFile());
  dir.close();
}
 
Example 8
Source File: Test2BPostingsBytes.java    From lucene-solr with Apache License 2.0 4 votes vote down vote up
public void test() throws Exception {
  IndexWriterConfig defaultConfig = new IndexWriterConfig(null);
  Codec defaultCodec = defaultConfig.getCodec();
  if ((new IndexWriterConfig(null)).getCodec() instanceof CompressingCodec) {
    Pattern regex = Pattern.compile("maxDocsPerChunk=(\\d+), blockSize=(\\d+)");
    Matcher matcher = regex.matcher(defaultCodec.toString());
    assertTrue("Unexpected CompressingCodec toString() output: " + defaultCodec.toString(), matcher.find());
    int maxDocsPerChunk = Integer.parseInt(matcher.group(1));
    int blockSize = Integer.parseInt(matcher.group(2));
    int product = maxDocsPerChunk * blockSize;
    assumeTrue(defaultCodec.getName() + " maxDocsPerChunk (" + maxDocsPerChunk
        + ") * blockSize (" + blockSize + ") < 16 - this can trigger OOM with -Dtests.heapsize=30g",
        product >= 16);
  }

  BaseDirectoryWrapper dir = newFSDirectory(createTempDir("2BPostingsBytes1"));
  if (dir instanceof MockDirectoryWrapper) {
    ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
  }
  
  IndexWriter w = new IndexWriter(dir,
      new IndexWriterConfig(new MockAnalyzer(random()))
      .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH)
      .setRAMBufferSizeMB(256.0)
      .setMergeScheduler(new ConcurrentMergeScheduler())
      .setMergePolicy(newLogMergePolicy(false, 10))
      .setOpenMode(IndexWriterConfig.OpenMode.CREATE)
      .setCodec(TestUtil.getDefaultCodec()));

  MergePolicy mp = w.getConfig().getMergePolicy();
  if (mp instanceof LogByteSizeMergePolicy) {
   // 1 petabyte:
   ((LogByteSizeMergePolicy) mp).setMaxMergeMB(1024*1024*1024);
  }

  Document doc = new Document();
  FieldType ft = new FieldType(TextField.TYPE_NOT_STORED);
  ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS);
  ft.setOmitNorms(true);
  MyTokenStream tokenStream = new MyTokenStream();
  Field field = new Field("field", tokenStream, ft);
  doc.add(field);
  
  final int numDocs = 1000;
  for (int i = 0; i < numDocs; i++) {
    if (i % 2 == 1) { // trick blockPF's little optimization
      tokenStream.n = 65536;
    } else {
      tokenStream.n = 65537;
    }
    w.addDocument(doc);
  }
  w.forceMerge(1);
  w.close();
  
  DirectoryReader oneThousand = DirectoryReader.open(dir);
  DirectoryReader subReaders[] = new DirectoryReader[1000];
  Arrays.fill(subReaders, oneThousand);
  BaseDirectoryWrapper dir2 = newFSDirectory(createTempDir("2BPostingsBytes2"));
  if (dir2 instanceof MockDirectoryWrapper) {
    ((MockDirectoryWrapper)dir2).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
  }
  IndexWriter w2 = new IndexWriter(dir2,
      new IndexWriterConfig(null));
  TestUtil.addIndexesSlowly(w2, subReaders);
  w2.forceMerge(1);
  w2.close();
  oneThousand.close();
  
  DirectoryReader oneMillion = DirectoryReader.open(dir2);
  subReaders = new DirectoryReader[2000];
  Arrays.fill(subReaders, oneMillion);
  BaseDirectoryWrapper dir3 = newFSDirectory(createTempDir("2BPostingsBytes3"));
  if (dir3 instanceof MockDirectoryWrapper) {
    ((MockDirectoryWrapper)dir3).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
  }
  IndexWriter w3 = new IndexWriter(dir3,
      new IndexWriterConfig(null));
  TestUtil.addIndexesSlowly(w3, subReaders);
  w3.forceMerge(1);
  w3.close();
  oneMillion.close();
  
  dir.close();
  dir2.close();
  dir3.close();
}
 
Example 9
Source File: TestAddIndexes.java    From lucene-solr with Apache License 2.0 4 votes vote down vote up
@Override
void doBody(int j, Directory[] dirs) throws Throwable {
  switch(j%5) {
  case 0:
    if (VERBOSE) {
      System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(Dir[]) then full merge");
    }
    writer2.addIndexes(dirs);
    try {
      writer2.forceMerge(1);
    } catch (IOException ioe) {
      if (ioe.getCause() instanceof MergePolicy.MergeAbortedException) {
        // OK
      } else {
        throw ioe;
      }
    }
    break;
  case 1:
    if (VERBOSE) {
      System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(Dir[])");
    }
    writer2.addIndexes(dirs);
    break;
  case 2:
    if (VERBOSE) {
      System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(LeafReader[])");
    }
    TestUtil.addIndexesSlowly(writer2, readers);
    break;
  case 3:
    if (VERBOSE) {
      System.out.println(Thread.currentThread().getName() + ": TEST: addIndexes(Dir[]) then maybeMerge");
    }
    writer2.addIndexes(dirs);
    writer2.maybeMerge();
    break;
  case 4:
    if (VERBOSE) {
      System.out.println(Thread.currentThread().getName() + ": TEST: commit");
    }
    writer2.commit();
  }
}