org.apache.lucene.util.InfoStream Java Examples

The following examples show how to use org.apache.lucene.util.InfoStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReaderPool.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
ReaderPool(Directory directory, Directory originalDirectory, SegmentInfos segmentInfos,
           FieldInfos.FieldNumbers fieldNumbers, LongSupplier completedDelGenSupplier, InfoStream infoStream,
           String softDeletesField, StandardDirectoryReader reader) throws IOException {
  this.directory = directory;
  this.originalDirectory = originalDirectory;
  this.segmentInfos = segmentInfos;
  this.fieldNumbers = fieldNumbers;
  this.completedDelGenSupplier = completedDelGenSupplier;
  this.infoStream = infoStream;
  this.softDeletesField = softDeletesField;
  if (reader != null) {
    // Pre-enroll all segment readers into the reader pool; this is necessary so
    // any in-memory NRT live docs are correctly carried over, and so NRT readers
    // pulled from this IW share the same segment reader:
    List<LeafReaderContext> leaves = reader.leaves();
    assert segmentInfos.size() == leaves.size();
    for (int i=0;i<leaves.size();i++) {
      LeafReaderContext leaf = leaves.get(i);
      SegmentReader segReader = (SegmentReader) leaf.reader();
      SegmentReader newReader = new SegmentReader(segmentInfos.info(i), segReader, segReader.getLiveDocs(),
          segReader.getHardLiveDocs(), segReader.numDocs(), true);
      readerMap.put(newReader.getOriginalSegmentInfo(), new ReadersAndUpdates(segmentInfos.getIndexCreatedVersionMajor(),
          newReader, newPendingDeletes(newReader, newReader.getOriginalSegmentInfo())));
    }
  }
}
 
Example #2
Source File: LiveIndexWriterConfig.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
LiveIndexWriterConfig(Analyzer analyzer) {
  this.analyzer = analyzer;
  ramBufferSizeMB = IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
  maxBufferedDocs = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
  mergedSegmentWarmer = null;
  delPolicy = new KeepOnlyLastCommitDeletionPolicy();
  commit = null;
  useCompoundFile = IndexWriterConfig.DEFAULT_USE_COMPOUND_FILE_SYSTEM;
  openMode = OpenMode.CREATE_OR_APPEND;
  similarity = IndexSearcher.getDefaultSimilarity();
  mergeScheduler = new ConcurrentMergeScheduler();
  indexingChain = DocumentsWriterPerThread.defaultIndexingChain;
  codec = Codec.getDefault();
  if (codec == null) {
    throw new NullPointerException();
  }
  infoStream = InfoStream.getDefault();
  mergePolicy = new TieredMergePolicy();
  flushPolicy = new FlushByRamOrCountsPolicy();
  readerPooling = IndexWriterConfig.DEFAULT_READER_POOLING;
  perThreadHardLimitMB = IndexWriterConfig.DEFAULT_RAM_PER_THREAD_HARD_LIMIT_MB;
  maxCommitMergeWaitMillis = IndexWriterConfig.DEFAULT_MAX_COMMIT_MERGE_WAIT_MILLIS;
}
 
Example #3
Source File: DocumentsWriterDeleteQueue.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
private DocumentsWriterDeleteQueue(InfoStream infoStream, long generation, long startSeqNo, LongSupplier previousMaxSeqId) {
  this.infoStream = infoStream;
  this.globalBufferedUpdates = new BufferedUpdates("global");
  this.generation = generation;
  this.nextSeqNo = new AtomicLong(startSeqNo);
  this.startSeqNo = startSeqNo;
  this.previousMaxSeqId = previousMaxSeqId;
  long value = previousMaxSeqId.getAsLong();
  assert value <= startSeqNo : "illegal max sequence ID: " + value + " start was: " + startSeqNo;
  /*
   * we use a sentinel instance as our initial tail. No slice will ever try to
   * apply this tail since the head is always omitted.
   */
  tail = new Node<>(null); // sentinel
  globalSlice = new DeleteSlice(tail);
}
 
Example #4
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testSegmentsInflation() throws IOException {
  MockDirectoryWrapper dir = newMockDirectory();
  dir.setCheckIndexOnClose(false); // TODO: allow falling back more than one commit
  
  // empty commit
  new IndexWriter(dir, new IndexWriterConfig(null)).close();   
  
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(1, sis.getGeneration());
  
  // add trash commit
  dir.createOutput(IndexFileNames.SEGMENTS + "_2", IOContext.DEFAULT).close();
  
  // ensure inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(2, sis.getGeneration());
  
  // add another trash commit
  dir.createOutput(IndexFileNames.SEGMENTS + "_4", IOContext.DEFAULT).close();
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(4, sis.getGeneration());

  dir.close();
}
 
Example #5
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testGenerationInflation() throws IOException {
  MockDirectoryWrapper dir = newMockDirectory();
  
  // initial commit
  IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
  iw.addDocument(new Document());
  iw.commit();
  iw.close();   
  
  // no deletes: start at 1
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(1, sis.info(0).getNextDelGen());
  
  // no inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(1, sis.info(0).getNextDelGen());
  
  // add trash per-segment deletes file
  dir.createOutput(IndexFileNames.fileNameFromGeneration("_0", "del", 2), IOContext.DEFAULT).close();
  
  // ensure inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(3, sis.info(0).getNextDelGen());
  
  dir.close();
}
 
Example #6
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testTrashyFile() throws IOException {
  MockDirectoryWrapper dir = newMockDirectory();
  dir.setCheckIndexOnClose(false); // TODO: maybe handle such trash better elsewhere...
  
  // empty commit
  new IndexWriter(dir, new IndexWriterConfig(null)).close();   
  
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(1, sis.getGeneration());
  
  // add trash file
  dir.createOutput(IndexFileNames.SEGMENTS + "_", IOContext.DEFAULT).close();
  
  // no inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(1, sis.getGeneration());

  dir.close();
}
 
Example #7
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testTrashyGenFile() throws IOException {
  MockDirectoryWrapper dir = newMockDirectory();
  
  // initial commit
  IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
  iw.addDocument(new Document());
  iw.commit();
  iw.close();   
  
  // no deletes: start at 1
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(1, sis.info(0).getNextDelGen());
  
  // add trash file
  dir.createOutput("_1_A", IOContext.DEFAULT).close();
  
  // no inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(1, sis.info(0).getNextDelGen());

  dir.close();
}
 
Example #8
Source File: IndexReplicationHandler.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
/**
 * Constructor with the given index directory and callback to notify when the
 * indexes were updated.
 */
public IndexReplicationHandler(Directory indexDir, Callable<Boolean> callback) throws IOException {
  this.callback = callback;
  this.indexDir = indexDir;
  currentRevisionFiles = null;
  currentVersion = null;
  if (DirectoryReader.indexExists(indexDir)) {
    final List<IndexCommit> commits = DirectoryReader.listCommits(indexDir);
    final IndexCommit commit = commits.get(commits.size() - 1);
    currentRevisionFiles = IndexRevision.revisionFiles(commit);
    currentVersion = IndexRevision.revisionVersion(commit);
    final InfoStream infoStream = InfoStream.getDefault();
    if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
      infoStream.message(INFO_STREAM_COMPONENT, "constructor(): currentVersion=" + currentVersion
          + " currentRevisionFiles=" + currentRevisionFiles);
      infoStream.message(INFO_STREAM_COMPONENT, "constructor(): commit=" + commit);
    }
  }
}
 
Example #9
Source File: SolrIndexConfig.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
@Override
public Map<String, Object> toMap(Map<String, Object> map) {
  Map<String, Object> m = Utils.makeMap("useCompoundFile", useCompoundFile,
      "maxBufferedDocs", maxBufferedDocs,
      "ramBufferSizeMB", ramBufferSizeMB,
      "ramPerThreadHardLimitMB", ramPerThreadHardLimitMB,
      "writeLockTimeout", writeLockTimeout,
      "lockType", lockType,
      "infoStreamEnabled", infoStream != InfoStream.NO_OUTPUT);
  if(mergeSchedulerInfo != null) m.put("mergeScheduler",mergeSchedulerInfo);
  if (metricsInfo != null) {
    m.put("metrics", metricsInfo);
  }
  if (mergePolicyFactoryInfo != null) {
    m.put("mergePolicyFactory", mergePolicyFactoryInfo);
  }
  if(mergedSegmentWarmerInfo != null) m.put("mergedSegmentWarmer",mergedSegmentWarmerInfo);
  return m;
}
 
Example #10
Source File: CloseIndexTask.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
@Override
public int doLogic() throws IOException {
  IndexWriter iw = getRunData().getIndexWriter();
  if (iw != null) {
    // If infoStream was set to output to a file, close it.
    InfoStream infoStream = iw.getConfig().getInfoStream();
    if (infoStream != null) {
      infoStream.close();
    }
    if (doWait == false) {
      iw.commit();
      iw.rollback();
    } else {
      iw.close();
    }
    getRunData().setIndexWriter(null);
  }
  return 1;
}
 
Example #11
Source File: IndexUpgrader.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/** Creates index upgrader on the given directory, using an {@link IndexWriter} using the given
 * {@code matchVersion}. You have the possibility to upgrade indexes with multiple commit points by removing
 * all older ones. If {@code infoStream} is not {@code null}, all logging output will be sent to this stream. */
public IndexUpgrader(Directory dir, InfoStream infoStream, boolean deletePriorCommits) {
  this(dir, new IndexWriterConfig(null), deletePriorCommits);
  if (null != infoStream) {
    this.iwc.setInfoStream(infoStream);
  }
}
 
Example #12
Source File: SegmentWriteState.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor which takes segment suffix.
 * 
 * @see #SegmentWriteState(InfoStream, Directory, SegmentInfo, FieldInfos,
 *      BufferedUpdates, IOContext)
 */
public SegmentWriteState(InfoStream infoStream, Directory directory, SegmentInfo segmentInfo, FieldInfos fieldInfos,
    BufferedUpdates segUpdates, IOContext context, String segmentSuffix) {
  this.infoStream = infoStream;
  this.segUpdates = segUpdates;
  this.directory = directory;
  this.segmentInfo = segmentInfo;
  this.fieldInfos = fieldInfos;
  assert assertSegmentSuffix(segmentSuffix);
  this.segmentSuffix = segmentSuffix;
  this.context = context;
}
 
Example #13
Source File: IndexUpgrader.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@SuppressForbidden(reason = "System.out required: command line tool")
static IndexUpgrader parseArgs(String[] args) throws IOException {
  String path = null;
  boolean deletePriorCommits = false;
  InfoStream out = null;
  String dirImpl = null;
  int i = 0;
  while (i<args.length) {
    String arg = args[i];
    if ("-delete-prior-commits".equals(arg)) {
      deletePriorCommits = true;
    } else if ("-verbose".equals(arg)) {
      out = new PrintStreamInfoStream(System.out);
    } else if ("-dir-impl".equals(arg)) {
      if (i == args.length - 1) {
        System.out.println("ERROR: missing value for -dir-impl option");
        System.exit(1);
      }
      i++;
      dirImpl = args[i];
    } else if (path == null) {
      path = arg;
    } else {
      printUsage();
    }
    i++;
  }
  if (path == null) {
    printUsage();
  }
  
  Path p = Paths.get(path);
  Directory dir = null;
  if (dirImpl == null) {
    dir = FSDirectory.open(p);
  } else {
    dir = CommandLineUtil.newFSDirectory(dirImpl, p);
  }
  return new IndexUpgrader(dir, out, deletePriorCommits);
}
 
Example #14
Source File: DocumentsWriterPerThread.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private FlushedSegment(InfoStream infoStream, SegmentCommitInfo segmentInfo, FieldInfos fieldInfos,
                       BufferedUpdates segmentUpdates, FixedBitSet liveDocs, int delCount, Sorter.DocMap sortMap) {
  this.segmentInfo = segmentInfo;
  this.fieldInfos = fieldInfos;
  this.segmentUpdates = segmentUpdates != null && segmentUpdates.any() ? new FrozenBufferedUpdates(infoStream, segmentUpdates, segmentInfo) : null;
  this.liveDocs = liveDocs;
  this.delCount = delCount;
  this.sortMap = sortMap;
}
 
Example #15
Source File: FrozenBufferedUpdates.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public FrozenBufferedUpdates(InfoStream infoStream, BufferedUpdates updates, SegmentCommitInfo privateSegment) {
  this.infoStream = infoStream;
  this.privateSegment = privateSegment;
  assert privateSegment == null || updates.deleteTerms.isEmpty() : "segment private packet should only have del queries";
  Term termsArray[] = updates.deleteTerms.keySet().toArray(new Term[updates.deleteTerms.size()]);
  ArrayUtil.timSort(termsArray);
  PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder();
  for (Term term : termsArray) {
    builder.add(term);
  }
  deleteTerms = builder.finish();
  
  deleteQueries = new Query[updates.deleteQueries.size()];
  deleteQueryLimits = new int[updates.deleteQueries.size()];
  int upto = 0;
  for(Map.Entry<Query,Integer> ent : updates.deleteQueries.entrySet()) {
    deleteQueries[upto] = ent.getKey();
    deleteQueryLimits[upto] = ent.getValue();
    upto++;
  }
  // TODO if a Term affects multiple fields, we could keep the updates key'd by Term
  // so that it maps to all fields it affects, sorted by their docUpto, and traverse
  // that Term only once, applying the update to all fields that still need to be
  // updated.
  updates.fieldUpdates.values().forEach(FieldUpdatesBuffer::finish);
  this.fieldUpdates = Map.copyOf(updates.fieldUpdates);
  this.fieldUpdatesCount = updates.numFieldUpdates.get();

  bytesUsed = (int) ((deleteTerms.ramBytesUsed() + deleteQueries.length * BYTES_PER_DEL_QUERY)
      + updates.fieldUpdatesBytesUsed.get());
  
  numTermDeletes = updates.numTermDeletes.get();
  if (infoStream != null && infoStream.isEnabled("BD")) {
    infoStream.message("BD", String.format(Locale.ROOT,
                                           "compressed %d to %d bytes (%.2f%%) for deletes/updates; private segment %s",
                                           updates.ramBytesUsed(), bytesUsed, 100.*bytesUsed/updates.ramBytesUsed(),
                                           privateSegment));
  }
}
 
Example #16
Source File: InternalEngine.java    From crate with Apache License 2.0 5 votes vote down vote up
private IndexWriterConfig getIndexWriterConfig() {
    final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer());
    iwc.setCommitOnClose(false); // we by default don't commit on close
    iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
    iwc.setReaderAttributes(getReaderAttributes(store.directory()));
    iwc.setIndexDeletionPolicy(combinedDeletionPolicy);
    // with tests.verbose, lucene sets this up: plumb to align with filesystem stream
    boolean verbose = false;
    try {
        verbose = Boolean.parseBoolean(System.getProperty("tests.verbose"));
    } catch (Exception ignore) {
        // ignored
    }
    iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger));
    iwc.setMergeScheduler(mergeScheduler);
    // Give us the opportunity to upgrade old segments while performing
    // background merges
    MergePolicy mergePolicy = config().getMergePolicy();
    // always configure soft-deletes field so an engine with soft-deletes disabled can open a Lucene index with soft-deletes.
    iwc.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD);
    if (softDeleteEnabled) {
        mergePolicy = new RecoverySourcePruneMergePolicy(SourceFieldMapper.RECOVERY_SOURCE_NAME, softDeletesPolicy::getRetentionQuery,
            new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery, mergePolicy));
    }
    iwc.setMergePolicy(new ElasticsearchMergePolicy(mergePolicy));
    iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac());
    iwc.setCodec(engineConfig.getCodec());
    iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh
    return iwc;
}
 
Example #17
Source File: SegmentMerger.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
SegmentMerger(List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir,
              FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
  if (context.context != IOContext.Context.MERGE) {
    throw new IllegalArgumentException("IOContext.context should be MERGE; got: " + context.context);
  }
  mergeState = new MergeState(readers, segmentInfo, infoStream);
  directory = dir;
  this.codec = segmentInfo.getCodec();
  this.context = context;
  this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
  Version minVersion = Version.LATEST;
  for (CodecReader reader : readers) {
    Version leafMinVersion = reader.getMetaData().getMinVersion();
    if (leafMinVersion == null) {
      minVersion = null;
      break;
    }
    if (minVersion.onOrAfter(leafMinVersion)) {
      minVersion = leafMinVersion;
    }

  }
  assert segmentInfo.minVersion == null : "The min version should be set by SegmentMerger for merged segments";
  segmentInfo.minVersion = minVersion;
  if (mergeState.infoStream.isEnabled("SM")) {
    if (segmentInfo.getIndexSort() != null) {
      mergeState.infoStream.message("SM", "index sort during merge: " + segmentInfo.getIndexSort());
    }
  }
}
 
Example #18
Source File: IndexWriterConfig.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/** 
 * Information about merges, deletes and a
 * message when maxFieldLength is reached will be printed
 * to this. Must not be null, but {@link InfoStream#NO_OUTPUT} 
 * may be used to suppress output.
 */
public IndexWriterConfig setInfoStream(InfoStream infoStream) {
  if (infoStream == null) {
    throw new IllegalArgumentException("Cannot set InfoStream implementation to null. "+
      "To disable logging use InfoStream.NO_OUTPUT");
  }
  this.infoStream = infoStream;
  return this;
}
 
Example #19
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testNoSegmentsDotGenInflation() throws IOException {
  Directory dir = newMockDirectory();
  
  // empty commit
  new IndexWriter(dir, new IndexWriterConfig(null)).close();   
  
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(1, sis.getGeneration());
  
  // no inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(1, sis.getGeneration());

  dir.close();
}
 
Example #20
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testSegmentNameInflation() throws IOException {
  MockDirectoryWrapper dir = newMockDirectory();
  
  // empty commit
  new IndexWriter(dir, new IndexWriterConfig(null)).close();   
  
  SegmentInfos sis = SegmentInfos.readLatestCommit(dir);
  assertEquals(0, sis.counter);
  
  // no inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(0, sis.counter);
  
  // add trash per-segment file
  dir.createOutput(IndexFileNames.segmentFileName("_0", "", "foo"), IOContext.DEFAULT).close();
  
  // ensure inflation
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(1, sis.counter);
  
  // add trash per-segment file
  dir.createOutput(IndexFileNames.segmentFileName("_3", "", "foo"), IOContext.DEFAULT).close();
  inflateGens(sis, Arrays.asList(dir.listAll()), InfoStream.getDefault());
  assertEquals(4, sis.counter);
  
  // ensure we write _4 segment next
  IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
  iw.addDocument(new Document());
  iw.commit();
  iw.close();
  sis = SegmentInfos.readLatestCommit(dir);
  assertEquals("_4", sis.info(0).info.name);
  assertEquals(5, sis.counter);
  
  dir.close();
}
 
Example #21
Source File: TestIndexFileDeleter.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
static void inflateGens(SegmentInfos sis, Collection<String> files, InfoStream stream) {
  List<String> filtered = new ArrayList<>();
  for (String file : files) {
    if (IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches() ||
        file.startsWith(IndexFileNames.SEGMENTS) ||
        file.startsWith(IndexFileNames.PENDING_SEGMENTS)) {
      filtered.add(file);
    }
  }
  IndexFileDeleter.inflateGens(sis, filtered, stream);
}
 
Example #22
Source File: TestDoc.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
  throws Exception {
  IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
  SegmentReader r1 = new SegmentReader(si1, Version.LATEST.major, context);
  SegmentReader r2 = new SegmentReader(si2, Version.LATEST.major, context);

  final Codec codec = Codec.getDefault();
  TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
  final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, null, merged, -1, false, codec, Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);

  SegmentMerger merger = new SegmentMerger(Arrays.<CodecReader>asList(r1, r2),
                                           si, InfoStream.getDefault(), trackingDir,
                                           new FieldInfos.FieldNumbers(null), context);

  MergeState mergeState = merger.merge();
  r1.close();
  r2.close();;
  si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
    
  if (useCompoundFile) {
    Collection<String> filesToDelete = si.files();
    codec.compoundFormat().write(dir, si, context);
    si.setUseCompoundFile(true);
    for(String name : filesToDelete) {
      si1.info.dir.deleteFile(name);
    }
  }

  return new SegmentCommitInfo(si, 0, 0, -1L, -1L, -1L, StringHelper.randomId());
}
 
Example #23
Source File: TestBackwardsCompatibility.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Randomizes the use of some of hte constructor variations
 */
private static IndexUpgrader newIndexUpgrader(Directory dir) {
  final boolean streamType = random().nextBoolean();
  final int choice = TestUtil.nextInt(random(), 0, 2);
  switch (choice) {
    case 0: return new IndexUpgrader(dir);
    case 1: return new IndexUpgrader(dir, streamType ? null : InfoStream.NO_OUTPUT, false);
    case 2: return new IndexUpgrader(dir, newIndexWriterConfig(null), false);
    default: fail("case statement didn't get updated when random bounds changed");
  }
  return null; // never get here
}
 
Example #24
Source File: SolrIndexConfig.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public IndexWriterConfig toIndexWriterConfig(SolrCore core) throws IOException {
  IndexSchema schema = core.getLatestSchema();
  IndexWriterConfig iwc = new IndexWriterConfig(new DelayedSchemaAnalyzer(core));
  if (maxBufferedDocs != -1)
    iwc.setMaxBufferedDocs(maxBufferedDocs);

  if (ramBufferSizeMB != -1)
    iwc.setRAMBufferSizeMB(ramBufferSizeMB);

  if (ramPerThreadHardLimitMB != -1) {
    iwc.setRAMPerThreadHardLimitMB(ramPerThreadHardLimitMB);
  }

  iwc.setSimilarity(schema.getSimilarity());
  MergePolicy mergePolicy = buildMergePolicy(core.getResourceLoader(), schema);
  iwc.setMergePolicy(mergePolicy);
  MergeScheduler mergeScheduler = buildMergeScheduler(core.getResourceLoader());
  iwc.setMergeScheduler(mergeScheduler);
  iwc.setInfoStream(infoStream);

  if (mergePolicy instanceof SortingMergePolicy) {
    Sort indexSort = ((SortingMergePolicy) mergePolicy).getSort();
    iwc.setIndexSort(indexSort);
  }

  iwc.setUseCompoundFile(useCompoundFile);

  if (mergedSegmentWarmerInfo != null) {
    // TODO: add infostream -> normal logging system (there is an issue somewhere)
    @SuppressWarnings({"rawtypes"})
    IndexReaderWarmer warmer = core.getResourceLoader().newInstance(mergedSegmentWarmerInfo.className,
                                                                      IndexReaderWarmer.class,
                                                                      null,
                                                                      new Class[] { InfoStream.class },
                                                                      new Object[] { iwc.getInfoStream() });
    iwc.setMergedSegmentWarmer(warmer);
  }

  return iwc;
}
 
Example #25
Source File: test.java    From vscode-extension with MIT License 5 votes vote down vote up
private IndexWriterConfig getIndexWriterConfig() {
    final IndexWriterConfig iwc = new IndexWriterConfig(engineConfig.getAnalyzer());
    iwc.setCommitOnClose(false); // we by default don't commit on close
    iwc.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
    iwc.setIndexDeletionPolicy(combinedDeletionPolicy);
    // with tests.verbose, lucene sets this up: plumb to align with filesystem stream
    boolean verbose = false;
    try {
        verbose = Boolean.parseBoolean(System.getProperty("tests.verbose"));
    } catch (Exception ignore) {
    }
    iwc.setInfoStream(verbose ? InfoStream.getDefault() : new LoggerInfoStream(logger));
    iwc.setMergeScheduler(mergeScheduler);
    // Give us the opportunity to upgrade old segments while performing
    // background merges
    MergePolicy mergePolicy = config().getMergePolicy();
    // always configure soft-deletes field so an engine with soft-deletes disabled can open a Lucene index with soft-deletes.
    iwc.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD);
    if (softDeleteEnabled) {
        mergePolicy = new RecoverySourcePruneMergePolicy(SourceFieldMapper.RECOVERY_SOURCE_NAME, softDeletesPolicy::getRetentionQuery,
            new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery, mergePolicy));
    }
    iwc.setMergePolicy(new ElasticsearchMergePolicy(mergePolicy));
    iwc.setSimilarity(engineConfig.getSimilarity());
    iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac());
    iwc.setCodec(engineConfig.getCodec());
    iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh
    if (config().getIndexSort() != null) {
        iwc.setIndexSort(config().getIndexSort());
    }
    return iwc;
}
 
Example #26
Source File: DefaultIndexingChain.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
PerField(int indexCreatedVersionMajor, FieldInfo fieldInfo, boolean invert, Similarity similarity, InfoStream infoStream, Analyzer analyzer) {
  this.indexCreatedVersionMajor = indexCreatedVersionMajor;
  this.fieldInfo = fieldInfo;
  this.similarity = similarity;
  this.infoStream = infoStream;
  this.analyzer = analyzer;
  if (invert) {
    setInvertState();
  }
}
 
Example #27
Source File: RollbackIndexTask.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public int doLogic() throws IOException {
  IndexWriter iw = getRunData().getIndexWriter();
  if (iw != null) {
    // If infoStream was set to output to a file, close it.
    InfoStream infoStream = iw.getConfig().getInfoStream();
    if (infoStream != null) {
      infoStream.close();
    }
    iw.rollback();
    getRunData().setIndexWriter(null);
  }
  return 1;
}
 
Example #28
Source File: IndexAndTaxonomyReplicationHandler.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor with the given index directory and callback to notify when the
 * indexes were updated.
 */
public IndexAndTaxonomyReplicationHandler(Directory indexDir, Directory taxoDir, Callable<Boolean> callback)
    throws IOException {
  this.callback = callback;
  this.indexDir = indexDir;
  this.taxoDir = taxoDir;
  currentRevisionFiles = null;
  currentVersion = null;
  final boolean indexExists = DirectoryReader.indexExists(indexDir);
  final boolean taxoExists = DirectoryReader.indexExists(taxoDir);
  if (indexExists != taxoExists) {
    throw new IllegalStateException("search and taxonomy indexes must either both exist or not: index=" + indexExists
        + " taxo=" + taxoExists);
  }
  if (indexExists) { // both indexes exist
    final IndexCommit indexCommit = IndexReplicationHandler.getLastCommit(indexDir);
    final IndexCommit taxoCommit = IndexReplicationHandler.getLastCommit(taxoDir);
    currentRevisionFiles = IndexAndTaxonomyRevision.revisionFiles(indexCommit, taxoCommit);
    currentVersion = IndexAndTaxonomyRevision.revisionVersion(indexCommit, taxoCommit);
    final InfoStream infoStream = InfoStream.getDefault();
    if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
      infoStream.message(INFO_STREAM_COMPONENT, "constructor(): currentVersion=" + currentVersion
          + " currentRevisionFiles=" + currentRevisionFiles);
      infoStream.message(INFO_STREAM_COMPONENT, "constructor(): indexCommit=" + indexCommit
          + " taxoCommit=" + taxoCommit);
    }
  }
}
 
Example #29
Source File: IndexAndTaxonomyReplicationHandler.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/** Sets the {@link InfoStream} to use for logging messages. */
public void setInfoStream(InfoStream infoStream) {
  if (infoStream == null) {
    infoStream = InfoStream.NO_OUTPUT;
  }
  this.infoStream = infoStream;
}
 
Example #30
Source File: IndexReplicationHandler.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * Cleans up the index directory from old index files. This method uses the
 * last commit found by {@link #getLastCommit(Directory)}. If it matches the
 * expected segmentsFile, then all files not referenced by this commit point
 * are deleted.
 * <p>
 * <b>NOTE:</b> this method does a best effort attempt to clean the index
 * directory. It suppresses any exceptions that occur, as this can be retried
 * the next time.
 */
public static void cleanupOldIndexFiles(Directory dir, String segmentsFile, InfoStream infoStream) {
  try {
    IndexCommit commit = getLastCommit(dir);
    // commit == null means weird IO errors occurred, ignore them
    // if there were any IO errors reading the expected commit point (i.e.
    // segments files mismatch), then ignore that commit either.
    if (commit != null && commit.getSegmentsFileName().equals(segmentsFile)) {
      Set<String> commitFiles = new HashSet<>(commit.getFileNames());
      Matcher matcher = IndexFileNames.CODEC_FILE_PATTERN.matcher("");
      for (String file : dir.listAll()) {
        if (!commitFiles.contains(file)
            && (matcher.reset(file).matches() || file.startsWith(IndexFileNames.SEGMENTS))) {
          // suppress exceptions, it's just a best effort
          IOUtils.deleteFilesIgnoringExceptions(dir, file);
        }
      }
    }
  } catch (Throwable t) {
    // ignore any errors that happen during this state and only log it. this
    // cleanup will have a chance to succeed the next time we get a new
    // revision.
    if (infoStream.isEnabled(INFO_STREAM_COMPONENT)) {
      infoStream.message(INFO_STREAM_COMPONENT, "cleanupOldIndexFiles(): failed on error " + t.getMessage());
    }
  }
}