org.apache.lucene.store.IOContext Java Examples

The following examples show how to use org.apache.lucene.store.IOContext. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HdfsDirectoryManifestFileCacheTest.java    From incubator-retired-blur with Apache License 2.0 6 votes vote down vote up
private void createFiles(Configuration configuration, int numberOfDirs, int numberOfFiles, Path path,
    HdfsDirectory mainDir) throws IOException {
  FileSystem fileSystem = path.getFileSystem(configuration);
  for (int d = 0; d < numberOfDirs; d++) {
    Path dir = new Path(path, "dir." + d);
    fileSystem.mkdirs(dir);
    for (int f = 0; f < numberOfFiles; f++) {
      Path p = new Path(dir, "file." + f);
      FSDataOutputStream outputStream = fileSystem.create(p);
      outputStream.write(1);
      outputStream.close();
    }
    HdfsDirectory subDir = new HdfsDirectory(configuration, dir);
    for (String file : subDir.listAll()) {
      subDir.copy(mainDir, file, UUID.randomUUID().toString(), IOContext.READ);
    }
    subDir.close();
  }
}
 
Example #2
Source File: TestBlockPostingsFormat.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
private void doTestImpactSerialization(List<Impact> impacts) throws IOException {
  CompetitiveImpactAccumulator acc = new CompetitiveImpactAccumulator();
  for (Impact impact : impacts) {
    acc.add(impact.freq, impact.norm);
  }
  try(Directory dir = newDirectory()) {
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      Lucene50SkipWriter.writeImpacts(acc, out);
    }
    try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
      byte[] b = new byte[Math.toIntExact(in.length())];
      in.readBytes(b, 0, b.length);
      List<Impact> impacts2 = Lucene50ScoreSkipReader.readImpacts(new ByteArrayDataInput(b), new MutableImpactList());
      assertEquals(impacts, impacts2);
    }
  }
}
 
Example #3
Source File: BaseCompoundFormatTestCase.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testMissingCodecHeadersAreCaught() throws Exception {
  Directory dir = newDirectory();
  String subFile = "_123.xyz";

  // missing codec header
  try (IndexOutput os = dir.createOutput(subFile, newIOContext(random()))) {
    for (int i=0; i < 1024; i++) {
      os.writeByte((byte) i);
    }
  }

  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.singletonList(subFile));
  Exception e = expectThrows(CorruptIndexException.class, () -> si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT));
  assertTrue(e.getMessage().contains("codec header mismatch"));
  dir.close();
}
 
Example #4
Source File: SegmentDocValues.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, Directory dir, final Long gen, FieldInfos infos) throws IOException {
  Directory dvDir = dir;
  String segmentSuffix = "";
  if (gen.longValue() != -1) {
    dvDir = si.info.dir; // gen'd files are written outside CFS, so use SegInfo directory
    segmentSuffix = Long.toString(gen.longValue(), Character.MAX_RADIX);
  }

  // set SegmentReadState to list only the fields that are relevant to that gen
  SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, IOContext.READ, segmentSuffix);
  DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat();
  return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) {
    @SuppressWarnings("synthetic-access")
    @Override
    protected void release() throws IOException {
      object.close();
      synchronized (SegmentDocValues.this) {
        genDVProducers.remove(gen);
      }
    }
  };
}
 
Example #5
Source File: TestOfflineSorter.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
/**
 * Make sure two files are byte-byte identical.
 */
private void assertFilesIdentical(Directory dir, String golden, String sorted) throws IOException {
  long numBytes = dir.fileLength(golden);
  assertEquals(numBytes, dir.fileLength(sorted));

  byte[] buf1 = new byte[64 * 1024];
  byte[] buf2 = new byte[64 * 1024];
  try (
       IndexInput in1 = dir.openInput(golden, IOContext.READONCE);
       IndexInput in2 = dir.openInput(sorted, IOContext.READONCE)
       ) {
    long left = numBytes;
    while (left > 0) {
      int chunk = (int) Math.min(buf1.length, left);
      left -= chunk;
      in1.readBytes(buf1, 0, chunk);
      in2.readBytes(buf2, 0, chunk);
      for (int i = 0; i < chunk; i++) {
        assertEquals(buf1[i], buf2[i]);
      }
    }
  }
}
 
Example #6
Source File: CopyOneFile.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public CopyOneFile(DataInput in, ReplicaNode dest, String name, FileMetaData metaData, byte[] buffer) throws IOException {
  this.in = in;
  this.name = name;
  this.dest = dest;
  this.buffer = buffer;
  // TODO: pass correct IOCtx, e.g. seg total size
  out = dest.createTempOutput(name, "copy", IOContext.DEFAULT);
  tmpName = out.getName();

  // last 8 bytes are checksum, which we write ourselves after copying all bytes and confirming checksum:
  bytesToCopy = metaData.length - Long.BYTES;

  if (Node.VERBOSE_FILES) {
    dest.message("file " + name + ": start copying to tmp file " + tmpName + " length=" + (8+bytesToCopy));
  }

  copyStartNS = System.nanoTime();
  this.metaData = metaData;
  dest.startCopyFile(name);
}
 
Example #7
Source File: TestDirectPacked.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
private void doTestBpv(Directory directory, int bpv, long offset) throws Exception {
  MyRandom random = new MyRandom(random().nextLong());
  int numIters = TEST_NIGHTLY ? 100 : 10;
  for (int i = 0; i < numIters; i++) {
    long original[] = randomLongs(random, bpv);
    int bitsRequired = bpv == 64 ? 64 : DirectWriter.bitsRequired(1L<<(bpv-1));
    String name = "bpv" + bpv + "_" + i;
    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    for (long j = 0; j < offset; ++j) {
      output.writeByte((byte) random.nextInt());
    }
    DirectWriter writer = DirectWriter.getInstance(output, original.length, bitsRequired);
    for (int j = 0; j < original.length; j++) {
      writer.add(original[j]);
    }
    writer.finish();
    output.close();
    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
    for (int j = 0; j < original.length; j++) {
      assertEquals("bpv=" + bpv, original[j], reader.get(j));
    }
    input.close();
  }
}
 
Example #8
Source File: HdfsDirectoryTest.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testCreateTempFiles() throws IOException {
  String file1;
  try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
      IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
    out.writeByte((byte) 42);
    file1 = out.getName();
  }
  assertTrue(file1.startsWith("foo_bar"));
  assertTrue(file1.endsWith(".tmp"));
  // Create the directory again to force the counter to be reset
  String file2;
  try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
      IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
    out.writeByte((byte) 42);
    file2 = out.getName();
  }
  assertTrue(file2.startsWith("foo_bar"));
  assertTrue(file2.endsWith(".tmp"));
  assertNotEquals(file1, file2);
}
 
Example #9
Source File: GridLuceneDirectory.java    From ignite with Apache License 2.0 6 votes vote down vote up
/** {@inheritDoc} */
@Override public IndexInput openInput(final String name, final IOContext context) throws IOException {
    ensureOpen();

    GridLuceneFile file = fileMap.get(name);

    if (file == null)
        throw new FileNotFoundException(name);

    // Lock for using in stream. Will be unlocked on stream closing.
    file.lockRef();

    if (!fileMap.containsKey(name)) {
        // Unblock for deferred delete.
        file.releaseRef();

        throw new FileNotFoundException(name);
    }

    return new GridLuceneInputStream(name, file);
}
 
Example #10
Source File: TestIndexedDISI.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testOneDocMissingFixed() throws IOException {
  int maxDoc = 9699;
  final byte denseRankPower = rarely() ? -1 : (byte) (random().nextInt(7)+7); // sane + chance of disable
  FixedBitSet set = new FixedBitSet(maxDoc);
  set.set(0, maxDoc);
  set.clear(1345);
  try (Directory dir = newDirectory()) {

    final int cardinality = set.cardinality();
    long length;
    int jumpTableentryCount;
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      jumpTableentryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower);
      length = out.getFilePointer();
    }

    int step = 16000;
    try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
      IndexedDISI disi = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      BitSetIterator disi2 = new BitSetIterator(set, cardinality);
      assertAdvanceEquality(disi, disi2, step);
    }
  }
}
 
Example #11
Source File: HdfsDirectoryFactory.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
@Override
public void move(Directory fromDir, Directory toDir, String fileName, IOContext ioContext) throws IOException {
  
  Directory baseFromDir = getBaseDir(fromDir);
  Directory baseToDir = getBaseDir(toDir);
  
  if (baseFromDir instanceof HdfsDirectory && baseToDir instanceof HdfsDirectory) {
    Path dir1 = ((HdfsDirectory) baseFromDir).getHdfsDirPath();
    Path dir2 = ((HdfsDirectory) baseToDir).getHdfsDirPath();
    Path file1 = new Path(dir1, fileName);
    Path file2 = new Path(dir2, fileName);
    FileContext fileContext = FileContext.getFileContext(getConf(dir1));
    fileContext.rename(file1, file2);
    return;
  }

  super.move(fromDir, toDir, fileName, ioContext);
}
 
Example #12
Source File: TestPendingDeletes.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
public void testIsFullyDeleted() throws IOException {
  Directory dir = new ByteBuffersDirectory();
  SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 3, false, Codec.getDefault(),
      Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
  SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1, StringHelper.randomId());
  FieldInfos fieldInfos = FieldInfos.EMPTY;
  si.getCodec().fieldInfosFormat().write(dir, si, "", fieldInfos, IOContext.DEFAULT);
  PendingDeletes deletes = newPendingDeletes(commitInfo);
  for (int i = 0; i < 3; i++) {
    assertTrue(deletes.delete(i));
    if (random().nextBoolean()) {
      assertTrue(deletes.writeLiveDocs(dir));
    }
    assertEquals(i == 2, deletes.isFullyDeleted(() -> null));
  }
}
 
Example #13
Source File: VectorStoreReaderLucene.java    From semanticvectors with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
public VectorStoreReaderLucene(String vectorFileName, FlagConfig flagConfig) throws IOException {
  this.flagConfig = flagConfig;
  this.vectorFileName = vectorFileName;
  this.vectorFile = new File(vectorFileName);
  try {
    String parentPath = this.vectorFile.getParent();
    if (parentPath == null) parentPath = "";
    this.directory = FSDirectory.open(FileSystems.getDefault().getPath(parentPath));  // Old from FSDirectory impl.
    // Read number of dimension from header information.
    this.threadLocalIndexInput = new ThreadLocal<IndexInput>() {
      @Override
      protected IndexInput initialValue() {
        try {
          return directory.openInput(vectorFile.getName(), IOContext.READ);
        } catch (IOException e) {
          throw new RuntimeException(e.getMessage(), e);
        }
      }
    };
    readHeadersFromIndexInput(flagConfig);
  } catch (IOException e) {
    logger.warning("Cannot open file: " + this.vectorFileName + "\n" + e.getMessage());
    throw e;
  }
}
 
Example #14
Source File: BlobStoreIndexShardRepository.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
/**
 * Snapshot individual file
 * <p>
 * This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
 * added to the {@code failures} list
 *
 * @param fileInfo file to be snapshotted
 */
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo) throws IOException {
    final String file = fileInfo.physicalName();
    try (IndexInput indexInput = store.openVerifyingInput(file, IOContext.READONCE, fileInfo.metadata())) {
        for (int i = 0; i < fileInfo.numberOfParts(); i++) {
            final long partBytes = fileInfo.partBytes(i);

            final InputStreamIndexInput inputStreamIndexInput = new InputStreamIndexInput(indexInput, partBytes);
            InputStream inputStream = snapshotRateLimiter == null ? inputStreamIndexInput : new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
            inputStream = new AbortableInputStream(inputStream, fileInfo.physicalName());
            blobContainer.writeBlob(fileInfo.partName(i), inputStream, partBytes);
        }
        Store.verify(indexInput);
        snapshotStatus.addProcessedFile(fileInfo.length());
    } catch (Throwable t) {
        failStoreIfCorrupted(t);
        snapshotStatus.addProcessedFile(0);
        throw t;
    }
}
 
Example #15
Source File: BaseFieldInfoFormatTestCase.java    From lucene-solr with Apache License 2.0 6 votes vote down vote up
/** Test field infos attributes coming back are not mutable */
public void testImmutableAttributes() throws Exception {
  Directory dir = newDirectory();
  Codec codec = getCodec();
  SegmentInfo segmentInfo = newSegmentInfo(dir, "_123");
  FieldInfos.Builder builder = new FieldInfos.Builder(new FieldInfos.FieldNumbers(null));
  FieldInfo fi = builder.getOrAdd("field");
  fi.setIndexOptions(TextField.TYPE_STORED.indexOptions());
  addAttributes(fi);
  fi.putAttribute("foo", "bar");
  fi.putAttribute("bar", "baz");
  FieldInfos infos = builder.finish();
  codec.fieldInfosFormat().write(dir, segmentInfo, "", infos, IOContext.DEFAULT);
  FieldInfos infos2 = codec.fieldInfosFormat().read(dir, segmentInfo, "", IOContext.DEFAULT);
  assertEquals(1, infos2.size());
  assertNotNull(infos2.fieldInfo("field"));
  Map<String,String> attributes = infos2.fieldInfo("field").attributes();
  // shouldn't be able to modify attributes
  expectThrows(UnsupportedOperationException.class, () -> {
    attributes.put("bogus", "bogus");
  });

  dir.close();
}
 
Example #16
Source File: BackupManager.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
/**
 * This method reads the meta-data information for the backed-up collection.
 *
 * @param backupLoc The base path used to store the backup data.
 * @param backupId The unique name for the backup.
 * @param collectionName The name of the collection whose meta-data is to be returned.
 * @return the meta-data information for the backed-up collection.
 * @throws IOException in case of errors.
 */
public DocCollection readCollectionState(URI backupLoc, String backupId, String collectionName) throws IOException {
  Objects.requireNonNull(collectionName);

  URI zkStateDir = repository.resolve(backupLoc, backupId, ZK_STATE_DIR);
  try (IndexInput is = repository.openInput(zkStateDir, COLLECTION_PROPS_FILE, IOContext.DEFAULT)) {
    byte[] arr = new byte[(int) is.length()]; // probably ok since the json file should be small.
    is.readBytes(arr, 0, (int) is.length());
    ClusterState c_state = ClusterState.createFromJson(-1, arr, Collections.emptySet());
    return c_state.getCollection(collectionName);
  }
}
 
Example #17
Source File: TestFailIfUnreferencedFiles.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testDummy() throws Exception {
  MockDirectoryWrapper dir = newMockDirectory();
  dir.setAssertNoUnrefencedFilesOnClose(true);
  IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(null));
  iw.addDocument(new Document());
  iw.close();
  IndexOutput output = dir.createOutput("_hello.world", IOContext.DEFAULT);
  output.writeString("i am unreferenced!");
  output.close();
  dir.sync(Collections.singleton("_hello.world"));
  dir.close();
}
 
Example #18
Source File: TestIndexedDISI.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
private void doTestAllSingleJump(BitSet set, Directory dir) throws IOException {
  final int cardinality = set.cardinality();
  final byte denseRankPower = rarely() ? -1 : (byte) (random().nextInt(7)+7); // sane + chance of disable
  long length;
  int jumpTableentryCount;
  try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
    jumpTableentryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower);
    length = out.getFilePointer();
  }

  try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
    for (int i = 0; i < set.length(); i++) {
      IndexedDISI disi = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      assertEquals("The bit at " + i + " should be correct with advanceExact", set.get(i), disi.advanceExact(i));

      IndexedDISI disi2 = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      disi2.advance(i);
      // Proper sanity check with jump tables as an error could make them seek backwards
      assertTrue("The docID should at least be " + i + " after advance(" + i + ") but was " + disi2.docID(),
          i <= disi2.docID());
      if (set.get(i)) {
        assertEquals("The docID should be present with advance", i, disi2.docID());
      } else {
        assertNotSame("The docID should not be present with advance", i, disi2.docID());
      }
    }
  }
}
 
Example #19
Source File: BaseCompoundFormatTestCase.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testResourceNameInsideCompoundFile() throws Exception {
  Directory dir = newDirectory();
  String subFile = "_123.xyz";
  SegmentInfo si = newSegmentInfo(dir, "_123");
  createSequenceFile(dir, subFile, (byte) 0, 10, si.getId(), "suffix");
  
  si.setFiles(Collections.singletonList(subFile));
  si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
  Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
  IndexInput in = cfs.openInput(subFile, IOContext.DEFAULT);
  String desc = in.toString();
  assertTrue("resource description hides that it's inside a compound file: " + desc, desc.contains("[slice=" + subFile + "]"));
  cfs.close();
  dir.close();
}
 
Example #20
Source File: CrankyCompoundFormat.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException {
  if (random.nextInt(100) == 0) {
    throw new IOException("Fake IOException from CompoundFormat.write()");
  }
  delegate.write(dir, si, context);
}
 
Example #21
Source File: VectorStoreWriter.java    From semanticvectors with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Outputs a vector store in Lucene binary format.
 * 
 * @param vectorFileName The name of the file to write to
 * @param objectVectors The vector store to be written to disk
 */
public static void writeVectorsInLuceneFormat(String vectorFileName, FlagConfig flagConfig, VectorStore objectVectors)
    throws IOException {
  VerbatimLogger.info("About to write " + objectVectors.getNumVectors() + " vectors of dimension "
      + flagConfig.dimension() + " to Lucene format file: " + vectorFileName + " ... ");
  File vectorFile = new File(vectorFileName);
  java.nio.file.Files.deleteIfExists(vectorFile.toPath());
  String parentPath = vectorFile.getParent();
  if (parentPath == null) parentPath = "";
  FSDirectory fsDirectory = FSDirectory.open(FileSystems.getDefault().getPath(parentPath));
  IndexOutput outputStream = fsDirectory.createOutput(vectorFile.getName(), IOContext.DEFAULT);
  writeToIndexOutput(objectVectors, flagConfig, outputStream);
  outputStream.close();
  fsDirectory.close();
}
 
Example #22
Source File: CrankyFieldInfosFormat.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public void write(Directory directory, SegmentInfo segmentInfo, String segmentSuffix, FieldInfos infos, IOContext context) throws IOException {
  if (random.nextInt(100) == 0) {
    throw new IOException("Fake IOException from FieldInfosFormat.getFieldInfosWriter()");
  }
  delegate.write(directory, segmentInfo, segmentSuffix, infos, context);
}
 
Example #23
Source File: BaseCompoundFormatTestCase.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
public void testEmpty() throws IOException {
  Directory dir = newDirectory();
  
  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.emptySet());
  si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
  Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
  assertEquals(0, cfs.listAll().length);
  cfs.close();
  dir.close();
}
 
Example #24
Source File: SortingStoredFieldsConsumer.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
void flush(SegmentWriteState state, Sorter.DocMap sortMap) throws IOException {
  super.flush(state, sortMap);
  if (sortMap == null) {
    // we're lucky the index is already sorted, just rename the temporary file and return
    for (Map.Entry<String, String> entry : tmpDirectory.getTemporaryFiles().entrySet()) {
      tmpDirectory.rename(entry.getValue(), entry.getKey());
    }
    return;
  }
  StoredFieldsReader reader = docWriter.codec.storedFieldsFormat()
      .fieldsReader(tmpDirectory, state.segmentInfo, state.fieldInfos, IOContext.DEFAULT);
  StoredFieldsReader mergeReader = reader.getMergeInstance();
  StoredFieldsWriter sortWriter = docWriter.codec.storedFieldsFormat()
      .fieldsWriter(state.directory, state.segmentInfo, IOContext.DEFAULT);
  try {
    reader.checkIntegrity();
    CopyVisitor visitor = new CopyVisitor(sortWriter);
    for (int docID = 0; docID < state.segmentInfo.maxDoc(); docID++) {
      sortWriter.startDocument();
      mergeReader.visitDocument(sortMap.newToOld(docID), visitor);
      sortWriter.finishDocument();
    }
    sortWriter.finish(state.fieldInfos, state.segmentInfo.maxDoc());
  } finally {
    IOUtils.close(reader, sortWriter);
    IOUtils.deleteFiles(tmpDirectory,
        tmpDirectory.getTemporaryFiles().values());
  }
}
 
Example #25
Source File: HdfsDirectory.java    From incubator-retired-blur with Apache License 2.0 5 votes vote down vote up
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
  LOG.debug("openInput [{0}] [{1}] [{2}]", name, context, getPath());
  if (!fileExists(name)) {
    throw new FileNotFoundException("File [" + name + "] not found.");
  }
  long fileLength = fileLength(name);
  Path path = getPath(name);
  FSInputFileHandle fsInputFileHandle = new FSInputFileHandle(_fileSystem, path, fileLength, name, _resourceTracking,
      _asyncClosing && _useCache);
  HdfsIndexInput input = new HdfsIndexInput(this, fsInputFileHandle, fileLength, _metricsGroup, name,
      _sequentialReadControl.clone());
  return input;
}
 
Example #26
Source File: TestIndexWriterExceptions.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
  if (doFail && name.startsWith("segments_")) {
    if (callStackContainsAnyOf("readCommit", "readLatestCommit")) {
      throw new UnsupportedOperationException("expected UOE");
    }
  }
  return super.openInput(name, context);
}
 
Example #27
Source File: Store.java    From crate with Apache License 2.0 5 votes vote down vote up
/**
 * The returned IndexOutput validates the files checksum.
 * <p>
 * Note: Checksums are calculated by default since version 4.8.0. This method only adds the
 * verification against the checksum in the given metadata and does not add any significant overhead.
 */
public IndexOutput createVerifyingOutput(String fileName, final StoreFileMetaData metadata, final IOContext context) throws IOException {
    IndexOutput output = directory().createOutput(fileName, context);
    boolean success = false;
    try {
        assert metadata.writtenBy() != null;
        output = new LuceneVerifyingIndexOutput(metadata, output);
        success = true;
    } finally {
        if (success == false) {
            IOUtils.closeWhileHandlingException(output);
        }
    }
    return output;
}
 
Example #28
Source File: TestCrashCausesCorruptIndex.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public IndexOutput createOutput(String name, IOContext cxt) throws IOException {
  IndexOutput indexOutput = in.createOutput(name, cxt);
  if (null != crashAfterCreateOutput && name.equals(crashAfterCreateOutput)) {
    // CRASH!
    indexOutput.close();
    if (VERBOSE) {
      System.out.println("TEST: now crash");
      new Throwable().printStackTrace(System.out);
    }
    throw new CrashingException("crashAfterCreateOutput "+crashAfterCreateOutput);
  }
  return indexOutput;
}
 
Example #29
Source File: CompressingTermVectorsFormat.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public final TermVectorsReader vectorsReader(Directory directory,
    SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
    throws IOException {
  return new CompressingTermVectorsReader(directory, segmentInfo, segmentSuffix,
      fieldInfos, context, formatName, compressionMode);
}
 
Example #30
Source File: SimpleTextLiveDocsFormat.java    From lucene-solr with Apache License 2.0 5 votes vote down vote up
@Override
public void writeLiveDocs(Bits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException {
  int size = bits.length();
  BytesRefBuilder scratch = new BytesRefBuilder();
  
  String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getNextDelGen());
  IndexOutput out = null;
  boolean success = false;
  try {
    out = dir.createOutput(fileName, context);
    SimpleTextUtil.write(out, SIZE);
    SimpleTextUtil.write(out, Integer.toString(size), scratch);
    SimpleTextUtil.writeNewline(out);
    
    for (int i = 0; i < size; ++i) {
      if (bits.get(i)) {
        SimpleTextUtil.write(out, DOC);
        SimpleTextUtil.write(out, Integer.toString(i), scratch);
        SimpleTextUtil.writeNewline(out);
      }
    }
    
    SimpleTextUtil.write(out, END);
    SimpleTextUtil.writeNewline(out);
    SimpleTextUtil.writeChecksum(out, scratch);
    success = true;
  } finally {
    if (success) {
      IOUtils.close(out);
    } else {
      IOUtils.closeWhileHandlingException(out);
    }
  }
}