Java Code Examples for org.apache.lucene.store.IOContext

The following examples show how to use org.apache.lucene.store.IOContext. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
/**
 * Snapshot individual file
 * <p>
 * This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are
 * added to the {@code failures} list
 *
 * @param fileInfo file to be snapshotted
 */
private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo) throws IOException {
    final String file = fileInfo.physicalName();
    try (IndexInput indexInput = store.openVerifyingInput(file, IOContext.READONCE, fileInfo.metadata())) {
        for (int i = 0; i < fileInfo.numberOfParts(); i++) {
            final long partBytes = fileInfo.partBytes(i);

            final InputStreamIndexInput inputStreamIndexInput = new InputStreamIndexInput(indexInput, partBytes);
            InputStream inputStream = snapshotRateLimiter == null ? inputStreamIndexInput : new RateLimitingInputStream(inputStreamIndexInput, snapshotRateLimiter, snapshotThrottleListener);
            inputStream = new AbortableInputStream(inputStream, fileInfo.physicalName());
            blobContainer.writeBlob(fileInfo.partName(i), inputStream, partBytes);
        }
        Store.verify(indexInput);
        snapshotStatus.addProcessedFile(fileInfo.length());
    } catch (Throwable t) {
        failStoreIfCorrupted(t);
        snapshotStatus.addProcessedFile(0);
        throw t;
    }
}
 
Example 2
Source Project: lucene-solr   Source File: HdfsDirectoryFactory.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void move(Directory fromDir, Directory toDir, String fileName, IOContext ioContext) throws IOException {
  
  Directory baseFromDir = getBaseDir(fromDir);
  Directory baseToDir = getBaseDir(toDir);
  
  if (baseFromDir instanceof HdfsDirectory && baseToDir instanceof HdfsDirectory) {
    Path dir1 = ((HdfsDirectory) baseFromDir).getHdfsDirPath();
    Path dir2 = ((HdfsDirectory) baseToDir).getHdfsDirPath();
    Path file1 = new Path(dir1, fileName);
    Path file2 = new Path(dir2, fileName);
    FileContext fileContext = FileContext.getFileContext(getConf(dir1));
    fileContext.rename(file1, file2);
    return;
  }

  super.move(fromDir, toDir, fileName, ioContext);
}
 
Example 3
Source Project: lucene-solr   Source File: CopyOneFile.java    License: Apache License 2.0 6 votes vote down vote up
public CopyOneFile(DataInput in, ReplicaNode dest, String name, FileMetaData metaData, byte[] buffer) throws IOException {
  this.in = in;
  this.name = name;
  this.dest = dest;
  this.buffer = buffer;
  // TODO: pass correct IOCtx, e.g. seg total size
  out = dest.createTempOutput(name, "copy", IOContext.DEFAULT);
  tmpName = out.getName();

  // last 8 bytes are checksum, which we write ourselves after copying all bytes and confirming checksum:
  bytesToCopy = metaData.length - Long.BYTES;

  if (Node.VERBOSE_FILES) {
    dest.message("file " + name + ": start copying to tmp file " + tmpName + " length=" + (8+bytesToCopy));
  }

  copyStartNS = System.nanoTime();
  this.metaData = metaData;
  dest.startCopyFile(name);
}
 
Example 4
Source Project: lucene-solr   Source File: TestIndexedDISI.java    License: Apache License 2.0 6 votes vote down vote up
public void testOneDocMissingFixed() throws IOException {
  int maxDoc = 9699;
  final byte denseRankPower = rarely() ? -1 : (byte) (random().nextInt(7)+7); // sane + chance of disable
  FixedBitSet set = new FixedBitSet(maxDoc);
  set.set(0, maxDoc);
  set.clear(1345);
  try (Directory dir = newDirectory()) {

    final int cardinality = set.cardinality();
    long length;
    int jumpTableentryCount;
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      jumpTableentryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower);
      length = out.getFilePointer();
    }

    int step = 16000;
    try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
      IndexedDISI disi = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      BitSetIterator disi2 = new BitSetIterator(set, cardinality);
      assertAdvanceEquality(disi, disi2, step);
    }
  }
}
 
Example 5
Source Project: lucene-solr   Source File: TestBlockPostingsFormat.java    License: Apache License 2.0 6 votes vote down vote up
private void doTestImpactSerialization(List<Impact> impacts) throws IOException {
  CompetitiveImpactAccumulator acc = new CompetitiveImpactAccumulator();
  for (Impact impact : impacts) {
    acc.add(impact.freq, impact.norm);
  }
  try(Directory dir = newDirectory()) {
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      Lucene50SkipWriter.writeImpacts(acc, out);
    }
    try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
      byte[] b = new byte[Math.toIntExact(in.length())];
      in.readBytes(b, 0, b.length);
      List<Impact> impacts2 = Lucene50ScoreSkipReader.readImpacts(new ByteArrayDataInput(b), new MutableImpactList());
      assertEquals(impacts, impacts2);
    }
  }
}
 
Example 6
Source Project: lucene-solr   Source File: TestDirectPacked.java    License: Apache License 2.0 6 votes vote down vote up
private void doTestBpv(Directory directory, int bpv, long offset) throws Exception {
  MyRandom random = new MyRandom(random().nextLong());
  int numIters = TEST_NIGHTLY ? 100 : 10;
  for (int i = 0; i < numIters; i++) {
    long original[] = randomLongs(random, bpv);
    int bitsRequired = bpv == 64 ? 64 : DirectWriter.bitsRequired(1L<<(bpv-1));
    String name = "bpv" + bpv + "_" + i;
    IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
    for (long j = 0; j < offset; ++j) {
      output.writeByte((byte) random.nextInt());
    }
    DirectWriter writer = DirectWriter.getInstance(output, original.length, bitsRequired);
    for (int j = 0; j < original.length; j++) {
      writer.add(original[j]);
    }
    writer.finish();
    output.close();
    IndexInput input = directory.openInput(name, IOContext.DEFAULT);
    LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsRequired, offset);
    for (int j = 0; j < original.length; j++) {
      assertEquals("bpv=" + bpv, original[j], reader.get(j));
    }
    input.close();
  }
}
 
Example 7
Source Project: lucene-solr   Source File: HdfsDirectoryTest.java    License: Apache License 2.0 6 votes vote down vote up
public void testCreateTempFiles() throws IOException {
  String file1;
  try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
      IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
    out.writeByte((byte) 42);
    file1 = out.getName();
  }
  assertTrue(file1.startsWith("foo_bar"));
  assertTrue(file1.endsWith(".tmp"));
  // Create the directory again to force the counter to be reset
  String file2;
  try (Directory dir = new HdfsDirectory(directoryPath, directoryConf);
      IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) {
    out.writeByte((byte) 42);
    file2 = out.getName();
  }
  assertTrue(file2.startsWith("foo_bar"));
  assertTrue(file2.endsWith(".tmp"));
  assertNotEquals(file1, file2);
}
 
Example 8
private void createFiles(Configuration configuration, int numberOfDirs, int numberOfFiles, Path path,
    HdfsDirectory mainDir) throws IOException {
  FileSystem fileSystem = path.getFileSystem(configuration);
  for (int d = 0; d < numberOfDirs; d++) {
    Path dir = new Path(path, "dir." + d);
    fileSystem.mkdirs(dir);
    for (int f = 0; f < numberOfFiles; f++) {
      Path p = new Path(dir, "file." + f);
      FSDataOutputStream outputStream = fileSystem.create(p);
      outputStream.write(1);
      outputStream.close();
    }
    HdfsDirectory subDir = new HdfsDirectory(configuration, dir);
    for (String file : subDir.listAll()) {
      subDir.copy(mainDir, file, UUID.randomUUID().toString(), IOContext.READ);
    }
    subDir.close();
  }
}
 
Example 9
Source Project: ignite   Source File: GridLuceneDirectory.java    License: Apache License 2.0 6 votes vote down vote up
/** {@inheritDoc} */
@Override public IndexInput openInput(final String name, final IOContext context) throws IOException {
    ensureOpen();

    GridLuceneFile file = fileMap.get(name);

    if (file == null)
        throw new FileNotFoundException(name);

    // Lock for using in stream. Will be unlocked on stream closing.
    file.lockRef();

    if (!fileMap.containsKey(name)) {
        // Unblock for deferred delete.
        file.releaseRef();

        throw new FileNotFoundException(name);
    }

    return new GridLuceneInputStream(name, file);
}
 
Example 10
Source Project: lucene-solr   Source File: TestOfflineSorter.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Make sure two files are byte-byte identical.
 */
private void assertFilesIdentical(Directory dir, String golden, String sorted) throws IOException {
  long numBytes = dir.fileLength(golden);
  assertEquals(numBytes, dir.fileLength(sorted));

  byte[] buf1 = new byte[64 * 1024];
  byte[] buf2 = new byte[64 * 1024];
  try (
       IndexInput in1 = dir.openInput(golden, IOContext.READONCE);
       IndexInput in2 = dir.openInput(sorted, IOContext.READONCE)
       ) {
    long left = numBytes;
    while (left > 0) {
      int chunk = (int) Math.min(buf1.length, left);
      left -= chunk;
      in1.readBytes(buf1, 0, chunk);
      in2.readBytes(buf2, 0, chunk);
      for (int i = 0; i < chunk; i++) {
        assertEquals(buf1[i], buf2[i]);
      }
    }
  }
}
 
Example 11
Source Project: lucene-solr   Source File: BaseCompoundFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
public void testMissingCodecHeadersAreCaught() throws Exception {
  Directory dir = newDirectory();
  String subFile = "_123.xyz";

  // missing codec header
  try (IndexOutput os = dir.createOutput(subFile, newIOContext(random()))) {
    for (int i=0; i < 1024; i++) {
      os.writeByte((byte) i);
    }
  }

  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.singletonList(subFile));
  Exception e = expectThrows(CorruptIndexException.class, () -> si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT));
  assertTrue(e.getMessage().contains("codec header mismatch"));
  dir.close();
}
 
Example 12
Source Project: lucene-solr   Source File: SegmentDocValues.java    License: Apache License 2.0 6 votes vote down vote up
private RefCount<DocValuesProducer> newDocValuesProducer(SegmentCommitInfo si, Directory dir, final Long gen, FieldInfos infos) throws IOException {
  Directory dvDir = dir;
  String segmentSuffix = "";
  if (gen.longValue() != -1) {
    dvDir = si.info.dir; // gen'd files are written outside CFS, so use SegInfo directory
    segmentSuffix = Long.toString(gen.longValue(), Character.MAX_RADIX);
  }

  // set SegmentReadState to list only the fields that are relevant to that gen
  SegmentReadState srs = new SegmentReadState(dvDir, si.info, infos, IOContext.READ, segmentSuffix);
  DocValuesFormat dvFormat = si.info.getCodec().docValuesFormat();
  return new RefCount<DocValuesProducer>(dvFormat.fieldsProducer(srs)) {
    @SuppressWarnings("synthetic-access")
    @Override
    protected void release() throws IOException {
      object.close();
      synchronized (SegmentDocValues.this) {
        genDVProducers.remove(gen);
      }
    }
  };
}
 
Example 13
Source Project: lucene-solr   Source File: TestPendingDeletes.java    License: Apache License 2.0 6 votes vote down vote up
public void testIsFullyDeleted() throws IOException {
  Directory dir = new ByteBuffersDirectory();
  SegmentInfo si = new SegmentInfo(dir, Version.LATEST, Version.LATEST, "test", 3, false, Codec.getDefault(),
      Collections.emptyMap(), StringHelper.randomId(), new HashMap<>(), null);
  SegmentCommitInfo commitInfo = new SegmentCommitInfo(si, 0, 0, -1, -1, -1, StringHelper.randomId());
  FieldInfos fieldInfos = FieldInfos.EMPTY;
  si.getCodec().fieldInfosFormat().write(dir, si, "", fieldInfos, IOContext.DEFAULT);
  PendingDeletes deletes = newPendingDeletes(commitInfo);
  for (int i = 0; i < 3; i++) {
    assertTrue(deletes.delete(i));
    if (random().nextBoolean()) {
      assertTrue(deletes.writeLiveDocs(dir));
    }
    assertEquals(i == 2, deletes.isFullyDeleted(() -> null));
  }
}
 
Example 14
public VectorStoreReaderLucene(String vectorFileName, FlagConfig flagConfig) throws IOException {
  this.flagConfig = flagConfig;
  this.vectorFileName = vectorFileName;
  this.vectorFile = new File(vectorFileName);
  try {
    String parentPath = this.vectorFile.getParent();
    if (parentPath == null) parentPath = "";
    this.directory = FSDirectory.open(FileSystems.getDefault().getPath(parentPath));  // Old from FSDirectory impl.
    // Read number of dimension from header information.
    this.threadLocalIndexInput = new ThreadLocal<IndexInput>() {
      @Override
      protected IndexInput initialValue() {
        try {
          return directory.openInput(vectorFile.getName(), IOContext.READ);
        } catch (IOException e) {
          throw new RuntimeException(e.getMessage(), e);
        }
      }
    };
    readHeadersFromIndexInput(flagConfig);
  } catch (IOException e) {
    logger.warning("Cannot open file: " + this.vectorFileName + "\n" + e.getMessage());
    throw e;
  }
}
 
Example 15
Source Project: lucene-solr   Source File: BaseFieldInfoFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
/** Test field infos attributes coming back are not mutable */
public void testImmutableAttributes() throws Exception {
  Directory dir = newDirectory();
  Codec codec = getCodec();
  SegmentInfo segmentInfo = newSegmentInfo(dir, "_123");
  FieldInfos.Builder builder = new FieldInfos.Builder(new FieldInfos.FieldNumbers(null));
  FieldInfo fi = builder.getOrAdd("field");
  fi.setIndexOptions(TextField.TYPE_STORED.indexOptions());
  addAttributes(fi);
  fi.putAttribute("foo", "bar");
  fi.putAttribute("bar", "baz");
  FieldInfos infos = builder.finish();
  codec.fieldInfosFormat().write(dir, segmentInfo, "", infos, IOContext.DEFAULT);
  FieldInfos infos2 = codec.fieldInfosFormat().read(dir, segmentInfo, "", IOContext.DEFAULT);
  assertEquals(1, infos2.size());
  assertNotNull(infos2.fieldInfo("field"));
  Map<String,String> attributes = infos2.fieldInfo("field").attributes();
  // shouldn't be able to modify attributes
  expectThrows(UnsupportedOperationException.class, () -> {
    attributes.put("bogus", "bogus");
  });

  dir.close();
}
 
Example 16
Source Project: lucene-solr   Source File: Lucene50StoredFieldsFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public StoredFieldsReader fieldsReader(Directory directory, SegmentInfo si, FieldInfos fn, IOContext context) throws IOException {
  String value = si.getAttribute(MODE_KEY);
  if (value == null) {
    throw new IllegalStateException("missing value for " + MODE_KEY + " for segment: " + si.name);
  }
  Mode mode = Mode.valueOf(value);
  return impl(mode).fieldsReader(directory, si, fn, context);
}
 
Example 17
Source Project: Elasticsearch   Source File: RecoveryStatus.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates an {@link org.apache.lucene.store.IndexOutput} for the given file name. Note that the
 * IndexOutput actually point at a temporary file.
 * <p>
 * Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput
 * at a later stage
 */
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException {
    ensureRefCount();
    String tempFileName = getTempNameForFile(fileName);
    if (tempFileNames.containsKey(tempFileName)) {
        throw new IllegalStateException("output for file [" + fileName + "] has already been created");
    }
    // add first, before it's created
    tempFileNames.put(tempFileName, fileName);
    IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT);
    openIndexOutputs.put(fileName, indexOutput);
    return indexOutput;
}
 
Example 18
Source Project: lucene-solr   Source File: TestSwappedIndexFiles.java    License: Apache License 2.0 5 votes vote down vote up
private void swapOneFile(Directory dir1, Directory dir2, String victim) throws IOException {
  if (VERBOSE) {
    System.out.println("TEST: swap file " + victim);
  }
  try (BaseDirectoryWrapper dirCopy = newDirectory()) {
    dirCopy.setCheckIndexOnClose(false);

    // Copy all files from dir1 to dirCopy, except victim which we copy from dir2:
    for(String name : dir1.listAll()) {
      if (name.equals(victim) == false) {
        dirCopy.copyFrom(dir1, name, name, IOContext.DEFAULT);
      } else {
        dirCopy.copyFrom(dir2, name, name, IOContext.DEFAULT);
      }
      dirCopy.sync(Collections.singleton(name));
    }

    // NOTE: we .close so that if the test fails (truncation not detected) we don't also get all these confusing errors about open files:
    expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, EOFException.class, IndexFormatTooOldException.class),
        () -> DirectoryReader.open(dirCopy).close()
    );

    // CheckIndex should also fail:
    expectThrowsAnyOf(Arrays.asList(CorruptIndexException.class, EOFException.class, IndexFormatTooOldException.class),
        () -> TestUtil.checkIndex(dirCopy, true, true, null)
    );
  }
}
 
Example 19
Source Project: lucene-solr   Source File: SegmentMerger.java    License: Apache License 2.0 5 votes vote down vote up
SegmentMerger(List<CodecReader> readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir,
              FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
  if (context.context != IOContext.Context.MERGE) {
    throw new IllegalArgumentException("IOContext.context should be MERGE; got: " + context.context);
  }
  mergeState = new MergeState(readers, segmentInfo, infoStream);
  directory = dir;
  this.codec = segmentInfo.getCodec();
  this.context = context;
  this.fieldInfosBuilder = new FieldInfos.Builder(fieldNumbers);
  Version minVersion = Version.LATEST;
  for (CodecReader reader : readers) {
    Version leafMinVersion = reader.getMetaData().getMinVersion();
    if (leafMinVersion == null) {
      minVersion = null;
      break;
    }
    if (minVersion.onOrAfter(leafMinVersion)) {
      minVersion = leafMinVersion;
    }

  }
  assert segmentInfo.minVersion == null : "The min version should be set by SegmentMerger for merged segments";
  segmentInfo.minVersion = minVersion;
  if (mergeState.infoStream.isEnabled("SM")) {
    if (segmentInfo.getIndexSort() != null) {
      mergeState.infoStream.message("SM", "index sort during merge: " + segmentInfo.getIndexSort());
    }
  }
}
 
Example 20
Source Project: lumongo   Source File: DistributedDirectory.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * ignore IOContext
 */
@Override
public IndexOutput createOutput(String name, IOContext context) throws IOException {
	ensureOpen();
	NosqlFile nosqlFile = nosqlDirectory.getFileHandle(name, true);
	return new DistributedIndexOutput(nosqlFile);
}
 
Example 21
Source Project: incubator-retired-blur   Source File: JoinDirectory.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
  if (_shortTermStorage.fileExists(name)) {
    return _shortTermStorage.openInput(name, context);
  }
  return _longTermStorage.openInput(name, context);
}
 
Example 22
Source Project: lucene-solr   Source File: TestUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Returns a copy of the source directory, with file contents stored
 * in RAM.
 */
public static Directory ramCopyOf(Directory dir) throws IOException {
  Directory ram = new ByteBuffersDirectory();
  for (String file : dir.listAll()) {
    if (file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) {
      ram.copyFrom(dir, file, file, IOContext.DEFAULT);
    }
  }
  return ram;
}
 
Example 23
Source Project: lucene-solr   Source File: IndexRevisionTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testOpen() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig conf = new IndexWriterConfig(null);
  conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
  IndexWriter writer = new IndexWriter(dir, conf);
  try {
    writer.addDocument(new Document());
    writer.commit();
    Revision rev = new IndexRevision(writer);
    @SuppressWarnings("unchecked")
    Map<String, List<RevisionFile>> sourceFiles = rev.getSourceFiles();
    String source = sourceFiles.keySet().iterator().next();
    for (RevisionFile file : sourceFiles.values().iterator().next()) {
      IndexInput src = dir.openInput(file.fileName, IOContext.READONCE);
      InputStream in = rev.open(source, file.fileName);
      assertEquals(src.length(), in.available());
      byte[] srcBytes = new byte[(int) src.length()];
      byte[] inBytes = new byte[(int) src.length()];
      int offset = 0;
      if (random().nextBoolean()) {
        int skip = random().nextInt(10);
        if (skip >= src.length()) {
          skip = 0;
        }
        in.skip(skip);
        src.seek(skip);
        offset = skip;
      }
      src.readBytes(srcBytes, offset, srcBytes.length - offset);
      in.read(inBytes, offset, inBytes.length - offset);
      assertArrayEquals(srcBytes, inBytes);
      IOUtils.close(src, in);
    }
    writer.close();
  } finally {
    IOUtils.close(dir);
  }
}
 
Example 24
@Override
public IndexOutput createOutput(final String name, IOContext context) throws IOException {
  if (_readOnly) {
    throw new IOException("Directory is in read only mode.");
  }
  if (fileExists(name)) {
    deleteFile(name);
  }
  return new FastHdfsKeyValueIndexOutput(name, _blockSize, this);
}
 
Example 25
Source Project: crate   Source File: MetaDataStateFormat.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Reads the state from a given file and compares the expected version against the actual version of
 * the state.
 */
public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException {
    try (Directory dir = newDirectory(file.getParent())) {
        try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) {
            // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here.
            CodecUtil.checksumEntireFile(indexInput);
            CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION);
            final XContentType xContentType = XContentType.values()[indexInput.readInt()];
            if (xContentType != FORMAT) {
                throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType);
            }
            long filePointer = indexInput.getFilePointer();
            long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer;
            try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) {
                try (InputStreamIndexInput in = new InputStreamIndexInput(slice, contentSize)) {
                    try (XContentParser parser = XContentFactory.xContent(FORMAT)
                            .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE,
                                in)) {
                        return fromXContent(parser);
                    }
                }
            }
        } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) {
            // we trick this into a dedicated exception with the original stacktrace
            throw new CorruptStateException(ex);
        }
    }
}
 
Example 26
Source Project: incubator-retired-blur   Source File: CacheDirectory.java    License: Apache License 2.0 5 votes vote down vote up
public IndexInput openInput(String name, IOContext context) throws IOException {
  IndexInput indexInput = _internal.openInput(name, context);
  if (_cache.cacheFileForReading(this, name, context) || (_tableBlockCacheFileTypes != null && isCachableFile(name))) {
    return new CacheIndexInput(this, name, indexInput, _cache);
  }
  return indexInput;
}
 
Example 27
Source Project: lucene-solr   Source File: CompressingStoredFieldsWriter.java    License: Apache License 2.0 5 votes vote down vote up
/** Sole constructor. */
CompressingStoredFieldsWriter(Directory directory, SegmentInfo si, String segmentSuffix, IOContext context,
    String formatName, CompressionMode compressionMode, int chunkSize, int maxDocsPerChunk, int blockShift) throws IOException {
  assert directory != null;
  this.segment = si.name;
  this.compressionMode = compressionMode;
  this.compressor = compressionMode.newCompressor();
  this.chunkSize = chunkSize;
  this.maxDocsPerChunk = maxDocsPerChunk;
  this.docBase = 0;
  this.bufferedDocs = ByteBuffersDataOutput.newResettableInstance();
  this.numStoredFields = new int[16];
  this.endOffsets = new int[16];
  this.numBufferedDocs = 0;

  boolean success = false;
  try {
    fieldsStream = directory.createOutput(IndexFileNames.segmentFileName(segment, segmentSuffix, FIELDS_EXTENSION), context);
    CodecUtil.writeIndexHeader(fieldsStream, formatName, VERSION_CURRENT, si.getId(), segmentSuffix);
    assert CodecUtil.indexHeaderLength(formatName, segmentSuffix) == fieldsStream.getFilePointer();

    indexWriter = new FieldsIndexWriter(directory, segment, segmentSuffix, INDEX_EXTENSION_PREFIX, INDEX_CODEC_NAME, si.getId(), blockShift, context);

    fieldsStream.writeVInt(chunkSize);
    fieldsStream.writeVInt(PackedInts.VERSION_CURRENT);

    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(fieldsStream, indexWriter);
    }
  }
}
 
Example 28
Source Project: lucene-solr   Source File: SimpleTextLiveDocsFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Bits readLiveDocs(Directory dir, SegmentCommitInfo info, IOContext context) throws IOException {
  assert info.hasDeletions();
  BytesRefBuilder scratch = new BytesRefBuilder();
  CharsRefBuilder scratchUTF16 = new CharsRefBuilder();
  
  String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getDelGen());
  ChecksumIndexInput in = null;
  boolean success = false;
  try {
    in = dir.openChecksumInput(fileName, context);
    
    SimpleTextUtil.readLine(in, scratch);
    assert StringHelper.startsWith(scratch.get(), SIZE);
    int size = parseIntAt(scratch.get(), SIZE.length, scratchUTF16);
    
    BitSet bits = new BitSet(size);
    
    SimpleTextUtil.readLine(in, scratch);
    while (!scratch.get().equals(END)) {
      assert StringHelper.startsWith(scratch.get(), DOC);
      int docid = parseIntAt(scratch.get(), DOC.length, scratchUTF16);
      bits.set(docid);
      SimpleTextUtil.readLine(in, scratch);
    }
    
    SimpleTextUtil.checkFooter(in);
    
    success = true;
    return new SimpleTextBits(bits, size);
  } finally {
    if (success) {
      IOUtils.close(in);
    } else {
      IOUtils.closeWhileHandlingException(in);
    }
  }
}
 
Example 29
Source Project: lucene-solr   Source File: SimpleTextLiveDocsFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void writeLiveDocs(Bits bits, Directory dir, SegmentCommitInfo info, int newDelCount, IOContext context) throws IOException {
  int size = bits.length();
  BytesRefBuilder scratch = new BytesRefBuilder();
  
  String fileName = IndexFileNames.fileNameFromGeneration(info.info.name, LIVEDOCS_EXTENSION, info.getNextDelGen());
  IndexOutput out = null;
  boolean success = false;
  try {
    out = dir.createOutput(fileName, context);
    SimpleTextUtil.write(out, SIZE);
    SimpleTextUtil.write(out, Integer.toString(size), scratch);
    SimpleTextUtil.writeNewline(out);
    
    for (int i = 0; i < size; ++i) {
      if (bits.get(i)) {
        SimpleTextUtil.write(out, DOC);
        SimpleTextUtil.write(out, Integer.toString(i), scratch);
        SimpleTextUtil.writeNewline(out);
      }
    }
    
    SimpleTextUtil.write(out, END);
    SimpleTextUtil.writeNewline(out);
    SimpleTextUtil.writeChecksum(out, scratch);
    success = true;
  } finally {
    if (success) {
      IOUtils.close(out);
    } else {
      IOUtils.closeWhileHandlingException(out);
    }
  }
}
 
Example 30
Source Project: lucene-solr   Source File: CompressingTermVectorsFormat.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public final TermVectorsReader vectorsReader(Directory directory,
    SegmentInfo segmentInfo, FieldInfos fieldInfos, IOContext context)
    throws IOException {
  return new CompressingTermVectorsReader(directory, segmentInfo, segmentSuffix,
      fieldInfos, context, formatName, compressionMode);
}