Java Code Examples for org.apache.lucene.store.IndexOutput

The following examples show how to use org.apache.lucene.store.IndexOutput. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: lucene-solr   Source File: TestOfflineSorter.java    License: Apache License 2.0 6 votes vote down vote up
@Nightly
public void testFixedLengthHeap() throws Exception {
  // Make sure the RAM accounting is correct, i.e. if we are sorting fixed width
  // ints (4 bytes) then the heap used is really only 4 bytes per value:
  Directory dir = newDirectory();
  IndexOutput out = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT);
  try (ByteSequencesWriter w = new OfflineSorter.ByteSequencesWriter(out)) {
    byte[] bytes = new byte[Integer.BYTES];
    for (int i=0;i<1024*1024;i++) {
      random().nextBytes(bytes);
      w.write(bytes);
    }
    CodecUtil.writeFooter(out);
  }

  ExecutorService exec = randomExecutorServiceOrNull();
  OfflineSorter sorter = new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(4), OfflineSorter.MAX_TEMPFILES, Integer.BYTES, exec, TestUtil.nextInt(random(), 1, 4));
  sorter.sort(out.getName());
  if (exec != null) {
    exec.shutdownNow();
  }
  // 1 MB of ints with 4 MH heap allowed should have been sorted in a single heap partition:
  assertEquals(0, sorter.sortInfo.mergeRounds);
  dir.close();
}
 
Example 2
private static Directory copyFilesLocally(Configuration configuration, Directory dir, String table, Path shardDir,
    Path localCachePath, Collection<String> files, String segmentName) throws IOException {
  LOG.info("Copying files need to local cache for faster reads [{0}].", shardDir);
  Path localShardPath = new Path(new Path(new Path(localCachePath, table), shardDir.getName()), segmentName);
  HdfsDirectory localDir = new HdfsDirectory(configuration, localShardPath, null);
  for (String name : files) {
    if (!isValidFileToCache(name)) {
      continue;
    }
    LOG.info("Valid file for local copy [{0}].", name);
    if (!isValid(localDir, dir, name)) {
      LastModified lastModified = (LastModified) dir;
      long fileModified = lastModified.getFileModified(name);

      IndexInput input = dir.openInput(name, IOContext.READONCE);
      IndexOutput output = localDir.createOutput(name, IOContext.READONCE);
      output.copyBytes(input, input.length());
      output.close();
      IndexOutput lastMod = localDir.createOutput(name + LASTMOD, IOContext.DEFAULT);
      lastMod.writeLong(fileModified);
      lastMod.close();
    }
  }
  return localDir;
}
 
Example 3
Source Project: lucene-solr   Source File: TestIndexWriter.java    License: Apache License 2.0 6 votes vote down vote up
public void testLeftoverTempFiles() throws Exception {
  Directory dir = newDirectory();
  IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
  IndexWriter w = new IndexWriter(dir, iwc);
  w.close();

  IndexOutput out = dir.createTempOutput("_0", "bkd", IOContext.DEFAULT);
  String tempName = out.getName();
  out.close();
  iwc = new IndexWriterConfig(new MockAnalyzer(random()));
  w = new IndexWriter(dir, iwc);

  // Make sure IW deleted the unref'd file:
  try {
    dir.openInput(tempName, IOContext.DEFAULT);
    fail("did not hit exception");
  } catch (FileNotFoundException | NoSuchFileException e) {
    // expected
  }
  w.close();
  dir.close();
}
 
Example 4
Source Project: lucene-solr   Source File: TestIndexInput.java    License: Apache License 2.0 6 votes vote down vote up
public void testRawIndexInputRead() throws IOException {
  for (int i = 0; i < 10; i++) {
    Random random = random();
    final Directory dir = newDirectory();
    IndexOutput os = dir.createOutput("foo", newIOContext(random));
    os.writeBytes(READ_TEST_BYTES, READ_TEST_BYTES.length);
    os.close();
    IndexInput is = dir.openInput("foo", newIOContext(random));
    checkReads(is, IOException.class);
    is.close();
  
    os = dir.createOutput("bar", newIOContext(random));
    os.writeBytes(RANDOM_TEST_BYTES, RANDOM_TEST_BYTES.length);
    os.close();
    is = dir.openInput("bar", newIOContext(random));
    checkRandomReads(is);
    is.close();
    dir.close();
  }
}
 
Example 5
Source Project: lucene-solr   Source File: BKDWriter.java    License: Apache License 2.0 6 votes vote down vote up
OneDimensionBKDWriter(IndexOutput metaOut, IndexOutput indexOut, IndexOutput dataOut) {
  if (numIndexDims != 1) {
    throw new UnsupportedOperationException("numIndexDims must be 1 but got " + numIndexDims);
  }
  if (pointCount != 0) {
    throw new IllegalStateException("cannot mix add and merge");
  }

  // Catch user silliness:
  if (finished == true) {
    throw new IllegalStateException("already finished");
  }

  // Mark that we already finished:
  finished = true;

  this.metaOut = metaOut;
  this.indexOut = indexOut;
  this.dataOut = dataOut;
  this.dataStartFP = dataOut.getFilePointer();

  lastPackedValue = new byte[packedBytesLength];
}
 
Example 6
/**
 * Transforms vector to cartesian form and writes vector out in dense format, truncating the
 * vectors to the assigned dimensionality
 */
public void writeToLuceneStream(IndexOutput outputStream, int k) {
  toCartesian();
  for (int i = 0; i < k * 2; ++i) {
    try {
      outputStream.writeInt(Float.floatToIntBits(coordinates[i]));
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
    
    
    /* DORMANT CODE!
  assert(opMode != MODE.POLAR_SPARSE);
  if (opMode == MODE.CARTESIAN) {
    cartesianToDensePolar();
  }
  for (int i = 0; i < dimension; ++i) {
    try {
      outputStream.writeInt((int)(phaseAngles[i]));
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
     */
}
 
Example 7
@Override
/**
 * Writes vector out in dense format.
 */
public void writeToLuceneStream(IndexOutput outputStream) {
  int[] coordsToWrite;
  
    coordsToWrite = coordinates;
  
  for (int i = 0; i < dimension; ++i) {
    try {
      outputStream.writeInt((coordsToWrite[i]));
    } catch (IOException e) {
      e.printStackTrace();
    }
  }
}
 
Example 8
@Test
public void test2() throws IOException {
  Cache cache = getCache();
  RAMDirectory directory = new RAMDirectory();
  Random random = new Random(seed);

  String name = "test2";
  long size = (10 * 1024 * 1024) + 13;

  IndexOutput output = directory.createOutput(name, IOContext.DEFAULT);
  writeRandomData(size, random, output);
  output.close();

  IndexInput input = directory.openInput(name, IOContext.DEFAULT);
  IndexInput testInput = new CacheIndexInput(null, name, input.clone(), cache);
  readRandomData(input, testInput, random, sampleSize, maxBufSize, maxOffset);
  readRandomDataShort(input, testInput, random, sampleSize);
  readRandomDataInt(input, testInput, random, sampleSize);
  readRandomDataLong(input, testInput, random, sampleSize);
  testInput.close();
  input.close();
  directory.close();
}
 
Example 9
Source Project: lucene-solr   Source File: BlockWriter.java    License: Apache License 2.0 6 votes vote down vote up
protected BlockWriter(IndexOutput blockOutput, int targetNumBlockLines, int deltaNumLines, BlockEncoder blockEncoder) {
  assert blockOutput != null;
  assert targetNumBlockLines > 0;
  assert deltaNumLines >= 0;
  assert deltaNumLines < targetNumBlockLines;
  this.blockOutput = blockOutput;
  this.targetNumBlockLines = targetNumBlockLines;
  this.deltaNumLines = deltaNumLines;
  this.blockEncoder = blockEncoder;

  this.blockLines = new ArrayList<>(targetNumBlockLines);
  this.blockHeaderWriter = createBlockHeaderSerializer();
  this.blockLineWriter = createBlockLineSerializer();
  this.termStateSerializer = createDeltaBaseTermStateSerializer();

  this.blockLinesWriteBuffer = ByteBuffersDataOutput.newResettableInstance();
  this.termStatesWriteBuffer = ByteBuffersDataOutput.newResettableInstance();
  this.blockWriteBuffer = ByteBuffersDataOutput.newResettableInstance();

  this.reusableBlockHeader = new BlockHeader();
  this.scratchBytesRef = new BytesRef();
}
 
Example 10
Source Project: lucene-solr   Source File: TestCodecUtil.java    License: Apache License 2.0 6 votes vote down vote up
public void testCheckFooterInvalid() throws Exception {
  ByteBuffersDataOutput out = new ByteBuffersDataOutput();
  IndexOutput output = new ByteBuffersIndexOutput(out, "temp", "temp");
  CodecUtil.writeHeader(output, "FooBar", 5);
  output.writeString("this is the data");
  output.writeInt(CodecUtil.FOOTER_MAGIC);
  output.writeInt(0);
  output.writeLong(1234567); // write a bogus checksum
  output.close();

  ChecksumIndexInput input = new BufferedChecksumIndexInput(new ByteBuffersIndexInput(out.toDataInput(), "temp"));
  CodecUtil.checkHeader(input, "FooBar", 5, 5);
  assertEquals("this is the data", input.readString());
  Exception mine = new RuntimeException("fake exception");
  CorruptIndexException expected = expectThrows(CorruptIndexException.class, () -> {
    CodecUtil.checkFooter(input, mine);
  });
  assertTrue(expected.getMessage().contains("checksum failed"));
  Throwable suppressed[] = expected.getSuppressed();
  assertEquals(1, suppressed.length);
  assertEquals("fake exception", suppressed[0].getMessage());
  input.close();
}
 
Example 11
Source Project: lucene-solr   Source File: TestIndexedDISI.java    License: Apache License 2.0 6 votes vote down vote up
public void testPositionNotZero() throws IOException {
  final int BLOCKS = 10;
  final byte denseRankPower = rarely() ? -1 : (byte) (random().nextInt(7)+7); // sane + chance of disable

  BitSet set = createSetWithRandomBlocks(BLOCKS);
  try (Directory dir = newDirectory()) {
    final int cardinality = set.cardinality();
    int jumpTableEntryCount;
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      jumpTableEntryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower);
    }
    try (IndexInput fullInput = dir.openInput("foo", IOContext.DEFAULT)) {
      IndexInput blockData =
          IndexedDISI.createBlockSlice(fullInput, "blocks", 0, fullInput.length(), jumpTableEntryCount);
      blockData.seek(random().nextInt((int) blockData.length()));

      RandomAccessInput jumpTable = IndexedDISI.createJumpTable(fullInput, 0, fullInput.length(), jumpTableEntryCount);
      IndexedDISI disi = new IndexedDISI(blockData, jumpTable, jumpTableEntryCount, denseRankPower, cardinality);
      // This failed at some point during LUCENE-8585 development as it did not reset the slice position
      disi.advanceExact(BLOCKS*65536-1);
    }
  }
}
 
Example 12
Source Project: lucene-solr   Source File: TestLucene84PostingsFormat.java    License: Apache License 2.0 6 votes vote down vote up
private void doTestImpactSerialization(List<Impact> impacts) throws IOException {
  CompetitiveImpactAccumulator acc = new CompetitiveImpactAccumulator();
  for (Impact impact : impacts) {
    acc.add(impact.freq, impact.norm);
  }
  try(Directory dir = newDirectory()) {
    try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
      Lucene84SkipWriter.writeImpacts(acc, out);
    }
    try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
      byte[] b = new byte[Math.toIntExact(in.length())];
      in.readBytes(b, 0, b.length);
      List<Impact> impacts2 = Lucene84ScoreSkipReader.readImpacts(new ByteArrayDataInput(b), new MutableImpactList());
      assertEquals(impacts, impacts2);
    }
  }
}
 
Example 13
Source Project: lucene-solr   Source File: BaseCompoundFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
public void testDeleteFileDisabled() throws IOException {
  final String testfile = "_123.test";

  Directory dir = newDirectory();
  IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
  out.writeInt(3);
  out.close();
 
  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.emptyList());
  si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
  Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
  expectThrows(UnsupportedOperationException.class, () -> {
    cfs.deleteFile(testfile);
  });

  cfs.close();
  dir.close();
}
 
Example 14
Source Project: lucene-solr   Source File: BaseCompoundFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
public void testCorruptFilesAreCaught() throws Exception {
  Directory dir = newDirectory();
  String subFile = "_123.xyz";

  // wrong checksum
  SegmentInfo si = newSegmentInfo(dir, "_123");
  try (IndexOutput os = dir.createOutput(subFile, newIOContext(random()))) {
    CodecUtil.writeIndexHeader(os, "Foo", 0, si.getId(), "suffix");
    for (int i=0; i < 1024; i++) {
      os.writeByte((byte) i);
    }

    // write footer w/ wrong checksum
    os.writeInt(CodecUtil.FOOTER_MAGIC);
    os.writeInt(0);

    long checksum = os.getChecksum();
    os.writeLong(checksum+1);
  }

  si.setFiles(Collections.singletonList(subFile));
  Exception e = expectThrows(CorruptIndexException.class, () -> si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT));
  assertTrue(e.getMessage().contains("checksum failed (hardware problem?)"));
  dir.close();
}
 
Example 15
Source Project: lucene-solr   Source File: BaseCompoundFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
public void testSyncDisabled() throws IOException {
  final String testfile = "_123.test";

  Directory dir = newDirectory();
  IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
  out.writeInt(3);
  out.close();
 
  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.emptyList());
  si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
  Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
  expectThrows(UnsupportedOperationException.class, () -> {
    cfs.sync(Collections.singleton(testfile));
  });

  cfs.close();
  dir.close();
}
 
Example 16
Source Project: lucene-solr   Source File: BaseCompoundFormatTestCase.java    License: Apache License 2.0 6 votes vote down vote up
public void testMakeLockDisabled() throws IOException {
  final String testfile = "_123.test";

  Directory dir = newDirectory();
  IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT);
  out.writeInt(3);
  out.close();
 
  SegmentInfo si = newSegmentInfo(dir, "_123");
  si.setFiles(Collections.emptyList());
  si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT);
  Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
  expectThrows(UnsupportedOperationException.class, () -> {
    cfs.obtainLock("foobar");
  });

  cfs.close();
  dir.close();
}
 
Example 17
@Test
public void testGenerateRandomVectorWriteAndRead() {
  Random random = new Random(0);

  Vector vector = VectorFactory.generateRandomVector(VectorType.BINARY, 64, 2, random);
  // The exact string depends on fail Java's implementation of Random, so we only check for length.
  String vectorString = vector.writeToString();
  assertEquals(64, vectorString.length());

  RAMDirectory directory = new RAMDirectory();
  try {
    IndexOutput indexOutput = directory.createOutput("binaryvectors.bin", IOContext.DEFAULT);
    vector.writeToLuceneStream(indexOutput);
    indexOutput.close();
    IndexInput indexInput = directory.openInput("binaryvectors.bin", IOContext.DEFAULT);
    Vector vector2 = VectorFactory.createZeroVector(VectorType.BINARY, 64);
    assertEquals("0000000000000000000000000000000000000000000000000000000000000000", vector2.writeToString());
    vector2.readFromLuceneStream(indexInput);
    assertEquals(vectorString, vector2.writeToString());
  } catch (IOException e) {
    e.printStackTrace();
    fail();
  }
  directory.close();
}
 
Example 18
Source Project: lucene-solr   Source File: BlockDirectory.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public IndexOutput createOutput(String name, IOContext context)
    throws IOException {
  final IndexOutput dest = super.createOutput(name, context);
  if (useWriteCache(name, context)) {
    return new CachedIndexOutput(this, dest, blockSize, name, cache, blockSize);
  }
  return dest;
}
 
Example 19
Source Project: lucene-solr   Source File: HdfsDirectoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testWritingAndReadingAFile() throws IOException {
  String[] listAll = directory.listAll();
  for (String file : listAll) {
    directory.deleteFile(file);
  }
  
  IndexOutput output = directory.createOutput("testing.test", new IOContext());
  output.writeInt(12345);
  output.close();

  IndexInput input = directory.openInput("testing.test", new IOContext());
  assertEquals(12345, input.readInt());
  input.close();

  listAll = directory.listAll();
  assertEquals(1, listAll.length);
  assertEquals("testing.test", listAll[0]);

  assertEquals(4, directory.fileLength("testing.test"));

  IndexInput input1 = directory.openInput("testing.test", new IOContext());

  IndexInput input2 = input1.clone();
  assertEquals(12345, input2.readInt());
  input2.close();

  assertEquals(12345, input1.readInt());
  input1.close();

  assertFalse(slowFileExists(directory, "testing.test.other"));
  assertTrue(slowFileExists(directory, "testing.test"));
  directory.deleteFile("testing.test");
  assertFalse(slowFileExists(directory, "testing.test"));
}
 
Example 20
Source Project: Elasticsearch   Source File: RecoveryStatus.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates an {@link org.apache.lucene.store.IndexOutput} for the given file name. Note that the
 * IndexOutput actually point at a temporary file.
 * <p>
 * Note: You can use {@link #getOpenIndexOutput(String)} with the same filename to retrieve the same IndexOutput
 * at a later stage
 */
public IndexOutput openAndPutIndexOutput(String fileName, StoreFileMetaData metaData, Store store) throws IOException {
    ensureRefCount();
    String tempFileName = getTempNameForFile(fileName);
    if (tempFileNames.containsKey(tempFileName)) {
        throw new IllegalStateException("output for file [" + fileName + "] has already been created");
    }
    // add first, before it's created
    tempFileNames.put(tempFileName, fileName);
    IndexOutput indexOutput = store.createVerifyingOutput(tempFileName, metaData, IOContext.DEFAULT);
    openIndexOutputs.put(fileName, indexOutput);
    return indexOutput;
}
 
Example 21
Source Project: hadoop-gpu   Source File: RAMDirectoryUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Read a number of files from a data input to a ram directory.
 * @param in  the data input
 * @param dir  the ram directory
 * @throws IOException
 */
public static void readRAMFiles(DataInput in, RAMDirectory dir)
    throws IOException {
  int numFiles = in.readInt();

  for (int i = 0; i < numFiles; i++) {
    String name = Text.readString(in);
    long length = in.readLong();

    if (length > 0) {
      // can we avoid the extra copy?
      IndexOutput output = null;
      try {
        output = dir.createOutput(name);

        int position = 0;
        byte[] buffer = new byte[BUFFER_SIZE];

        while (position < length) {
          int len =
              position + BUFFER_SIZE <= length ? BUFFER_SIZE
                  : (int) (length - position);
          in.readFully(buffer, 0, len);
          output.writeBytes(buffer, 0, len);
          position += len;
        }
      } finally {
        if (output != null) {
          output.close();
        }
      }
    }
  }
}
 
Example 22
Source Project: RDFS   Source File: RAMDirectoryUtil.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Read a number of files from a data input to a ram directory.
 * @param in  the data input
 * @param dir  the ram directory
 * @throws IOException
 */
public static void readRAMFiles(DataInput in, RAMDirectory dir)
    throws IOException {
  int numFiles = in.readInt();

  for (int i = 0; i < numFiles; i++) {
    String name = Text.readString(in);
    long length = in.readLong();

    if (length > 0) {
      // can we avoid the extra copy?
      IndexOutput output = null;
      try {
        output = dir.createOutput(name);

        int position = 0;
        byte[] buffer = new byte[BUFFER_SIZE];

        while (position < length) {
          int len =
              position + BUFFER_SIZE <= length ? BUFFER_SIZE
                  : (int) (length - position);
          in.readFully(buffer, 0, len);
          output.writeBytes(buffer, 0, len);
          position += len;
        }
      } finally {
        if (output != null) {
          output.close();
        }
      }
    }
  }
}
 
Example 23
Source Project: lucene-solr   Source File: TestIndexWriterExceptions.java    License: Apache License 2.0 5 votes vote down vote up
public void testSimulatedCorruptIndex1() throws IOException {
    BaseDirectoryWrapper dir = newDirectory();
    dir.setCheckIndexOnClose(false); // we are corrupting it!

    IndexWriter writer = null;

    writer  = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())));

    // add 100 documents
    for (int i = 0; i < 100; i++) {
        addDoc(writer);
    }

    // close
    writer.close();

    long gen = SegmentInfos.getLastCommitGeneration(dir);
    assertTrue("segment generation should be > 0 but got " + gen, gen > 0);

    String fileNameIn = SegmentInfos.getLastCommitSegmentsFileName(dir);
    String fileNameOut = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS,
                                                               "",
                                                               1+gen);
    IndexInput in = dir.openInput(fileNameIn, newIOContext(random()));
    IndexOutput out = dir.createOutput(fileNameOut, newIOContext(random()));
    long length = in.length();
    for(int i=0;i<length-1;i++) {
      out.writeByte(in.readByte());
    }
    in.close();
    out.close();
    dir.deleteFile(fileNameIn);

    expectThrows(Exception.class, () -> {
      DirectoryReader.open(dir);
    });

    dir.close();
}
 
Example 24
Source Project: lucene-solr   Source File: TestIndexedDISI.java    License: Apache License 2.0 5 votes vote down vote up
private void doTestAllSingleJump(BitSet set, Directory dir) throws IOException {
  final int cardinality = set.cardinality();
  final byte denseRankPower = rarely() ? -1 : (byte) (random().nextInt(7)+7); // sane + chance of disable
  long length;
  int jumpTableentryCount;
  try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) {
    jumpTableentryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower);
    length = out.getFilePointer();
  }

  try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) {
    for (int i = 0; i < set.length(); i++) {
      IndexedDISI disi = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      assertEquals("The bit at " + i + " should be correct with advanceExact", set.get(i), disi.advanceExact(i));

      IndexedDISI disi2 = new IndexedDISI(in, 0L, length, jumpTableentryCount, denseRankPower, cardinality);
      disi2.advance(i);
      // Proper sanity check with jump tables as an error could make them seek backwards
      assertTrue("The docID should at least be " + i + " after advance(" + i + ") but was " + disi2.docID(),
          i <= disi2.docID());
      if (set.get(i)) {
        assertEquals("The docID should be present with advance", i, disi2.docID());
      } else {
        assertNotSame("The docID should not be present with advance", i, disi2.docID());
      }
    }
  }
}
 
Example 25
Source Project: incubator-retired-blur   Source File: CacheDirectoryTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test2() throws IOException {
  IndexOutput output = _cacheDirectory.createOutput("test.file", IOContext.DEFAULT);
  byte[] buf = new byte[9000];
  for (int i = 0; i < buf.length; i++) {
    buf[i] = (byte) i;
  }
  output.writeBytes(buf, buf.length);
  output.close();

  IndexInput input = _cacheDirectory.openInput("test.file", IOContext.DEFAULT);
  assertEquals(9000, input.length());
  input.close();
}
 
Example 26
Source Project: lucene-solr   Source File: TestPagedBytes.java    License: Apache License 2.0 5 votes vote down vote up
@Ignore // memory hole
public void testOverflow() throws IOException {
  BaseDirectoryWrapper dir = newFSDirectory(createTempDir("testOverflow"));
  if (dir instanceof MockDirectoryWrapper) {
    ((MockDirectoryWrapper)dir).setThrottling(MockDirectoryWrapper.Throttling.NEVER);
  }
  final int blockBits = TestUtil.nextInt(random(), 14, 28);
  final int blockSize = 1 << blockBits;
  byte[] arr = new byte[TestUtil.nextInt(random(), blockSize / 2, blockSize * 2)];
  for (int i = 0; i < arr.length; ++i) {
    arr[i] = (byte) i;
  }
  final long numBytes = (1L << 31) + TestUtil.nextInt(random(), 1, blockSize * 3);
  final PagedBytes p = new PagedBytes(blockBits);
  final IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT);
  for (long i = 0; i < numBytes; ) {
    assertEquals(i, out.getFilePointer());
    final int len = (int) Math.min(arr.length, numBytes - i);
    out.writeBytes(arr, len);
    i += len;
  }
  assertEquals(numBytes, out.getFilePointer());
  out.close();
  final IndexInput in = dir.openInput("foo", IOContext.DEFAULT);
  p.copy(in, numBytes);
  final PagedBytes.Reader reader = p.freeze(random().nextBoolean());

  for (long offset : new long[] {0L, Integer.MAX_VALUE, numBytes - 1,
      TestUtil.nextLong(random(), 1, numBytes - 2)}) {
    BytesRef b = new BytesRef();
    reader.fillSlice(b, offset, 1);
    assertEquals(arr[(int) (offset % arr.length)], b.bytes[b.offset]);
  }
  in.close();
  dir.close();
}
 
Example 27
Source Project: lucene-solr   Source File: SolrSnapshotMetaDataManager.java    License: Apache License 2.0 5 votes vote down vote up
private synchronized void persist() throws IOException {
  String fileName = SNAPSHOTS_PREFIX + nextWriteGen;
  IndexOutput out = dir.createOutput(fileName, IOContext.DEFAULT);
  boolean success = false;
  try {
    CodecUtil.writeHeader(out, CODEC_NAME, VERSION_CURRENT);
    out.writeVInt(nameToDetailsMapping.size());
    for(Entry<String,SnapshotMetaData> ent : nameToDetailsMapping.entrySet()) {
      out.writeString(ent.getKey());
      out.writeString(ent.getValue().getIndexDirPath());
      out.writeVLong(ent.getValue().getGenerationNumber());
    }
    success = true;
  } finally {
    if (!success) {
      IOUtils.closeWhileHandlingException(out);
      IOUtils.deleteFilesIgnoringExceptions(dir, fileName);
    } else {
      IOUtils.close(out);
    }
  }

  dir.sync(Collections.singletonList(fileName));

  if (nextWriteGen > 0) {
    String lastSaveFile = SNAPSHOTS_PREFIX + (nextWriteGen-1);
    // exception OK: likely it didn't exist
    IOUtils.deleteFilesIgnoringExceptions(dir, lastSaveFile);
  }

  nextWriteGen++;
}
 
Example 28
@Test
public void testSymlink() throws IOException {
  HdfsDirectory dir1 = new HdfsDirectory(_configuration, new Path(_base, "dir1"));
  IndexOutput output = dir1.createOutput("file1", IOContext.DEFAULT);
  output.writeLong(12345);
  output.close();

  assertTrue(dir1.fileExists("file1"));

  HdfsDirectory dir2 = new HdfsDirectory(_configuration, new Path(_base, "dir2"));
  dir1.copy(dir2, "file1", "file2", IOContext.DEFAULT);

  assertTrue(dir2.fileExists("file2"));
  assertEquals(8, dir2.fileLength("file2"));

  String[] listAll = dir2.listAll();
  assertEquals(1, listAll.length);
  assertEquals("file2", listAll[0]);

  IndexInput input = dir2.openInput("file2", IOContext.DEFAULT);
  assertEquals(12345, input.readLong());
  input.close();

  dir2.deleteFile("file2");

  assertFalse(dir2.fileExists("file2"));
  assertTrue(dir1.fileExists("file1"));

  dir2.close();
  dir1.close();
}
 
Example 29
Source Project: lucene-solr   Source File: TestOfflineSorter.java    License: Apache License 2.0 5 votes vote down vote up
/** Make sure corruption on a temp file (partition) is caught, even if the corruption didn't confuse OfflineSorter! */
public void testBitFlippedOnPartition1() throws Exception {

  try (Directory dir0 = newMockDirectory()) {

    Directory dir = new FilterDirectory(dir0) {

      boolean corrupted;

      @Override
      public IndexOutput createTempOutput(String prefix, String suffix, IOContext context) throws IOException {
        IndexOutput out = in.createTempOutput(prefix, suffix, context);
        if (corrupted == false && suffix.equals("sort")) {
          corrupted = true;
          return new CorruptingIndexOutput(dir0, 544677, out);
        } else {
          return out;
        }
      }
    };

    IndexOutput unsorted = dir.createTempOutput("unsorted", "tmp", IOContext.DEFAULT);
    writeAll(unsorted, generateFixed((int) (OfflineSorter.MB * 3)));

    CorruptIndexException e = expectThrows(CorruptIndexException.class, () -> {
        new OfflineSorter(dir, "foo", OfflineSorter.DEFAULT_COMPARATOR, BufferSize.megabytes(1), 10, -1, null, 0).sort(unsorted.getName());
      });
    assertTrue(e.getMessage().contains("checksum failed (hardware problem?)"));
  }
}
 
Example 30
Source Project: lucene-solr   Source File: SegmentInfos.java    License: Apache License 2.0 5 votes vote down vote up
private void write(Directory directory) throws IOException {

    long nextGeneration = getNextPendingGeneration();
    String segmentFileName = IndexFileNames.fileNameFromGeneration(IndexFileNames.PENDING_SEGMENTS,
                                                                   "",
                                                                   nextGeneration);

    // Always advance the generation on write:
    generation = nextGeneration;
    
    IndexOutput segnOutput = null;
    boolean success = false;

    try {
      segnOutput = directory.createOutput(segmentFileName, IOContext.DEFAULT);
      write(segnOutput);
      segnOutput.close();
      directory.sync(Collections.singleton(segmentFileName));
      success = true;
    } finally {
      if (success) {
        pendingCommit = true;
      } else {
        // We hit an exception above; try to close the file
        // but suppress any exception:
        IOUtils.closeWhileHandlingException(segnOutput);
        // Try not to leave a truncated segments_N file in
        // the index:
        IOUtils.deleteFilesIgnoringExceptions(directory, segmentFileName);
      }
    }
  }