Java Code Examples for org.apache.lucene.store.Directory#openInput()
The following examples show how to use
org.apache.lucene.store.Directory#openInput() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestIndexedDISI.java From lucene-solr with Apache License 2.0 | 6 votes |
private void assertAdvanceBeyondEnd(BitSet set, Directory dir) throws IOException { final int cardinality = set.cardinality(); final byte denseRankPower = 9; // Not tested here so fixed to isolate factors long length; int jumpTableentryCount; try (IndexOutput out = dir.createOutput("bar", IOContext.DEFAULT)) { jumpTableentryCount = IndexedDISI.writeBitSet(new BitSetIterator(set, cardinality), out, denseRankPower); } try (IndexInput in = dir.openInput("bar", IOContext.DEFAULT)) { BitSetIterator disi2 = new BitSetIterator(set, cardinality); int doc = disi2.docID(); int index = 0; while (doc < cardinality) { doc = disi2.nextDoc(); index++; } IndexedDISI disi = new IndexedDISI(in, 0L, in.length(), jumpTableentryCount, denseRankPower, cardinality); // Advance 1 docID beyond end assertFalse("There should be no set bit beyond the valid docID range", disi.advanceExact(set.length())); disi.advance(doc); // Should be the special docID signifyin NO_MORE_DOCS from the BitSetIterator assertEquals("The index when advancing beyond the last defined docID should be correct", index, disi.index()+1); // disi.index()+1 as the while-loop also counts the NO_MORE_DOCS } }
Example 2
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
/** * This test creates compound file based on a single file. * Files of different sizes are tested: 0, 1, 10, 100 bytes. */ public void testSingleFile() throws IOException { int data[] = new int[] { 0, 1, 10, 100 }; for (int i=0; i<data.length; i++) { String testfile = "_" + i + ".test"; Directory dir = newDirectory(); SegmentInfo si = newSegmentInfo(dir, "_" + i); createSequenceFile(dir, testfile, (byte) 0, data[i], si.getId(), "suffix"); si.setFiles(Collections.singleton(testfile)); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); IndexInput expected = dir.openInput(testfile, newIOContext(random())); IndexInput actual = cfs.openInput(testfile, newIOContext(random())); assertSameStreams(testfile, expected, actual); assertSameSeekBehavior(testfile, expected, actual); expected.close(); actual.close(); cfs.close(); dir.close(); } }
Example 3
Source File: TestDirectPacked.java From lucene-solr with Apache License 2.0 | 6 votes |
/** simple encode/decode */ public void testSimple() throws Exception { Directory dir = newDirectory(); int bitsPerValue = DirectWriter.bitsRequired(2); IndexOutput output = dir.createOutput("foo", IOContext.DEFAULT); DirectWriter writer = DirectWriter.getInstance(output, 5, bitsPerValue); writer.add(1); writer.add(0); writer.add(2); writer.add(1); writer.add(2); writer.finish(); output.close(); IndexInput input = dir.openInput("foo", IOContext.DEFAULT); LongValues reader = DirectReader.getInstance(input.randomAccessSlice(0, input.length()), bitsPerValue, 0); assertEquals(1, reader.get(0)); assertEquals(0, reader.get(1)); assertEquals(2, reader.get(2)); assertEquals(1, reader.get(3)); assertEquals(2, reader.get(4)); input.close(); dir.close(); }
Example 4
Source File: BaseDirectoryTestSuite.java From incubator-retired-blur with Apache License 2.0 | 5 votes |
@Test public void testLongReadAndClone() throws IOException { FSDirectory control = FSDirectory.open(fileControl); Directory dir = getControlDir(control, directory); String name = writeFile(dir,10*1000*1000); IndexInput input = dir.openInput(name, IOContext.DEFAULT); readFile(input,1000*1000); IndexInput clone = input.clone(); clone.readByte(); input.close(); }
Example 5
Source File: Store.java From crate with Apache License 2.0 | 5 votes |
public static void checkIntegrity(final StoreFileMetaData md, final Directory directory) throws IOException { try (IndexInput input = directory.openInput(md.name(), IOContext.READONCE)) { if (input.length() != md.length()) { // first check the length no matter how old this file is throw new CorruptIndexException("expected length=" + md.length() + " != actual length: " + input.length() + " : file truncated?", input); } // throw exception if the file is corrupt String checksum = Store.digestToString(CodecUtil.checksumEntireFile(input)); // throw exception if metadata is inconsistent if (!checksum.equals(md.checksum())) { throw new CorruptIndexException("inconsistent metadata: lucene checksum=" + checksum + ", metadata checksum=" + md.checksum(), input); } } }
Example 6
Source File: OfflinePointReader.java From lucene-solr with Apache License 2.0 | 5 votes |
public OfflinePointReader(Directory tempDir, String tempFileName, int packedBytesLength, long start, long length, byte[] reusableBuffer) throws IOException { this.bytesPerDoc = packedBytesLength + Integer.BYTES; this.packedValueLength = packedBytesLength; if ((start + length) * bytesPerDoc + CodecUtil.footerLength() > tempDir.fileLength(tempFileName)) { throw new IllegalArgumentException("requested slice is beyond the length of this file: start=" + start + " length=" + length + " bytesPerDoc=" + bytesPerDoc + " fileLength=" + tempDir.fileLength(tempFileName) + " tempFileName=" + tempFileName); } if (reusableBuffer == null) { throw new IllegalArgumentException("[reusableBuffer] cannot be null"); } if (reusableBuffer.length < bytesPerDoc) { throw new IllegalArgumentException("Length of [reusableBuffer] must be bigger than " + bytesPerDoc); } this.maxPointOnHeap = reusableBuffer.length / bytesPerDoc; // Best-effort checksumming: if (start == 0 && length*bytesPerDoc == tempDir.fileLength(tempFileName) - CodecUtil.footerLength()) { // If we are going to read the entire file, e.g. because BKDWriter is now // partitioning it, we open with checksums: in = tempDir.openChecksumInput(tempFileName, IOContext.READONCE); } else { // Since we are going to seek somewhere in the middle of a possibly huge // file, and not read all bytes from there, don't use ChecksumIndexInput here. // This is typically fine, because this same file will later be read fully, // at another level of the BKDWriter recursion in = tempDir.openInput(tempFileName, IOContext.READONCE); } name = tempFileName; long seekFP = start * bytesPerDoc; in.seek(seekFP); countLeft = length; this.onHeapBuffer = reusableBuffer; this.pointValue = new OfflinePointValue(onHeapBuffer, packedValueLength); }
Example 7
Source File: FieldsIndexReader.java From lucene-solr with Apache License 2.0 | 5 votes |
FieldsIndexReader(Directory dir, String name, String suffix, String extensionPrefix, String codecName, byte[] id) throws IOException { try (ChecksumIndexInput metaIn = dir.openChecksumInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_META_EXTENSION_SUFFIX), IOContext.READONCE)) { Throwable priorE = null; try { CodecUtil.checkIndexHeader(metaIn, codecName + "Meta", VERSION_START, VERSION_CURRENT, id, suffix); maxDoc = metaIn.readInt(); blockShift = metaIn.readInt(); numChunks = metaIn.readInt(); docsStartPointer = metaIn.readLong(); docsMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift); docsEndPointer = startPointersStartPointer = metaIn.readLong(); startPointersMeta = DirectMonotonicReader.loadMeta(metaIn, numChunks, blockShift); startPointersEndPointer = metaIn.readLong(); maxPointer = metaIn.readLong(); } finally { CodecUtil.checkFooter(metaIn, priorE); } } indexInput = dir.openInput(IndexFileNames.segmentFileName(name, suffix, extensionPrefix + FIELDS_INDEX_EXTENSION_SUFFIX), IOContext.READ); boolean success = false; try { CodecUtil.checkIndexHeader(indexInput, codecName + "Idx", VERSION_START, VERSION_CURRENT, id, suffix); CodecUtil.retrieveChecksum(indexInput); success = true; } finally { if (success == false) { indexInput.close(); } } final RandomAccessInput docsSlice = indexInput.randomAccessSlice(docsStartPointer, docsEndPointer - docsStartPointer); final RandomAccessInput startPointersSlice = indexInput.randomAccessSlice(startPointersStartPointer, startPointersEndPointer - startPointersStartPointer); docs = DirectMonotonicReader.getInstance(docsMeta, docsSlice); startPointers = DirectMonotonicReader.getInstance(startPointersMeta, startPointersSlice); }
Example 8
Source File: Blur022SegmentInfoReader.java From incubator-retired-blur with Apache License 2.0 | 5 votes |
@Override public SegmentInfo read(Directory dir, String segment, IOContext context) throws IOException { final String fileName = IndexFileNames.segmentFileName(segment, "", Blur022SegmentInfoFormat.SI_EXTENSION); final IndexInput input = dir.openInput(fileName, context); boolean success = false; try { CodecUtil.checkHeader(input, Blur022SegmentInfoFormat.CODEC_NAME, Blur022SegmentInfoFormat.VERSION_START, Blur022SegmentInfoFormat.VERSION_CURRENT); final String version = input.readString(); final int docCount = input.readInt(); if (docCount < 0) { throw new CorruptIndexException("invalid docCount: " + docCount + " (resource=" + input + ")"); } final boolean isCompoundFile = input.readByte() == SegmentInfo.YES; final Map<String, String> diagnostics = input.readStringStringMap(); final Map<String, String> attributes = input.readStringStringMap(); final Set<String> files = input.readStringSet(); if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } final SegmentInfo si = new SegmentInfo(dir, version, segment, docCount, isCompoundFile, null, diagnostics, Collections.unmodifiableMap(attributes)); si.setFiles(files); success = true; return si; } finally { if (!success) { IOUtils.closeWhileHandlingException(input); } else { input.close(); } } }
Example 9
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testResourceNameInsideCompoundFile() throws Exception { Directory dir = newDirectory(); String subFile = "_123.xyz"; SegmentInfo si = newSegmentInfo(dir, "_123"); createSequenceFile(dir, subFile, (byte) 0, 10, si.getId(), "suffix"); si.setFiles(Collections.singletonList(subFile)); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); IndexInput in = cfs.openInput(subFile, IOContext.DEFAULT); String desc = in.toString(); assertTrue("resource description hides that it's inside a compound file: " + desc, desc.contains("[slice=" + subFile + "]")); cfs.close(); dir.close(); }
Example 10
Source File: TableCopyCommand.java From incubator-retired-blur with Apache License 2.0 | 5 votes |
private long copy(String file, Directory srcDirectory, HdfsDirectory destDirectory) throws IOException { long fileLength = srcDirectory.fileLength(file); IOContext context = IOContext.DEFAULT; IndexOutput os = null; IndexInput is = null; IOException priorException = null; try { os = destDirectory.createOutput(file, context); is = srcDirectory.openInput(file, context); os.copyBytes(is, is.length()); } catch (IOException ioe) { priorException = ioe; } finally { boolean success = false; try { IOUtils.closeWhileHandlingException(priorException, os, is); success = true; } finally { if (!success) { try { destDirectory.deleteFile(file); } catch (Throwable t) { } } } } return fileLength; }
Example 11
Source File: TestIndexFileDeleter.java From lucene-solr with Apache License 2.0 | 5 votes |
public void copyFile(Directory dir, String src, String dest) throws IOException { IndexInput in = dir.openInput(src, newIOContext(random())); IndexOutput out = dir.createOutput(dest, newIOContext(random())); byte[] b = new byte[1024]; long remainder = in.length(); while(remainder > 0) { int len = (int) Math.min(b.length, remainder); in.readBytes(b, 0, len); out.writeBytes(b, len); remainder -= len; } in.close(); out.close(); }
Example 12
Source File: Store.java From crate with Apache License 2.0 | 5 votes |
private static void checksumFromLuceneFile(Directory directory, String file, Map<String, StoreFileMetaData> builder, Logger logger, Version version, boolean readFileAsHash) throws IOException { final String checksum; final BytesRefBuilder fileHash = new BytesRefBuilder(); try (IndexInput in = directory.openInput(file, IOContext.READONCE)) { final long length; try { length = in.length(); if (length < CodecUtil.footerLength()) { // truncated files trigger IAE if we seek negative... these files are really corrupted though throw new CorruptIndexException("Can't retrieve checksum from file: " + file + " file length must be >= " + CodecUtil.footerLength() + " but was: " + in.length(), in); } if (readFileAsHash) { final VerifyingIndexInput verifyingIndexInput = new VerifyingIndexInput(in); // additional safety we checksum the entire file we read the hash for... hashFile(fileHash, new InputStreamIndexInput(verifyingIndexInput, length), length); checksum = digestToString(verifyingIndexInput.verify()); } else { checksum = digestToString(CodecUtil.retrieveChecksum(in)); } } catch (Exception ex) { logger.debug(() -> new ParameterizedMessage("Can retrieve checksum from file [{}]", file), ex); throw ex; } builder.put(file, new StoreFileMetaData(file, length, checksum, version, fileHash.get())); } }
Example 13
Source File: TestDirectMonotonic.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testConstantSlope() throws IOException { Directory dir = newDirectory(); final int blockShift = TestUtil.nextInt(random(), DirectMonotonicWriter.MIN_BLOCK_SHIFT, DirectMonotonicWriter.MAX_BLOCK_SHIFT); final int numValues = TestUtil.nextInt(random(), 1, 1 << 20); final long min = random().nextLong(); final long inc = random().nextInt(1 << random().nextInt(20)); List<Long> actualValues = new ArrayList<>(); for (int i = 0; i < numValues; ++i) { actualValues.add(min + inc * i); } final long dataLength; try (IndexOutput metaOut = dir.createOutput("meta", IOContext.DEFAULT); IndexOutput dataOut = dir.createOutput("data", IOContext.DEFAULT)) { DirectMonotonicWriter w = DirectMonotonicWriter.getInstance(metaOut, dataOut, numValues, blockShift); for (long v : actualValues) { w.add(v); } w.finish(); dataLength = dataOut.getFilePointer(); } try (IndexInput metaIn = dir.openInput("meta", IOContext.READONCE); IndexInput dataIn = dir.openInput("data", IOContext.DEFAULT)) { DirectMonotonicReader.Meta meta = DirectMonotonicReader.loadMeta(metaIn, numValues, blockShift); LongValues values = DirectMonotonicReader.getInstance(meta, dataIn.randomAccessSlice(0, dataLength)); for (int i = 0; i < numValues; ++i) { assertEquals(actualValues.get(i).longValue(), values.get(i)); } assertEquals(0, dataIn.getFilePointer()); } dir.close(); }
Example 14
Source File: SolrCore.java From lucene-solr with Apache License 2.0 | 5 votes |
private String getIndexPropertyFromPropFile(Directory dir) throws IOException { IndexInput input; try { input = dir.openInput(IndexFetcher.INDEX_PROPERTIES, IOContext.DEFAULT); } catch (FileNotFoundException | NoSuchFileException e) { // Swallow this error, dataDir/index is the right thing to return // if there is no index.properties file // All other exceptions are will propagate to caller. return dataDir + "index/"; } final InputStream is = new PropertiesInputStream(input); // c'tor just assigns a variable here, no exception thrown. try { Properties p = new Properties(); p.load(new InputStreamReader(is, StandardCharsets.UTF_8)); String s = p.getProperty("index"); if (s != null && s.trim().length() > 0) { return dataDir + s.trim(); } // We'll return dataDir/index/ if the properties file has an "index" property with // no associated value or does not have an index property at all. return dataDir + "index/"; } finally { IOUtils.closeQuietly(is); } }
Example 15
Source File: IndexRevisionTest.java From lucene-solr with Apache License 2.0 | 5 votes |
@Test public void testOpen() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = new IndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); IndexWriter writer = new IndexWriter(dir, conf); try { writer.addDocument(new Document()); writer.commit(); Revision rev = new IndexRevision(writer); @SuppressWarnings("unchecked") Map<String, List<RevisionFile>> sourceFiles = rev.getSourceFiles(); String source = sourceFiles.keySet().iterator().next(); for (RevisionFile file : sourceFiles.values().iterator().next()) { IndexInput src = dir.openInput(file.fileName, IOContext.READONCE); InputStream in = rev.open(source, file.fileName); assertEquals(src.length(), in.available()); byte[] srcBytes = new byte[(int) src.length()]; byte[] inBytes = new byte[(int) src.length()]; int offset = 0; if (random().nextBoolean()) { int skip = random().nextInt(10); if (skip >= src.length()) { skip = 0; } in.skip(skip); src.seek(skip); offset = skip; } src.readBytes(srcBytes, offset, srcBytes.length - offset); in.read(inBytes, offset, inBytes.length - offset); assertArrayEquals(srcBytes, inBytes); IOUtils.close(src, in); } writer.close(); } finally { IOUtils.close(dir); } }
Example 16
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 4 votes |
/** This test opens two files from a compound stream and verifies that * their file positions are independent of each other. */ public void testRandomAccess() throws IOException { Directory dir = newDirectory(); Directory cr = createLargeCFS(dir); // Open two files IndexInput e1 = dir.openInput("_123.f11", newIOContext(random())); IndexInput e2 = dir.openInput("_123.f3", newIOContext(random())); IndexInput a1 = cr.openInput("_123.f11", newIOContext(random())); IndexInput a2 = dir.openInput("_123.f3", newIOContext(random())); // Seek the first pair e1.seek(100); a1.seek(100); assertEquals(100, e1.getFilePointer()); assertEquals(100, a1.getFilePointer()); byte be1 = e1.readByte(); byte ba1 = a1.readByte(); assertEquals(be1, ba1); // Now seek the second pair e2.seek(1027); a2.seek(1027); assertEquals(1027, e2.getFilePointer()); assertEquals(1027, a2.getFilePointer()); byte be2 = e2.readByte(); byte ba2 = a2.readByte(); assertEquals(be2, ba2); // Now make sure the first one didn't move assertEquals(101, e1.getFilePointer()); assertEquals(101, a1.getFilePointer()); be1 = e1.readByte(); ba1 = a1.readByte(); assertEquals(be1, ba1); // Now more the first one again, past the buffer length e1.seek(1910); a1.seek(1910); assertEquals(1910, e1.getFilePointer()); assertEquals(1910, a1.getFilePointer()); be1 = e1.readByte(); ba1 = a1.readByte(); assertEquals(be1, ba1); // Now make sure the second set didn't move assertEquals(1028, e2.getFilePointer()); assertEquals(1028, a2.getFilePointer()); be2 = e2.readByte(); ba2 = a2.readByte(); assertEquals(be2, ba2); // Move the second set back, again cross the buffer size e2.seek(17); a2.seek(17); assertEquals(17, e2.getFilePointer()); assertEquals(17, a2.getFilePointer()); be2 = e2.readByte(); ba2 = a2.readByte(); assertEquals(be2, ba2); // Finally, make sure the first set didn't move // Now make sure the first one didn't move assertEquals(1911, e1.getFilePointer()); assertEquals(1911, a1.getFilePointer()); be1 = e1.readByte(); ba1 = a1.readByte(); assertEquals(be1, ba1); e1.close(); e2.close(); a1.close(); a2.close(); cr.close(); dir.close(); }
Example 17
Source File: IndexAndTaxonomyRevisionTest.java From lucene-solr with Apache License 2.0 | 4 votes |
@Test public void testOpen() throws Exception { Directory indexDir = newDirectory(); IndexWriterConfig conf = new IndexWriterConfig(null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); IndexWriter indexWriter = new IndexWriter(indexDir, conf); Directory taxoDir = newDirectory(); SnapshotDirectoryTaxonomyWriter taxoWriter = new SnapshotDirectoryTaxonomyWriter(taxoDir); try { indexWriter.addDocument(newDocument(taxoWriter)); indexWriter.commit(); taxoWriter.commit(); Revision rev = new IndexAndTaxonomyRevision(indexWriter, taxoWriter); for (Entry<String,List<RevisionFile>> e : rev.getSourceFiles().entrySet()) { String source = e.getKey(); @SuppressWarnings("resource") // silly, both directories are closed in the end Directory dir = source.equals(IndexAndTaxonomyRevision.INDEX_SOURCE) ? indexDir : taxoDir; for (RevisionFile file : e.getValue()) { IndexInput src = dir.openInput(file.fileName, IOContext.READONCE); InputStream in = rev.open(source, file.fileName); assertEquals(src.length(), in.available()); byte[] srcBytes = new byte[(int) src.length()]; byte[] inBytes = new byte[(int) src.length()]; int offset = 0; if (random().nextBoolean()) { int skip = random().nextInt(10); if (skip >= src.length()) { skip = 0; } in.skip(skip); src.seek(skip); offset = skip; } src.readBytes(srcBytes, offset, srcBytes.length - offset); in.read(inBytes, offset, inBytes.length - offset); assertArrayEquals(srcBytes, inBytes); IOUtils.close(src, in); } } indexWriter.close(); } finally { IOUtils.close(indexWriter, taxoWriter, taxoDir, indexDir); } }
Example 18
Source File: TestSTBlockReader.java From lucene-solr with Apache License 2.0 | 4 votes |
MockSTBlockReader(IndexDictionary.BrowserSupplier supplier, List<MockSTBlockLine> lines, Directory directory, FieldInfo fieldInfo, FieldInfos fieldInfos) throws IOException { super(supplier, directory.openInput(MOCK_BLOCK_OUTPUT_NAME, IOContext.DEFAULT), getMockPostingReaderBase(), mockFieldMetadata(fieldInfo, getLastTermForField(lines, fieldInfo.name)), null, fieldInfos); this.lines = lines; }
Example 19
Source File: TestAllFilesHaveChecksumFooter.java From lucene-solr with Apache License 2.0 | 4 votes |
private void checkFooter(Directory dir, String file) throws IOException { try (IndexInput in = dir.openInput(file, newIOContext(random()))) { CodecUtil.checksumEntireFile(in); } }
Example 20
Source File: SimpleTextCompoundFormat.java From lucene-solr with Apache License 2.0 | 4 votes |
@Override public void write(Directory dir, SegmentInfo si, IOContext context) throws IOException { String dataFile = IndexFileNames.segmentFileName(si.name, "", DATA_EXTENSION); int numFiles = si.files().size(); String names[] = si.files().toArray(new String[numFiles]); Arrays.sort(names); long startOffsets[] = new long[numFiles]; long endOffsets[] = new long[numFiles]; BytesRefBuilder scratch = new BytesRefBuilder(); try (IndexOutput out = dir.createOutput(dataFile, context)) { for (int i = 0; i < names.length; i++) { // write header for file SimpleTextUtil.write(out, HEADER); SimpleTextUtil.write(out, names[i], scratch); SimpleTextUtil.writeNewline(out); // write bytes for file startOffsets[i] = out.getFilePointer(); try (IndexInput in = dir.openInput(names[i], IOContext.READONCE)) { out.copyBytes(in, in.length()); } endOffsets[i] = out.getFilePointer(); } long tocPos = out.getFilePointer(); // write CFS table SimpleTextUtil.write(out, TABLE); SimpleTextUtil.write(out, Integer.toString(numFiles), scratch); SimpleTextUtil.writeNewline(out); for (int i = 0; i < names.length; i++) { SimpleTextUtil.write(out, TABLENAME); SimpleTextUtil.write(out, names[i], scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, TABLESTART); SimpleTextUtil.write(out, Long.toString(startOffsets[i]), scratch); SimpleTextUtil.writeNewline(out); SimpleTextUtil.write(out, TABLEEND); SimpleTextUtil.write(out, Long.toString(endOffsets[i]), scratch); SimpleTextUtil.writeNewline(out); } DecimalFormat df = new DecimalFormat(OFFSETPATTERN, DecimalFormatSymbols.getInstance(Locale.ROOT)); SimpleTextUtil.write(out, TABLEPOS); SimpleTextUtil.write(out, df.format(tocPos), scratch); SimpleTextUtil.writeNewline(out); } }