Java Code Examples for htsjdk.samtools.util.BlockCompressedFilePointerUtil

The following examples show how to use htsjdk.samtools.util.BlockCompressedFilePointerUtil. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hmftools   Source File: ChunkHttpBuffer.java    License: GNU General Public License v3.0 5 votes vote down vote up
@NotNull
private ListenableFuture<byte[]> getBytesForChunk(@NotNull Chunk chunk) {
    long start = BlockCompressedFilePointerUtil.getBlockAddress(chunk.getChunkStart());
    long end = BlockCompressedFilePointerUtil.getBlockAddress(chunk.getChunkEnd());
    if (start <= end) {
        return readUrlBytes(start, end - start);
    } else {
        return Futures.immediateFailedFuture(new IllegalArgumentException("start offset is greater than end"));
    }
}
 
Example 2
@NotNull
private static List<Chunk> expandChunks(@NotNull List<Chunk> chunks) {
    List<Chunk> result = Lists.newArrayList();
    for (Chunk chunk : chunks) {
        long chunkEndBlockAddress = BlockCompressedFilePointerUtil.getBlockAddress(chunk.getChunkEnd());
        long extendedEndBlockAddress = chunkEndBlockAddress + BlockCompressedStreamConstants.MAX_COMPRESSED_BLOCK_SIZE;
        long newChunkEnd = Math.min(extendedEndBlockAddress, MAX_BLOCK_ADDRESS);
        long chunkEndVirtualPointer = newChunkEnd << 16;
        result.add(new Chunk(chunk.getChunkStart(), chunkEndVirtualPointer));
    }
    return result;
}
 
Example 3
private static void createBaiAndSplittingIndex(final File inputBam, final File index, final long granularity, final ValidationStringency readValidationStringency) {
    assertIsBam(inputBam);
    try(SamReader reader = SamReaderFactory.makeDefault()
            .validationStringency(readValidationStringency)
            .setOption(SamReaderFactory.Option.INCLUDE_SOURCE_IN_RECORDS, true)
            .open(inputBam);
        BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(index))) {
            final SAMFileHeader header = reader.getFileHeader();
            assertBamIsCoordinateSorted(header);
            final SBIIndexWriter indexer = new SBIIndexWriter(out, granularity);

            final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, FileExtensions.BAI_INDEX), header);
            BAMFileSpan lastFilePointer = null;
            for(final SAMRecord read : reader){
                BAMFileSpan filePointer = (BAMFileSpan) read.getFileSource().getFilePointer();
                indexer.processRecord(filePointer.getFirstOffset());
                bamIndexer.processAlignment(read);
                lastFilePointer = filePointer;
            }
            long nextStart = 0;
            if (lastFilePointer != null && !lastFilePointer.getChunks().isEmpty()) {
                nextStart = lastFilePointer.getChunks().get(0).getChunkEnd();
            }
            if (nextStart == 0) {
                nextStart = BlockCompressedFilePointerUtil.makeFilePointer(inputBam.length()); // default to file length (in case of no reads)
            }
            indexer.finish(nextStart, inputBam.length()); // nextStart is start of next record that would be added
            bamIndexer.finish();
    } catch (final IOException e) {
        throw new UserException("Couldn't create splitting index", e);
    }
}
 
Example 4
Source Project: Hadoop-BAM   Source File: SAMFileMerger.java    License: MIT License 4 votes vote down vote up
private static long shiftVirtualFilePointer(long virtualFilePointer, long offset) {
  long blockAddress = BlockCompressedFilePointerUtil.getBlockAddress(virtualFilePointer);
  int blockOffset = BlockCompressedFilePointerUtil.getBlockOffset(virtualFilePointer);
  return (blockAddress + offset) << 16 | (long) blockOffset;
}