htsjdk.samtools.util.BlockCompressedOutputStream Java Examples

The following examples show how to use htsjdk.samtools.util.BlockCompressedOutputStream. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: BgZip.java    From rtg-tools with BSD 2-Clause "Simplified" License 6 votes vote down vote up
@Override
protected void initFlags() {
  mFlags.setDescription("Compress a file with block gzip.");
  CommonFlagCategories.setCategories(mFlags);

  mFlags.registerRequired(File.class, CommonFlags.FILE, "file to (de)compress, use '-' for standard input").setCategory(INPUT_OUTPUT).setMaxCount(Integer.MAX_VALUE);

  mFlags.registerOptional('c', STDOUT_FLAG, "write on standard output, keep original files unchanged. Implied when using standard input").setCategory(INPUT_OUTPUT);
  mFlags.registerOptional('d', DECOMPRESS_FLAG, "decompress").setCategory(INPUT_OUTPUT);
  mFlags.registerOptional('f', FORCE_FLAG, "force overwrite of existing output file").setCategory(INPUT_OUTPUT);
  mFlags.registerOptional(NO_TERMINATE_FLAG, "if set, do not add the block gzip termination block").setCategory(INPUT_OUTPUT);
  mFlags.registerOptional('l', LEVEL_FLAG, Integer.class, CommonFlags.INT, "the compression level to use, between 1 (least but fast) and 9 (highest but slow)", BlockCompressedOutputStream.getDefaultCompressionLevel()).setCategory(INPUT_OUTPUT);

  mFlags.setValidator(flags -> flags.checkInRange(LEVEL_FLAG, 1, 9)
    && flags.checkNand(STDOUT_FLAG, FORCE_FLAG)
    && flags.checkNand(DECOMPRESS_FLAG, NO_TERMINATE_FLAG)
    && flags.checkNand(DECOMPRESS_FLAG, LEVEL_FLAG));
}
 
Example #2
Source File: BgZip.java    From rtg-tools with BSD 2-Clause "Simplified" License 6 votes vote down vote up
private OutputStream getOutputStream(OutputStream out, File f, boolean stdout) throws FileNotFoundException {
  final OutputStream os;
  final String outputFilename = getOutputFilename(f, mFlags.isSet(DECOMPRESS_FLAG));
  if (!stdout && !mFlags.isSet(FORCE_FLAG)) { //if we aren't forcibly overwriting files
    if (outputFilename == null) {
      throw new NoTalkbackSlimException("unrecognized gzip extension on file: " + f.getPath() + " -- aborting");
    }
    final File outfile = new File(outputFilename);
    if (outfile.exists()) {
      throw new NoTalkbackSlimException("Output file \"" + outfile.getPath() + "\" already exists.");
    }
  }
  if (mFlags.isSet(DECOMPRESS_FLAG)) {
    os = stdout ? out : new FileOutputStream(outputFilename);
  } else {
    final File file = new File(f.getPath() + FileUtils.GZ_SUFFIX);
    os = stdout
      ? new BlockCompressedOutputStream(out, null, (Integer) mFlags.getValue(LEVEL_FLAG), !mFlags.isSet(NO_TERMINATE_FLAG))
      : new BlockCompressedOutputStream(new FileOutputStream(file), file, (Integer) mFlags.getValue(LEVEL_FLAG), !mFlags.isSet(NO_TERMINATE_FLAG));
  }
  return os;
}
 
Example #3
Source File: TabixIndexMerge.java    From rtg-tools with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Merge indexes for files that will be concatenated.
 * @param output output index file
 * @param files <code>tabix</code> index files
 * @param dataFileSizes file size of corresponding data files
 * @throws IOException if an IO error occurs
 */
public static void mergeTabixFiles(File output, List<File> files, List<Long> dataFileSizes) throws IOException {
  long pointerAdjust = 0;
  final SequenceIndex[][] indexesSquared = new SequenceIndex[files.size()][];
  final String[][] sequenceNames = new String[files.size()][];
  TabixHeader mergedHeader = null;
  for (int i = 0; i < files.size(); ++i) {
    final File tbiFile = files.get(i);
    try (BlockCompressedInputStream bcis = new BlockCompressedInputStream(tbiFile)) {
      final TabixHeader th = TabixHeader.readHeader(bcis);
      sequenceNames[i] = th.getSequenceNamesUnpacked();
      if (mergedHeader != null) {
        mergedHeader = TabixHeader.mergeHeaders(mergedHeader, th);
      } else {
        mergedHeader = th;
      }
      indexesSquared[i] = loadFileIndexes(bcis, th.getNumSequences(), pointerAdjust);
    }
    pointerAdjust += dataFileSizes.get(i);
  }
  final List<SequenceIndex> indexes = collapseIndexes(indexesSquared, sequenceNames);
  TabixIndexer.mergeChunks(indexes);
  try (BlockCompressedOutputStream fos = new BlockCompressedOutputStream(output)) {
    TabixIndexer.writeIndex(indexes, mergedHeader.getOptions(), Arrays.asList(mergedHeader.getSequenceNamesUnpacked()), fos);
  }
}
 
Example #4
Source File: PicardCommandLineProgram.java    From gatk with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
/**
 * Create a common SAMFileWriter for use with Picard tools.
 *
 * @param outputFile    - if this file has a .cram extension then a reference is required. Can not be null.
 * @param referenceFile - the reference source to use. Can not be null if a output file has a .cram extension.
 * @param header        - header to be used for the output writer
 * @param preSorted     - if true then the records must already be sorted to match the header sort order
 * @return SAMFileWriter
 */
public SAMFileWriter createSAMWriter(
        final File outputFile,
        final File referenceFile,
        final SAMFileHeader header,
        final boolean preSorted)
{
    BlockCompressedOutputStream.setDefaultCompressionLevel(COMPRESSION_LEVEL);

    SAMFileWriterFactory factory = new SAMFileWriterFactory()
            .setCreateIndex(CREATE_INDEX)
            .setCreateMd5File(CREATE_MD5_FILE);

    if (MAX_RECORDS_IN_RAM != null) {
        factory = factory.setMaxRecordsInRam(MAX_RECORDS_IN_RAM);
    }

    return ReadUtils.createCommonSAMWriterFromFactory(factory, outputFile, referenceFile, header, preSorted);
}
 
Example #5
Source File: BgzipFileHelper.java    From rtg-tools with BSD 2-Clause "Simplified" License 6 votes vote down vote up
/**
 * Writes the contents of the given input <code>stream</code> to the given block compressed
 * gzipped file.
 *
 * @param stream an <code>InputStream</code>
 * @param file a <code>File</code> to write to
 * @return a <code>File</code> containing the contents of the stream
 * @exception IOException if an error occurs.
 * @exception NullPointerException if the stream is null
 */
public static File streamToBgzipFile(final InputStream stream, final File file) throws IOException {
  if (stream == null) {
    throw new NullPointerException("null stream given");
  }
  if (file == null) {
    throw new NullPointerException("null file given");
  }
  try (OutputStream out = new BlockCompressedOutputStream(file)) {
    final byte[] b = new byte[FileUtils.BUFFER_SIZE];
    int len = stream.read(b);
    while (len > 0) {
      out.write(b, 0, len);
      len = stream.read(b);
    }
  }
  return file;
}
 
Example #6
Source File: FilterBamTest.java    From Drop-seq with MIT License 5 votes vote down vote up
@AfterMethod
public void restoreDeflaterFactory() {
	if (TestUtils.isMacOs()) {
		if (deflaterFactory != null) {
			BlockCompressedOutputStream.setDefaultDeflaterFactory(deflaterFactory);
		}
	}
}
 
Example #7
Source File: SAMFileHeader_Utils.java    From cramtools with Apache License 2.0 5 votes vote down vote up
protected static void writeHeader(final OutputStream outputStream, final SAMFileHeader samFileHeader) {
	final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream,
			null);
	final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));
	writeHeader(outputBinaryCodec, samFileHeader);
	try {
		blockCompressedOutputStream.flush();
	} catch (final IOException ioe) {
		throw new RuntimeIOException(ioe);
	}
}
 
Example #8
Source File: CramToBam_OBA_Function.java    From cramtools with Apache License 2.0 5 votes vote down vote up
@Override
public OrderedByteArray apply(OrderedByteArray object) {
	if (object == null)
		throw new NullPointerException();

	log.debug("processing container " + object.order);
	Container container;
	try {
		container = ContainerIO.readContainer(header.getVersion(), new ByteArrayInputStream(object.bytes));
		if (container.isEOF())
			return null;

		ArrayList<CramCompressionRecord> records = new ArrayList<CramCompressionRecord>(container.nofRecords);
		parser.getRecords(container, records, ValidationStringency.SILENT);
		n.normalize(records, null, 0, container.header.substitutionMatrix);

		ByteArrayOutputStream bamBAOS = new ByteArrayOutputStream();
		BlockCompressedOutputStream os = new BlockCompressedOutputStream(bamBAOS, null);
		codec.setOutputStream(os);
		for (CramCompressionRecord record : records) {
			SAMRecord samRecord = f.create(record);
			codec.encode(samRecord);
		}
		os.flush();
		OrderedByteArray bb = new OrderedByteArray();
		bb.bytes = bamBAOS.toByteArray();
		bb.order = object.order;
		log.debug(String.format("Converted OBA %d, records %d", object.order, records.size()));
		return bb;
	} catch (IOException | IllegalArgumentException | IllegalAccessException e) {
		throw new RuntimeException(e);
	}
}
 
Example #9
Source File: IntelInflaterDeflaterIntegrationTest.java    From gatk with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Test(dataProvider = "JdkFlags")
public void testIntelInflaterDeflaterWithPrintReads(final boolean use_jdk_inflater, final boolean use_jdk_deflater) throws Exception {
    if (!isIntelInflaterDeflaterSupported()) {
        throw new SkipException("IntelInflater/IntelDeflater not available on this platform");
    }

    final File ORIG_BAM = new File(largeFileTestDir, INPUT_FILE);
    final File outFile = GATKBaseTest.createTempFile(INPUT_FILE, ".bam");

    final ArrayList<String> args = new ArrayList<>();
    args.add("--input"); args.add(ORIG_BAM.getAbsolutePath());
    args.add("--output"); args.add(outFile.getAbsolutePath());
    args.add("--use-jdk-inflater"); args.add(String.valueOf(use_jdk_inflater));
    args.add("--use-jdk-deflater"); args.add(String.valueOf(use_jdk_deflater));

    // store current default factories, so they can be restored later
    InflaterFactory currentInflaterFactory = BlockGunzipper.getDefaultInflaterFactory();
    DeflaterFactory currentDeflaterFactory = BlockCompressedOutputStream.getDefaultDeflaterFactory();

    // set default factories to jdk version
    // because PrintReads cannot change the factory to Jdk if it was already set to Intel
    BlockGunzipper.setDefaultInflaterFactory(new InflaterFactory());
    BlockCompressedOutputStream.setDefaultDeflaterFactory(new DeflaterFactory());

    // run PrintReads
    runCommandLine(args);

    // restore default factories
    BlockGunzipper.setDefaultInflaterFactory(currentInflaterFactory);
    BlockCompressedOutputStream.setDefaultDeflaterFactory(currentDeflaterFactory);

    // validate input and output files are the same
    SamAssertionUtils.assertSamsEqual(outFile, ORIG_BAM);
}
 
Example #10
Source File: PicardCommandLineProgram.java    From gatk with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Override
public Object instanceMain(final String[] argv) {
    // First, we parse the commandline arguments, then we set important statics like VALIDATION_STRINGENCY, and
    // finally, we call into the normal instance main (post arg-parsing). If don't start with the argument parsing
    // we always get default values for VALIDATION_STRINGENCY, COMPRESSION_LEVEL, etc.
    if (!parseArgs(argv)) {
        //an information only argument like help or version was specified, just exit
        return 0;
    }
    // set general SAM/BAM parameters
    SamReaderFactory.setDefaultValidationStringency(VALIDATION_STRINGENCY);

    BlockCompressedOutputStream.setDefaultCompressionLevel(COMPRESSION_LEVEL);

    if (MAX_RECORDS_IN_RAM != null) {
        SAMFileWriterImpl.setDefaultMaxRecordsInRam(MAX_RECORDS_IN_RAM);
    }

    if (CREATE_INDEX){
        SAMFileWriterFactory.setDefaultCreateIndexWhileWriting(true);
    }

    SAMFileWriterFactory.setDefaultCreateMd5File(CREATE_MD5_FILE);

    // defer to parent to finish the initialization and starting the program.
    return instanceMainPostParseArgs();
}
 
Example #11
Source File: CommandLineProgram.java    From gatk with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Output a curated set of important settings to the logger.
 *
 * May be overridden by subclasses to specify a different set of settings to output.
 */
protected void printSettings() {
    if ( VERBOSITY != Log.LogLevel.DEBUG ) {
        logger.info("HTSJDK Defaults.COMPRESSION_LEVEL : " + Defaults.COMPRESSION_LEVEL);
        logger.info("HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS);
        logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS);
        logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : " + Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE);
    }
    else {
        // At DEBUG verbosity, print all the HTSJDK defaults:
        Defaults.allDefaults()
                .forEach((key, value) -> logger.info("HTSJDK " + Defaults.class.getSimpleName() + "." + key + " : " + value));
    }

    // Log the configuration options:
    ConfigFactory.logConfigFields(ConfigFactory.getInstance().getGATKConfig(), Log.LogLevel.DEBUG);

    final boolean usingIntelDeflater = (BlockCompressedOutputStream.getDefaultDeflaterFactory() instanceof IntelDeflaterFactory && ((IntelDeflaterFactory)BlockCompressedOutputStream.getDefaultDeflaterFactory()).usingIntelDeflater());
    logger.info("Deflater: " + (usingIntelDeflater ? "IntelDeflater": "JdkDeflater"));
    final boolean usingIntelInflater = (BlockGunzipper.getDefaultInflaterFactory() instanceof IntelInflaterFactory && ((IntelInflaterFactory)BlockGunzipper.getDefaultInflaterFactory()).usingIntelInflater());
    logger.info("Inflater: " + (usingIntelInflater ? "IntelInflater": "JdkInflater"));

    logger.info("GCS max retries/reopens: " + BucketUtils.getCloudStorageConfiguration(NIO_MAX_REOPENS, "").maxChannelReopens());
    if (Strings.isNullOrEmpty(NIO_PROJECT_FOR_REQUESTER_PAYS)) {
        logger.info("Requester pays: disabled");
    } else {
        logger.info("Requester pays: enabled. Billed to: " + NIO_PROJECT_FOR_REQUESTER_PAYS);
    }
}
 
Example #12
Source File: SparkUtils.java    From gatk with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
/**
 * Private helper method for {@link #convertHeaderlessHadoopBamShardToBam} that takes a SAMFileHeader and writes it
 * to the provided `OutputStream`, correctly encoded for the BAM format and preceded by the BAM magic bytes.
 *
 * @param samFileHeader SAM header to write
 * @param outputStream stream to write the SAM header to
 */
private static void writeBAMHeaderToStream( final SAMFileHeader samFileHeader, final OutputStream outputStream ) {
    final BlockCompressedOutputStream blockCompressedOutputStream = new BlockCompressedOutputStream(outputStream, (File)null);
    final BinaryCodec outputBinaryCodec = new BinaryCodec(new DataOutputStream(blockCompressedOutputStream));

    final String headerString;
    final Writer stringWriter = new StringWriter();
    new SAMTextHeaderCodec().encode(stringWriter, samFileHeader, true);
    headerString = stringWriter.toString();

    outputBinaryCodec.writeBytes(ReadUtils.BAM_MAGIC);

    // calculate and write the length of the SAM file header text and the header text
    outputBinaryCodec.writeString(headerString, true, false);

    // write the sequences binarily.  This is redundant with the text header
    outputBinaryCodec.writeInt(samFileHeader.getSequenceDictionary().size());
    for (final SAMSequenceRecord sequenceRecord: samFileHeader.getSequenceDictionary().getSequences()) {
        outputBinaryCodec.writeString(sequenceRecord.getSequenceName(), true, true);
        outputBinaryCodec.writeInt(sequenceRecord.getSequenceLength());
    }

    try {
        blockCompressedOutputStream.flush();
    } catch (final IOException ioe) {
        throw new RuntimeIOException(ioe);
    }
}
 
Example #13
Source File: GenericPositionReaderTest.java    From rtg-tools with BSD 2-Clause "Simplified" License 5 votes vote down vote up
private static GenericPositionReader makeGpr(String contents) throws IOException {
  final ByteArrayOutputStream baos = new ByteArrayOutputStream();
  try (final BlockCompressedOutputStream out = new BlockCompressedOutputStream(baos, (File) null)) {
    out.write(contents.getBytes());
  }
  final ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
  return new GenericPositionReader(new BlockCompressedLineReader(new BlockCompressedInputStream(bais)), new TabixIndexer.TabixOptions(TabixIndexer.TabixOptions.FORMAT_GENERIC, 0, 1, 1, '#', 0, false));
}
 
Example #14
Source File: GzipAsynchOutputStream.java    From rtg-tools with BSD 2-Clause "Simplified" License 5 votes vote down vote up
private static OutputStream makeOutputStream(File file, int gzipSize) throws IOException  {
  if (file == null) {
    throw new IllegalArgumentException("File cannot be null");
  }
  if (!BGZIP) {
    return new BufferedOutputStreamFix(new AdjustableGZIPOutputStream(FileUtils.createOutputStream(file, false), gzipSize, DEFAULT_GZIP_LEVEL));
  } else {
  // Use BlockCompressedOutputStream so that the file is tabix compatible.
    return new BufferedOutputStreamFix(new BlockCompressedOutputStream(FileUtils.createOutputStream(file, false), null, DEFAULT_GZIP_LEVEL, false));
  }
}
 
Example #15
Source File: TabixIndexer.java    From rtg-tools with BSD 2-Clause "Simplified" License 5 votes vote down vote up
static void saveTabixIndex(BlockCompressedPositionReader in, final OutputStream samIndex, TabixOptions ops) throws IOException, UnindexableDataException {
  final ArrayList<SequenceIndex> indexs = new ArrayList<>();
  populateIndex(indexs, in, false);
  mergeChunks(indexs);
  try (BlockCompressedOutputStream indexOut = new BlockCompressedOutputStream(samIndex, (File) null)) {
    writeIndex(indexs, ops, in.getSequenceNames(), indexOut);
  }
}
 
Example #16
Source File: TestUtils.java    From Drop-seq with MIT License 5 votes vote down vote up
/**
 * CommandLineProgram only replaces default inflater/deflater with Intel versions, which don't work on Mac.
 * Once they've been set to Intel versions in a JVM, they need to be reverted explicitly by any unit test.
 * that is failing on Mac.
 */
public static void setInflaterDeflaterIfMacOs() {
	if (isMacOs()) {
		BlockCompressedOutputStream.setDefaultDeflaterFactory(new DeflaterFactory());
		BlockGunzipper.setDefaultInflaterFactory(new InflaterFactory());
	}
}
 
Example #17
Source File: FilterBamTest.java    From Drop-seq with MIT License 5 votes vote down vote up
@BeforeMethod
public void setDeflaterFactory() {
   	if (TestUtils.isMacOs()) {
		deflaterFactory = BlockCompressedOutputStream.getDefaultDeflaterFactory();
		BlockCompressedOutputStream.setDefaultDeflaterFactory(new DeflaterFactory());
	}
}
 
Example #18
Source File: BGZFCompressionOutputStream.java    From Hadoop-BAM with MIT License 4 votes vote down vote up
public BGZFCompressionOutputStream(OutputStream out)
    throws IOException {
  super(out);
  this.output = new BlockCompressedOutputStream(out, (File) null);
}
 
Example #19
Source File: BCFRecordWriter.java    From Hadoop-BAM with MIT License 4 votes vote down vote up
public BCFStoppableOutputStream(boolean startStopped, OutputStream out) {
           super(new BlockCompressedOutputStream(out, (File) null));
	origOut = out;
	stopped = startStopped;
}
 
Example #20
Source File: BGzipLineWriter.java    From imputationserver with GNU Affero General Public License v3.0 4 votes vote down vote up
public BGzipLineWriter(String filename) throws IOException {
	bw = new BufferedWriter(new OutputStreamWriter(new BlockCompressedOutputStream(new File(filename))));
	first = true;
}
 
Example #21
Source File: TestBAMRecordView.java    From cramtools with Apache License 2.0 4 votes vote down vote up
@Test
public void test() throws IOException {
	byte[] buf = new byte[1024];
	BAMRecordView view = new BAMRecordView(buf);
	view.setRefID(0);
	view.setAlignmentStart(77);
	view.setMappingScore(44);
	view.setIndexBin(99);
	view.setFlags(555);
	view.setMateRefID(0);
	view.setMateAlStart(78);
	view.setInsertSize(133);

	view.setReadName("name1");
	view.setCigar(TextCigarCodec.decode("10M"));
	view.setBases("AAAAAAAAAA".getBytes());
	view.setQualityScores("BBBBBBBBBB".getBytes());

	int id = 'A' << 16 | 'M' << 8 | 'A';
	view.addTag(id, "Q".getBytes(), 0, 1);

	int len = view.finish();

	System.out.println(Arrays.toString(Arrays.copyOf(buf, len)));

	ByteArrayOutputStream baos = new ByteArrayOutputStream();

	SAMFileHeader header = new SAMFileHeader();
	header.addSequence(new SAMSequenceRecord("14", 14));

	ByteArrayOutputStream baos2 = new ByteArrayOutputStream();
	SAMFileWriter writer = new SAMFileWriterFactory().makeBAMWriter(header, true, baos2);
	SAMRecord record = new SAMRecord(header);
	record.setReferenceIndex(0);
	record.setAlignmentStart(1);
	record.setCigarString("10M");
	record.setFlags(555);
	record.setMappingQuality(44);
	record.setMateReferenceIndex(0);
	record.setMateAlignmentStart(0);
	record.setInferredInsertSize(133);
	record.setReadName("name1");
	record.setReadBases("AAAAAAAAAA".getBytes());
	record.setBaseQualities("BBBBBBBBBB".getBytes());
	record.setAttribute("AM", 'Q');

	System.out.println("BAMFileWriter.addAlignment():");
	writer.addAlignment(record);
	System.out.println(".");
	writer.close();

	System.out.println("------------------------------------------");
	System.out.println();
	System.out.println(new String(baos2.toByteArray()));
	System.out.println();

	SAMFileReader.setDefaultValidationStringency(ValidationStringency.SILENT);
	SAMFileReader reader2 = new SAMFileReader(new ByteArrayInputStream(baos2.toByteArray()));
	SAMRecordIterator iterator = reader2.iterator();
	while (iterator.hasNext()) {
		record = iterator.next();
		System.out.println(record.getSAMString());
	}
	System.out.println("------------------------------------------");

	BlockCompressedOutputStream bcos = new BlockCompressedOutputStream(baos, null);
	bcos.write("BAM\1".getBytes());
	bcos.write(toByteArray(header));
	CramInt.writeInt32(header.getSequenceDictionary().size(), bcos);
	for (final SAMSequenceRecord sequenceRecord : header.getSequenceDictionary().getSequences()) {
		byte[] bytes = sequenceRecord.getSequenceName().getBytes();
		CramInt.writeInt32(bytes.length + 1, bcos);
		bcos.write(sequenceRecord.getSequenceName().getBytes());
		bcos.write(0);
		CramInt.writeInt32(sequenceRecord.getSequenceLength(), bcos);
	}
	bcos.write(buf, 0, len);
	bcos.close();

	System.out.println(new String(baos.toByteArray()));

	SAMFileReader reader = new SAMFileReader(new ByteArrayInputStream(baos.toByteArray()));
	iterator = reader.iterator();
	while (iterator.hasNext()) {
		record = iterator.next();
		System.out.println(record.getSAMString());
	}
	reader.close();

}
 
Example #22
Source File: BgzipFileHelper.java    From rtg-tools with BSD 2-Clause "Simplified" License 3 votes vote down vote up
/**
 * Create a BGZIP file
 * @param data data to write
 * @param f file to write to
 * @return file written to
 * @throws IOException if an IO error occurs
 */
public static File bytesToBgzipFile(byte[] data, File f) throws IOException {
  try (BlockCompressedOutputStream out = new BlockCompressedOutputStream(f)) {
    out.write(data);
  }
  return f;
}
 
Example #23
Source File: GzipAsynchOutputStream.java    From rtg-tools with BSD 2-Clause "Simplified" License 2 votes vote down vote up
/**
* Create an asynchronous GZIP output stream to write to the given stream.
*
* @param stream the output stream
* @param pipeSize the size of the buffer between the threads.  At least 1 Kb.
* @param gzipSize the buffer size of the decompression object.  At least 1 Kb.
* @param terminated true to terminate the file (if block compressed)
* @throws IOException on IO error.
*/
public GzipAsynchOutputStream(OutputStream stream, int pipeSize, int gzipSize, boolean terminated) throws IOException {
  super(BGZIP ? new BlockCompressedOutputStream(stream, null, DEFAULT_GZIP_LEVEL, terminated) : new AdjustableGZIPOutputStream(stream), pipeSize);
  // Use BlockCompressedOutputStream so that the file is tabix compatible.
  //    super(new BlockCompressedOutputStream(stream, DEFAULT_GZIP_LEVEL), pipeSize, gzipSize);
}