htsjdk.samtools.util.Log Java Examples
The following examples show how to use
htsjdk.samtools.util.Log.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: ReplaceSamHeader.java From picard with MIT License | 6 votes |
private void standardReheader(final SAMFileHeader replacementHeader) { final SamReader recordReader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).validationStringency(ValidationStringency.SILENT).open(INPUT); if (replacementHeader.getSortOrder() != recordReader.getFileHeader().getSortOrder()) { throw new PicardException("Sort orders of INPUT (" + recordReader.getFileHeader().getSortOrder().name() + ") and HEADER (" + replacementHeader.getSortOrder().name() + ") do not agree."); } final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(replacementHeader, true, OUTPUT); final ProgressLogger progress = new ProgressLogger(Log.getInstance(ReplaceSamHeader.class)); for (final SAMRecord rec : recordReader) { rec.setHeader(replacementHeader); writer.addAlignment(rec); progress.record(rec); } writer.close(); CloserUtil.close(recordReader); }
Example #2
Source File: FilterProgramUtils.java From Drop-seq with MIT License | 6 votes |
public static void reportAndCheckFilterResults( final String elementType, final long elementsAccepted, final long elementsRejected, final Double passingThreshold, final Log log) { final long totalElements = elementsAccepted + elementsRejected; log.info(String.format("Total %d %s processed. %d %s accepted; %d %s rejected.", totalElements, elementType, elementsAccepted, elementType, elementsRejected, elementType)); if (passingThreshold != null) { if (passingThreshold >= 1) { if (elementsAccepted < passingThreshold) { throw new RuntimeException(String.format("Fewer than %d %s passed filters", passingThreshold.intValue(), elementType)); } } else if (elementsAccepted/((double)totalElements) < passingThreshold) { throw new RuntimeException(String.format("A smaller fraction than %f %s passed filters", passingThreshold, elementType)); } } }
Example #3
Source File: OpticalDuplicateFinderTest.java From picard with MIT License | 6 votes |
@Test public void testKeeperOrderDependence() { final Log log = Log.getInstance(OpticalDuplicateFinderTest.class); final OpticalDuplicateFinder finder = new OpticalDuplicateFinder(OpticalDuplicateFinder.DEFAULT_READ_NAME_REGEX, 100, log); final List<PhysicalLocation> locs = Arrays.asList( loc(1, 100, 190), loc(1, 100, 280), loc(1, 100, 370), loc(1, 100, 460) ); final List<PhysicalLocation> locsReordered = Arrays.asList( loc(1, 100, 190), loc(1, 100, 460), loc(1, 100, 370), loc(1, 100, 280) ); Assert.assertEquals(countTrue(finder.findOpticalDuplicates(locs, locs.get(0))), 3); Assert.assertEquals(countTrue(finder.findOpticalDuplicates(locsReordered, locsReordered.get(0))), 3); }
Example #4
Source File: DbSnpBitSetUtil.java From picard with MIT License | 6 votes |
/** Factory method to create both a SNP bitmask and an indel bitmask in a single pass of the VCF. * If intervals are given, consider only SNP and indel sites that overlap the intervals. If log is given, * progress loading the variants will be written to the log. */ public static DbSnpBitSets createSnpAndIndelBitSets(final File dbSnpFile, final SAMSequenceDictionary sequenceDictionary, final IntervalList intervals, final Optional<Log> log) { final DbSnpBitSets sets = new DbSnpBitSets(); sets.snps = new DbSnpBitSetUtil(); sets.indels = new DbSnpBitSetUtil(); final Map<DbSnpBitSetUtil, Set<VariantType>> map = new HashMap<>(); map.put(sets.snps, EnumSet.of(VariantType.SNP)); map.put(sets.indels, EnumSet.of(VariantType.insertion, VariantType.deletion)); loadVcf(dbSnpFile, sequenceDictionary, map, intervals, log); return sets; }
Example #5
Source File: SparkCommandLineArgumentCollection.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Returns the Spark log level for the argument set. This is simply sparkVerbosity * if it was specified. Otherwise, it returns the log level corresponding to the * provided tool (htsjdk) verbosity. * * @param toolVerbosity Current tool's htsjdk log level * @return Spark log level String */ public String getSparkVerbosity(final Log.LogLevel toolVerbosity) { Utils.nonNull(toolVerbosity, "Tool verbosity cannot be null"); if (sparkVerbosity != null) return sparkVerbosity; if (toolVerbosity.equals(Log.LogLevel.DEBUG)) { return Level.DEBUG.name(); } if (toolVerbosity.equals(Log.LogLevel.INFO)) { return Level.INFO.name(); } if (toolVerbosity.equals(Log.LogLevel.WARNING)) { return Level.WARN.name(); } if (toolVerbosity.equals(Log.LogLevel.ERROR)) { return Level.ERROR.name(); } throw new IllegalStateException("Unknown tool verbosity: " + toolVerbosity.name()); }
Example #6
Source File: RnaSeqMetricsCollector.java From picard with MIT License | 6 votes |
public static OverlapDetector<Interval> makeOverlapDetector(final File samFile, final SAMFileHeader header, final File ribosomalIntervalsFile, final Log log) { final OverlapDetector<Interval> ribosomalSequenceOverlapDetector = new OverlapDetector<Interval>(0, 0); if (ribosomalIntervalsFile != null) { final IntervalList ribosomalIntervals = IntervalList.fromFile(ribosomalIntervalsFile); if (ribosomalIntervals.size() == 0) { log.warn("The RIBOSOMAL_INTERVALS file, " + ribosomalIntervalsFile.getAbsolutePath() + " does not contain intervals"); } try { SequenceUtil.assertSequenceDictionariesEqual(header.getSequenceDictionary(), ribosomalIntervals.getHeader().getSequenceDictionary()); } catch (SequenceUtil.SequenceListsDifferException e) { throw new PicardException("Sequence dictionaries differ in " + samFile.getAbsolutePath() + " and " + ribosomalIntervalsFile.getAbsolutePath(), e); } final IntervalList uniquedRibosomalIntervals = ribosomalIntervals.uniqued(); final List<Interval> intervals = uniquedRibosomalIntervals.getIntervals(); ribosomalSequenceOverlapDetector.addAll(intervals, intervals); } return ribosomalSequenceOverlapDetector; }
Example #7
Source File: ConfigFactory.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Logs all the parameters in the given {@link Config} object at the given {@link Log.LogLevel} * @param config A {@link Config} object from which to log all parameters and values. * @param logLevel The log {@link htsjdk.samtools.util.Log.LogLevel} at which to log the data in {@code config} * @param <T> any {@link Config} type to use to log all configuration information. */ public static <T extends Config> void logConfigFields(final T config, final Log.LogLevel logLevel) { Utils.nonNull(config); Utils.nonNull(logLevel); final Level level = LoggingUtils.levelToLog4jLevel(logLevel); // Only continue in this method here if we would log the given level: if ( !logger.isEnabled(level) ) { return; } logger.log(level, "Configuration file values: "); for ( final Map.Entry<String, Object> entry : getConfigMap(config, false).entrySet() ) { logger.log(level, "\t" + entry.getKey() + " = " + entry.getValue()); } }
Example #8
Source File: CommandLineProgramTester.java From gatk with BSD 3-Clause "New" or "Revised" License | 6 votes |
/** * Inject the verbosity parameter into the list. * * Default behaviour look for --verbosity argument; if not found, supply a default value that minimizes the amount of logging output. */ default List<String> injectDefaultVerbosity(final List<String> args) { // global toggle for BunnyLog output. BunnyLog.setEnabled(false); for (String arg : args) { if (arg.equalsIgnoreCase("--" + StandardArgumentDefinitions.VERBOSITY_NAME) || arg.equalsIgnoreCase("-" + StandardArgumentDefinitions.VERBOSITY_NAME)) { return args; } } List<String> argsWithVerbosity = new ArrayList<>(args); argsWithVerbosity.add("--" + StandardArgumentDefinitions.VERBOSITY_NAME); argsWithVerbosity.add(Log.LogLevel.ERROR.name()); return argsWithVerbosity; }
Example #9
Source File: SamFormatConverter.java From picard with MIT License | 6 votes |
/** * Convert a file from one of sam/bam/cram format to another based on the extension of output. * * @param input input file in one of sam/bam/cram format * @param output output to write converted file to, the conversion is based on the extension of this filename * @param referenceSequence the reference sequence to use, necessary when reading/writing cram * @param createIndex whether or not an index should be written alongside the output file */ public static void convert(final File input, final File output, final File referenceSequence, final Boolean createIndex) { IOUtil.assertFileIsReadable(input); IOUtil.assertFileIsWritable(output); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(referenceSequence).open(input); final SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(reader.getFileHeader(), true, output, referenceSequence); if (createIndex && writer.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new PicardException("Can't CREATE_INDEX unless sort order is coordinate"); } final ProgressLogger progress = new ProgressLogger(Log.getInstance(SamFormatConverter.class)); for (final SAMRecord rec : reader) { writer.addAlignment(rec); progress.record(rec); } CloserUtil.close(reader); writer.close(); }
Example #10
Source File: Diagnostic.java From rtg-tools with BSD 2-Clause "Simplified" License | 6 votes |
/** * Set the stream to which logging messages are sent. It is permissible, * but unadvisable to set this to null, as setting it to null causes all * logging information to be discarded. * * @param logStream stream to use for logging */ public static synchronized void setLogStream(final LogStream logStream) { sLogRedirect = false; if (sLogStream == logStream) { return; } if (sLogStream != null && logStream != null && logStream.stream() == sLogStream.stream()) { return; } if (sLogStream != null) { closeLog(); } sLogStream = logStream; Log.setGlobalPrintStream(sLogStream == null ? NullStreamUtils.getNullPrintStream() : sLogStream.stream()); sLogClosed = false; sProgressClosed = false; }
Example #11
Source File: AllelicPanelOfNormalsUnitTest.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 6 votes |
@Test public void testPoNHyperparameterInitialization() { LoggingUtils.setLoggingLevel(Log.LogLevel.INFO); final AllelicPanelOfNormals allelicPoN = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE)); final SimpleInterval firstSite = new SimpleInterval("1", 1, 1); final SimpleInterval siteNotInPoN = new SimpleInterval("2", 1, 1); //all sites in PoN are from chr1 // test initialization of hyperparameters for first site in PoN (a = 1218, r = 1317) final double alphaAtFirstSite = allelicPoN.getAlpha(firstSite); final double betaAtFirstSite = allelicPoN.getBeta(firstSite); Assert.assertEquals(alphaAtFirstSite, ALPHA_EXPECTED_AT_FIRST_SITE, DELTA); Assert.assertEquals(betaAtFirstSite, BETA_EXPECTED_AT_FIRST_SITE, DELTA); // test initialization of MLE hyperparameters (which are default values for sites not in PoN) final double alphaNotInPoN = allelicPoN.getAlpha(siteNotInPoN); final double betaNotInPoN = allelicPoN.getBeta(siteNotInPoN); final double meanBias = allelicPoN.getGlobalMeanBias(); final double biasVariance = allelicPoN.getGlobalBiasVariance(); Assert.assertEquals(alphaNotInPoN, MLE_ALPHA_EXPECTED, DELTA); Assert.assertEquals(betaNotInPoN, MLE_BETA_EXPECTED, DELTA); Assert.assertEquals(meanBias, MLE_MEAN_BIAS_EXPECTED, DELTA); Assert.assertEquals(biasVariance, MLE_BIAS_VARIANCE_EXPECTED, DELTA); }
Example #12
Source File: CRAMFileWriter.java From cramtools with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws IOException { Log.setGlobalLogLevel(LogLevel.INFO); File bamFile = new File(args[0]); File outCramFile = new File(args[1]); ReferenceSource source = new ReferenceSource(new File(args[2])); int maxThreads = Integer.valueOf(args[3]); BAMFileReader reader = new BAMFileReader(bamFile, null, false, false, ValidationStringency.SILENT, new DefaultSAMRecordFactory()); OutputStream os = new FileOutputStream(outCramFile); CRAMFileWriter writer = new CRAMFileWriter(os, source, reader.getFileHeader(), outCramFile.getName(), maxThreads); CloseableIterator<SAMRecord> iterator = reader.getIterator(); while (iterator.hasNext()) { SAMRecord record = iterator.next(); writer.addAlignment(record); } writer.close(); reader.close(); }
Example #13
Source File: QualityScoreStats.java From cramtools with Apache License 2.0 | 6 votes |
public static void main(String[] args) throws Exception { Params params = new Params(); JCommander jc = new JCommander(params); try { jc.parse(args); } catch (Exception e) { System.out.println("Failed to parse parameteres, detailed message below: "); System.out.println(e.getMessage()); System.out.println(); System.out.println("See usage: -h"); System.exit(1); } if (args.length == 0 || params.help) { printUsage(jc); System.exit(1); } Log.setGlobalLogLevel(params.logLevel); dist(params.inputFile, (byte) (0xFF & params.defaultQualityScore)); }
Example #14
Source File: AlleleFractionModellerUnitTest.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 6 votes |
@DataProvider(name = "biasCorrection") public Object[][] dataBiasCorrection() { LoggingUtils.setLoggingLevel(Log.LogLevel.INFO); final AllelicCountCollection sampleNormal = new AllelicCountCollection(SAMPLE_NORMAL_FILE); final AllelicCountCollection sampleWithBadSNPs = new AllelicCountCollection(SAMPLE_WITH_BAD_SNPS_FILE); final AllelicCountCollection sampleWithEvent = new AllelicCountCollection(SAMPLE_WITH_EVENT_FILE); final AllelicPanelOfNormals allelicPoNNormal = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_NORMAL_COUNTS_FILE)); final AllelicPanelOfNormals allelicPoNWithBadSNPs = new AllelicPanelOfNormals(new AllelicCountCollection(ALLELIC_PON_WITH_BAD_SNPS_COUNTS_FILE)); final double minorFractionExpectedInMiddleSegmentNormal = 0.5; final double minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN = 0.4; final double minorFractionExpectedInMiddleSegmentWithEvent = 0.33; return new Object[][]{ {sampleNormal, allelicPoNNormal, minorFractionExpectedInMiddleSegmentNormal}, {sampleWithBadSNPs, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithBadSNPsAndNormalPoN}, {sampleWithEvent, allelicPoNNormal, minorFractionExpectedInMiddleSegmentWithEvent}, {sampleWithBadSNPs, allelicPoNWithBadSNPs, minorFractionExpectedInMiddleSegmentNormal} }; }
Example #15
Source File: PolyATrimmerTest.java From Drop-seq with MIT License | 6 votes |
@Test(dataProvider = "testClpDataProvider") public void testClp(final boolean newTrimmer) throws IOException { final File tempDir = Files.createTempDirectory("PolyATrimmerTest.").toFile(); final Log.LogLevel saveLogLevel = Log.getGlobalLogLevel(); Log.setGlobalLogLevel(Log.LogLevel.DEBUG); try { final PolyATrimmer clp = new PolyATrimmer(); clp.INPUT = INPUT; clp.OUTPUT = File.createTempFile("PolyATrimmerTest.", ".sam"); clp.OUTPUT.deleteOnExit(); clp.OUTPUT_SUMMARY = File.createTempFile("PolyATrimmerTest.", ".summary"); clp.OUTPUT_SUMMARY.deleteOnExit(); clp.TMP_DIR = Arrays.asList(tempDir); tempDir.deleteOnExit(); clp.MISMATCHES = 0; clp.NUM_BASES = 6; clp.VALIDATION_STRINGENCY = ValidationStringency.STRICT; clp.USE_NEW_TRIMMER = newTrimmer; Assert.assertEquals(clp.doWork(), 0); final File expectedResult = new File(TESTDATA_DIR, String.format("N701.%s_trimmer.sam", newTrimmer? "new": "old")); TestUtils.assertSamFilesSame(clp.OUTPUT, expectedResult); } finally { Log.setGlobalLogLevel(saveLogLevel); IOUtil.recursiveDelete(tempDir.toPath()); } }
Example #16
Source File: LongBloomFilterTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Test void addTest() { LoggingUtils.setLoggingLevel(Log.LogLevel.DEBUG); final LongBloomFilter bloomFilter = new LongBloomFilter(testVals.length, FPP); for (final long val : testVals) { bloomFilter.add(val); } Assert.assertTrue(bloomFilter.containsAll(testVals)); }
Example #17
Source File: SparkCommandLineArgumentCollectionTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Test(dataProvider = "sparkLogLevels") public void test(String sparkLevel){ final SparkCommandLineArgumentCollection sparkArgumentCollection = new SparkCommandLineArgumentCollection(); sparkArgumentCollection.setSparkVerbosity(sparkLevel); final String level = sparkArgumentCollection.getSparkVerbosity(Log.LogLevel.INFO); Assert.assertEquals(level, sparkLevel); }
Example #18
Source File: OpticalDuplicateFinderTest.java From picard with MIT License | 5 votes |
@Test public void testKeeperAtEndWithinCliqueOfAllOpticalDuplicates() { final Log log = Log.getInstance(OpticalDuplicateFinderTest.class); final OpticalDuplicateFinder finder = new OpticalDuplicateFinder(OpticalDuplicateFinder.DEFAULT_READ_NAME_REGEX, 15, log); final List<PhysicalLocation> locs = Arrays.asList( loc(1, 10, 0), loc(1, 20, 0), loc(1, 30, 0) ); assertEquals(finder.findOpticalDuplicates(locs, locs.get(2)), new boolean[] {true, true, false}); }
Example #19
Source File: SparkCommandLineArgumentCollectionTest.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
@DataProvider(name="toolLogLevels") public Object[][] toolLogLevels(){ return new Object[][] { {Log.LogLevel.DEBUG, Level.DEBUG.name()}, {Log.LogLevel.INFO, Level.INFO.name()}, {Log.LogLevel.WARNING, Level.WARN.name()}, {Log.LogLevel.ERROR, Level.ERROR.name()}, }; }
Example #20
Source File: CommandLineProgram.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * Output a curated set of important settings to the logger. * * May be overridden by subclasses to specify a different set of settings to output. */ protected void printSettings() { if ( VERBOSITY != Log.LogLevel.DEBUG ) { logger.info("HTSJDK Defaults.COMPRESSION_LEVEL : " + Defaults.COMPRESSION_LEVEL); logger.info("HTSJDK Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_READ_FOR_SAMTOOLS); logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS : " + Defaults.USE_ASYNC_IO_WRITE_FOR_SAMTOOLS); logger.info("HTSJDK Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE : " + Defaults.USE_ASYNC_IO_WRITE_FOR_TRIBBLE); } else { // At DEBUG verbosity, print all the HTSJDK defaults: Defaults.allDefaults() .forEach((key, value) -> logger.info("HTSJDK " + Defaults.class.getSimpleName() + "." + key + " : " + value)); } // Log the configuration options: ConfigFactory.logConfigFields(ConfigFactory.getInstance().getGATKConfig(), Log.LogLevel.DEBUG); final boolean usingIntelDeflater = (BlockCompressedOutputStream.getDefaultDeflaterFactory() instanceof IntelDeflaterFactory && ((IntelDeflaterFactory)BlockCompressedOutputStream.getDefaultDeflaterFactory()).usingIntelDeflater()); logger.info("Deflater: " + (usingIntelDeflater ? "IntelDeflater": "JdkDeflater")); final boolean usingIntelInflater = (BlockGunzipper.getDefaultInflaterFactory() instanceof IntelInflaterFactory && ((IntelInflaterFactory)BlockGunzipper.getDefaultInflaterFactory()).usingIntelInflater()); logger.info("Inflater: " + (usingIntelInflater ? "IntelInflater": "JdkInflater")); logger.info("GCS max retries/reopens: " + BucketUtils.getCloudStorageConfiguration(NIO_MAX_REOPENS, "").maxChannelReopens()); if (Strings.isNullOrEmpty(NIO_PROJECT_FOR_REQUESTER_PAYS)) { logger.info("Requester pays: disabled"); } else { logger.info("Requester pays: enabled. Billed to: " + NIO_PROJECT_FOR_REQUESTER_PAYS); } }
Example #21
Source File: LoggingUtils.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * set the logging level for {@link com.esotericsoftware.minlog.Log}, the logger used by Kryo */ private static void setMinLogLoggingLevel(Log.LogLevel verbosity) { switch (verbosity) { case DEBUG: com.esotericsoftware.minlog.Log.DEBUG(); break; case INFO: com.esotericsoftware.minlog.Log.INFO(); break; case WARNING: com.esotericsoftware.minlog.Log.WARN(); break; case ERROR: com.esotericsoftware.minlog.Log.ERROR(); break; default: throw new GATKException("This log level is not implemented properly: " + verbosity); } }
Example #22
Source File: LoggingUtils.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
private static void setLog4JLoggingLevel(Log.LogLevel verbosity) { // Now establish the logging level used by log4j by propagating the requested // logging level to all loggers associated with our logging configuration. final LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); final Configuration loggerContextConfig = loggerContext.getConfiguration(); final String contextClassName = LoggingUtils.class.getName(); final LoggerConfig loggerConfig = loggerContextConfig.getLoggerConfig(contextClassName); loggerConfig.setLevel(levelToLog4jLevel(verbosity)); loggerContext.updateLoggers(); }
Example #23
Source File: LoggingUtils.java From gatk with BSD 3-Clause "New" or "Revised" License | 5 votes |
/** * Propagate the verbosity level to Picard, log4j, the java built in logger, and Kryo's MinLog */ public static void setLoggingLevel(final Log.LogLevel verbosity) { // Call the Picard API to establish the logging level used by Picard Log.setGlobalLogLevel(verbosity); // set the Log4JLoggingLevel setLog4JLoggingLevel(verbosity); // set the java.util.logging Level setJavaUtilLoggingLevel(verbosity); // set the esotericsoft MinLog level, this is used by kryo setMinLogLoggingLevel(verbosity); }
Example #24
Source File: DetectFeatureB.java From cramtools with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException, IllegalArgumentException, IllegalAccessException { Log.setGlobalLogLevel(LogLevel.INFO); File cramFile = new File(args[0]); InputStream is = new BufferedInputStream(new FileInputStream(cramFile)); CramHeader header = CramIO.readCramHeader(is); Container c = null; while ((c = ContainerIO.readContainer(header.getVersion(), is)) != null && !c.isEOF()) { for (Slice slice : c.slices) { final DataReaderFactory dataReaderFactory = new DataReaderFactory(); final Map<Integer, InputStream> inputMap = new HashMap<Integer, InputStream>(); for (final Integer exId : slice.external.keySet()) { inputMap.put(exId, new ByteArrayInputStream(slice.external.get(exId).getRawContent())); } final CramRecordReader reader = new CramRecordReader(ValidationStringency.SILENT); dataReaderFactory.buildReader(reader, new DefaultBitInputStream(new ByteArrayInputStream( slice.coreBlock.getRawContent())), inputMap, c.header, slice.sequenceId); for (int i = 0; i < slice.nofRecords; i++) { CramCompressionRecord record = new CramCompressionRecord(); reader.read(record); if (record.isSegmentUnmapped() || record.readFeatures == null || record.readFeatures.isEmpty()) continue; for (ReadFeature rf : record.readFeatures) { if (rf.getOperator() == ReadBase.operator) { System.out.println("Read feature B detected."); System.exit(1); } } } } } }
Example #25
Source File: DetectMultiref.java From cramtools with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException, IllegalArgumentException, IllegalAccessException { Log.setGlobalLogLevel(LogLevel.INFO); File cramFile = new File(args[0]); InputStream is = new BufferedInputStream(new FileInputStream(cramFile)); CramHeader header = CramIO.readCramHeader(is); Container c = null; while ((c = ContainerIO.readContainer(header.getVersion(), is)) != null && !c.isEOF()) { for (Slice slice : c.slices) { if (slice.sequenceId == Slice.MULTI_REFERENCE) { System.out.println("Read feature B detected."); System.exit(1); } } } }
Example #26
Source File: OpticalDuplicateFinderTest.java From picard with MIT License | 5 votes |
@Test public void testKeeper() { final Log log = Log.getInstance(OpticalDuplicateFinderTest.class); final OpticalDuplicateFinder finder = new OpticalDuplicateFinder(OpticalDuplicateFinder.DEFAULT_READ_NAME_REGEX, 100, log); final List<PhysicalLocation> locs = Arrays.asList( loc(7, 1500, 1500), loc(7, 1501, 1501), loc(5, 1500, 1500), loc(7, 1490, 1502), loc(7, 2500, 2500), loc(7, 10, 10) ); assertEquals(finder.findOpticalDuplicates(locs, null ), new boolean[] {true, true, false, false, false, false}); assertEquals(finder.findOpticalDuplicates(locs, locs.get(0)), new boolean[] {false, true, false, true, false, false}); assertEquals(finder.findOpticalDuplicates(locs, locs.get(1)), new boolean[] {true, false, false, true, false, false}); assertEquals(finder.findOpticalDuplicates(locs, locs.get(3)), new boolean[] {true, true, false, false, false, false}); for (int i=0; i<100; ++i) { final Random random = new Random(i); final List<PhysicalLocation> shuffled = new ArrayList<>(locs); final List<PhysicalLocation> keepers = Arrays.asList(locs.get(0), locs.get(1), locs.get(3)); final PhysicalLocation keeper = keepers.get(random.nextInt(keepers.size())); Collections.shuffle(shuffled); int opticalDupeCount = countTrue(finder.findOpticalDuplicates(shuffled, keeper)); Assert.assertEquals(opticalDupeCount, 2); } }
Example #27
Source File: OpticalDuplicateFinderTest.java From picard with MIT License | 5 votes |
/** * Tests the case where the "keeper" record is not in the list that is passed to the OpticalDuplicateFinder. This can happen * when there are, e.g. FR and RF reads, which can all be molecular duplicates of one another, but cannot be duplicates of one * another and are thus partitioned into two sets for optical duplicate checking. */ @Test public void testKeeperNotInList() { final Log log = Log.getInstance(OpticalDuplicateFinderTest.class); final OpticalDuplicateFinder finder = new OpticalDuplicateFinder(OpticalDuplicateFinder.DEFAULT_READ_NAME_REGEX, 100, log); final List<PhysicalLocation> locs = Arrays.asList( loc(1, 100, 100), loc(1, 101, 101), loc(1, 99, 99), loc(1, 99, 102) ); Assert.assertEquals(countTrue(finder.findOpticalDuplicates(locs, loc(7, 5000, 5000))), 3); }
Example #28
Source File: AllelicPanelOfNormalsCreatorUnitTest.java From gatk-protected with BSD 3-Clause "New" or "Revised" License | 5 votes |
@Test(dataProvider = "dataCreate") public void testCreate(final double siteFrequencyThreshold, final AllelicPanelOfNormals expected) { LoggingUtils.setLoggingLevel(Log.LogLevel.INFO); final AllelicPanelOfNormalsCreator allelicPoNCreator = new AllelicPanelOfNormalsCreator(PULLDOWN_FILES); final AllelicPanelOfNormals result = allelicPoNCreator.create(siteFrequencyThreshold); AllelicPoNTestUtils.assertAllelicPoNsEqual(result, expected); }
Example #29
Source File: CleanSam.java From picard with MIT License | 5 votes |
/** * Do the work after command line has been parsed. * RuntimeException may be thrown by this method, and are reported appropriately. * * @return program exit status. */ @Override protected int doWork() { IOUtil.assertFileIsReadable(INPUT); IOUtil.assertFileIsWritable(OUTPUT); final SamReaderFactory factory = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE); if (VALIDATION_STRINGENCY == ValidationStringency.STRICT) { factory.validationStringency(ValidationStringency.LENIENT); } final SamReader reader = factory.open(INPUT); final SAMFileWriter writer = new SAMFileWriterFactory().makeSAMOrBAMWriter(reader.getFileHeader(), true, OUTPUT); final CloseableIterator<SAMRecord> it = reader.iterator(); final ProgressLogger progress = new ProgressLogger(Log.getInstance(CleanSam.class)); // If the read (or its mate) maps off the end of the alignment, clip it while (it.hasNext()) { final SAMRecord rec = it.next(); // If the read (or its mate) maps off the end of the alignment, clip it AbstractAlignmentMerger.createNewCigarsIfMapsOffEndOfReference(rec); // check the read's mapping quality if (rec.getReadUnmappedFlag() && 0 != rec.getMappingQuality()) { rec.setMappingQuality(0); } writer.addAlignment(rec); progress.record(rec); } writer.close(); it.close(); CloserUtil.close(reader); return 0; }
Example #30
Source File: MarkDuplicatesWithMateCigarIterator.java From picard with MIT License | 5 votes |
public void logMemoryStats(final Log log) { System.gc(); final Runtime runtime = Runtime.getRuntime(); log.info("freeMemory: " + runtime.freeMemory() + "; totalMemory: " + runtime.totalMemory() + "; maxMemory: " + runtime.maxMemory() + "; output buffer size: " + outputBuffer.size() + "; duplicate queue size: " + toMarkQueue.size() ); }