Java Code Examples for htsjdk.samtools.util.IOUtil#recursiveDelete()

The following examples show how to use htsjdk.samtools.util.IOUtil#recursiveDelete() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PolyATrimmerTest.java    From Drop-seq with MIT License 6 votes vote down vote up
@Test(dataProvider = "testClpDataProvider")
public void testClp(final boolean newTrimmer) throws IOException {
    final File tempDir = Files.createTempDirectory("PolyATrimmerTest.").toFile();
    final Log.LogLevel saveLogLevel = Log.getGlobalLogLevel();
    Log.setGlobalLogLevel(Log.LogLevel.DEBUG);
    try {
        final PolyATrimmer clp = new PolyATrimmer();
        clp.INPUT = INPUT;
        clp.OUTPUT = File.createTempFile("PolyATrimmerTest.", ".sam");
        clp.OUTPUT.deleteOnExit();
        clp.OUTPUT_SUMMARY = File.createTempFile("PolyATrimmerTest.", ".summary");
        clp.OUTPUT_SUMMARY.deleteOnExit();
        clp.TMP_DIR = Arrays.asList(tempDir);
        tempDir.deleteOnExit();
        clp.MISMATCHES = 0;
        clp.NUM_BASES = 6;
        clp.VALIDATION_STRINGENCY = ValidationStringency.STRICT;
        clp.USE_NEW_TRIMMER = newTrimmer;
        Assert.assertEquals(clp.doWork(), 0);
        final File expectedResult = new File(TESTDATA_DIR, String.format("N701.%s_trimmer.sam", newTrimmer? "new": "old"));
        TestUtils.assertSamFilesSame(clp.OUTPUT, expectedResult);
    } finally {
        Log.setGlobalLogLevel(saveLogLevel);
        IOUtil.recursiveDelete(tempDir.toPath());
    }
}
 
Example 2
Source File: BamToBfqTest.java    From picard with MIT License 6 votes vote down vote up
@Test(dataProvider = "inputs")
public void testBamToBfq(final File input, final boolean isPairedRun,
                 final String outputFilePrefix) throws IOException {
    final File analysisDir = IOUtil.createTempDir("BamToBfqTest", ".dir");
    try {
        final String[] args = new String[] {
                "INPUT=" + input.getAbsolutePath(),
                "ANALYSIS_DIR=" + analysisDir.getAbsolutePath(),
                "OUTPUT_FILE_PREFIX=" + outputFilePrefix,
                "PAIRED_RUN=" + isPairedRun,
                "READS_TO_ALIGN=8"
        };
        BamToBfq bamToBfq = new BamToBfq();
        Assert.assertEquals(bamToBfq.instanceMain(args), 0, "Can't process " + input.getAbsolutePath() + " correctly");

        final File output = new File(analysisDir, outputFilePrefix + ".0.1.bfq");
        final File expectedBFQ = new File(TEST_DATA_DIR, outputFilePrefix + ".0.1.bfq");

        Assert.assertEquals(Files.readAllBytes(output.toPath()), Files.readAllBytes(expectedBFQ.toPath()));
    } finally {
        IOUtil.recursiveDelete(analysisDir.toPath());
    }
}
 
Example 3
Source File: MarkDuplicatesTest.java    From picard with MIT License 6 votes vote down vote up
@Test(dataProvider = "testOpticalDuplicateDetectionDataProvider")
public void testOpticalDuplicateDetection(final File sam, final long expectedNumOpticalDuplicates) {
    final File outputDir = IOUtil.createTempDir(TEST_BASE_NAME + ".", ".tmp");
    outputDir.deleteOnExit();
    final File outputSam = new File(outputDir, TEST_BASE_NAME + ".sam");
    outputSam.deleteOnExit();
    final File metricsFile = new File(outputDir, TEST_BASE_NAME + ".duplicate_metrics");
    metricsFile.deleteOnExit();
    // Run MarkDuplicates, merging the 3 input files, and either enabling or suppressing PG header
    // record creation according to suppressPg.
    final MarkDuplicates markDuplicates = new MarkDuplicates();
    markDuplicates.setupOpticalDuplicateFinder();
    markDuplicates.INPUT = CollectionUtil.makeList(sam.getAbsolutePath());
    markDuplicates.OUTPUT = outputSam;
    markDuplicates.METRICS_FILE = metricsFile;
    markDuplicates.TMP_DIR = CollectionUtil.makeList(outputDir);
    // Needed to suppress calling CommandLineProgram.getVersion(), which doesn't work for code not in a jar
    markDuplicates.PROGRAM_RECORD_ID = null;
    Assert.assertEquals(markDuplicates.doWork(), 0);
    Assert.assertEquals(markDuplicates.numOpticalDuplicates(), expectedNumOpticalDuplicates);
    IOUtil.recursiveDelete(outputDir.toPath());

}
 
Example 4
Source File: IlluminaBasecallsToFastqTest.java    From picard with MIT License 5 votes vote down vote up
@Test
public void testMultiplexWithIlluminaReadNameHeaders() throws Exception {
    final File outputDir = File.createTempFile("testMultiplexRH.", ".dir");
    try {
        outputDir.delete();
        outputDir.mkdir();
        outputDir.deleteOnExit();

        final String filePrefix = "testMultiplexRH";
        final File outputPrefix = new File(outputDir, filePrefix);

        runPicardCommandLine(new String[]{
                "BASECALLS_DIR=" + BASECALLS_DIR,
                "LANE=" + 1,
                "RUN_BARCODE=HiMom",
                "READ_STRUCTURE=" + "25T8B25T",
                "OUTPUT_PREFIX=" + outputPrefix.getAbsolutePath(),
                "MACHINE_NAME=machine1",
                "FLOWCELL_BARCODE=abcdeACXX",
                "READ_NAME_FORMAT=" + IlluminaBasecallsToFastq.ReadNameFormat.ILLUMINA,
                "MAX_READS_IN_RAM_PER_TILE=100" //force spill to disk to test encode/decode
        });

        final String[] filenames = new String[]{
                filePrefix + ".1.fastq",
                filePrefix + ".barcode_1.fastq"
        };
        for (final String filename : filenames) {
            IOUtil.assertFilesEqual(new File(outputDir, filename), new File(TEST_DATA_DIR, filename));
        }

    } finally {
        IOUtil.recursiveDelete(outputDir.toPath());
    }
}
 
Example 5
Source File: CleanSamTester.java    From picard with MIT License 5 votes vote down vote up
protected void test() {
    try {
        final SamFileValidator validator = new SamFileValidator(new PrintWriter(System.out), 8000);

        // Validate it has the expected cigar
        validator.setIgnoreWarnings(true);
        validator.setVerbose(true, 1000);
        validator.setErrorsToIgnore(Arrays.asList(SAMValidationError.Type.MISSING_READ_GROUP));
        SamReaderFactory factory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
        SamReader samReader = factory.open(getOutput());
        final SAMRecordIterator iterator = samReader.iterator();
        while (iterator.hasNext()) {
            final SAMRecord rec = iterator.next();
            Assert.assertEquals(rec.getCigarString(), expectedCigar);
            if (SAMUtils.hasMateCigar(rec)) {
                Assert.assertEquals(SAMUtils.getMateCigarString(rec), expectedCigar);
            }
        }
        CloserUtil.close(samReader);

        // Run validation on the output file
        samReader = factory.open(getOutput());
        final boolean validated = validator.validateSamFileVerbose(samReader, null);
        CloserUtil.close(samReader);

        Assert.assertTrue(validated, "ValidateSamFile failed");
    } finally {
        IOUtil.recursiveDelete(getOutputDir().toPath());
    }
}
 
Example 6
Source File: IntervalListToolsTest.java    From picard with MIT License 5 votes vote down vote up
@AfterTest
void deleteTempDirs() {
    for (File file : dirsToDelete) {
        try {
            IOUtil.recursiveDelete(file.toPath());
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
 
Example 7
Source File: MarkDuplicatesSetSizeHistogramTester.java    From picard with MIT License 4 votes vote down vote up
@AfterClass
public void afterTest() {
    IOUtil.recursiveDelete(getOutputDir().toPath());
}
 
Example 8
Source File: MarkDuplicatesTest.java    From picard with MIT License 4 votes vote down vote up
/**
 * Test that PG header records are created & chained appropriately (or not created), and that the PG record chains
 * are as expected.  MarkDuplicates is used both to merge and to mark dupes in this case.
 * @param suppressPg If true, do not create PG header record.
 * @param expectedPnVnByReadName For each read, info about the expect chain of PG records.
 */
@Test(dataProvider = "pgRecordChainingTest")
public void pgRecordChainingTest(final boolean suppressPg,
                                 final Map<String, List<ExpectedPnAndVn>> expectedPnVnByReadName) {
    final File outputDir = IOUtil.createTempDir(TEST_BASE_NAME + ".", ".tmp");
    outputDir.deleteOnExit();
    try {
        // Run MarkDuplicates, merging the 3 input files, and either enabling or suppressing PG header
        // record creation according to suppressPg.
        final MarkDuplicates markDuplicates = new MarkDuplicates();
        final ArrayList<String> args = new ArrayList<>();
        for (int i = 1; i <= 3; ++i) {
            args.add("INPUT=" + new File(TEST_DATA_DIR, "merge" + i + ".sam").getAbsolutePath());
        }
        final File outputSam = new File(outputDir, TEST_BASE_NAME + ".sam");
        args.add("OUTPUT=" + outputSam.getAbsolutePath());
        args.add("METRICS_FILE=" + new File(outputDir, TEST_BASE_NAME + ".duplicate_metrics").getAbsolutePath());
        args.add("ADD_PG_TAG_TO_READS=true");
        if (suppressPg) args.add("PROGRAM_RECORD_ID=null");

        // I generally prefer to call doWork rather than invoking the argument parser, but it is necessary
        // in this case to initialize the command line.
        // Note that for the unit test, version won't come through because it is obtained through jar
        // manifest, and unit test doesn't run code from a jar.
        Assert.assertEquals(markDuplicates.instanceMain(args.toArray(new String[args.size()])), 0);

        // Read the MarkDuplicates output file, and get the PG ID for each read.  In this particular test,
        // the PG ID should be the same for both ends of a pair.
        final SamReader reader = SamReaderFactory.makeDefault().open(outputSam);

        final Map<String, String> pgIdForReadName = new HashMap<>();
        for (final SAMRecord rec : reader) {
            final String existingPgId = pgIdForReadName.get(rec.getReadName());
            final String thisPgId = rec.getStringAttribute(SAMTag.PG.name());
            if (existingPgId != null) {
                Assert.assertEquals(thisPgId, existingPgId);
            } else {
                pgIdForReadName.put(rec.getReadName(), thisPgId);
            }
        }
        final SAMFileHeader header = reader.getFileHeader();
        CloserUtil.close(reader);

        // Confirm that for each read name, the chain of PG records contains exactly the number that is expected,
        // and that values in the PG chain are as expected.
        for (final Map.Entry<String, List<ExpectedPnAndVn>> entry : expectedPnVnByReadName.entrySet()) {
            final String readName = entry.getKey();
            final List<ExpectedPnAndVn> expectedList = entry.getValue();
            String pgId = pgIdForReadName.get(readName);
            for (final ExpectedPnAndVn expected : expectedList) {
                final SAMProgramRecord programRecord = header.getProgramRecord(pgId);
                if (expected.expectedPn != null) Assert.assertEquals(programRecord.getProgramName(), expected.expectedPn);
                if (expected.expectedVn != null) Assert.assertEquals(programRecord.getProgramVersion(), expected.expectedVn);
                pgId = programRecord.getPreviousProgramGroupId();
            }
            Assert.assertNull(pgId);
        }

    } finally {
        IOUtil.recursiveDelete(outputDir.toPath());
    }
}
 
Example 9
Source File: MarkDuplicatesTagRepresentativeReadIndexTester.java    From picard with MIT License 4 votes vote down vote up
@Override
public void test() {
    try {
        updateExpectedDuplicationMetrics();
        // Read the output and check the duplicate flag
        int outputRecords = 0;
        int indexInFile = 0;
        final SamReader reader = SamReaderFactory.makeDefault().open(getOutput());
        for (final SAMRecord record : reader) {
            outputRecords++;

            final String key = samRecordToDuplicatesFlagsKey(record);
            Assert.assertTrue(this.duplicateFlags.containsKey(key),"DOES NOT CONTAIN KEY: " + key);
            final boolean value = this.duplicateFlags.get(key);
            this.duplicateFlags.remove(key);
            if (value != record.getDuplicateReadFlag()) {
                System.err.println("Mismatching read:");
                System.err.print(record.getSAMString());
            }
            Assert.assertEquals(record.getDuplicateReadFlag(), value);
            if (testRepresentativeReads) {
                if (expectedRepresentativeIndexMap.containsKey(indexInFile) && expectedSetSizeMap.containsKey(record.getReadName())){
                    Assert.assertEquals(record.getAttribute("DI"), expectedRepresentativeIndexMap.get(indexInFile));
                    Assert.assertEquals(record.getAttribute("DS"), expectedSetSizeMap.get(record.getReadName()));
                }
            }
            indexInFile+=1;
        }
        CloserUtil.close(reader);

        // Ensure the program output the same number of records as were read in
        Assert.assertEquals(outputRecords, this.getNumberOfRecords(), ("saw " + outputRecords + " output records, vs. " + this.getNumberOfRecords() + " input records"));

        // Check the values written to metrics.txt against our input expectations
        final MetricsFile<DuplicationMetrics, Comparable<?>> metricsOutput = new MetricsFile<>();
        try{
            metricsOutput.read(new FileReader(metricsFile));
        }
        catch (final FileNotFoundException ex) {
            throw new PicardException("Metrics file not found: " + ex);
        }
        final List<DuplicationMetrics> g = metricsOutput.getMetrics();
        // expect getMetrics to return a collection with a single duplicateMetrics object
        Assert.assertEquals(metricsOutput.getMetrics().size(), 1);
        final DuplicationMetrics observedMetrics = metricsOutput.getMetrics().get(0);
        Assert.assertEquals(observedMetrics.UNPAIRED_READS_EXAMINED, expectedMetrics.UNPAIRED_READS_EXAMINED, "UNPAIRED_READS_EXAMINED does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIRS_EXAMINED, expectedMetrics.READ_PAIRS_EXAMINED, "READ_PAIRS_EXAMINED does not match expected");
        Assert.assertEquals(observedMetrics.UNMAPPED_READS, expectedMetrics.UNMAPPED_READS, "UNMAPPED_READS does not match expected");
        Assert.assertEquals(observedMetrics.UNPAIRED_READ_DUPLICATES, expectedMetrics.UNPAIRED_READ_DUPLICATES, "UNPAIRED_READ_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIR_DUPLICATES, expectedMetrics.READ_PAIR_DUPLICATES, "READ_PAIR_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIR_OPTICAL_DUPLICATES, expectedMetrics.READ_PAIR_OPTICAL_DUPLICATES, "READ_PAIR_OPTICAL_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.PERCENT_DUPLICATION, expectedMetrics.PERCENT_DUPLICATION, "PERCENT_DUPLICATION does not match expected");
        Assert.assertEquals(observedMetrics.ESTIMATED_LIBRARY_SIZE, expectedMetrics.ESTIMATED_LIBRARY_SIZE, "ESTIMATED_LIBRARY_SIZE does not match expected");
        Assert.assertEquals(observedMetrics.SECONDARY_OR_SUPPLEMENTARY_RDS, expectedMetrics.SECONDARY_OR_SUPPLEMENTARY_RDS, "SECONDARY_OR_SUPPLEMENTARY_RDS does not match expected");
    } finally {
        IOUtil.recursiveDelete(getOutputDir().toPath());
    }
}
 
Example 10
Source File: AbstractMarkDuplicatesCommandLineProgramTester.java    From picard with MIT License 4 votes vote down vote up
/**
 * Runs test and returns metrics
 * @return Duplication metrics
 * @throws IOException
 */
public MetricsFile<DuplicationMetrics, Double> testMetrics() throws IOException {
    try {
        updateExpectedDuplicationMetrics();

        // Read the output and check the duplicate flag
        int outputRecords = 0;
        final Set<String> sequencingDTErrorsSeen = new HashSet<>();
        try(final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(fastaFiles.get(samRecordSetBuilder.getHeader())).open(getOutput())) {
            for (final SAMRecord record : reader) {
                outputRecords++;
                final String key = samRecordToDuplicatesFlagsKey(record);
                Assert.assertTrue(this.duplicateFlags.containsKey(key), "DOES NOT CONTAIN KEY: " + key);
                final boolean value = this.duplicateFlags.get(key);
                this.duplicateFlags.remove(key);
                Assert.assertEquals(record.getDuplicateReadFlag(), value, "Mismatching read: " + record.getSAMString());
                if (testOpticalDuplicateDTTag && MarkDuplicates.DUPLICATE_TYPE_SEQUENCING.equals(record.getAttribute("DT"))) {
                    sequencingDTErrorsSeen.add(record.getReadName());
                }
                Assert.assertEquals(record.getDuplicateReadFlag(), value);
            }
        }

        // Ensure the program output the same number of records as were read in
        Assert.assertEquals(outputRecords, this.getNumberOfRecords(), ("saw " + outputRecords + " output records, vs. " + this.getNumberOfRecords() + " input records"));

        // Check the values written to metrics.txt against our input expectations
        final MetricsFile<DuplicationMetrics, Double> metricsOutput = new MetricsFile<>();
        try{
            metricsOutput.read(new FileReader(metricsFile));
        }
        catch (final FileNotFoundException ex) {
            Assert.fail("Metrics file not found: " + ex.getMessage());
        }
        Assert.assertEquals(metricsOutput.getMetrics().size(), 1);
        final DuplicationMetrics observedMetrics = metricsOutput.getMetrics().get(0);
        Assert.assertEquals(observedMetrics.UNPAIRED_READS_EXAMINED, expectedMetrics.UNPAIRED_READS_EXAMINED, "UNPAIRED_READS_EXAMINED does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIRS_EXAMINED, expectedMetrics.READ_PAIRS_EXAMINED, "READ_PAIRS_EXAMINED does not match expected");
        Assert.assertEquals(observedMetrics.UNMAPPED_READS, expectedMetrics.UNMAPPED_READS, "UNMAPPED_READS does not match expected");
        Assert.assertEquals(observedMetrics.UNPAIRED_READ_DUPLICATES, expectedMetrics.UNPAIRED_READ_DUPLICATES, "UNPAIRED_READ_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIR_DUPLICATES, expectedMetrics.READ_PAIR_DUPLICATES, "READ_PAIR_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.READ_PAIR_OPTICAL_DUPLICATES, expectedMetrics.READ_PAIR_OPTICAL_DUPLICATES, "READ_PAIR_OPTICAL_DUPLICATES does not match expected");
        Assert.assertEquals(observedMetrics.PERCENT_DUPLICATION, expectedMetrics.PERCENT_DUPLICATION, "PERCENT_DUPLICATION does not match expected");
        Assert.assertEquals(observedMetrics.ESTIMATED_LIBRARY_SIZE, expectedMetrics.ESTIMATED_LIBRARY_SIZE, "ESTIMATED_LIBRARY_SIZE does not match expected");
        Assert.assertEquals(observedMetrics.SECONDARY_OR_SUPPLEMENTARY_RDS, expectedMetrics.SECONDARY_OR_SUPPLEMENTARY_RDS, "SECONDARY_OR_SUPPLEMENTARY_RDS does not match expected");
        if (testOpticalDuplicateDTTag) {
            Assert.assertEquals(sequencingDTErrorsSeen.size(), expectedMetrics.READ_PAIR_OPTICAL_DUPLICATES, "READ_PAIR_OPTICAL_DUPLICATES does not match duplicate groups observed in the file");
            Assert.assertEquals(sequencingDTErrorsSeen.size(), observedMetrics.READ_PAIR_OPTICAL_DUPLICATES, "READ_PAIR_OPTICAL_DUPLICATES does not match duplicate groups observed in the file");
        }
        return metricsOutput;
    } finally {
        IOUtil.recursiveDelete(getOutputDir().toPath());
    }
}
 
Example 11
Source File: CollectIndependentReplicatesMetricTest.java    From picard with MIT License 4 votes vote down vote up
@AfterTest
public void tearDown() {
    IOUtil.recursiveDelete(bamOutDir.toPath());
}
 
Example 12
Source File: IOUtils.java    From gatk with BSD 3-Clause "New" or "Revised" License 2 votes vote down vote up
/**
 * Delete rootPath recursively
 * @param rootPath is the file/directory to be deleted
 */
public static void deleteRecursively(final Path rootPath) {
    IOUtil.recursiveDelete(rootPath);
}