use of org.broadinstitute.hellbender.utils.test.IntegrationTestSpec in project gatk by broadinstitute.
the class SelectVariantsIntegrationTest method testSACDiploid.
@Test
public void testSACDiploid() throws IOException {
final String testFile = getToolTestDataDir() + "diploid-multisample-sac.g.vcf";
final IntegrationTestSpec spec = new IntegrationTestSpec(baseTestString(" -sn NA12891 -trimAlternates", testFile), Collections.singletonList(getToolTestDataDir() + "expected/" + "testSelectVariants_SACDiploid.vcf"));
spec.executeTest("testSACDiploid" + testFile, this);
}
use of org.broadinstitute.hellbender.utils.test.IntegrationTestSpec in project gatk by broadinstitute.
the class ReadWalkerGCSSupportIntegrationTest method testReadBAMOnGCS.
@Test(dataProvider = "GCSTestCases", groups = { "bucket" })
public void testReadBAMOnGCS(final String bam, final List<SimpleInterval> intervals, final boolean includeUnmapped, final String expectedOutput) throws IOException {
final StringBuilder intervalArgBuilder = new StringBuilder("");
if (intervals != null) {
for (final SimpleInterval interval : intervals) {
intervalArgBuilder.append(" -L ");
intervalArgBuilder.append(interval.toString());
}
}
if (includeUnmapped) {
intervalArgBuilder.append(" -L ");
intervalArgBuilder.append("unmapped");
}
String intervalArg = intervalArgBuilder.toString();
final IntegrationTestSpec testSpec = new IntegrationTestSpec(" -I " + getGCPTestInputPath() + bam + intervalArg + " -O %s", Collections.singletonList(expectedOutput));
testSpec.executeTest("testReadBAMOnGCS", this);
}
use of org.broadinstitute.hellbender.utils.test.IntegrationTestSpec in project gatk by broadinstitute.
the class ApplyBQSRSparkIntegrationTest method testPR_GCS.
//TODO: This is disabled because we can't read a google bucket as a hadoop file system outside of the dataproc environment yet
//Renable when we've figured out how to setup the google hadoop fs connector
@Test(dataProvider = "ApplyBQSRTestGCS", groups = { "spark", "bucket" }, enabled = false)
public void testPR_GCS(ABQSRTest params) throws IOException {
String args = " -I " + params.bam + " --apiKey " + getGCPTestApiKey() + " --bqsr_recal_file " + resourceDir + "HiSeq.20mb.1RG.table.gz " + params.args + " -O %s";
ArgumentsBuilder ab = new ArgumentsBuilder().add(args);
IntegrationTestSpec spec = new IntegrationTestSpec(ab.getString(), Arrays.asList(params.expectedFile));
spec.executeTest("testPrintReads-" + params.args, this);
}
use of org.broadinstitute.hellbender.utils.test.IntegrationTestSpec in project gatk by broadinstitute.
the class ApplyBQSRSparkIntegrationTest method testPRFailWithLowMaxCycle.
@Test(groups = "spark")
public void testPRFailWithLowMaxCycle() throws IOException {
String args = " -I " + hiSeqBam + " --bqsr_recal_file " + resourceDir + "HiSeq.1mb.1RG.lowMaxCycle.table.gz" + " -O /dev/null";
ArgumentsBuilder ab = new ArgumentsBuilder().add(args);
IntegrationTestSpec spec = new IntegrationTestSpec(ab.getString(), 0, UserException.class);
spec.executeTest("testPRFailWithLowMaxCycle", this);
}
use of org.broadinstitute.hellbender.utils.test.IntegrationTestSpec in project gatk by broadinstitute.
the class BaseRecalibratorSparkIntegrationTest method testBQSRFailWithIncompatibleReference.
@Test(groups = { "spark", "cloud" })
public void testBQSRFailWithIncompatibleReference() throws IOException {
final String resourceDir = getTestDataDir() + "/" + "BQSR" + "/";
final String localResources = getResourceDir();
final String hg19Ref = ReferenceAPISource.URL_PREFIX + ReferenceAPISource.HG19_REF_ID;
final String HiSeqBam_chr17 = resourceDir + "NA12878.chr17_69k_70k.dictFix.bam";
final String dbSNPb37_chr2021 = resourceDir + DBSNP_138_B37_CH20_1M_1M1K_VCF;
final BQSRTest params = new BQSRTest(hg19Ref, HiSeqBam_chr17, dbSNPb37_chr2021, "", localResources + BQSRTestData.EXPECTED_WGS_B37_CH20_1M_1M1K_RECAL);
IntegrationTestSpec spec = new IntegrationTestSpec(params.getCommandLine(), 1, UserException.IncompatibleSequenceDictionaries.class);
spec.executeTest("testBQSRFailWithIncompatibleReference", this);
}
Aggregations