Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Account for overlapping reads in Theoretical Sensitivity model #1802

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
7 changes: 4 additions & 3 deletions src/main/java/picard/analysis/CollectWgsMetrics.java
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,8 @@ public class CollectWgsMetrics extends CommandLineProgram {
@Argument(doc="Allele fraction for which to calculate theoretical sensitivity.", optional = true)
public List<Double> ALLELE_FRACTION = new ArrayList<>(Arrays.asList(0.001, 0.005, 0.01, 0.02, 0.05, 0.1, 0.2, 0.3, 0.5));

@Argument(doc="Phred scaled PCR Error Rate for Theoretical Sensitivity model.", optional = true)
public int PCR_ERROR_RATE = 45;
@Argument(doc="Maximum value allowed for base quality resulting from overlapping reads. Often this is governed by PCR error rate. (Phred scale)", optional = true)
public int MAX_OVERLAP_BASE_QUAL = 45;

@Argument(doc = "If true, fast algorithm is used.")
public boolean USE_FAST_ALGORITHM = false;
Expand Down Expand Up @@ -248,7 +248,8 @@ protected int doWork() {
log.info("Calculating theoretical sensitivity at " + ALLELE_FRACTION.size() + " allele fractions.");

List<TheoreticalSensitivityMetrics> tsm = TheoreticalSensitivity.calculateSensitivities(SAMPLE_SIZE,
collector.getUnfilteredDepthHistogram(), collector.getUnfilteredBaseQHistogram(), ALLELE_FRACTION, collector.basesExcludedByOverlap, PCR_ERROR_RATE);
collector.getUnfilteredDepthHistogram(), collector.getUnfilteredBaseQHistogram(), ALLELE_FRACTION,
collector.getMetrics(dupeFilter, adapterFilter, mapqFilter, pairFilter).PCT_EXC_OVERLAP, MAX_OVERLAP_BASE_QUAL);
theoreticalSensitivityMetrics.addAllMetrics(tsm);
theoreticalSensitivityMetrics.write(THEORETICAL_SENSITIVITY_OUTPUT);
}
Expand Down
56 changes: 16 additions & 40 deletions src/main/java/picard/analysis/TheoreticalSensitivity.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ public class TheoreticalSensitivity {
private static final Log log = Log.getInstance(TheoreticalSensitivity.class);
private static final int SAMPLING_MAX = 600; //prevent 'infinite' loops
private static final int MAX_CONSIDERED_DEPTH_HET_SENS = 1000; // No point in looking any deeper than this, otherwise GC overhead is too high. Only used for HET sensitivity.
private static final int LARGE_NUMBER_OF_DRAWS = 10; // The number of draws at which we believe a Gaussian approximation to sum random variables.
private static final double DEPTH_BIN_WIDTH = 0.01; // Minimal fraction of depth histogram to use when integrating theoretical sensitivity. This ensures we don't calculate theoretical sensitivity at every depth, which would be computationally expensive.
private static final int RANDOM_SEED = 51;

Expand Down Expand Up @@ -241,12 +240,14 @@ public TheoreticalSensitivity() {
* @param sampleSize sampleSize is the total number of simulations to run
* @param alleleFraction the allele fraction to evaluate sensitivity at
* @param randomSeed random number seed to use for random number generator
* @param overlapProbability Probability a position on a read overlaps with its mate
* @param maxOverlapBaseQuality Maximum base quality that can result from summing overlapping base qualities, often capped by PCR error rate
* @return Theoretical sensitivity for the given arguments at a constant depth.
*/
public static double sensitivityAtConstantDepth(final int depth, final Histogram<Integer> qualityHistogram,
final double logOddsThreshold, final int sampleSize,
final double alleleFraction, final long randomSeed,
final double overlapProbability, final int pcrErrorRate) {
final double overlapProbability, final int maxOverlapBaseQuality) {
// If the depth is 0 at a particular locus, the sensitivity is trivially 0.0.
if (depth == 0) {
return 0.0;
Expand All @@ -268,47 +269,27 @@ public static double sensitivityAtConstantDepth(final int depth, final Histogram
final int altDepth = bd.sample();

int sumOfQualities;
if (altDepth < LARGE_NUMBER_OF_DRAWS) {
// If the number of alt reads is "small" we draw from the actual base quality distribution.
//sumOfQualities = IntStream.range(0, altDepth).map(n -> qualityRW.draw()).sum();
sumOfQualities = 0;
for(int i = 0;i < altDepth;i++) {
if(rg.nextDouble() > overlapProbability) {
sumOfQualities += Math.min(qualityRW.draw(), pcrErrorRate);
} else {
// Simulate overlapping reads by sampling from the quality distribution twice.
// Since PCR errors affect overlapping reads, the total contribution to the sum
// of qualities cannot exceed the PCR error rate.
sumOfQualities += Math.min(qualityRW.draw() + qualityRW.draw(), pcrErrorRate);
}
// If the number of alt reads is "small" we draw from the actual base quality distribution.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Since it looks like you're getting rid of the Gaussian approximation at larger depth it seems that this comment is now moot.

sumOfQualities = 0;
for(int i = 0;i < altDepth;i++) {
if(rg.nextDouble() > overlapProbability) {
sumOfQualities += Math.min(qualityRW.draw(), maxOverlapBaseQuality);
} else {
// Simulate overlapping reads by sampling from the quality distribution twice.
// Since PCR errors affect overlapping reads, the total contribution to the sum
// of qualities cannot exceed the PCR error rate.
sumOfQualities += Math.min(qualityRW.draw() + qualityRW.draw(), maxOverlapBaseQuality);
}
} else {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Have you profiled this change, in particular when we care about extremely deep data?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The tests are a bit long now, I'm trying to resolve the issue.

// If the number of alt reads is "large" we draw from a Gaussian approximation of the base
// quality distribution to speed up the code.
sumOfQualities = drawSumOfQScores(altDepth, averageQuality, standardDeviationQuality, randomNumberGenerator.nextGaussian());
}


if (isCalled(depth, altDepth, (double) sumOfQualities, alleleFraction, logOddsThreshold)) {
calledVariants++;
}
}
return (double) calledVariants / sampleSize;
}

/**
* Simulates the sum of base qualities taken from reads that support the alternate allele by
* taking advantage of the fact that the sum of draws from a distribution tends towards a
* Gaussian per the Central Limit Theorem.
* @param altDepth Number of draws to take from base quality distribution
* @param averageQuality Average quality of alt bases
* @param standardDeviationQuality Sample standard deviation of base quality scores
* @param z number of standard deviation from the mean to take sum over
* @return Simulated sum of base qualities the support the alternate allele
*/
static int drawSumOfQScores(final int altDepth, final double averageQuality, final double standardDeviationQuality, final double z) {
return (int) (altDepth * averageQuality + z * Math.sqrt(altDepth) * standardDeviationQuality);
}

/**
* Calculates the theoretical sensitivity with a given Phred-scaled quality score distribution at a constant
* depth.
Expand All @@ -335,15 +316,10 @@ private static double sensitivityAtConstantDepth(final int depth, final Histogra
* @param sampleSize the total number of simulations to run
* @param logOddsThreshold Log odds threshold necessary for variant to be called
* @param alleleFraction the allele fraction to evaluate sensitivity at
* @param overlapProbability the probability two bases from the same fragment are overlapping
* @param pcrErrorRate Upper bound for the effective base quality two bases in an overlapping read
* @return Theoretical sensitivity for the given arguments over a particular depth distribution.
*/
public static double theoreticalSensitivity(final Histogram<Integer> depthHistogram,
final Histogram<Integer> qualityHistogram, final int sampleSize,
final double logOddsThreshold, final double alleleFraction) {
return theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, logOddsThreshold, alleleFraction,
0.0, 45);
}

public static double theoreticalSensitivity(final Histogram<Integer> depthHistogram,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

do we need to keep this method overload, given that it is only used in tests? I get that it'd be an API change, but not sure how we feel about that. You're changing the API anyway with respect to sensitivityAtConstantDepth, so probably best to be consistent.

final Histogram<Integer> qualityHistogram, final int sampleSize,
final double logOddsThreshold, final double alleleFraction,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import picard.analysis.MetricAccumulationLevel;
import picard.analysis.TheoreticalSensitivity;
import picard.analysis.TheoreticalSensitivityMetrics;
import picard.analysis.WgsMetrics;
import picard.cmdline.CommandLineProgram;
import picard.cmdline.StandardOptionDefinitions;
import picard.metrics.MultilevelMetrics;
Expand Down
3 changes: 1 addition & 2 deletions src/test/java/picard/IntelInflaterDeflaterLoadTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ public void testIntelDeflaterIsAvailable() {
}

private void checkIntelSupported(final String componentName) {
// Check if on Linux Mac or Apple Silicon (e.g. Apple M1)
if ((!SystemUtils.IS_OS_LINUX && !SystemUtils.IS_OS_MAC) || SystemUtils.OS_ARCH.equals("aarch64")) {
if ((!SystemUtils.IS_OS_LINUX && !SystemUtils.IS_OS_MAC)) {
throw new SkipException(componentName + " is not available on this platform");
}

Expand Down
80 changes: 19 additions & 61 deletions src/test/java/picard/analysis/TheoreticalSensitivityTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@
import java.io.File;
import java.io.FileReader;
import java.util.*;
import java.util.stream.IntStream;

/**
* Created by davidben on 5/18/15.
Expand Down Expand Up @@ -249,12 +248,9 @@ public void testSensitivityAtConstantDepth(final double expected, final File met
metrics.read(metricsFileReader);
}

Object hi[] = metrics.getMetrics().toArray();

final List<Histogram<Integer>> histograms = metrics.getAllHistograms();
final Histogram<Integer> qualityHistogram = histograms.get(1);

List<?> l = metrics.getMetrics();
// We ensure that even using different random seeds we converge to roughly the same value.
for (int i = 0; i < 3; i++) {
double result = TheoreticalSensitivity.sensitivityAtConstantDepth(depth, qualityHistogram, 3,
Expand All @@ -266,25 +262,31 @@ public void testSensitivityAtConstantDepth(final double expected, final File met
@DataProvider(name = "TheoreticalSensitivityDataProvider")
public Object[][] arbFracSensDataProvider() {
final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics");
final File wgsMetricsHighDepthFile = new File(TEST_DIR, "ultra_high_depth.wgs_metrics");

// This test acts primarily as an integration test. The sample sizes
// are not quite large enough to converge properly, but is used for the purpose of
// keeping the compute time of the tests short.
return new Object[][]{
{0.90, wgsMetricsFile, 0.5, 400, false},
{0.78, wgsMetricsFile, 0.3, 400, false},
{0.29, wgsMetricsFile, 0.1, 500, false},
{0.08, wgsMetricsFile, 0.05, 500, false},

{0.90, wgsMetricsFile, 0.5, 400, true},
{0.80, wgsMetricsFile, 0.3, 400, true},
{0.35, wgsMetricsFile, 0.1, 500, true},
{0.12, wgsMetricsFile, 0.05, 500, true}
{0.90, wgsMetricsFile, 0.5, 400, false, 45},
{0.78, wgsMetricsFile, 0.3, 400, false, 45},
{0.29, wgsMetricsFile, 0.1, 500, false, 45},
{0.08, wgsMetricsFile, 0.05, 500, false, 45},

{0.90, wgsMetricsFile, 0.5, 400, true, 45},
{0.80, wgsMetricsFile, 0.3, 400, true, 45},
{0.35, wgsMetricsFile, 0.1, 500, true, 45},
{0.12, wgsMetricsFile, 0.05, 500, true, 45},

{0.90, wgsMetricsHighDepthFile, 0.001, 400, true, 45},
{0.80, wgsMetricsHighDepthFile, 0.001, 400, true, 90}
};
}

@Test(dataProvider = "TheoreticalSensitivityDataProvider")
public void testSensitivity(final double expected, final File metricsFile, final double alleleFraction, final int sampleSize, final boolean useOverlapProbability) throws Exception {
public void testSensitivity(final double expected, final File metricsFile, final double alleleFraction,
final int sampleSize, final boolean useOverlapProbability,
final int pcrErrorRate) throws Exception {
// This tests Theoretical Sensitivity using distributions on both base quality scores
// and the depth histogram.

Expand All @@ -305,10 +307,10 @@ public void testSensitivity(final double expected, final File metricsFile, final
final double overlapProbability = ((WgsMetrics) metrics.getMetrics().toArray()[0]).PCT_EXC_OVERLAP;
final double result;
if (useOverlapProbability) {
result = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, alleleFraction, overlapProbability, 45);
result = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, alleleFraction, overlapProbability, pcrErrorRate);
}
else {
result = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, alleleFraction);
result = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, alleleFraction, 0, pcrErrorRate);
}

Assert.assertEquals(result, expected, tolerance);
Expand Down Expand Up @@ -345,7 +347,7 @@ public void testHetVsArbitrary(final File metricsFile, final double tolerance, f
final double[] depthDistribution = TheoreticalSensitivity.normalizeHistogram(depthHistogram);

// TS is TheoreticalSensitivity, THS is TheoreticalHetSensitivity
final double resultFromTS = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, 0.5);
final double resultFromTS = TheoreticalSensitivity.theoreticalSensitivity(depthHistogram, qualityHistogram, sampleSize, 3, 0.5, 0.0, 45);
final double resultFromTHS = TheoreticalSensitivity.hetSNPSensitivity(depthDistribution, qualityDistribution, sampleSize, 3);

Assert.assertEquals(resultFromTS, resultFromTHS, tolerance);
Expand Down Expand Up @@ -374,50 +376,6 @@ public void testCallingThreshold(final int totalDepth, final int altDepth, final
Assert.assertEquals(TheoreticalSensitivity.isCalled(totalDepth, altDepth, sumOfAltQualities, alleleFraction, logOddsThreshold), expectedCall);
}

@DataProvider(name = "sumOfGaussiansDataProvider")
public Object[][] sumOfGaussians() {
final File wgsMetricsFile = new File(TEST_DIR, "test_Solexa-332667.wgs_metrics");
final File targetedMetricsFile = new File(TEST_DIR, "test_25103070136.targeted_pcr_metrics");

// When we sum more base qualities from a particular distribution, it should look increasingly Gaussian.
return new Object[][]{
{wgsMetricsFile, 500, 0.03},
{wgsMetricsFile, 20, 0.05},
{wgsMetricsFile, 10, 0.10},
{targetedMetricsFile, 500, 0.03},
{targetedMetricsFile, 20, 0.05},
{targetedMetricsFile, 10, 0.10}
};
}

@Test(dataProvider = "sumOfGaussiansDataProvider")
public void testDrawSumOfQScores(final File metricsFile, final int altDepth, final double tolerance) throws Exception {
final MetricsFile<TheoreticalSensitivityMetrics, Integer> metrics = new MetricsFile<>();
try (final FileReader metricsFileReader = new FileReader(metricsFile)) {
metrics.read(metricsFileReader);
}

final List<Histogram<Integer>> histograms = metrics.getAllHistograms();

final Histogram<Integer> qualityHistogram = histograms.get(1);
final TheoreticalSensitivity.RouletteWheel qualityRW = new TheoreticalSensitivity.RouletteWheel(
TheoreticalSensitivity.trimDistribution(TheoreticalSensitivity.normalizeHistogram(qualityHistogram)));

final Random randomNumberGenerator = new Random(51);

// Calculate mean and deviation of quality score distribution to enable Gaussian sampling below
final double averageQuality = qualityHistogram.getMean();
final double standardDeviationQuality = qualityHistogram.getStandardDeviation();

for (int k = 0; k < 1; k++) {
int sumOfQualitiesFull = IntStream.range(0, altDepth).map(n -> qualityRW.draw()).sum();
int sumOfQualities = TheoreticalSensitivity.drawSumOfQScores(altDepth, averageQuality,
standardDeviationQuality, randomNumberGenerator.nextGaussian());

Assert.assertEquals(sumOfQualitiesFull, sumOfQualities, sumOfQualitiesFull * tolerance);
}
}

@DataProvider(name = "trimDistributionDataProvider")
public Object[][] trimDistributions() {
return new Object[][]{
Expand Down