Skip to content

Commit

Permalink
Merge pull request #447 from broadinstitute/ak_diamonds
Browse files Browse the repository at this point in the history
use diamond types <>
  • Loading branch information
akiezun committed Apr 25, 2015
2 parents a75b706 + 3d5794a commit f1eefec
Show file tree
Hide file tree
Showing 169 changed files with 578 additions and 578 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private abstract class Distributor {
protected abstract PerUnitMetricCollector<METRIC_TYPE, HISTOGRAM_KEY, ARGTYPE> makeUnknownCollector();

public Distributor(final List<SAMReadGroupRecord> rgRecs) {
collectors = new LinkedHashMap<String, PerUnitMetricCollector<METRIC_TYPE, HISTOGRAM_KEY, ARGTYPE>>();
collectors = new LinkedHashMap<>();
for(final SAMReadGroupRecord rg : rgRecs) {
final String key = getKey(rg);
if(!collectors.containsKey(key)) {
Expand Down Expand Up @@ -150,7 +150,7 @@ public void addToFile(final MetricsFile<METRIC_TYPE, HISTOGRAM_KEY> file) {
private class AllReadsDistributor extends Distributor {

public AllReadsDistributor(final List<SAMReadGroupRecord> rgRecs) {
super(new ArrayList<SAMReadGroupRecord>());
super(new ArrayList<>());
makeCollector(null);
}

Expand Down Expand Up @@ -258,7 +258,7 @@ protected PerUnitMetricCollector<METRIC_TYPE, HISTOGRAM_KEY, ARGTYPE> makeUnknow
* readGroups found in the records depending on the accumulationLevels provided
*/
protected void setup(final Set<MetricAccumulationLevel> accumulationLevels, final List<SAMReadGroupRecord> samRgRecords) {
outputOrderedDistributors = new ArrayList<Distributor>(4);
outputOrderedDistributors = new ArrayList<>(4);
if(accumulationLevels.contains(MetricAccumulationLevel.ALL_READS)) {
outputOrderedDistributors.add(new AllReadsDistributor(samRgRecords));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ public final class ClipReads extends ReadWalker {
/**
* List of sequence that should be clipped from the reads
*/
private List<SeqToClip> sequencesToClip = new ArrayList<SeqToClip>();
private List<SeqToClip> sequencesToClip = new ArrayList<>();

/**
* List of cycle start / stop pairs (0-based, stop is included in the cycle to remove) to clip from the reads
Expand Down Expand Up @@ -259,7 +259,7 @@ public void onTraversalStart() {
// Initialize the cycle ranges to clip
//
if (cyclesToClipArg != null) {
cyclesToClip = new ArrayList<Pair<Integer, Integer>>();
cyclesToClip = new ArrayList<>();
for (String range : cyclesToClipArg.split(",")) {
try {
String[] elts = range.split("-");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public void acceptRecord(final SAMRecord rec, final ReferenceSequence ref) {

/** Converts the supplied adapter sequences to byte arrays in both fwd and rc. */
private byte [][] prepareAdapterSequences() {
final Set<String> kmers = new HashSet<String>();
final Set<String> kmers = new HashSet<>();

// Make a set of all kmers of adapterMatchLength
for (final String seq : adapterSequence) {
Expand Down Expand Up @@ -194,7 +194,7 @@ public void addMetricsToFile(final MetricsFile<AlignmentSummaryMetrics, Comparab
*/
private class IndividualAlignmentSummaryMetricsCollector {
private long numPositiveStrand = 0;
private final Histogram<Integer> readLengthHistogram = new Histogram<Integer>();
private final Histogram<Integer> readLengthHistogram = new Histogram<>();
private AlignmentSummaryMetrics metrics;
private long chimeras;
private long chimerasDenominator;
Expand All @@ -203,9 +203,9 @@ private class IndividualAlignmentSummaryMetricsCollector {

private long nonBisulfiteAlignedBases = 0;
private long hqNonBisulfiteAlignedBases = 0;
private final Histogram<Long> mismatchHistogram = new Histogram<Long>();
private final Histogram<Long> hqMismatchHistogram = new Histogram<Long>();
private final Histogram<Integer> badCycleHistogram = new Histogram<Integer>();
private final Histogram<Long> mismatchHistogram = new Histogram<>();
private final Histogram<Long> hqMismatchHistogram = new Histogram<>();
private final Histogram<Integer> badCycleHistogram = new Histogram<>();

public IndividualAlignmentSummaryMetricsCollector(final AlignmentSummaryMetrics.Category pairingCategory,
final String sample,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ public SinglePassSamProgram makeInstance(final String outbase) {

@Override
protected String[] customCommandLineValidation() {
programsToRun = new ArrayList<ProgramInterface>(PROGRAM);
programsToRun = new ArrayList<>(PROGRAM);
return super.customCommandLineValidation();
}

Expand All @@ -121,8 +121,8 @@ public Object doWork() {
OUTPUT = OUTPUT.substring(0, OUTPUT.length() - 1);
}

final List<SinglePassSamProgram> programs = new ArrayList<SinglePassSamProgram>();
for (ProgramInterface program : new HashSet<ProgramInterface>(programsToRun)) {
final List<SinglePassSamProgram> programs = new ArrayList<>();
for (ProgramInterface program : new HashSet<>(programsToRun)) {
SinglePassSamProgram instance = program.makeInstance(OUTPUT);

// Generally programs should not be accessing these directly but it might make things smoother
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ public class CollectOxoGMetrics extends PicardCommandLineProgram {
public int CONTEXT_SIZE = 1;

@Argument(doc = "The optional set of sequence contexts to restrict analysis to. If not supplied all contexts are analyzed.")
public Set<String> CONTEXTS = new HashSet<String>();
public Set<String> CONTEXTS = new HashSet<>();

@Argument(doc = "For debugging purposes: stop after visiting this many sites with at least 1X coverage.")
public int STOP_AFTER = Integer.MAX_VALUE;
Expand Down Expand Up @@ -159,7 +159,7 @@ public static final class CpcgMetrics extends MetricBase {
@Override
protected String[] customCommandLineValidation() {
final int size = 1 + 2 * CONTEXT_SIZE;
final List<String> messages = new ArrayList<String>();
final List<String> messages = new ArrayList<>();

for (final String ctx : CONTEXTS) {
if (ctx.length() != size) {
Expand Down Expand Up @@ -188,16 +188,16 @@ protected Object doWork() {
final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(REFERENCE_SEQUENCE);
final SamReader in = SamReaderFactory.makeDefault().open(INPUT);

final Set<String> samples = new HashSet<String>();
final Set<String> libraries = new HashSet<String>();
final Set<String> samples = new HashSet<>();
final Set<String> libraries = new HashSet<>();
for (final SAMReadGroupRecord rec : in.getFileHeader().getReadGroups()) {
samples.add(getOrElse(rec.getSample(), UNKNOWN_SAMPLE));
libraries.add(getOrElse(rec.getLibrary(), UNKNOWN_LIBRARY));
}

// Setup the calculators
final Set<String> contexts = CONTEXTS.isEmpty() ? makeContextStrings(CONTEXT_SIZE) : CONTEXTS;
final ListMap<String, Calculator> calculators = new ListMap<String, Calculator>();
final ListMap<String, Calculator> calculators = new ListMap<>();
for (final String context : contexts) {
for (final String library : libraries) {
calculators.add(context, new Calculator(library, context));
Expand All @@ -221,7 +221,7 @@ protected Object doWork() {
iterator.setEmitUncoveredLoci(false);
iterator.setMappingQualityScoreCutoff(MINIMUM_MAPPING_QUALITY);

final List<SamRecordFilter> filters = new ArrayList<SamRecordFilter>();
final List<SamRecordFilter> filters = new ArrayList<>();
filters.add(new NotPrimaryAlignmentFilter());
filters.add(new DuplicateReadFilter());
if (MINIMUM_INSERT_SIZE > 0 || MAXIMUM_INSERT_SIZE > 0) {
Expand Down Expand Up @@ -275,7 +275,7 @@ protected Object doWork() {
for (final List<Calculator> calcs : calculators.values()) {
for (final Calculator calc : calcs) {
final CpcgMetrics m = calc.finish();
m.SAMPLE_ALIAS = StringUtil.join(",", new ArrayList<String>(samples));
m.SAMPLE_ALIAS = StringUtil.join(",", new ArrayList<>(samples));
file.addMetric(m);
}
}
Expand All @@ -286,7 +286,7 @@ protected Object doWork() {
}

private Set<String> makeContextStrings(final int contextSize) {
final Set<String> contexts = new HashSet<String>();
final Set<String> contexts = new HashSet<>();

for (final byte[] kmer : generateAllKmers(2 * contextSize + 1)) {
if (kmer[contextSize] == 'C') {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ public void acceptRecord(final SAMRecord record, final ReferenceSequence refSeq)

/** A Collector for individual InsertSizeMetrics for a given SAMPLE or SAMPLE/LIBRARY or SAMPLE/LIBRARY/READ_GROUP (depending on aggregation levels) */
public class PerUnitInsertSizeMetricsCollector implements PerUnitMetricCollector<InsertSizeMetrics, Integer, InsertSizeCollectorArgs> {
final EnumMap<SamPairUtil.PairOrientation, Histogram<Integer>> Histograms = new EnumMap<SamPairUtil.PairOrientation, Histogram<Integer>>(SamPairUtil.PairOrientation.class);
final EnumMap<SamPairUtil.PairOrientation, Histogram<Integer>> Histograms = new EnumMap<>(SamPairUtil.PairOrientation.class);
final String sample;
final String library;
final String readGroup;
Expand All @@ -93,9 +93,9 @@ else if (this.sample != null) {
else {
prefix = "All_Reads.";
}
Histograms.put(SamPairUtil.PairOrientation.FR, new Histogram<Integer>("insert_size", prefix + "fr_count"));
Histograms.put(SamPairUtil.PairOrientation.TANDEM, new Histogram<Integer>("insert_size", prefix + "tandem_count"));
Histograms.put(SamPairUtil.PairOrientation.RF, new Histogram<Integer>("insert_size", prefix + "rf_count"));
Histograms.put(SamPairUtil.PairOrientation.FR, new Histogram<>("insert_size", prefix + "fr_count"));
Histograms.put(SamPairUtil.PairOrientation.TANDEM, new Histogram<>("insert_size", prefix + "tandem_count"));
Histograms.put(SamPairUtil.PairOrientation.RF, new Histogram<>("insert_size", prefix + "rf_count"));
}

public void acceptRecord(final InsertSizeCollectorArgs args) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ private void ensureArraysBigEnough(final int length) {

Histogram<Integer> getMeanQualityHistogram() {
final String label = useOriginalQualities ? "MEAN_ORIGINAL_QUALITY" : "MEAN_QUALITY";
final Histogram<Integer> meanQualities = new Histogram<Integer>("CYCLE", label);
final Histogram<Integer> meanQualities = new Histogram<>("CYCLE", label);

int firstReadLength = 0;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ protected void acceptRead(final SAMRecord rec, final ReferenceSequence ref) {
@Override
protected void finish() {
// Built the Histograms out of the long[]s
final Histogram<Byte> qHisto = new Histogram<Byte>("QUALITY", "COUNT_OF_Q");
final Histogram<Byte> oqHisto = new Histogram<Byte>("QUALITY", "COUNT_OF_OQ");
final Histogram<Byte> qHisto = new Histogram<>("QUALITY", "COUNT_OF_Q");
final Histogram<Byte> oqHisto = new Histogram<>("QUALITY", "COUNT_OF_OQ");

for (int i=0; i< qCounts.length; ++i) {
if (qCounts[i] > 0) qHisto.increment( (byte) i, (double) qCounts[i]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public ArtifactCounter(final String sampleAlias, final String library, final int
this.library = library;

// define the contexts
this.fullContexts = new HashSet<String>();
this.fullContexts = new HashSet<>();
for (final byte[] kmer : SequenceUtil.generateAllKmers(2 * contextSize + 1)) {
this.fullContexts.add(StringUtil.bytesToString(kmer));
}
Expand All @@ -45,9 +45,9 @@ public ArtifactCounter(final String sampleAlias, final String library, final int
// NB: we use N to represent a wildcard base, rather than an ambiguous base. It's assumed that all of the input
// contexts are unambiguous, and that any actual N's in the data have been dealt with elsewhere.
final String padding = StringUtil.repeatCharNTimes('N', contextSize);
this.leadingContextMap = new HashMap<String, String>();
this.trailingContextMap = new HashMap<String, String>();
this.zeroContextMap = new HashMap<String, String>();
this.leadingContextMap = new HashMap<>();
this.trailingContextMap = new HashMap<>();
this.zeroContextMap = new HashMap<>();
for (final String context : this.fullContexts) {
final String leading = context.substring(0, contextSize);
final String trailing = context.substring(contextSize + 1, context.length());
Expand All @@ -58,21 +58,21 @@ public ArtifactCounter(final String sampleAlias, final String library, final int
}

// set up the accumulators
final Set<String> halfContexts = new HashSet<String>();
final Set<String> halfContexts = new HashSet<>();
halfContexts.addAll(leadingContextMap.values());
halfContexts.addAll(trailingContextMap.values());
final Set<String> zeroContexts = new HashSet<String>();
final Set<String> zeroContexts = new HashSet<>();
zeroContexts.addAll(zeroContextMap.values());

this.fullContextAccumulator = new ContextAccumulator(fullContexts, expectedTandemReads);
this.halfContextAccumulator = new ContextAccumulator(halfContexts, expectedTandemReads);
this.zeroContextAccumulator = new ContextAccumulator(zeroContexts, expectedTandemReads);

// these will get populated in the final step
preAdapterSummaryMetricsList = new ArrayList<PreAdapterSummaryMetrics>();
preAdapterDetailMetricsList = new ArrayList<PreAdapterDetailMetrics>();
baitBiasSummaryMetricsList = new ArrayList<BaitBiasSummaryMetrics>();
baitBiasDetailMetricsList = new ArrayList<BaitBiasDetailMetrics>();
preAdapterSummaryMetricsList = new ArrayList<>();
preAdapterDetailMetricsList = new ArrayList<>();
baitBiasSummaryMetricsList = new ArrayList<>();
baitBiasDetailMetricsList = new ArrayList<>();
}

/**
Expand Down Expand Up @@ -118,7 +118,7 @@ public void finish() {
*
*/
private Map<Transition, SummaryPair> getSummaryMetrics() {
final Map<Transition, SummaryPair> summaryMetricsMap = new HashMap<Transition, SummaryPair>();
final Map<Transition, SummaryPair> summaryMetricsMap = new HashMap<>();

// extract the detail metrics from each accumulator
final ListMap<Transition, DetailPair> fullMetrics = this.fullContextAccumulator.calculateMetrics(sampleAlias, library);
Expand All @@ -134,8 +134,8 @@ private Map<Transition, SummaryPair> getSummaryMetrics() {
}

// we want to report on leading / trailing contexts separately
final List<DetailPair> leadingMetricsForTransition = new ArrayList<DetailPair>();
final List<DetailPair> trailingMetricsForTransition = new ArrayList<DetailPair>();
final List<DetailPair> leadingMetricsForTransition = new ArrayList<>();
final List<DetailPair> trailingMetricsForTransition = new ArrayList<>();
for (final DetailPair metrics : halfMetrics.get(transition)) {
// first make sure they're the same context
if (!metrics.preAdapterMetrics.CONTEXT.equals(metrics.baitBiasMetrics.CONTEXT)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ public class CollectSequencingArtifactMetrics extends SinglePassSamProgram {

@Argument(doc = "If specified, only print results for these contexts in the detail metrics output. " +
"However, the summary metrics output will still take all contexts into consideration.", optional = true)
public Set<String> CONTEXTS_TO_PRINT = new HashSet<String>();
public Set<String> CONTEXTS_TO_PRINT = new HashSet<>();

private static final String UNKNOWN_LIBRARY = "UnknownLibrary";
private static final String UNKNOWN_SAMPLE = "UnknownSample";
Expand All @@ -103,13 +103,13 @@ public class CollectSequencingArtifactMetrics extends SinglePassSamProgram {
private DbSnpBitSetUtil dbSnpMask;
private SamRecordFilter recordFilter;

private final Set<String> samples = new HashSet<String>();
private final Set<String> libraries = new HashSet<String>();
private final Map<String, ArtifactCounter> artifactCounters = new HashMap<String, ArtifactCounter>();
private final Set<String> samples = new HashSet<>();
private final Set<String> libraries = new HashSet<>();
private final Map<String, ArtifactCounter> artifactCounters = new HashMap<>();

@Override
protected String[] customCommandLineValidation() {
final List<String> messages = new ArrayList<String>();
final List<String> messages = new ArrayList<>();

final int contextFullLength = 2 * CONTEXT_SIZE + 1;
if (CONTEXT_SIZE < 0) messages.add("CONTEXT_SIZE cannot be negative");
Expand Down Expand Up @@ -156,7 +156,7 @@ protected void setup(final SAMFileHeader header, final File samFile) {
}

// set record-level filters
final List<SamRecordFilter> filters = new ArrayList<SamRecordFilter>();
final List<SamRecordFilter> filters = new ArrayList<>();
filters.add(new FailsVendorReadQualityFilter());
filters.add(new NotPrimaryAlignmentFilter());
filters.add(new DuplicateReadFilter());
Expand All @@ -169,7 +169,7 @@ protected void setup(final SAMFileHeader header, final File samFile) {
recordFilter = new AggregateFilter(filters);

// set up the artifact counters
final String sampleAlias = StringUtil.join(",", new ArrayList<String>(samples));
final String sampleAlias = StringUtil.join(",", new ArrayList<>(samples));
for (final String library : libraries) {
artifactCounters.put(library, new ArtifactCounter(sampleAlias, library, CONTEXT_SIZE, TANDEM_READS));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ class ContextAccumulator {

public ContextAccumulator(final Set<String> contexts, final boolean expectedTandemReads) {
this.expectedTandemReads = expectedTandemReads;
this.artifactMap = new HashMap<Transition, Map<String, AlignmentAccumulator>>();
this.artifactMap = new HashMap<>();
for (final Transition transition : Transition.values()) {
this.artifactMap.put(transition, new HashMap<String, AlignmentAccumulator>());
this.artifactMap.put(transition, new HashMap<>());
}
for (final String context : contexts) {
final char refBase = getCentralBase(context);
Expand All @@ -43,7 +43,7 @@ public void countRecord(final String refContext, final char calledBase, final SA
* Core method to compute detailed (i.e. context-by-context) metrics from this accumulator.
*/
public ListMap<Transition, DetailPair> calculateMetrics(final String sampleAlias, final String library) {
final ListMap<Transition, DetailPair> detailMetricsMap = new ListMap<Transition, DetailPair>();
final ListMap<Transition, DetailPair> detailMetricsMap = new ListMap<>();
for (final Transition altTransition : Transition.altValues()) {
final Transition refTransition = altTransition.matchingRef();
for (final String context : this.artifactMap.get(altTransition).keySet()) {
Expand Down
Loading

0 comments on commit f1eefec

Please sign in to comment.