diff --git a/src/main/java/org/broadinstitute/hellbender/metrics/MultiLevelCollector.java b/src/main/java/org/broadinstitute/hellbender/metrics/MultiLevelCollector.java index 2bfaf44783a..76a94b847f9 100644 --- a/src/main/java/org/broadinstitute/hellbender/metrics/MultiLevelCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/metrics/MultiLevelCollector.java @@ -98,7 +98,7 @@ private abstract class Distributor { protected abstract PerUnitMetricCollector makeUnknownCollector(); public Distributor(final List rgRecs) { - collectors = new LinkedHashMap>(); + collectors = new LinkedHashMap<>(); for(final SAMReadGroupRecord rg : rgRecs) { final String key = getKey(rg); if(!collectors.containsKey(key)) { @@ -150,7 +150,7 @@ public void addToFile(final MetricsFile file) { private class AllReadsDistributor extends Distributor { public AllReadsDistributor(final List rgRecs) { - super(new ArrayList()); + super(new ArrayList<>()); makeCollector(null); } @@ -258,7 +258,7 @@ protected PerUnitMetricCollector makeUnknow * readGroups found in the records depending on the accumulationLevels provided */ protected void setup(final Set accumulationLevels, final List samRgRecords) { - outputOrderedDistributors = new ArrayList(4); + outputOrderedDistributors = new ArrayList<>(4); if(accumulationLevels.contains(MetricAccumulationLevel.ALL_READS)) { outputOrderedDistributors.add(new AllReadsDistributor(samRgRecords)); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/ClipReads.java b/src/main/java/org/broadinstitute/hellbender/tools/ClipReads.java index ca0d3e7f4e1..3ed7346b47f 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/ClipReads.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/ClipReads.java @@ -198,7 +198,7 @@ public final class ClipReads extends ReadWalker { /** * List of sequence that should be clipped from the reads */ - private List sequencesToClip = new ArrayList(); + private List sequencesToClip = new ArrayList<>(); /** * List of cycle start / stop pairs (0-based, stop is included in the cycle to remove) to clip from the reads @@ -259,7 +259,7 @@ public void onTraversalStart() { // Initialize the cycle ranges to clip // if (cyclesToClipArg != null) { - cyclesToClip = new ArrayList>(); + cyclesToClip = new ArrayList<>(); for (String range : cyclesToClipArg.split(",")) { try { String[] elts = range.split("-"); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/AlignmentSummaryMetricsCollector.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/AlignmentSummaryMetricsCollector.java index e66bcc3212d..16c53fd5ff7 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/AlignmentSummaryMetricsCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/AlignmentSummaryMetricsCollector.java @@ -73,7 +73,7 @@ public void acceptRecord(final SAMRecord rec, final ReferenceSequence ref) { /** Converts the supplied adapter sequences to byte arrays in both fwd and rc. */ private byte [][] prepareAdapterSequences() { - final Set kmers = new HashSet(); + final Set kmers = new HashSet<>(); // Make a set of all kmers of adapterMatchLength for (final String seq : adapterSequence) { @@ -194,7 +194,7 @@ public void addMetricsToFile(final MetricsFile readLengthHistogram = new Histogram(); + private final Histogram readLengthHistogram = new Histogram<>(); private AlignmentSummaryMetrics metrics; private long chimeras; private long chimerasDenominator; @@ -203,9 +203,9 @@ private class IndividualAlignmentSummaryMetricsCollector { private long nonBisulfiteAlignedBases = 0; private long hqNonBisulfiteAlignedBases = 0; - private final Histogram mismatchHistogram = new Histogram(); - private final Histogram hqMismatchHistogram = new Histogram(); - private final Histogram badCycleHistogram = new Histogram(); + private final Histogram mismatchHistogram = new Histogram<>(); + private final Histogram hqMismatchHistogram = new Histogram<>(); + private final Histogram badCycleHistogram = new Histogram<>(); public IndividualAlignmentSummaryMetricsCollector(final AlignmentSummaryMetrics.Category pairingCategory, final String sample, diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectMultipleMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectMultipleMetrics.java index a6f085d525e..bf70b65f5f7 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectMultipleMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectMultipleMetrics.java @@ -103,7 +103,7 @@ public SinglePassSamProgram makeInstance(final String outbase) { @Override protected String[] customCommandLineValidation() { - programsToRun = new ArrayList(PROGRAM); + programsToRun = new ArrayList<>(PROGRAM); return super.customCommandLineValidation(); } @@ -121,8 +121,8 @@ public Object doWork() { OUTPUT = OUTPUT.substring(0, OUTPUT.length() - 1); } - final List programs = new ArrayList(); - for (ProgramInterface program : new HashSet(programsToRun)) { + final List programs = new ArrayList<>(); + for (ProgramInterface program : new HashSet<>(programsToRun)) { SinglePassSamProgram instance = program.makeInstance(OUTPUT); // Generally programs should not be accessing these directly but it might make things smoother diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectOxoGMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectOxoGMetrics.java index ee0f9e8ae44..9891444970a 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectOxoGMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectOxoGMetrics.java @@ -83,7 +83,7 @@ public class CollectOxoGMetrics extends PicardCommandLineProgram { public int CONTEXT_SIZE = 1; @Argument(doc = "The optional set of sequence contexts to restrict analysis to. If not supplied all contexts are analyzed.") - public Set CONTEXTS = new HashSet(); + public Set CONTEXTS = new HashSet<>(); @Argument(doc = "For debugging purposes: stop after visiting this many sites with at least 1X coverage.") public int STOP_AFTER = Integer.MAX_VALUE; @@ -159,7 +159,7 @@ public static final class CpcgMetrics extends MetricBase { @Override protected String[] customCommandLineValidation() { final int size = 1 + 2 * CONTEXT_SIZE; - final List messages = new ArrayList(); + final List messages = new ArrayList<>(); for (final String ctx : CONTEXTS) { if (ctx.length() != size) { @@ -188,8 +188,8 @@ protected Object doWork() { final ReferenceSequenceFileWalker refWalker = new ReferenceSequenceFileWalker(REFERENCE_SEQUENCE); final SamReader in = SamReaderFactory.makeDefault().open(INPUT); - final Set samples = new HashSet(); - final Set libraries = new HashSet(); + final Set samples = new HashSet<>(); + final Set libraries = new HashSet<>(); for (final SAMReadGroupRecord rec : in.getFileHeader().getReadGroups()) { samples.add(getOrElse(rec.getSample(), UNKNOWN_SAMPLE)); libraries.add(getOrElse(rec.getLibrary(), UNKNOWN_LIBRARY)); @@ -197,7 +197,7 @@ protected Object doWork() { // Setup the calculators final Set contexts = CONTEXTS.isEmpty() ? makeContextStrings(CONTEXT_SIZE) : CONTEXTS; - final ListMap calculators = new ListMap(); + final ListMap calculators = new ListMap<>(); for (final String context : contexts) { for (final String library : libraries) { calculators.add(context, new Calculator(library, context)); @@ -221,7 +221,7 @@ protected Object doWork() { iterator.setEmitUncoveredLoci(false); iterator.setMappingQualityScoreCutoff(MINIMUM_MAPPING_QUALITY); - final List filters = new ArrayList(); + final List filters = new ArrayList<>(); filters.add(new NotPrimaryAlignmentFilter()); filters.add(new DuplicateReadFilter()); if (MINIMUM_INSERT_SIZE > 0 || MAXIMUM_INSERT_SIZE > 0) { @@ -275,7 +275,7 @@ protected Object doWork() { for (final List calcs : calculators.values()) { for (final Calculator calc : calcs) { final CpcgMetrics m = calc.finish(); - m.SAMPLE_ALIAS = StringUtil.join(",", new ArrayList(samples)); + m.SAMPLE_ALIAS = StringUtil.join(",", new ArrayList<>(samples)); file.addMetric(m); } } @@ -286,7 +286,7 @@ protected Object doWork() { } private Set makeContextStrings(final int contextSize) { - final Set contexts = new HashSet(); + final Set contexts = new HashSet<>(); for (final byte[] kmer : generateAllKmers(2 * contextSize + 1)) { if (kmer[contextSize] == 'C') { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/InsertSizeMetricsCollector.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/InsertSizeMetricsCollector.java index c6cd084d5a3..915c05cb17f 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/InsertSizeMetricsCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/InsertSizeMetricsCollector.java @@ -70,7 +70,7 @@ public void acceptRecord(final SAMRecord record, final ReferenceSequence refSeq) /** A Collector for individual InsertSizeMetrics for a given SAMPLE or SAMPLE/LIBRARY or SAMPLE/LIBRARY/READ_GROUP (depending on aggregation levels) */ public class PerUnitInsertSizeMetricsCollector implements PerUnitMetricCollector { - final EnumMap> Histograms = new EnumMap>(SamPairUtil.PairOrientation.class); + final EnumMap> Histograms = new EnumMap<>(SamPairUtil.PairOrientation.class); final String sample; final String library; final String readGroup; @@ -93,9 +93,9 @@ else if (this.sample != null) { else { prefix = "All_Reads."; } - Histograms.put(SamPairUtil.PairOrientation.FR, new Histogram("insert_size", prefix + "fr_count")); - Histograms.put(SamPairUtil.PairOrientation.TANDEM, new Histogram("insert_size", prefix + "tandem_count")); - Histograms.put(SamPairUtil.PairOrientation.RF, new Histogram("insert_size", prefix + "rf_count")); + Histograms.put(SamPairUtil.PairOrientation.FR, new Histogram<>("insert_size", prefix + "fr_count")); + Histograms.put(SamPairUtil.PairOrientation.TANDEM, new Histogram<>("insert_size", prefix + "tandem_count")); + Histograms.put(SamPairUtil.PairOrientation.RF, new Histogram<>("insert_size", prefix + "rf_count")); } public void acceptRecord(final InsertSizeCollectorArgs args) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/MeanQualityByCycle.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/MeanQualityByCycle.java index 56a03523a70..0338a8cc631 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/MeanQualityByCycle.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/MeanQualityByCycle.java @@ -100,7 +100,7 @@ private void ensureArraysBigEnough(final int length) { Histogram getMeanQualityHistogram() { final String label = useOriginalQualities ? "MEAN_ORIGINAL_QUALITY" : "MEAN_QUALITY"; - final Histogram meanQualities = new Histogram("CYCLE", label); + final Histogram meanQualities = new Histogram<>("CYCLE", label); int firstReadLength = 0; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/QualityScoreDistribution.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/QualityScoreDistribution.java index 2e9c19523cc..326b2592d29 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/QualityScoreDistribution.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/QualityScoreDistribution.java @@ -92,8 +92,8 @@ protected void acceptRead(final SAMRecord rec, final ReferenceSequence ref) { @Override protected void finish() { // Built the Histograms out of the long[]s - final Histogram qHisto = new Histogram("QUALITY", "COUNT_OF_Q"); - final Histogram oqHisto = new Histogram("QUALITY", "COUNT_OF_OQ"); + final Histogram qHisto = new Histogram<>("QUALITY", "COUNT_OF_Q"); + final Histogram oqHisto = new Histogram<>("QUALITY", "COUNT_OF_OQ"); for (int i=0; i< qCounts.length; ++i) { if (qCounts[i] > 0) qHisto.increment( (byte) i, (double) qCounts[i]); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ArtifactCounter.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ArtifactCounter.java index 477d0d1f961..0963f163cb8 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ArtifactCounter.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ArtifactCounter.java @@ -36,7 +36,7 @@ public ArtifactCounter(final String sampleAlias, final String library, final int this.library = library; // define the contexts - this.fullContexts = new HashSet(); + this.fullContexts = new HashSet<>(); for (final byte[] kmer : SequenceUtil.generateAllKmers(2 * contextSize + 1)) { this.fullContexts.add(StringUtil.bytesToString(kmer)); } @@ -45,9 +45,9 @@ public ArtifactCounter(final String sampleAlias, final String library, final int // NB: we use N to represent a wildcard base, rather than an ambiguous base. It's assumed that all of the input // contexts are unambiguous, and that any actual N's in the data have been dealt with elsewhere. final String padding = StringUtil.repeatCharNTimes('N', contextSize); - this.leadingContextMap = new HashMap(); - this.trailingContextMap = new HashMap(); - this.zeroContextMap = new HashMap(); + this.leadingContextMap = new HashMap<>(); + this.trailingContextMap = new HashMap<>(); + this.zeroContextMap = new HashMap<>(); for (final String context : this.fullContexts) { final String leading = context.substring(0, contextSize); final String trailing = context.substring(contextSize + 1, context.length()); @@ -58,10 +58,10 @@ public ArtifactCounter(final String sampleAlias, final String library, final int } // set up the accumulators - final Set halfContexts = new HashSet(); + final Set halfContexts = new HashSet<>(); halfContexts.addAll(leadingContextMap.values()); halfContexts.addAll(trailingContextMap.values()); - final Set zeroContexts = new HashSet(); + final Set zeroContexts = new HashSet<>(); zeroContexts.addAll(zeroContextMap.values()); this.fullContextAccumulator = new ContextAccumulator(fullContexts, expectedTandemReads); @@ -69,10 +69,10 @@ public ArtifactCounter(final String sampleAlias, final String library, final int this.zeroContextAccumulator = new ContextAccumulator(zeroContexts, expectedTandemReads); // these will get populated in the final step - preAdapterSummaryMetricsList = new ArrayList(); - preAdapterDetailMetricsList = new ArrayList(); - baitBiasSummaryMetricsList = new ArrayList(); - baitBiasDetailMetricsList = new ArrayList(); + preAdapterSummaryMetricsList = new ArrayList<>(); + preAdapterDetailMetricsList = new ArrayList<>(); + baitBiasSummaryMetricsList = new ArrayList<>(); + baitBiasDetailMetricsList = new ArrayList<>(); } /** @@ -118,7 +118,7 @@ public void finish() { * */ private Map getSummaryMetrics() { - final Map summaryMetricsMap = new HashMap(); + final Map summaryMetricsMap = new HashMap<>(); // extract the detail metrics from each accumulator final ListMap fullMetrics = this.fullContextAccumulator.calculateMetrics(sampleAlias, library); @@ -134,8 +134,8 @@ private Map getSummaryMetrics() { } // we want to report on leading / trailing contexts separately - final List leadingMetricsForTransition = new ArrayList(); - final List trailingMetricsForTransition = new ArrayList(); + final List leadingMetricsForTransition = new ArrayList<>(); + final List trailingMetricsForTransition = new ArrayList<>(); for (final DetailPair metrics : halfMetrics.get(transition)) { // first make sure they're the same context if (!metrics.preAdapterMetrics.CONTEXT.equals(metrics.baitBiasMetrics.CONTEXT)) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/CollectSequencingArtifactMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/CollectSequencingArtifactMetrics.java index ead75aae9c3..42b1e9b08d7 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/CollectSequencingArtifactMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/CollectSequencingArtifactMetrics.java @@ -89,7 +89,7 @@ public class CollectSequencingArtifactMetrics extends SinglePassSamProgram { @Argument(doc = "If specified, only print results for these contexts in the detail metrics output. " + "However, the summary metrics output will still take all contexts into consideration.", optional = true) - public Set CONTEXTS_TO_PRINT = new HashSet(); + public Set CONTEXTS_TO_PRINT = new HashSet<>(); private static final String UNKNOWN_LIBRARY = "UnknownLibrary"; private static final String UNKNOWN_SAMPLE = "UnknownSample"; @@ -103,13 +103,13 @@ public class CollectSequencingArtifactMetrics extends SinglePassSamProgram { private DbSnpBitSetUtil dbSnpMask; private SamRecordFilter recordFilter; - private final Set samples = new HashSet(); - private final Set libraries = new HashSet(); - private final Map artifactCounters = new HashMap(); + private final Set samples = new HashSet<>(); + private final Set libraries = new HashSet<>(); + private final Map artifactCounters = new HashMap<>(); @Override protected String[] customCommandLineValidation() { - final List messages = new ArrayList(); + final List messages = new ArrayList<>(); final int contextFullLength = 2 * CONTEXT_SIZE + 1; if (CONTEXT_SIZE < 0) messages.add("CONTEXT_SIZE cannot be negative"); @@ -156,7 +156,7 @@ protected void setup(final SAMFileHeader header, final File samFile) { } // set record-level filters - final List filters = new ArrayList(); + final List filters = new ArrayList<>(); filters.add(new FailsVendorReadQualityFilter()); filters.add(new NotPrimaryAlignmentFilter()); filters.add(new DuplicateReadFilter()); @@ -169,7 +169,7 @@ protected void setup(final SAMFileHeader header, final File samFile) { recordFilter = new AggregateFilter(filters); // set up the artifact counters - final String sampleAlias = StringUtil.join(",", new ArrayList(samples)); + final String sampleAlias = StringUtil.join(",", new ArrayList<>(samples)); for (final String library : libraries) { artifactCounters.put(library, new ArtifactCounter(sampleAlias, library, CONTEXT_SIZE, TANDEM_READS)); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ContextAccumulator.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ContextAccumulator.java index 9420f68186e..dfd63f31f82 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ContextAccumulator.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ContextAccumulator.java @@ -20,9 +20,9 @@ class ContextAccumulator { public ContextAccumulator(final Set contexts, final boolean expectedTandemReads) { this.expectedTandemReads = expectedTandemReads; - this.artifactMap = new HashMap>(); + this.artifactMap = new HashMap<>(); for (final Transition transition : Transition.values()) { - this.artifactMap.put(transition, new HashMap()); + this.artifactMap.put(transition, new HashMap<>()); } for (final String context : contexts) { final char refBase = getCentralBase(context); @@ -43,7 +43,7 @@ public void countRecord(final String refContext, final char calledBase, final SA * Core method to compute detailed (i.e. context-by-context) metrics from this accumulator. */ public ListMap calculateMetrics(final String sampleAlias, final String library) { - final ListMap detailMetricsMap = new ListMap(); + final ListMap detailMetricsMap = new ListMap<>(); for (final Transition altTransition : Transition.altValues()) { final Transition refTransition = altTransition.matchingRef(); for (final String context : this.artifactMap.get(altTransition).keySet()) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ConvertSequencingArtifactToOxoG.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ConvertSequencingArtifactToOxoG.java index 9bea482f8bd..f22b9647831 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ConvertSequencingArtifactToOxoG.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/artifacts/ConvertSequencingArtifactToOxoG.java @@ -53,8 +53,8 @@ protected Object doWork() { * Determine output fields. Just copy these from the input for now. */ final String oxogSampleAlias = preAdapterDetailMetricsList.get(0).SAMPLE_ALIAS; - final Set oxogLibraries = new HashSet(); - final Set oxogContexts = new HashSet(); + final Set oxogLibraries = new HashSet<>(); + final Set oxogContexts = new HashSet<>(); for (final PreAdapterDetailMetrics preAdapter : preAdapterDetailMetricsList) { oxogLibraries.add(preAdapter.LIBRARY); // Remember that OxoG only reports on the 'C' contexts @@ -68,11 +68,11 @@ protected Object doWork() { * Remember, we only care about two transitions - C>A and G>T! Thus, for each context we * will only store one metric. */ - final Map> preAdapterDetailMetricsMap = new HashMap>(); - final Map> baitBiasDetailMetricsMap = new HashMap>(); + final Map> preAdapterDetailMetricsMap = new HashMap<>(); + final Map> baitBiasDetailMetricsMap = new HashMap<>(); for (final String library : oxogLibraries) { - final Map contextsToPreAdapter = new HashMap(); - final Map contextsToBaitBias = new HashMap(); + final Map contextsToPreAdapter = new HashMap<>(); + final Map contextsToBaitBias = new HashMap<>(); preAdapterDetailMetricsMap.put(library, contextsToPreAdapter); baitBiasDetailMetricsMap.put(library, contextsToBaitBias); } @@ -92,7 +92,7 @@ protected Object doWork() { /** * Create the OxoG metrics */ - final List oxogMetrics = new ArrayList(); + final List oxogMetrics = new ArrayList<>(); for (final String library : oxogLibraries) { for (final String context : oxogContexts) { final CpcgMetrics m = new CpcgMetrics(); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CalculateHsMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CalculateHsMetrics.java index 7998e9be5ff..adb308df769 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CalculateHsMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CalculateHsMetrics.java @@ -45,7 +45,7 @@ protected String getProbeSetName() { if (BAIT_SET_NAME != null) { return BAIT_SET_NAME; } else { - final SortedSet baitSetNames = new TreeSet(); + final SortedSet baitSetNames = new TreeSet<>(); for (final File file : BAIT_INTERVALS) { baitSetNames.add(CollectTargetedMetrics.renderProbeNameFromFile(file)); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectJumpingLibraryMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectJumpingLibraryMetrics.java index 064a3ec79ff..a700a1bc2c1 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectJumpingLibraryMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectJumpingLibraryMetrics.java @@ -1 +1 @@ -package org.broadinstitute.hellbender.tools.picard.analysis.directed; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMTag; import htsjdk.samtools.SamPairUtil; import htsjdk.samtools.SamPairUtil.PairOrientation; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.metrics.MetricsFile; import htsjdk.samtools.util.CloserUtil; import htsjdk.samtools.util.Histogram; import htsjdk.samtools.util.IOUtil; import org.broadinstitute.hellbender.cmdline.Argument; import org.broadinstitute.hellbender.cmdline.CommandLineProgramProperties; import org.broadinstitute.hellbender.cmdline.PicardCommandLineProgram; import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions; import org.broadinstitute.hellbender.cmdline.programgroups.QCProgramGroup; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.utils.read.markduplicates.DuplicationMetrics; import java.io.File; import java.util.*; /** * Command-line program to compute metrics about outward-facing pairs, inward-facing * pairs, and chimeras in a jumping library. * * @author ktibbett@broadinstitute.org */ @CommandLineProgramProperties( usage = "Computes jumping library metrics. Gets all data for computation from the first" + "read in each pair and assumes that the MQ tag is set with the mate's mapping quality. If the " + "MQ tag is not set, then the program assumes that the mate's mapping quality is >= MINIMUM_MAPPING_QUALITY", usageShort = "Produces jumping library metrics for the provided SAM/BAMs", programGroup = QCProgramGroup.class ) public class CollectJumpingLibraryMetrics extends PicardCommandLineProgram { // Usage and parameters @Argument(shortName = StandardArgumentDefinitions.INPUT_SHORT_NAME, doc = "BAM file(s) of reads with duplicates marked") public List INPUT = new ArrayList(); @Argument(shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc = "File to which metrics should be written") public File OUTPUT; @Argument(shortName = StandardArgumentDefinitions.MINIMUM_MAPPING_QUALITY_SHORT_NAME, doc = "Mapping quality minimum cutoff") public Integer MINIMUM_MAPPING_QUALITY = 0; @Argument(shortName = "T", doc = "When calculating mean and stdev stop when the bins in the tail of the distribution " + "contain fewer than mode/TAIL_LIMIT items") public int TAIL_LIMIT = 10000; @Argument(doc = "Jumps greater than or equal to the greater of this value or 2 times the mode of the " + "outward-facing pairs are considered chimeras") public int CHIMERA_KB_MIN = 100000; private static final int SAMPLE_FOR_MODE = 50000; // How many outward-facing pairs to sample to determine the mode /** * Calculates the detailed statistics about the jumping library and then generates the results. */ protected Object doWork() { for (File f : INPUT) { IOUtil.assertFileIsReadable(f); } IOUtil.assertFileIsWritable(OUTPUT); Histogram innieHistogram = new Histogram(); Histogram outieHistogram = new Histogram(); int fragments = 0; int innies = 0; int outies = 0; int innieDupes = 0; int outieDupes = 0; int crossChromPairs = 0; int superSized = 0; int tandemPairs = 0; double chimeraSizeMinimum = Math.max(getOutieMode(), (double) CHIMERA_KB_MIN); for (File f : INPUT) { SamReader reader = SamReaderFactory.makeDefault().open(f); if (reader.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UserException("SAM file must " + f.getName() + " must be sorted in coordinate order"); } for (SAMRecord sam : reader) { // We're getting all our info from the first of each pair. if (!sam.getFirstOfPairFlag()) { continue; } // Ignore unmapped read pairs if (sam.getReadUnmappedFlag()) { if (!sam.getMateUnmappedFlag()) { fragments++; continue; } // If both ends are unmapped and we've hit unaligned reads we're done if (sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) { break; } continue; } if (sam.getMateUnmappedFlag()) { fragments++; continue; } // Ignore low-quality reads. If we don't have the mate mapping quality, assume it's OK if ((sam.getAttribute(SAMTag.MQ.name()) != null && sam.getIntegerAttribute(SAMTag.MQ.name()) < MINIMUM_MAPPING_QUALITY) || sam.getMappingQuality() < MINIMUM_MAPPING_QUALITY) { continue; } final int absInsertSize = Math.abs(sam.getInferredInsertSize()); if (absInsertSize > chimeraSizeMinimum) { superSized++; } else if (sam.getMateNegativeStrandFlag() == sam.getReadNegativeStrandFlag()) { tandemPairs++; } else if (!sam.getMateReferenceIndex().equals(sam.getReferenceIndex())) { crossChromPairs++; } else { final PairOrientation pairOrientation = SamPairUtil.getPairOrientation(sam); if (pairOrientation == PairOrientation.RF) { outieHistogram.increment(absInsertSize); outies++; if (sam.getDuplicateReadFlag()) { outieDupes++; } } else if (pairOrientation == PairOrientation.FR) { innieHistogram.increment(absInsertSize); innies++; if (sam.getDuplicateReadFlag()) { innieDupes++; } } else { throw new IllegalStateException("This should never happen"); } } } CloserUtil.close(reader); } MetricsFile metricsFile = getMetricsFile(); JumpingLibraryMetrics metrics = new JumpingLibraryMetrics(); metrics.JUMP_PAIRS = outies; metrics.JUMP_DUPLICATE_PAIRS = outieDupes; metrics.JUMP_DUPLICATE_PCT = outies != 0 ? outieDupes / (double) outies : 0; metrics.JUMP_LIBRARY_SIZE = (outies > 0 && outieDupes > 0) ? DuplicationMetrics.estimateLibrarySize(outies, outies - outieDupes) : 0; outieHistogram.trimByTailLimit(TAIL_LIMIT); metrics.JUMP_MEAN_INSERT_SIZE = outieHistogram.getMean(); metrics.JUMP_STDEV_INSERT_SIZE = outieHistogram.getStandardDeviation(); metrics.NONJUMP_PAIRS = innies; metrics.NONJUMP_DUPLICATE_PAIRS = innieDupes; metrics.NONJUMP_DUPLICATE_PCT = innies != 0 ? innieDupes / (double) innies : 0; metrics.NONJUMP_LIBRARY_SIZE = (innies > 0 && innieDupes > 0) ? DuplicationMetrics.estimateLibrarySize(innies, innies - innieDupes) : 0; innieHistogram.trimByTailLimit(TAIL_LIMIT); metrics.NONJUMP_MEAN_INSERT_SIZE = innieHistogram.getMean(); metrics.NONJUMP_STDEV_INSERT_SIZE = innieHistogram.getStandardDeviation(); metrics.CHIMERIC_PAIRS = crossChromPairs + superSized + tandemPairs; metrics.FRAGMENTS = fragments; double totalPairs = outies + innies + metrics.CHIMERIC_PAIRS; metrics.PCT_JUMPS = totalPairs != 0 ? outies / totalPairs : 0; metrics.PCT_NONJUMPS = totalPairs != 0 ? innies / totalPairs : 0; metrics.PCT_CHIMERAS = totalPairs != 0 ? metrics.CHIMERIC_PAIRS / totalPairs : 0; metricsFile.addMetric(metrics); metricsFile.write(OUTPUT); return null; } /** * Calculates the mode for outward-facing pairs, using the first SAMPLE_FOR_MODE * outward-facing pairs found in INPUT */ private double getOutieMode() { int samplePerFile = SAMPLE_FOR_MODE / INPUT.size(); Histogram histo = new Histogram(); for (File f : INPUT) { SamReader reader = SamReaderFactory.makeDefault().open(f); int sampled = 0; for (Iterator it = reader.iterator(); it.hasNext() && sampled < samplePerFile; ) { SAMRecord sam = it.next(); if (!sam.getFirstOfPairFlag()) { continue; } // If we get here we've hit the end of the aligned reads if (sam.getReadUnmappedFlag() && sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) { break; } else if (sam.getReadUnmappedFlag() || sam.getMateUnmappedFlag()) { continue; } else { if ((sam.getAttribute(SAMTag.MQ.name()) == null || sam.getIntegerAttribute(SAMTag.MQ.name()) >= MINIMUM_MAPPING_QUALITY) && sam.getMappingQuality() >= MINIMUM_MAPPING_QUALITY && sam.getMateNegativeStrandFlag() != sam.getReadNegativeStrandFlag() && sam.getMateReferenceIndex().equals(sam.getReferenceIndex())) { if (SamPairUtil.getPairOrientation(sam) == PairOrientation.RF) { histo.increment(Math.abs(sam.getInferredInsertSize())); sampled++; } } } } CloserUtil.close(reader); } return histo.size() > 0 ? histo.getMode() : 0; } } \ No newline at end of file +package org.broadinstitute.hellbender.tools.picard.analysis.directed; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMTag; import htsjdk.samtools.SamPairUtil; import htsjdk.samtools.SamPairUtil.PairOrientation; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.metrics.MetricsFile; import htsjdk.samtools.util.CloserUtil; import htsjdk.samtools.util.Histogram; import htsjdk.samtools.util.IOUtil; import org.broadinstitute.hellbender.cmdline.Argument; import org.broadinstitute.hellbender.cmdline.CommandLineProgramProperties; import org.broadinstitute.hellbender.cmdline.PicardCommandLineProgram; import org.broadinstitute.hellbender.cmdline.StandardArgumentDefinitions; import org.broadinstitute.hellbender.cmdline.programgroups.QCProgramGroup; import org.broadinstitute.hellbender.exceptions.UserException; import org.broadinstitute.hellbender.utils.read.markduplicates.DuplicationMetrics; import java.io.File; import java.util.*; /** * Command-line program to compute metrics about outward-facing pairs, inward-facing * pairs, and chimeras in a jumping library. * * @author ktibbett@broadinstitute.org */ @CommandLineProgramProperties( usage = "Computes jumping library metrics. Gets all data for computation from the first" + "read in each pair and assumes that the MQ tag is set with the mate's mapping quality. If the " + "MQ tag is not set, then the program assumes that the mate's mapping quality is >= MINIMUM_MAPPING_QUALITY", usageShort = "Produces jumping library metrics for the provided SAM/BAMs", programGroup = QCProgramGroup.class ) public class CollectJumpingLibraryMetrics extends PicardCommandLineProgram { // Usage and parameters @Argument(shortName = StandardArgumentDefinitions.INPUT_SHORT_NAME, doc = "BAM file(s) of reads with duplicates marked") public List INPUT = new ArrayList<>(); @Argument(shortName = StandardArgumentDefinitions.OUTPUT_SHORT_NAME, doc = "File to which metrics should be written") public File OUTPUT; @Argument(shortName = StandardArgumentDefinitions.MINIMUM_MAPPING_QUALITY_SHORT_NAME, doc = "Mapping quality minimum cutoff") public Integer MINIMUM_MAPPING_QUALITY = 0; @Argument(shortName = "T", doc = "When calculating mean and stdev stop when the bins in the tail of the distribution " + "contain fewer than mode/TAIL_LIMIT items") public int TAIL_LIMIT = 10000; @Argument(doc = "Jumps greater than or equal to the greater of this value or 2 times the mode of the " + "outward-facing pairs are considered chimeras") public int CHIMERA_KB_MIN = 100000; private static final int SAMPLE_FOR_MODE = 50000; // How many outward-facing pairs to sample to determine the mode /** * Calculates the detailed statistics about the jumping library and then generates the results. */ protected Object doWork() { for (File f : INPUT) { IOUtil.assertFileIsReadable(f); } IOUtil.assertFileIsWritable(OUTPUT); Histogram innieHistogram = new Histogram<>(); Histogram outieHistogram = new Histogram<>(); int fragments = 0; int innies = 0; int outies = 0; int innieDupes = 0; int outieDupes = 0; int crossChromPairs = 0; int superSized = 0; int tandemPairs = 0; double chimeraSizeMinimum = Math.max(getOutieMode(), (double) CHIMERA_KB_MIN); for (File f : INPUT) { SamReader reader = SamReaderFactory.makeDefault().open(f); if (reader.getFileHeader().getSortOrder() != SAMFileHeader.SortOrder.coordinate) { throw new UserException("SAM file must " + f.getName() + " must be sorted in coordinate order"); } for (SAMRecord sam : reader) { // We're getting all our info from the first of each pair. if (!sam.getFirstOfPairFlag()) { continue; } // Ignore unmapped read pairs if (sam.getReadUnmappedFlag()) { if (!sam.getMateUnmappedFlag()) { fragments++; continue; } // If both ends are unmapped and we've hit unaligned reads we're done if (sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) { break; } continue; } if (sam.getMateUnmappedFlag()) { fragments++; continue; } // Ignore low-quality reads. If we don't have the mate mapping quality, assume it's OK if ((sam.getAttribute(SAMTag.MQ.name()) != null && sam.getIntegerAttribute(SAMTag.MQ.name()) < MINIMUM_MAPPING_QUALITY) || sam.getMappingQuality() < MINIMUM_MAPPING_QUALITY) { continue; } final int absInsertSize = Math.abs(sam.getInferredInsertSize()); if (absInsertSize > chimeraSizeMinimum) { superSized++; } else if (sam.getMateNegativeStrandFlag() == sam.getReadNegativeStrandFlag()) { tandemPairs++; } else if (!sam.getMateReferenceIndex().equals(sam.getReferenceIndex())) { crossChromPairs++; } else { final PairOrientation pairOrientation = SamPairUtil.getPairOrientation(sam); if (pairOrientation == PairOrientation.RF) { outieHistogram.increment(absInsertSize); outies++; if (sam.getDuplicateReadFlag()) { outieDupes++; } } else if (pairOrientation == PairOrientation.FR) { innieHistogram.increment(absInsertSize); innies++; if (sam.getDuplicateReadFlag()) { innieDupes++; } } else { throw new IllegalStateException("This should never happen"); } } } CloserUtil.close(reader); } MetricsFile metricsFile = getMetricsFile(); JumpingLibraryMetrics metrics = new JumpingLibraryMetrics(); metrics.JUMP_PAIRS = outies; metrics.JUMP_DUPLICATE_PAIRS = outieDupes; metrics.JUMP_DUPLICATE_PCT = outies != 0 ? outieDupes / (double) outies : 0; metrics.JUMP_LIBRARY_SIZE = (outies > 0 && outieDupes > 0) ? DuplicationMetrics.estimateLibrarySize(outies, outies - outieDupes) : 0; outieHistogram.trimByTailLimit(TAIL_LIMIT); metrics.JUMP_MEAN_INSERT_SIZE = outieHistogram.getMean(); metrics.JUMP_STDEV_INSERT_SIZE = outieHistogram.getStandardDeviation(); metrics.NONJUMP_PAIRS = innies; metrics.NONJUMP_DUPLICATE_PAIRS = innieDupes; metrics.NONJUMP_DUPLICATE_PCT = innies != 0 ? innieDupes / (double) innies : 0; metrics.NONJUMP_LIBRARY_SIZE = (innies > 0 && innieDupes > 0) ? DuplicationMetrics.estimateLibrarySize(innies, innies - innieDupes) : 0; innieHistogram.trimByTailLimit(TAIL_LIMIT); metrics.NONJUMP_MEAN_INSERT_SIZE = innieHistogram.getMean(); metrics.NONJUMP_STDEV_INSERT_SIZE = innieHistogram.getStandardDeviation(); metrics.CHIMERIC_PAIRS = crossChromPairs + superSized + tandemPairs; metrics.FRAGMENTS = fragments; double totalPairs = outies + innies + metrics.CHIMERIC_PAIRS; metrics.PCT_JUMPS = totalPairs != 0 ? outies / totalPairs : 0; metrics.PCT_NONJUMPS = totalPairs != 0 ? innies / totalPairs : 0; metrics.PCT_CHIMERAS = totalPairs != 0 ? metrics.CHIMERIC_PAIRS / totalPairs : 0; metricsFile.addMetric(metrics); metricsFile.write(OUTPUT); return null; } /** * Calculates the mode for outward-facing pairs, using the first SAMPLE_FOR_MODE * outward-facing pairs found in INPUT */ private double getOutieMode() { int samplePerFile = SAMPLE_FOR_MODE / INPUT.size(); Histogram histo = new Histogram<>(); for (File f : INPUT) { SamReader reader = SamReaderFactory.makeDefault().open(f); int sampled = 0; for (Iterator it = reader.iterator(); it.hasNext() && sampled < samplePerFile; ) { SAMRecord sam = it.next(); if (!sam.getFirstOfPairFlag()) { continue; } // If we get here we've hit the end of the aligned reads if (sam.getReadUnmappedFlag() && sam.getReferenceIndex() == SAMRecord.NO_ALIGNMENT_REFERENCE_INDEX) { break; } else if (sam.getReadUnmappedFlag() || sam.getMateUnmappedFlag()) { continue; } else { if ((sam.getAttribute(SAMTag.MQ.name()) == null || sam.getIntegerAttribute(SAMTag.MQ.name()) >= MINIMUM_MAPPING_QUALITY) && sam.getMappingQuality() >= MINIMUM_MAPPING_QUALITY && sam.getMateNegativeStrandFlag() != sam.getReadNegativeStrandFlag() && sam.getMateReferenceIndex().equals(sam.getReferenceIndex())) { if (SamPairUtil.getPairOrientation(sam) == PairOrientation.RF) { histo.increment(Math.abs(sam.getInferredInsertSize())); sampled++; } } } } CloserUtil.close(reader); } return histo.size() > 0 ? histo.getMode() : 0; } } \ No newline at end of file diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetrics.java index 9113aa59218..c6ad905ec6f 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetrics.java @@ -53,7 +53,7 @@ public class CollectRnaSeqMetrics extends SinglePassSamProgram { public File CHART_OUTPUT; @Argument(doc="If a read maps to a sequence specified with this option, all the bases in the read are counted as ignored bases.", optional = true) - public Set IGNORE_SEQUENCE = new HashSet(); + public Set IGNORE_SEQUENCE = new HashSet<>(); @Argument(doc="This percentage of the length of a fragment must overlap one of the ribosomal intervals for a read or read pair by this must in order to be considered rRNA.") public double RRNA_FRAGMENT_PERCENTAGE = 0.8; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetrics.java index 0e46b8c2ffe..f8ed70447c9 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetrics.java @@ -59,7 +59,7 @@ public class CollectRrbsMetrics extends PicardCommandLineProgram { public double MAX_MISMATCH_RATE = 0.1; @Argument(doc = "Set of sequence names to consider, if not specified all sequences will be used", optional = true) - public Set SEQUENCE_NAMES = new HashSet(); + public Set SEQUENCE_NAMES = new HashSet<>(); @Argument(shortName = StandardArgumentDefinitions.ASSUME_SORTED_SHORT_NAME, doc = "If true, assume that the input file is coordinate sorted even if the header says otherwise.") @@ -141,7 +141,7 @@ private void assertIoFiles(final File summaryFile, final File detailsFile, final @Override protected String[] customCommandLineValidation() { - final List errorMsgs = new ArrayList(); + final List errorMsgs = new ArrayList<>(); if (MAX_MISMATCH_RATE < 0 || MAX_MISMATCH_RATE > 1) { errorMsgs.add("MAX_MISMATCH_RATE must be in the range of 0-1"); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectWgsMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectWgsMetrics.java index 6d60f7b702c..09573c88bee 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectWgsMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectWgsMetrics.java @@ -129,7 +129,7 @@ protected Object doWork() { final SamReader in = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); final SamLocusIterator iterator = new SamLocusIterator(in); - final List filters = new ArrayList(); + final List filters = new ArrayList<>(); final CountingFilter dupeFilter = new CountingDuplicateFilter(); final CountingFilter mapqFilter = new CountingMapQFilter(MINIMUM_MAPPING_QUALITY); final CountingPairedFilter pairFilter = new CountingPairedFilter(); @@ -164,7 +164,7 @@ protected Object doWork() { if (base == 'N') continue; // Figure out the coverage while not counting overlapping reads twice, and excluding various things - final HashSet readNames = new HashSet(info.getRecordAndPositions().size()); + final HashSet readNames = new HashSet<>(info.getRecordAndPositions().size()); int pileupSize = 0; for (final SamLocusIterator.RecordAndOffset recs : info.getRecordAndPositions()) { @@ -186,13 +186,13 @@ protected Object doWork() { } // Construct and write the outputs - final Histogram histo = new Histogram("coverage", "count"); + final Histogram histo = new Histogram<>("coverage", "count"); for (int i = 0; i < HistogramArray.length; ++i) { histo.increment(i, HistogramArray[i]); } // Construct and write the outputs - final Histogram baseQHisto = new Histogram("value", "baseq_count"); + final Histogram baseQHisto = new Histogram<>("value", "baseq_count"); for (int i=0; i makeChildCol public static OverlapDetector makeOverlapDetector(final File samFile, final SAMFileHeader header, final File ribosomalIntervalsFile) { - OverlapDetector ribosomalSequenceOverlapDetector = new OverlapDetector(0, 0); + OverlapDetector ribosomalSequenceOverlapDetector = new OverlapDetector<>(0, 0); if (ribosomalIntervalsFile != null) { final IntervalList ribosomalIntervals = IntervalList.fromFile(ribosomalIntervalsFile); @@ -76,7 +76,7 @@ public static OverlapDetector makeOverlapDetector(final File samFile, } public static HashSet makeIgnoredSequenceIndicesSet(final SAMFileHeader header, final Set ignoredSequence) { - final HashSet ignoredSequenceIndices = new HashSet(); + final HashSet ignoredSequenceIndices = new HashSet<>(); for (final String sequenceName: ignoredSequence) { final SAMSequenceRecord sequenceRecord = header.getSequence(sequenceName); if (sequenceRecord == null) { @@ -91,7 +91,7 @@ private class PerUnitRnaSeqMetricsCollector implements PerUnitMetricCollector coverageByTranscript = new HashMap(); + private final Map coverageByTranscript = new HashMap<>(); public PerUnitRnaSeqMetricsCollector(final String sample, final String library, @@ -261,11 +261,11 @@ public void addMetricsToFile(final MetricsFile file) { * expressed transcripts. */ private Histogram computeCoverageMetrics() { - final Histogram cvs = new Histogram(); - final Histogram fivePrimeSkews = new Histogram(); - final Histogram threePrimeSkews = new Histogram(); - final Histogram gapBasesPerKb = new Histogram(); - final Histogram fiveToThreeSkews = new Histogram(); + final Histogram cvs = new Histogram<>(); + final Histogram fivePrimeSkews = new Histogram<>(); + final Histogram threePrimeSkews = new Histogram<>(); + final Histogram gapBasesPerKb = new Histogram<>(); + final Histogram fiveToThreeSkews = new Histogram<>(); String prefix = null; if (this.metrics.READ_GROUP != null) { prefix = this.metrics.READ_GROUP + "."; @@ -280,7 +280,7 @@ else if (this.metrics.SAMPLE != null) { prefix = "All_Reads."; } - final Histogram normalizedCoverageByNormalizedPosition = new Histogram("normalized_position", prefix + "normalized_coverage"); + final Histogram normalizedCoverageByNormalizedPosition = new Histogram<>("normalized_position", prefix + "normalized_coverage"); final Map transcripts = pickTranscripts(coverageByTranscript); final double transcriptCount = transcripts.size(); @@ -356,7 +356,7 @@ private double[] copyAndReverse(final double[] in) { /** Picks the set of transcripts on which the coverage metrics are to be calculated. */ public Map pickTranscripts(final Map transcriptCoverage) { - final Map bestPerGene = new HashMap(); + final Map bestPerGene = new HashMap<>(); // Make a map of the best transcript per gene to it's mean coverage for (final Gene gene : geneOverlapDetector.getAll()) { @@ -388,7 +388,7 @@ public Map pickTranscripts(final Map retval = new HashMap(); + final Map retval = new HashMap<>(); for (final Map.Entry entry : bestPerGene.entrySet()) { final Gene.Transcript tx = entry.getKey(); final double coverage = entry.getValue(); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/RrbsMetricsCollector.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/RrbsMetricsCollector.java index da721927440..b456d6e7817 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/RrbsMetricsCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/RrbsMetricsCollector.java @@ -43,8 +43,8 @@ private class PerUnitRrbsMetricsCollector implements PerUnitMetricCollector cpgTotal = new Histogram(); - final Histogram cpgConverted = new Histogram(); + final Histogram cpgTotal = new Histogram<>(); + final Histogram cpgConverted = new Histogram<>(); // Counters for QC filters used in the final metrics int mappedRecordCount = 0; @@ -180,7 +180,7 @@ private RrbsSummaryMetrics buildSummaryMetrics() { } private List buildDetailMetrics() { - final List detailMetrics = new ArrayList(); + final List detailMetrics = new ArrayList<>(); for (final CpgLocation key : cpgTotal.keySet()) { final RrbsCpgDetailMetrics cpgMetric = new RrbsCpgDetailMetrics(); cpgMetric.SAMPLE = sample; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/TargetMetricsCollector.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/TargetMetricsCollector.java index d6a19ffbcd1..18c3a7f92aa 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/TargetMetricsCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/TargetMetricsCollector.java @@ -126,7 +126,7 @@ protected static void reflectiveCopy(final TargetMetrics } } - final Set outputSet = new HashSet(); + final Set outputSet = new HashSet<>(); for(final Field field : outputMetrics.getClass().getFields()) { outputSet.add(field.getName()); } @@ -162,19 +162,19 @@ public TargetMetricsCollector(final Set accumulationLev this.allTargets = targetIntervals; final List uniqueBaits = this.allProbes.uniqued().getIntervals(); - this.probeDetector = new OverlapDetector(-NEAR_PROBE_DISTANCE, 0); + this.probeDetector = new OverlapDetector<>(-NEAR_PROBE_DISTANCE, 0); this.probeDetector.addAll(uniqueBaits, uniqueBaits); this.probeTerritory = Interval.countBases(uniqueBaits); final List uniqueTargets = this.allTargets.uniqued().getIntervals(); - targetDetector = new OverlapDetector(0,0); + targetDetector = new OverlapDetector<>(0,0); this.targetDetector.addAll(uniqueTargets, uniqueTargets); this.targetTerritory = Interval.countBases(uniqueTargets); // Populate the coverage by target map int i = 0; cov = new Coverage[uniqueTargets.size()]; - this.coverageByTargetForRead = new LinkedHashMap(uniqueTargets.size() * 2, 0.5f); + this.coverageByTargetForRead = new LinkedHashMap<>(uniqueTargets.size() * 2, 0.5f); for (final Interval target : uniqueTargets) { final Coverage coverage = new Coverage(target, 0); this.coverageByTargetForRead.put(target, coverage); @@ -189,7 +189,7 @@ public TargetMetricsCollector(final Set accumulationLev if (refFile != null) { - intervalToGc = new HashMap(); + intervalToGc = new HashMap<>(); for (final Interval target : uniqueTargets) { final ReferenceSequence rs = refFile.getSubsequenceAt(target.getContig(), target.getStart(), target.getEnd()); intervalToGc.put(target, SequenceUtil.calculateGc(rs.getBases())); @@ -251,7 +251,7 @@ public PerUnitTargetMetricCollector(final String probeSetName, final Set(coverageTargets.size() * 2, 0.5f); + this.coverageByTarget = new LinkedHashMap<>(coverageTargets.size() * 2, 0.5f); for (Interval target : coverageTargets) { this.coverageByTarget.put(target, new Coverage(target,0)); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectory.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectory.java index 56c5dbc3bf6..e4af06a3c61 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectory.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectory.java @@ -45,7 +45,7 @@ public class CheckIlluminaDirectory extends PicardCommandLineProgram { "is left unspecified then both ExtractIlluminaBarcodes and IlluminaBaseCallsToSam should complete successfully UNLESS the " + "individual records of the files themselves are spurious.", shortName = "DT", optional = true) - public final Set DATA_TYPES = new TreeSet(); + public final Set DATA_TYPES = new TreeSet<>(); @Argument(doc = ReadStructure.PARAMETER_DOC + " Note: If you want to check whether or not a future IlluminaBasecallsToSam or ExtractIlluminaBarcodes " + "run will fail then be sure to use the exact same READ_STRUCTURE that you would pass to these programs for this run.", @@ -76,7 +76,7 @@ protected Object doWork() { DATA_TYPES.addAll(Arrays.asList(IlluminaBasecallsConverter.DATA_TYPES_NO_BARCODE)); } - final List failingLanes = new ArrayList(); + final List failingLanes = new ArrayList<>(); int totalFailures = 0; final int[] expectedCycles = new OutputMapping(readStructure).getOutputCycles(); @@ -193,7 +193,7 @@ private static final int verifyLane(final IlluminaFileUtil fileUtil, final List< } } log.info("Could not find a format with available files for the following data types: " + StringUtil - .join(", ", new ArrayList(unmatchedDataTypes))); + .join(", ", new ArrayList<>(unmatchedDataTypes))); numFailures += unmatchedDataTypes.size(); } @@ -218,7 +218,7 @@ private static final int verifyLane(final IlluminaFileUtil fileUtil, final List< @Override protected String[] customCommandLineValidation() { IOUtil.assertDirectoryIsReadable(BASECALLS_DIR); - final List errors = new ArrayList(); + final List errors = new ArrayList<>(); for (final Integer lane : LANES) { if (lane < 1) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodes.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodes.java index 655c6bc3866..6e0a7533320 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodes.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodes.java @@ -103,7 +103,7 @@ public class ExtractIlluminaBarcodes extends PicardCommandLineProgram { @Argument(doc = "Barcode sequence. These must be unique, and all the same length. This cannot be used with reads that " + "have more than one barcode; use BARCODE_FILE in that case. ", mutex = {"BARCODE_FILE"}) - public List BARCODE = new ArrayList(); + public List BARCODE = new ArrayList<>(); @Argument(doc = "Tab-delimited file of barcode sequences, barcode name and, optionally, library name. " + "Barcodes must be unique and all the same length. Column headers must be 'barcode_sequence_1', " + @@ -146,7 +146,7 @@ public class ExtractIlluminaBarcodes extends PicardCommandLineProgram { private IlluminaDataProviderFactory factory; - private final Map barcodeToMetrics = new LinkedHashMap(); + private final Map barcodeToMetrics = new LinkedHashMap<>(); private final NumberFormat tileNumberFormatter = getNumberInstance(); private BclQualityEvaluationStrategy bclQualityEvaluationStrategy; @@ -189,7 +189,7 @@ protected Object doWork() { final ExecutorService pool = newFixedThreadPool(numProcessors); // TODO: This is terribly inefficient; we're opening a huge number of files via the extractor constructor and we never close them. - final List extractors = new ArrayList(factory.getAvailableTiles().size()); + final List extractors = new ArrayList<>(factory.getAvailableTiles().size()); for (final int tile : factory.getAvailableTiles()) { final PerTileBarcodeExtractor extractor = new PerTileBarcodeExtractor( tile, @@ -323,7 +323,7 @@ private File getBarcodeFile(final int tile) { */ @Override protected String[] customCommandLineValidation() { - final ArrayList messages = new ArrayList(); + final ArrayList messages = new ArrayList<>(); this.bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(MINIMUM_QUALITY); @@ -341,7 +341,7 @@ protected String[] customCommandLineValidation() { if (BARCODE_FILE != null) { parseBarcodeFile(messages); } else { - final Set barcodes = new HashSet(); + final Set barcodes = new HashSet<>(); for (final String barcode : BARCODE) { if (barcodes.contains(barcode)) { messages.add("Barcode " + barcode + " specified more than once."); @@ -378,7 +378,7 @@ private void parseBarcodeFile(final ArrayList messages) { final boolean hasBarcodeName = barcodesParser.hasColumn(BARCODE_NAME_COLUMN); final boolean hasLibraryName = barcodesParser.hasColumn(LIBRARY_NAME_COLUMN); final int numBarcodes = readStructure.barcodes.length(); - final Set barcodes = new HashSet(); + final Set barcodes = new HashSet<>(); for (final TabbedTextFileWithHeaderParser.Row row : barcodesParser) { final String bcStrings[] = new String[numBarcodes]; int barcodeNum = 1; @@ -570,7 +570,7 @@ public PerTileBarcodeExtractor( this.maxMismatches = maxMismatches; this.minMismatchDelta = minMismatchDelta; this.minimumBaseQuality = minimumBaseQuality; - this.metrics = new LinkedHashMap(barcodeToMetrics.size()); + this.metrics = new LinkedHashMap<>(barcodeToMetrics.size()); for (final String key : barcodeToMetrics.keySet()) { this.metrics.put(key, copy(barcodeToMetrics.get(key))); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsConverter.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsConverter.java index 9bfee57a269..cc824c048fd 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsConverter.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsConverter.java @@ -227,7 +227,7 @@ public void run() { } else { this.numThreads = numProcessors; } - this.tiles = new ArrayList(factory.getAvailableTiles()); + this.tiles = new ArrayList<>(factory.getAvailableTiles()); // Since the first non-fixed part of the read name is the tile number, without preceding zeroes, // and the output is sorted by read name, process the tiles in this order. sort(tiles, TILE_NUMBER_COMPARATOR); @@ -274,7 +274,7 @@ public IlluminaDataProviderFactory getFactory() { public void doTileProcessing() { try { // Generate the list of tiles that will be processed - final List tiles = new ArrayList(); + final List tiles = new ArrayList<>(); for (final Integer tileNumber : this.tiles) { tiles.add(new Tile(tileNumber)); } @@ -372,8 +372,8 @@ int getPriority() { */ private class TileProcessingRecord { final private Map> barcodeToRecordCollection = - new HashMap>(); - final private Map barcodeToProcessingState = new HashMap(); + new HashMap<>(); + final private Map barcodeToProcessingState = new HashMap<>(); private TileProcessingState state = TileProcessingState.NOT_DONE_READING; private long recordCount = 0; @@ -538,7 +538,7 @@ private class TileReadAggregator { *

* Implemented as a TreeMap to guarantee tiles are iterated over in natural order. */ - private final Map tileRecords = new TreeMap(); + private final Map tileRecords = new TreeMap<>(); /** * The executor responsible for doing work. @@ -551,7 +551,7 @@ private class TileReadAggregator { numThreads, 0L, MILLISECONDS, - new PriorityBlockingQueue(5, new Comparator() { + new PriorityBlockingQueue<>(5, new Comparator() { @Override /** * Compare the two Runnables, and assume they are PriorityRunnable; if not something strange is @@ -694,7 +694,7 @@ private void findAndEnqueueWorkOrSignalCompletion() { if (this.isWorkCompleted()) { this.signalWorkComplete(); } else { - final Queue tasks = new LinkedList(); + final Queue tasks = new LinkedList<>(); for (final String barcode : barcodeRecordWriterMap.keySet()) { NEXT_BARCODE: for (final Map.Entry entry : this.tileRecords.entrySet()) { @@ -762,7 +762,7 @@ public void run() { log.debug(format("Writing records from tile %s with barcode %s ...", tile.getNumber(), barcode)); - final PeekIterator it = new PeekIterator(records.iterator()); + final PeekIterator it = new PeekIterator<>(records.iterator()); while (it.hasNext()) { final CLUSTER_OUTPUT_RECORD rec = it.next(); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastq.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastq.java index 555e3dae0d6..4b68b1dd28a 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastq.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastq.java @@ -147,7 +147,7 @@ public enum ReadNameFormat { CASAVA_1_8, ILLUMINA } - private final Map barcodeFastqWriterMap = new HashMap(); + private final Map barcodeFastqWriterMap = new HashMap<>(); private ReadStructure readStructure; IlluminaBasecallsConverter basecallsConverter; private static final Log log = getInstance(IlluminaBasecallsToFastq.class); @@ -172,7 +172,7 @@ protected Object doWork() { @Override protected String[] customCommandLineValidation() { - final LinkedList errors = new LinkedList(); + final LinkedList errors = new LinkedList<>(); if (READ_NAME_FORMAT == ReadNameFormat.CASAVA_1_8 && MACHINE_NAME == null) { errors.add("MACHINE_NAME is required when using Casava1.8-style read name headers."); } @@ -216,7 +216,7 @@ private void initialize() { demultiplex = true; } final int readsPerCluster = readStructure.templates.length() + readStructure.barcodes.length(); - basecallsConverter = new IlluminaBasecallsConverter(BASECALLS_DIR, BARCODES_DIR, LANE, readStructure, + basecallsConverter = new IlluminaBasecallsConverter<>(BASECALLS_DIR, BARCODES_DIR, LANE, readStructure, barcodeFastqWriterMap, demultiplex, MAX_READS_IN_RAM_PER_TILE / readsPerCluster, TMP_DIR, NUM_PROCESSORS, FORCE_GC, FIRST_TILE, TILE_LIMIT, queryNameComparator, new FastqRecordsForClusterCodec(readStructure.templates.length(), @@ -238,7 +238,7 @@ private void initialize() { * @param expectedCols The columns that are REQUIRED */ private void assertExpectedColumns(final Set actualCols, final Set expectedCols) { - final Set missingColumns = new HashSet(expectedCols); + final Set missingColumns = new HashSet<>(expectedCols); missingColumns.removeAll(actualCols); if (missingColumns.size() > 0) { @@ -257,7 +257,7 @@ private void populateWritersFromMultiplexParams() { final TabbedTextFileWithHeaderParser libraryParamsParser = new TabbedTextFileWithHeaderParser(MULTIPLEX_PARAMS); final Set expectedColumnLabels = makeSet("OUTPUT_PREFIX"); - final List barcodeColumnLabels = new ArrayList(); + final List barcodeColumnLabels = new ArrayList<>(); for (int i = 1; i <= readStructure.barcodes.length(); i++) { barcodeColumnLabels.add("BARCODE_" + i); } @@ -269,7 +269,7 @@ private void populateWritersFromMultiplexParams() { List barcodeValues = null; if (barcodeColumnLabels.size() > 0) { - barcodeValues = new ArrayList(); + barcodeValues = new ArrayList<>(); for (final String barcodeLabel : barcodeColumnLabels) { barcodeValues.add(row.getField(barcodeLabel)); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSam.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSam.java index f0278e96994..b6550bd5c33 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSam.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSam.java @@ -156,7 +156,7 @@ public class IlluminaBasecallsToSam extends PicardCommandLineProgram { @Argument(doc = "Whether to include non-PF reads", shortName = "NONPF", optional = true) public boolean INCLUDE_NON_PF_READS = true; - private final Map barcodeSamWriterMap = new HashMap(); + private final Map barcodeSamWriterMap = new HashMap<>(); private ReadStructure readStructure; IlluminaBasecallsConverter basecallsConverter; private static final Log log = getInstance(IlluminaBasecallsToSam.class); @@ -182,7 +182,7 @@ private void initialize() { final int numOutputRecords = readStructure.templates.length(); - basecallsConverter = new IlluminaBasecallsConverter(BASECALLS_DIR, BARCODES_DIR, LANE, readStructure, + basecallsConverter = new IlluminaBasecallsConverter<>(BASECALLS_DIR, BARCODES_DIR, LANE, readStructure, barcodeSamWriterMap, true, MAX_READS_IN_RAM_PER_TILE / numOutputRecords, TMP_DIR, NUM_PROCESSORS, FORCE_GC, FIRST_TILE, TILE_LIMIT, new QueryNameComparator(), new Codec(numOutputRecords), SAMRecordsForCluster.class, bclQualityEvaluationStrategy, this.APPLY_EAMSS_FILTER, INCLUDE_NON_PF_READS); @@ -207,7 +207,7 @@ FIRST_TILE, TILE_LIMIT, new QueryNameComparator(), new Codec(numOutputRecords), * @return actualCols - expectedCols */ private Set findAndFilterExpectedColumns(final Set actualCols, final Set expectedCols) { - final Set missingColumns = new HashSet(expectedCols); + final Set missingColumns = new HashSet<>(expectedCols); missingColumns.removeAll(actualCols); if (missingColumns.size() > 0) { @@ -217,7 +217,7 @@ private Set findAndFilterExpectedColumns(final Set actualCols, f ))); } - final Set remainingColumns = new HashSet(actualCols); + final Set remainingColumns = new HashSet<>(actualCols); remainingColumns.removeAll(expectedCols); return remainingColumns; } @@ -252,7 +252,7 @@ private void populateWritersFromLibraryParams() { final TabbedTextFileWithHeaderParser libraryParamsParser = new TabbedTextFileWithHeaderParser(LIBRARY_PARAMS); final Set expectedColumnLabels = makeSet("OUTPUT", "SAMPLE_ALIAS", "LIBRARY_NAME"); - final List barcodeColumnLabels = new ArrayList(); + final List barcodeColumnLabels = new ArrayList<>(); if (readStructure.barcodes.length() == 1) { //For the single barcode read case, the barcode label name can either by BARCODE or BARCODE_1 if (libraryParamsParser.hasColumn("BARCODE")) { @@ -276,7 +276,7 @@ private void populateWritersFromLibraryParams() { List barcodeValues = null; if (barcodeColumnLabels.size() > 0) { - barcodeValues = new ArrayList(); + barcodeValues = new ArrayList<>(); for (final String barcodeLabel : barcodeColumnLabels) { barcodeValues.add(row.getField(barcodeLabel)); } @@ -313,7 +313,7 @@ private void populateWritersFromLibraryParams() { * @return A Map of ReadGroupHeaderTags -> Values */ private Map buildSamHeaderParameters(final List barcodes) { - final Map params = new HashMap(); + final Map params = new HashMap<>(); String platformUnit = RUN_BARCODE + "." + LANE; if (barcodes != null) platformUnit += ("." + barcodeSeqsToString(barcodes)); @@ -369,7 +369,7 @@ private SAMFileWriterWrapper buildSamFileWriter(final File output, final String */ @Override protected String[] customCommandLineValidation() { - final ArrayList messages = new ArrayList(); + final ArrayList messages = new ArrayList<>(); readStructure = new ReadStructure(READ_STRUCTURE); if (!readStructure.barcodes.isEmpty()) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/MarkIlluminaAdapters.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/MarkIlluminaAdapters.java index 593ac8c500f..e88cfa288e9 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/MarkIlluminaAdapters.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/MarkIlluminaAdapters.java @@ -145,12 +145,12 @@ protected Object doWork() { out = new SAMFileWriterFactory().makeSAMOrBAMWriter(in.getFileHeader(), true, OUTPUT); } - final Histogram histo = new Histogram("clipped_bases", "read_count"); + final Histogram histo = new Histogram<>("clipped_bases", "read_count"); // Combine any adapters and custom adapter pairs from the command line into an array for use in clipping final AdapterPair[] adapters; { - final List tmp = new ArrayList(); + final List tmp = new ArrayList<>(); tmp.addAll(ADAPTERS); if (FIVE_PRIME_ADAPTER != null && THREE_PRIME_ADAPTER != null) { tmp.add(new CustomAdapterPair(FIVE_PRIME_ADAPTER, THREE_PRIME_ADAPTER)); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectHiSeqXPfFailMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectHiSeqXPfFailMetrics.java index 9acf07d3d85..8ce8a32cbf8 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectHiSeqXPfFailMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectHiSeqXPfFailMetrics.java @@ -79,8 +79,8 @@ public class CollectHiSeqXPfFailMetrics extends PicardCommandLineProgram { private static final Log LOG = getInstance(CollectHiSeqXPfFailMetrics.class); - private final Map tileToSummaryMetrics = new LinkedHashMap(); - private final Map> tileToDetailedMetrics = new LinkedHashMap>(); + private final Map tileToSummaryMetrics = new LinkedHashMap<>(); + private final Map> tileToDetailedMetrics = new LinkedHashMap<>(); //Add "T" to the number of cycles to create a "TemplateRead" of the desired length. private final ReadStructure READ_STRUCTURE = new ReadStructure(N_CYCLES + "T"); @@ -90,7 +90,7 @@ public class CollectHiSeqXPfFailMetrics extends PicardCommandLineProgram { @Override protected String[] customCommandLineValidation() { - final List errors = new ArrayList(); + final List errors = new ArrayList<>(); if (N_CYCLES < 0) { errors.add("Number of Cycles to look at must be greater than 0"); @@ -137,10 +137,10 @@ protected Object doWork() { LOG.info("Processing with " + numProcessors + " PerTilePFMetricsExtractor(s)."); final ExecutorService pool = newFixedThreadPool(numProcessors); - final List extractors = new ArrayList(factory.getAvailableTiles().size()); + final List extractors = new ArrayList<>(factory.getAvailableTiles().size()); for (final int tile : factory.getAvailableTiles()) { tileToSummaryMetrics.put(tile, new PFFailSummaryMetric(Integer.toString(tile))); - tileToDetailedMetrics.put(tile, new ArrayList()); + tileToDetailedMetrics.put(tile, new ArrayList<>()); final PerTilePFMetricsExtractor extractor = new PerTilePFMetricsExtractor( tile, diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetrics.java index fd9ddbe4f2a..a426ae58f02 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetrics.java @@ -83,7 +83,7 @@ public class CollectIlluminaBasecallingMetrics extends PicardCommandLineProgram private static final String BARCODE_SEQUENCE_COLUMN_NAME_STUB = "barcode_sequence_"; public CollectIlluminaBasecallingMetrics() { - this.barcodeToMetricCounts = new TreeMap(); + this.barcodeToMetricCounts = new TreeMap<>(); } @Override @@ -206,8 +206,8 @@ private class IlluminaMetricCounts { final IlluminaBasecallingMetrics metrics; public IlluminaMetricCounts(final String barcode, final String barcodeName, final Integer laneNumber) { - this.tileToClusterHistogram = new Histogram(); - this.tileToPfClusterHistogram = new Histogram(); + this.tileToClusterHistogram = new Histogram<>(); + this.tileToPfClusterHistogram = new Histogram<>(); this.metrics = new IlluminaBasecallingMetrics(); this.metrics.MOLECULAR_BARCODE_SEQUENCE_1 = barcode; this.metrics.MOLECULAR_BARCODE_NAME = barcodeName; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/IlluminaPhasingMetrics.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/IlluminaPhasingMetrics.java index 2360c3623ca..57c18bec413 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/IlluminaPhasingMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/IlluminaPhasingMetrics.java @@ -24,7 +24,7 @@ public class IlluminaPhasingMetrics extends MetricBase { */ public static Collection getPhasingMetricsForTiles(final long lane, final Collection tilesForLane) { final LanePhasingMetricsCollector lanePhasingMetricsCollector = new LanePhasingMetricsCollector(tilesForLane); - final Collection phasingMetrics = new ArrayList(); + final Collection phasingMetrics = new ArrayList<>(); for (final TileTemplateRead tileTemplateRead : lanePhasingMetricsCollector.getMedianPhasingMap().keySet()) { final IlluminaPhasingMetrics phasingMetric = new IlluminaPhasingMetrics(); phasingMetric.LANE = lane; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/LanePhasingMetricsCollector.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/LanePhasingMetricsCollector.java index 08926ea5fc4..8e2374e7c8b 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/LanePhasingMetricsCollector.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/LanePhasingMetricsCollector.java @@ -21,11 +21,11 @@ public class LanePhasingMetricsCollector { * first and second (if available) reads */ public LanePhasingMetricsCollector(final Collection laneTiles) { - final Map medianPhasingMap = new TreeMap(); - final Map medianPrePhasingMap = new TreeMap(); + final Map medianPhasingMap = new TreeMap<>(); + final Map medianPrePhasingMap = new TreeMap<>(); - final CollectionUtil.MultiMap phasingValues = new CollectionUtil.MultiMap(); - final CollectionUtil.MultiMap prePhasingValues = new CollectionUtil.MultiMap(); + final CollectionUtil.MultiMap phasingValues = new CollectionUtil.MultiMap<>(); + final CollectionUtil.MultiMap prePhasingValues = new CollectionUtil.MultiMap<>(); // Collect the phasing/prephasing values from all of the tiles, sorted by template read # for (final Tile tile : laneTiles) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaDataProviderFactory.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaDataProviderFactory.java index 80f9b5274db..59f58902428 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaDataProviderFactory.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaDataProviderFactory.java @@ -52,7 +52,7 @@ public class IlluminaDataProviderFactory { * We try to prefer data types that will be the fastest to parse/smallest in memory * NOTE: In the code below, if Qseq is chosen to provide for ANY data type then it is used for ALL its data types (since we'll have to parse the entire line for each Qseq anyways) */ - private static final Map> DATA_TYPE_TO_PREFERRED_FORMATS = new HashMap>(); + private static final Map> DATA_TYPE_TO_PREFERRED_FORMATS = new HashMap<>(); static { /** For types found in Qseq, we prefer the NON-Qseq file formats first. However, if we end up using Qseqs then we use Qseqs for EVERY type it provides, @@ -144,7 +144,7 @@ public IlluminaDataProviderFactory(final File basecallDirectory, File barcodesDi * for data residing in QSeqs (since QSeqs span multiple data types). This is no longer the case, you * MUST specify all data types that should be returned. */ - final Set dataTypes = unmodifiableSet(new HashSet(asList(dataTypesArg))); + final Set dataTypes = unmodifiableSet(new HashSet<>(asList(dataTypesArg))); if (dataTypes.isEmpty()) { throw new IlluminaParserException("No data types have been specified for basecall output " + basecallDirectory + ", lane " + lane); @@ -159,12 +159,12 @@ public IlluminaDataProviderFactory(final File basecallDirectory, File barcodesDi final Set unmatchedDataTypes = findUnmatchedTypes(dataTypes, formatToDataTypes); if (unmatchedDataTypes.size() > 0) { throw new IlluminaParserException("Could not find a format with available files for the following data types: " - + join(", ", new ArrayList(unmatchedDataTypes))); + + join(", ", new ArrayList<>(unmatchedDataTypes))); } log.debug("The following file formats will be used by IlluminaDataProvider: " + join("," + formatToDataTypes.keySet())); - availableTiles = fileUtil.getActualTiles(new ArrayList(formatToDataTypes.keySet())); + availableTiles = fileUtil.getActualTiles(new ArrayList<>(formatToDataTypes.keySet())); if (availableTiles.isEmpty()) { throw new IlluminaParserException("No available tiles were found, make sure that " + basecallDirectory.getAbsolutePath() + " has a lane " + lane); } @@ -221,7 +221,7 @@ public IlluminaDataProvider makeDataProvider(List requestedTiles) { } } - final Map> parsersToDataType = new HashMap>(); + final Map> parsersToDataType = new HashMap<>(); for (final Map.Entry> fmToDt : formatToDataTypes.entrySet()) { parsersToDataType.put(makeParser(fmToDt.getKey(), requestedTiles), fmToDt.getValue()); } @@ -239,7 +239,7 @@ public IlluminaDataProvider makeDataProvider(List requestedTiles) { * @return The data types that go unsupported by the formats found in formatToMatchedTypes */ public static Set findUnmatchedTypes(final Set requestedDataTypes, final Map> formatToMatchedTypes) { - final Set copiedTypes = new HashSet(requestedDataTypes); + final Set copiedTypes = new HashSet<>(requestedDataTypes); for (final Set matchedTypes : formatToMatchedTypes.values()) { copiedTypes.removeAll(matchedTypes); } @@ -257,9 +257,9 @@ public static Set findUnmatchedTypes(final Set> determineFormats(final Set requestedDataTypes, final IlluminaFileUtil fileUtil) { //For predictable ordering and uniqueness only, put the requestedDataTypes into a treeSet - final SortedSet toSupport = new TreeSet(requestedDataTypes); - final Map> fileTypeToDataTypes = new HashMap>(); - final Map dataTypeToFormat = new HashMap(); + final SortedSet toSupport = new TreeSet<>(requestedDataTypes); + final Map> fileTypeToDataTypes = new HashMap<>(); + final Map dataTypeToFormat = new HashMap<>(); for (final IlluminaDataType ts : toSupport) { final SupportedIlluminaFormat preferredFormat = findPreferredAvailableFormat(ts, fileUtil); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileMap.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileMap.java index 30a1607ccaf..8bd9e70c1d3 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileMap.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileMap.java @@ -32,6 +32,6 @@ public IlluminaFileMap keep(final List tilesToKeep) { * @return A List of files for all tiles >= startingTile that are contained in this FileMap */ public List getFilesStartingAt(final int startingTile) { - return new ArrayList(this.tailMap(startingTile).values()); + return new ArrayList<>(this.tailMap(startingTile).values()); } } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtil.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtil.java index cd7082a82b9..a74c3e7c985 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtil.java @@ -54,7 +54,7 @@ public enum SupportedIlluminaFormat { private final int lane; private final File tileMetricsOut; - private final Map utils = new HashMap(); + private final Map utils = new HashMap<>(); public IlluminaFileUtil(final File basecallDir, final int lane) { this(basecallDir, null, lane); @@ -145,7 +145,7 @@ public ParameterizedFileUtil getUtil(final SupportedIlluminaFormat format) { public List getExpectedTiles() { assertFileIsReadable(tileMetricsOut); //Used just to ensure predictable ordering - final TreeSet expectedTiles = new TreeSet(); + final TreeSet expectedTiles = new TreeSet<>(); final Iterator tileMetrics = new TileMetricsOutReader(tileMetricsOut); while (tileMetrics.hasNext()) { @@ -159,7 +159,7 @@ public List getExpectedTiles() { } close(tileMetrics); - return new ArrayList(expectedTiles); + return new ArrayList<>(expectedTiles); } /** diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileBclFileUtil.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileBclFileUtil.java index 567672a2074..31f3d0294f6 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileBclFileUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileBclFileUtil.java @@ -44,7 +44,7 @@ public class MultiTileBclFileUtil extends ParameterizedFileUtil { public CycleIlluminaFileMap getFiles(final List tiles, final int[] cycles) { // Filter input list of cycles according to which actually exist - final ArrayList goodCycleList = new ArrayList(cycles.length); + final ArrayList goodCycleList = new ArrayList<>(cycles.length); for (final int cycle : cycles) { if (cycleFileMap.containsKey(cycle)) { goodCycleList.add(cycle); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileParser.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileParser.java index b5178519e36..0315a830935 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileParser.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/MultiTileParser.java @@ -31,7 +31,7 @@ public MultiTileParser(final TileIndex tileIndex, final Set supportedTypes) { this.tileIndex = tileIndex; this.tileIndexIterator = tileIndex.iterator(); - this.requestedTilesIterator = new PeekIterator(requestedTiles.iterator()); + this.requestedTilesIterator = new PeekIterator<>(requestedTiles.iterator()); this.supportedTypes = supportedTypes; } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileCycleParser.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileCycleParser.java index 74a994f35cf..04aa1a9883e 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileCycleParser.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileCycleParser.java @@ -60,7 +60,7 @@ abstract class PerTileCycleParser implements } private TreeSet getTileOrder(final CycleIlluminaFileMap cyclesToTileFiles) { - final TreeSet uniqueTiles = new TreeSet(); + final TreeSet uniqueTiles = new TreeSet<>(); for (final IlluminaFileMap fileMap : cyclesToTileFiles.values()) { uniqueTiles.addAll(fileMap.keySet()); @@ -106,7 +106,7 @@ public void seekToTile(final int tile) { } int totalCycles = 0; - final List tileFiles = new ArrayList(); + final List tileFiles = new ArrayList<>(); for (final Map.Entry entry : cyclesToTileFiles.entrySet()) { tileFiles.add(entry.getValue().get(currentTile)); ++totalCycles; @@ -167,7 +167,7 @@ public int getTileOfNextCluster() { @Override public void verifyData(List tiles, final int[] cycles) { if (tiles == null) { - tiles = new ArrayList(this.cyclesToTileFiles.keySet()); + tiles = new ArrayList<>(this.cyclesToTileFiles.keySet()); } this.cyclesToTileFiles.assertValid(tiles, cycles); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileFileUtil.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileFileUtil.java index 32192c38f48..6645f908e90 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileFileUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileFileUtil.java @@ -14,9 +14,9 @@ public PerTileFileUtil(final String extension, final File base, super(true, extension, base, faker, lane); this.fileMap = getTiledFiles(base, matchPattern); if (fileMap.size() > 0) { - this.tiles = Collections.unmodifiableList(new ArrayList(this.fileMap.keySet())); + this.tiles = Collections.unmodifiableList(new ArrayList<>(this.fileMap.keySet())); } else { - this.tiles = new ArrayList(); + this.tiles = new ArrayList<>(); } } @@ -35,13 +35,13 @@ public IlluminaFileMap getFiles(final List tiles) { @Override public List verify(final List expectedTiles, final int[] expectedCycles) { - final List failures = new LinkedList(); + final List failures = new LinkedList<>(); if (!base.exists()) { failures.add("Base directory(" + base.getAbsolutePath() + ") does not exist!"); } else { if (!tiles.containsAll(expectedTiles)) { - final List missing = new ArrayList(expectedTiles); + final List missing = new ArrayList<>(expectedTiles); missing.removeAll(tiles); failures.add("Missing tile " + missing + " for file type " + extension + "."); } @@ -52,7 +52,7 @@ public List verify(final List expectedTiles, final int[] expect @Override public List fakeFiles(final List expectedTiles, final int[] cycles, final IlluminaFileUtil.SupportedIlluminaFormat format) { - final List failures = new LinkedList(); + final List failures = new LinkedList<>(); if (!base.exists()) { failures.add("Base directory(" + base.getAbsolutePath() + ") does not exist!"); } else { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParser.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParser.java index a2b912cd771..c6b97ac1e15 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParser.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParser.java @@ -35,7 +35,7 @@ public PerTileParser(final IlluminaFileMap tilesToFiles, final int nextTile) { this.nextTile = nextTile; if (!tilesToFiles.containsKey(nextTile)) { - throw new IllegalArgumentException("NextTile (" + nextTile + ") is not contained by tilesToFiles (" + join(",", new ArrayList(tilesToFiles.keySet()))); + throw new IllegalArgumentException("NextTile (" + nextTile + ") is not contained by tilesToFiles (" + join(",", new ArrayList<>(tilesToFiles.keySet()))); } } @@ -69,7 +69,7 @@ public void seekToTile(int oneBasedTileNumber) { if (!tileToFiles.containsKey(oneBasedTileNumber)) { throw new IlluminaParserException("PerTileParser does not contain key(" + oneBasedTileNumber + ") keys available (" - + join(",", new ArrayList(tileToFiles.keySet())) + ")"); + + join(",", new ArrayList<>(tileToFiles.keySet())) + ")"); } if (currentIterator != null) { @@ -113,7 +113,7 @@ public void close() { } public void verifyData(List tiles, final int[] cycles) { - final List mapTiles = new ArrayList(this.tileToFiles.keySet()); + final List mapTiles = new ArrayList<>(this.tileToFiles.keySet()); if (!mapTiles.containsAll(tiles)) { throw new IlluminaParserException("Missing tiles in PerTileParser expected(" + join(",", tiles) + ") but found (" + join(",", mapTiles) + ")"); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleFileUtil.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleFileUtil.java index 7e38b81903c..41a1740d5bb 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleFileUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleFileUtil.java @@ -13,7 +13,7 @@ public class PerTilePerCycleFileUtil extends ParameterizedFileUtil { private final CycleIlluminaFileMap cycleFileMap; - private final Set detectedCycles = new TreeSet(); + private final Set detectedCycles = new TreeSet<>(); public PerTilePerCycleFileUtil(final String extension, final File base, final FileFaker faker, final int lane) { @@ -44,7 +44,7 @@ protected CycleIlluminaFileMap getPerTilePerCycleFiles() { detectedCycles.add(getCycleFromDir(tempCycleDir)); } - final Set uniqueTiles = new HashSet(); + final Set uniqueTiles = new HashSet<>(); for (final File cycleDir : tempCycleDirs) { final IlluminaFileMap fileMap = getTiledFiles(cycleDir, matchPattern); @@ -52,7 +52,7 @@ protected CycleIlluminaFileMap getPerTilePerCycleFiles() { cycledMap.put(getCycleFromDir(cycleDir), fileMap); } - this.tiles = Collections.unmodifiableList(new ArrayList(uniqueTiles)); + this.tiles = Collections.unmodifiableList(new ArrayList<>(uniqueTiles)); return cycledMap; } @@ -92,7 +92,7 @@ public CycleIlluminaFileMap getFiles(final List tiles, final int[] cycl private Set removeNonExistentCycles(final int[] cycles) { - final TreeSet inputCyclesSet = new TreeSet(); + final TreeSet inputCyclesSet = new TreeSet<>(); for (final Integer inputCycle : cycles) { inputCyclesSet.add(inputCycle); } @@ -129,8 +129,8 @@ public boolean filesAvailable() { @Override public List verify(final List expectedTiles, final int[] expectedCycles) { - final List failures = new LinkedList(); - final Map tileToFileLengthMap = new HashMap(); + final List failures = new LinkedList<>(); + final Map tileToFileLengthMap = new HashMap<>(); if (!base.exists()) { failures.add("Base directory(" + base.getAbsolutePath() + ") does not exist!"); @@ -174,13 +174,13 @@ public List verify(final List expectedTiles, final int[] expect @Override public List fakeFiles(final List expectedTiles, final int[] expectedCycles, final IlluminaFileUtil.SupportedIlluminaFormat format) { - final List failures = new LinkedList(); + final List failures = new LinkedList<>(); if (!base.exists()) { base.mkdirs(); } - final Set missingCycleSet = new TreeSet(); + final Set missingCycleSet = new TreeSet<>(); for (final Integer cycle : expectedCycles) { missingCycleSet.add(cycle); } @@ -195,7 +195,7 @@ public List fakeFiles(final List expectedTiles, final int[] exp } final CycleIlluminaFileMap cfm = getPerTilePerCycleFiles(); - final Map tileToSizeMap = new HashMap(); + final Map tileToSizeMap = new HashMap<>(); for (final int currentCycle : expectedCycles) { final IlluminaFileMap fileMap = cfm.get(currentCycle); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/ReadStructure.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/ReadStructure.java index 39f30769fbf..4c76b05987a 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/ReadStructure.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/ReadStructure.java @@ -94,14 +94,14 @@ public ReadStructure(final List collection) { throw new IllegalArgumentException("ReadStructure does not support 0 length clusters!"); } - final List allRanges = new ArrayList(collection.size()); + final List allRanges = new ArrayList<>(collection.size()); this.descriptors = unmodifiableList(collection); int cycles = 0; - final List nonSkipIndicesList = new ArrayList(); - final List barcodeIndicesList = new ArrayList(); - final List templateIndicesList = new ArrayList(); - final List skipIndicesList = new ArrayList(); + final List nonSkipIndicesList = new ArrayList<>(); + final List barcodeIndicesList = new ArrayList<>(); + final List templateIndicesList = new ArrayList<>(); + final List skipIndicesList = new ArrayList<>(); readLengths = new int[collection.size()]; int currentCycleIndex = 0; // Current cycle in the entire read structure @@ -187,7 +187,7 @@ private final static List readStructureStringToDescriptors(final final Matcher subMatcher = SubPattern.matcher(readStructure); - final List descriptors = new ArrayList(); + final List descriptors = new ArrayList<>(); while (subMatcher.find()) { final ReadDescriptor rd = new ReadDescriptor(parseInt(subMatcher.group(1)), valueOf(subMatcher.group(2))); descriptors.add(rd); @@ -334,7 +334,7 @@ public int[] getCycles() { * (36T8B36T) in ReadStructure form */ public ReadStructure toReadStructure() { - final List descriptors = new ArrayList(numDescriptors); + final List descriptors = new ArrayList<>(numDescriptors); for (final ReadDescriptor rd : this) { descriptors.add(rd); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/Tile.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/Tile.java index 1f9524a1280..b9a7121da71 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/Tile.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/Tile.java @@ -33,8 +33,8 @@ public Tile(final int lane, final int tile, final float density, final float clu final Collection phasingValues = ensureSoleTilePhasingValuesPerRead(asList(tilePhasingValues)); - final Map phasingMap = new HashMap(); - final Map prePhasingMap = new HashMap(); + final Map phasingMap = new HashMap<>(); + final Map prePhasingMap = new HashMap<>(); /** For each of the TileReads, assign their phasing & prephasing values to the respective maps, which we will * use later to calculate the medians @@ -96,7 +96,7 @@ public TileTemplateRead getPartition(final TilePhasingValue phasingValue) { } }); - final Collection newTilePhasingValues = new LinkedList(); + final Collection newTilePhasingValues = new LinkedList<>(); for (final TileTemplateRead read : partitionedMap.keySet()) { newTilePhasingValues.add(getSoleElement(partitionedMap.get(read))); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileIndex.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileIndex.java index cf23023c7b0..501eb366ddb 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileIndex.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileIndex.java @@ -26,7 +26,7 @@ */ public class TileIndex implements Iterable { private final File tileIndexFile; - private final List tiles = new ArrayList(); + private final List tiles = new ArrayList<>(); TileIndex(final File tileIndexFile) { try { @@ -78,15 +78,15 @@ private boolean readTileIndexRecord(final byte[] buf, final int numBytes, final } public List getTiles() { - final List ret = new ArrayList(tiles.size()); + final List ret = new ArrayList<>(tiles.size()); for (final TileIndexRecord rec : tiles) ret.add(rec.tile); return ret; } public List verify(final List expectedTiles) { - final Set tileSet = new HashSet(tiles.size()); + final Set tileSet = new HashSet<>(tiles.size()); for (final TileIndexRecord rec : tiles) tileSet.add(rec.tile); - final List failures = new LinkedList(); + final List failures = new LinkedList<>(); for (final int expectedTile : expectedTiles) { if (!tileSet.contains(expectedTile)) { failures.add("Tile " + expectedTile + " not found in " + tileIndexFile.getAbsolutePath()); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileMetricsUtil.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileMetricsUtil.java index 93d4b89cac9..79d3d52bd49 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileMetricsUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/TileMetricsUtil.java @@ -57,7 +57,7 @@ public static Collection parseTileMetrics(final File tileMetricsOutFile, f // Collect the tiles by lane & tile, and then collect the metrics by lane final Map> locationToMetricsMap = partitionTileMetricsByLocation(tileMetrics); - final Collection tiles = new LinkedList(); + final Collection tiles = new LinkedList<>(); for (final Map.Entry> entry : locationToMetricsMap.entrySet()) { final Collection tileRecords = entry.getValue(); @@ -87,7 +87,7 @@ public static Collection parseTileMetrics(final File tileMetricsOutFile, f */ private static Collection getTilePhasingValues(final Map> codeMetricsMap, final ReadStructure readStructure) { boolean isFirstRead = true; - final Collection tilePhasingValues = new ArrayList(); + final Collection tilePhasingValues = new ArrayList<>(); for (int descriptorIndex = 0; descriptorIndex < readStructure.descriptors.size(); descriptorIndex++) { if (readStructure.descriptors.get(descriptorIndex).type == Template) { final TileTemplateRead tileTemplateRead = isFirstRead ? FIRST : SECOND; @@ -114,8 +114,8 @@ private static Collection getTilePhasingValues(final Map determineLastValueForLaneTileMetricsCode(final Iterator tileMetricsIterator) { - final Map filteredTileMetrics = new HashMap(); - for (final IlluminaTileMetrics illuminaTileMetrics : new IterableAdapter(tileMetricsIterator)) { + final Map filteredTileMetrics = new HashMap<>(); + for (final IlluminaTileMetrics illuminaTileMetrics : new IterableAdapter<>(tileMetricsIterator)) { filteredTileMetrics.put(illuminaTileMetrics.getLaneTileCode(), illuminaTileMetrics); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java index c3c35fbf10f..8fe43ed8e65 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclQualityEvaluationStrategy.java @@ -26,7 +26,7 @@ public class BclQualityEvaluationStrategy { public static final int ILLUMINA_ALLEGED_MINIMUM_QUALITY = 2; private final int minimumRevisedQuality; /** A thread-safe defaulting map that injects an AtomicInteger starting at 0 when a uninitialized key is get-ted. */ - private Map qualityCountMap = Collections.synchronizedMap(new CollectionUtil.DefaultingMap( + private Map qualityCountMap = Collections.synchronizedMap(new CollectionUtil.DefaultingMap<>( new CollectionUtil.DefaultingMap.Factory() { @Override public AtomicInteger make(final Byte _) { @@ -64,7 +64,7 @@ public byte reviseAndConditionallyLogQuality(final byte quality) { * Reviews the qualities observed thus far and throws an exception if any are below the minimum quality threshold. */ public void assertMinimumQualities() { - final Collection errorTokens = new LinkedList(); + final Collection errorTokens = new LinkedList<>(); for (final Map.Entry entry : this.qualityCountMap.entrySet()) { /** * We're comparing revised qualities here, not observed, but the qualities that are logged in qualityCountMap are observed @@ -87,7 +87,7 @@ public void assertMinimumQualities() { * Returns a view of number of qualities that failed, where the key is the quality score and the value is the number of observations. */ public Map getPoorQualityFrequencies() { - final Map qualityCountMapCopy = new HashMap(); + final Map qualityCountMapCopy = new HashMap<>(); for (final Map.Entry entry : qualityCountMap.entrySet()) { qualityCountMapCopy.put(entry.getKey(), entry.getValue().intValue()); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScatterer.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScatterer.java index bfa6557416a..f151c177b6b 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScatterer.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScatterer.java @@ -66,10 +66,10 @@ public List scatter(final IntervalList sourceIntervalList, final i final IntervalList uniquedList = isUniqued ? sourceIntervalList : sourceIntervalList.uniqued(); final long idealSplitLength = deduceIdealSplitLength(uniquedList, scatterCount); - final List accumulatedIntervalLists = new ArrayList(); + final List accumulatedIntervalLists = new ArrayList<>(); IntervalList runningIntervalList = new IntervalList(uniquedList.getHeader()); - final ArrayDeque intervalQueue = new ArrayDeque(uniquedList.getIntervals()); + final ArrayDeque intervalQueue = new ArrayDeque<>(uniquedList.getIntervals()); while (!intervalQueue.isEmpty() && accumulatedIntervalLists.size() < scatterCount - 1) { final Interval interval = intervalQueue.pollFirst(); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListTools.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListTools.java index 5006be6ef9a..42f3c0db2c4 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListTools.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListTools.java @@ -186,7 +186,7 @@ protected Object doWork() { // Decide on a PG ID and make a program group final SAMFileHeader header = result.getHeader(); - final Set pgs = new HashSet(); + final Set pgs = new HashSet<>(); for (final SAMProgramRecord pg : header.getProgramRecords()) pgs.add(pg.getId()); for (int i = 1; i < Integer.MAX_VALUE; ++i) { if (!pgs.contains(String.valueOf(i))) { @@ -246,7 +246,7 @@ protected Object doWork() { private List openIntervalLists(final List files){ - final List lists = new ArrayList(); + final List lists = new ArrayList<>(); for (final File f : files) { final IntervalList list = TYPE.getIntervalList(f, INCLUDE_FILTERED); @@ -270,7 +270,7 @@ private List openIntervalLists(final List files){ @Override protected String[] customCommandLineValidation() { - final List errorMsgs = new ArrayList(); + final List errorMsgs = new ArrayList<>(); if (SCATTER_COUNT < 1) { errorMsgs.add("SCATTER_COUNT must be greater than 0."); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/ScatterIntervalsByNs.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/ScatterIntervalsByNs.java index f51f7169890..57c2cb3a209 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/ScatterIntervalsByNs.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/interval/ScatterIntervalsByNs.java @@ -110,7 +110,7 @@ protected Object doWork() { * **************************************************************** */ public static IntervalList segregateReference(final ReferenceSequenceFile refFile, final int maxNmerToMerge) { - final List preliminaryIntervals = new LinkedList(); + final List preliminaryIntervals = new LinkedList<>(); final SAMFileHeader header = new SAMFileHeader(); header.setSequenceDictionary(refFile.getSequenceDictionary()); header.setSortOrder(SAMFileHeader.SortOrder.coordinate); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/BamToBfq.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/BamToBfq.java index bea1110cd43..39fcae42180 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/BamToBfq.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/BamToBfq.java @@ -188,7 +188,7 @@ public void writeBfqFiles() { if (!pairedReads) { - List filters = new ArrayList(); + List filters = new ArrayList<>(); filters.add(tagFilter); filters.add(clippedFilter); if (!this.includeNonPfReads) { @@ -436,10 +436,10 @@ private int countWritableRecords() { //but it doesn't check this early, nor produce an understandable error message." throw new UserException("Input file (" + this.bamFile.getAbsolutePath() +") needs to be sorted by queryname."); } - final PeekableIterator it = new PeekableIterator(reader.iterator()); + final PeekableIterator it = new PeekableIterator<>(reader.iterator()); if (!this.pairedReads) { // Filter out noise reads and reads that fail the quality filter - final List filters = new ArrayList(); + final List filters = new ArrayList<>(); filters.add(new TagFilter(ReservedTagConstants.XN, 1)); if (!this.includeNonPfReads) { filters.add(new FailsVendorReadQualityFilter()); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/CreateSequenceDictionary.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/CreateSequenceDictionary.java index a3f44a526dc..9a284e9a416 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/CreateSequenceDictionary.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/CreateSequenceDictionary.java @@ -81,8 +81,8 @@ SAMSequenceDictionary makeSequenceDictionary(final File referenceFile) { final ReferenceSequenceFile refSeqFile = ReferenceSequenceFileFactory.getReferenceSequenceFile(referenceFile, true); ReferenceSequence refSeq; - final List ret = new ArrayList(); - final Set sequenceNames = new HashSet(); + final List ret = new ArrayList<>(); + final Set sequenceNames = new HashSet<>(); for (int numSequences = 0; numSequences < NUM_SEQUENCES && (refSeq = refSeqFile.nextSequence()) != null; ++numSequences) { if (sequenceNames.contains(refSeq.getName())) { throw new UserException.MalformedFile(referenceFile, "Sequence name appears more than once in reference: " + refSeq.getName()); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/DownsampleSam.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/DownsampleSam.java index 79639281c0b..c3765ae51a1 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/DownsampleSam.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/DownsampleSam.java @@ -50,7 +50,7 @@ protected Object doWork() { final Random r = RANDOM_SEED == null ? new Random() : new Random(RANDOM_SEED); final SamReader in = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); final SAMFileWriter out = new SAMFileWriterFactory().makeSAMOrBAMWriter(in.getFileHeader(), true, OUTPUT); - final Map decisions = new HashMap(); + final Map decisions = new HashMap<>(); long total = 0; long kept = 0; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FastqToSam.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FastqToSam.java index 6171a080bde..eb899689400 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FastqToSam.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FastqToSam.java @@ -71,7 +71,7 @@ public class FastqToSam extends PicardCommandLineProgram { public String PLATFORM_MODEL; @Argument(doc="Comment(s) to include in the merged output file's header.", optional=true, shortName="CO") - public List COMMENT = new ArrayList(); + public List COMMENT = new ArrayList<>(); @Argument(shortName = "DS", doc = "Inserted into the read group header", optional = true) public String DESCRIPTION; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FixMateInformation.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FixMateInformation.java index fe1a6dffc94..f1a889472e1 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FixMateInformation.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/FixMateInformation.java @@ -53,7 +53,7 @@ public class FixMateInformation extends PicardCommandLineProgram { protected Object doWork() { // Open up the input boolean allQueryNameSorted = true; - final List readers = new ArrayList(); + final List readers = new ArrayList<>(); for (final File f : INPUT) { IOUtil.assertFileIsReadable(f); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(f); @@ -90,7 +90,7 @@ protected Object doWork() { // Deal with merging if necessary final Iterator tmp; if (INPUT.size() > 1) { - final List headers = new ArrayList(readers.size()); + final List headers = new ArrayList<>(readers.size()); for (final SamReader reader : readers) { headers.add(reader.getFileHeader()); } @@ -105,7 +105,7 @@ protected Object doWork() { // And now deal with re-sorting if necessary if (ASSUME_SORTED || allQueryNameSorted) { - iterator = new SamPairUtil.SetMateInfoIterator(new PeekableIterator(tmp), ADD_MATE_CIGAR); + iterator = new SamPairUtil.SetMateInfoIterator(new PeekableIterator<>(tmp), ADD_MATE_CIGAR); } else { log.info("Sorting input into queryname order."); final SortingCollection sorter = SortingCollection.newInstance(SAMRecord.class, diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/MergeSamFiles.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/MergeSamFiles.java index 7bced3fbea4..7b9bfce51f0 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/MergeSamFiles.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/MergeSamFiles.java @@ -26,7 +26,7 @@ public class MergeSamFiles extends PicardCommandLineProgram { private static final Log log = Log.getInstance(MergeSamFiles.class); @Argument(shortName = "I", doc = "SAM or BAM input file", optional=false) - public List INPUT = new ArrayList(); + public List INPUT = new ArrayList<>(); @Argument(shortName = "O", doc = "SAM or BAM file to write merged result to") public File OUTPUT; @@ -47,7 +47,7 @@ public class MergeSamFiles extends PicardCommandLineProgram { public boolean USE_THREADING = false; @Argument(doc = "Comment(s) to include in the merged output file's header.", optional = true, shortName = "CO") - public List COMMENT = new ArrayList(); + public List COMMENT = new ArrayList<>(); private static final int PROGRESS_INTERVAL = 1000000; @@ -57,8 +57,8 @@ protected Object doWork() { boolean matchedSortOrders = true; // Open the files for reading and writing - final List readers = new ArrayList(); - final List headers = new ArrayList(); + final List readers = new ArrayList<>(); + final List headers = new ArrayList<>(); { SAMSequenceDictionary dict = null; // Used to try and reduce redundant SDs in memory diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/ReorderSam.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/ReorderSam.java index c49088d4d3a..0bf64c199ab 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/ReorderSam.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/ReorderSam.java @@ -153,7 +153,7 @@ private void writeReads(final SAMFileWriter out, */ private Map buildSequenceDictionaryMap(final SAMSequenceDictionary refDict, final SAMSequenceDictionary readsDict) { - Map newOrder = new HashMap(); + Map newOrder = new HashMap<>(); log.info("Reordering SAM/BAM file:"); for (final SAMSequenceRecord refRec : refDict.getSequences()) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/RevertSam.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/RevertSam.java index 5a9774f8640..d97b4cdf528 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/RevertSam.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/RevertSam.java @@ -175,8 +175,8 @@ protected Object doWork() { } else { long total = 0, discarded = 0; - final PeekableIterator iterator = new PeekableIterator(sorter.iterator()); - final Map readGroupToFormat = new HashMap(); + final PeekableIterator iterator = new PeekableIterator<>(sorter.iterator()); + final Map readGroupToFormat = new HashMap<>(); // Figure out the quality score encoding scheme for each read group. for (final SAMReadGroupRecord rg : inHeader.getReadGroups()) { @@ -278,7 +278,7 @@ public boolean filterOut(final SAMRecord first, final SAMRecord second) { * remaining returns an empty list. */ private List fetchByReadName(final PeekableIterator iterator) { - final List out = new LinkedList(); + final List out = new LinkedList<>(); if (iterator.hasNext()) { final SAMRecord first = iterator.next(); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastq.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastq.java index 10fdb6f11db..e48640acdce 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastq.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastq.java @@ -106,7 +106,7 @@ public class SamToFastq extends PicardCommandLineProgram { protected Object doWork() { IOUtil.assertFileIsReadable(INPUT); final SamReader reader = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(INPUT); - final Map firstSeenMates = new HashMap(); + final Map firstSeenMates = new HashMap<>(); final FastqWriterFactory factory = new FastqWriterFactory(); factory.setCreateMd5(CREATE_MD5_FILE); final Map writers = generateWriters(reader.getFileHeader().getReadGroups(), factory); @@ -150,7 +150,7 @@ protected Object doWork() { CloserUtil.close(reader); // Close all the fastq writers being careful to close each one only once! - for (final FastqWriters writerMapping : new HashSet(writers.values())) { + for (final FastqWriters writerMapping : new HashSet<>(writers.values())) { writerMapping.closeAll(); } @@ -168,7 +168,7 @@ protected Object doWork() { private Map generateWriters(final List samReadGroupRecords, final FastqWriterFactory factory) { - final Map writerMap = new HashMap(); + final Map writerMap = new HashMap<>(); final FastqWriters fastqWriters; if (!OUTPUT_PER_RG) { @@ -202,7 +202,7 @@ private Map generateWriters(final List lazySecondOfPairWriter = new Lazy(new Lazy.LazyInitializer() { + final Lazy lazySecondOfPairWriter = new Lazy<>(new Lazy.LazyInitializer() { @Override public FastqWriter make() { return INTERLEAVE ? firstOfPairWriter : factory.newWriter(makeReadGroupFile(rg, "_2")); @@ -389,7 +389,7 @@ private FastqWriters(final FastqWriter firstOfPair, final Lazy seco /** Simple constructor; all writers are pre-initialized.. */ private FastqWriters(final FastqWriter firstOfPair, final FastqWriter secondOfPair, final FastqWriter unpaired) { - this(firstOfPair, new Lazy(new Lazy.LazyInitializer() { + this(firstOfPair, new Lazy<>(new Lazy.LazyInitializer() { @Override public FastqWriter make() { return secondOfPair; @@ -410,7 +410,7 @@ public FastqWriter getUnpaired() { } public void closeAll() { - final Set fastqWriters = new HashSet(); + final Set fastqWriters = new HashSet<>(); fastqWriters.add(firstOfPair); fastqWriters.add(unpaired); // Make sure this is a no-op if the second writer was never fetched. diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/EstimateLibraryComplexity.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/EstimateLibraryComplexity.java index fb1a931fb11..32a7c727d7b 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/EstimateLibraryComplexity.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/EstimateLibraryComplexity.java @@ -228,7 +228,7 @@ protected Object doWork() { log.info("Will store " + MAX_RECORDS_IN_RAM + " read pairs in memory before sorting."); - final List readGroups = new ArrayList(); + final List readGroups = new ArrayList<>(); final int recordsRead = 0; final SortingCollection sorter = SortingCollection.newInstance(PairedReadSequence.class, new PairedReadCodec(), @@ -239,7 +239,7 @@ protected Object doWork() { // Loop through the input files and pick out the read sequences etc. final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Read"); for (final File f : INPUT) { - final Map pendingByName = new HashMap(); + final Map pendingByName = new HashMap<>(); final SamReader in = SamReaderFactory.makeDefault().referenceSequence(REFERENCE_SEQUENCE).open(f); readGroups.addAll(in.getFileHeader().getReadGroups()); @@ -290,10 +290,10 @@ protected Object doWork() { log.info("Finished reading - moving on to scanning for duplicates."); // Now go through the sorted reads and attempt to find duplicates - final PeekableIterator iterator = new PeekableIterator(sorter.iterator()); + final PeekableIterator iterator = new PeekableIterator<>(sorter.iterator()); - final Map> duplicationHistosByLibrary = new HashMap>(); - final Map> opticalHistosByLibrary = new HashMap>(); + final Map> duplicationHistosByLibrary = new HashMap<>(); + final Map> opticalHistosByLibrary = new HashMap<>(); int groupsProcessed = 0; long lastLogTime = System.currentTimeMillis(); @@ -321,8 +321,8 @@ protected Object doWork() { Histogram duplicationHisto = duplicationHistosByLibrary.get(library); Histogram opticalHisto = opticalHistosByLibrary.get(library); if (duplicationHisto == null) { - duplicationHisto = new Histogram("duplication_group_count", library); - opticalHisto = new Histogram("duplication_group_count", "optical_duplicates"); + duplicationHisto = new Histogram<>("duplication_group_count", library); + opticalHisto = new Histogram<>("duplication_group_count", "optical_duplicates"); duplicationHistosByLibrary.put(library, duplicationHisto); opticalHistosByLibrary.put(library, opticalHisto); } @@ -331,7 +331,7 @@ protected Object doWork() { for (int i = 0; i < seqs.size(); ++i) { final PairedReadSequence lhs = seqs.get(i); if (lhs == null) continue; - final List dupes = new ArrayList(); + final List dupes = new ArrayList<>(); for (int j = i + 1; j < seqs.size(); ++j) { final PairedReadSequence rhs = seqs.get(j); @@ -431,7 +431,7 @@ private boolean matches(final PairedReadSequence lhs, final PairedReadSequence r * identify duplicates. */ List getNextGroup(final PeekableIterator iterator) { - final List group = new ArrayList(); + final List group = new ArrayList<>(); final PairedReadSequence first = iterator.next(); group.add(first); @@ -455,7 +455,7 @@ List getNextGroup(final PeekableIterator Map> splitByLibrary(final List input, final List rgs) { - final Map> out = new HashMap>(); + final Map> out = new HashMap<>(); for (final PairedReadSequence seq : input) { String library = null; if (seq.getReadGroup() != -1) { @@ -467,7 +467,7 @@ Map> splitByLibrary(final List librarySeqs = out.get(library); if (librarySeqs == null) { - librarySeqs = new ArrayList(); + librarySeqs = new ArrayList<>(); out.put(library, librarySeqs); } librarySeqs.add(seq); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicates.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicates.java index 8173554a266..3bde6845862 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicates.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicates.java @@ -356,7 +356,7 @@ private void generateDuplicateIndexes() { this.duplicateIndexes = new SortingLongCollection(maxInMemory, TMP_DIR.toArray(new File[TMP_DIR.size()])); ReadEndsForMarkDuplicates firstOfNextChunk = null; - final List nextChunk = new ArrayList(200); + final List nextChunk = new ArrayList<>(200); // First just do the pairs log.info("Traversing read pair information and detecting duplicates."); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesWithMateCigar.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesWithMateCigar.java index 728baa651be..6d4646aca57 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesWithMateCigar.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesWithMateCigar.java @@ -99,7 +99,7 @@ protected Object doWork() { final ProgressLogger progress = new ProgressLogger(log, (int) 1e6, "Read"); // Go through the records - for (final SAMRecord record : new IterableAdapter(iterator)) { + for (final SAMRecord record : new IterableAdapter<>(iterator)) { if (progress.record(record)) { iterator.logMemoryStats(log); } @@ -158,7 +158,7 @@ private void updateProgramRecord(final SAMRecord record, final Map pgIdsSeenAsPrevious = new HashSet(); + final Set pgIdsSeenAsPrevious = new HashSet<>(); // get all program record ids that are mentioned as previously seen for (final SAMProgramRecord samProgramRecord : header.getProgramRecords()) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/GatherVcfs.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/GatherVcfs.java index 7d6ac4d52ff..2b928c48396 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/GatherVcfs.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/GatherVcfs.java @@ -114,8 +114,8 @@ private static void assertSameSamplesAndValidOrdering(final List inputFile final List theseSamples = in.getFileHeader().getGenotypeSamples(); if (!samples.equals(theseSamples)) { - final SortedSet s1 = new TreeSet(samples); - final SortedSet s2 = new TreeSet(theseSamples); + final SortedSet s1 = new TreeSet<>(samples); + final SortedSet s2 = new TreeSet<>(theseSamples); s1.removeAll(theseSamples); s2.removeAll(samples); @@ -160,7 +160,7 @@ private static void gatherConventionally(final SAMSequenceDictionary sequenceDic for (final File f : inputFiles) { log.debug("Gathering from file: ", f.getAbsolutePath()); final VCFFileReader variantReader = new VCFFileReader(f, false); - final PeekableIterator variantIterator = new PeekableIterator(variantReader.iterator()); + final PeekableIterator variantIterator = new PeekableIterator<>(variantReader.iterator()); final VCFHeader header = variantReader.getFileHeader(); if (firstHeader == null) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/LiftoverVcf.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/LiftoverVcf.java index ee461a990d3..5ead3df9c10 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/LiftoverVcf.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/LiftoverVcf.java @@ -91,7 +91,7 @@ protected Object doWork() { log.info("Loading up the target reference genome."); final ReferenceSequenceFileWalker walker = new ReferenceSequenceFileWalker(REFERENCE_SEQUENCE); - final Map refSeqs = new HashMap(); + final Map refSeqs = new HashMap<>(); for (final SAMSequenceRecord rec: walker.getSequenceDictionary().getSequences()) { refSeqs.put(rec.getSequenceName(), walker.get(rec.getSequenceIndex()).getBases()); } @@ -140,7 +140,7 @@ protected Object doWork() { } else { // Fix the alleles if we went from positive to negative strand - final List alleles = new ArrayList(); + final List alleles = new ArrayList<>(); for (final Allele oldAllele : ctx.getAlleles()) { if (target.isPositiveStrand() || oldAllele.isSymbolic()) { alleles.add(oldAllele); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MakeSitesOnlyVcf.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MakeSitesOnlyVcf.java index 9360837e1f7..57883735f20 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MakeSitesOnlyVcf.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MakeSitesOnlyVcf.java @@ -45,7 +45,7 @@ public class MakeSitesOnlyVcf extends PicardCommandLineProgram { public File OUTPUT; @Argument(shortName = "S", doc = "Optionally one or more samples to retain when building the 'sites-only' VCF.", optional=true) - public Set SAMPLE = new TreeSet(); + public Set SAMPLE = new TreeSet<>(); public MakeSitesOnlyVcf() { CREATE_INDEX = true; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MergeVcfs.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MergeVcfs.java index 192414db7cb..f52fd8c176f 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MergeVcfs.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/MergeVcfs.java @@ -63,9 +63,9 @@ public MergeVcfs() { @Override protected Object doWork() { final ProgressLogger progress = new ProgressLogger(log, 10000); - final List sampleList = new ArrayList(); - final Collection> iteratorCollection = new ArrayList>(INPUT.size()); - final Collection headers = new HashSet(INPUT.size()); + final List sampleList = new ArrayList<>(); + final Collection> iteratorCollection = new ArrayList<>(INPUT.size()); + final Collection headers = new HashSet<>(INPUT.size()); VariantContextComparator variantContextComparator = null; SAMSequenceDictionary sequenceDictionary = null; @@ -117,7 +117,7 @@ protected Object doWork() { writer.writeHeader(new VCFHeader(VCFUtils.smartMergeHeaders(headers, false), sampleList)); - final MergingIterator mergingIterator = new MergingIterator(variantContextComparator, iteratorCollection); + final MergingIterator mergingIterator = new MergingIterator<>(variantContextComparator, iteratorCollection); while (mergingIterator.hasNext()) { final VariantContext context = mergingIterator.next(); writer.add(context); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/SortVcf.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/SortVcf.java index 113041c2780..01fad1242bf 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/SortVcf.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/SortVcf.java @@ -49,8 +49,8 @@ public class SortVcf extends PicardCommandLineProgram { private final Log log = Log.getInstance(SortVcf.class); - private final List inputReaders = new ArrayList(); - private final List inputHeaders = new ArrayList(); + private final List inputReaders = new ArrayList<>(); + private final List inputHeaders = new ArrayList<>(); // Overrides the option default, including in the help message. Option remains settable on commandline. public SortVcf() { @@ -59,7 +59,7 @@ public SortVcf() { @Override protected Object doWork() { - final List sampleList = new ArrayList(); + final List sampleList = new ArrayList<>(); for (final File input : INPUT) IOUtil.assertFileIsReadable(input); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordance.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordance.java index 4f19cf55e2d..8e00ffee983 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordance.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordance.java @@ -168,7 +168,7 @@ protected Object doWork() { indelCounter = new GenotypeConcordanceCounts(); // A map to keep track of the count of Truth/Call States which we could not successfully classify - final Map unClassifiedStatesMap = new HashMap(); + final Map unClassifiedStatesMap = new HashMap<>(); log.info("Starting iteration over variants."); while (pairedIterator.hasNext()) { @@ -390,7 +390,7 @@ else if (truthRef.length() > callRef.length()) { } } - final OrderedSet allAlleles = new OrderedSet(); + final OrderedSet allAlleles = new OrderedSet<>(); if (truthContext != null || callContext != null) { // Store the refAllele as the first (0th index) allele in allAlleles (only can do if at least one context is non-null) @@ -510,9 +510,9 @@ class PairedVariantSubContextIterator implements Iterator { PairedVariantSubContextIterator(final Iterator truthIterator, final String truthSample, final Iterator callIterator, final String callSample, final SAMSequenceDictionary dict) { - this.truthIterator = new PeekableIterator(truthIterator); + this.truthIterator = new PeekableIterator<>(truthIterator); this.truthSample = truthSample; - this.callIterator = new PeekableIterator(callIterator); + this.callIterator = new PeekableIterator<>(callIterator); this.callSample = callSample; this.comparator = new VariantContextComparator(dict); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceCounts.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceCounts.java index 5cb071f0c21..55da1220954 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceCounts.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceCounts.java @@ -33,7 +33,7 @@ public class GenotypeConcordanceCounts { CallState.HOM_VAR1, CallState.HOM_VAR2, CallState.HOM_VAR3}; /** The underlying counts table */ - private final Histogram counter = new Histogram(); + private final Histogram counter = new Histogram<>(); /** * Increments a count for the truth/call state tuple. @@ -62,7 +62,7 @@ public void validateCountsAgainstScheme(final GenotypeConcordanceScheme scheme) } private Set getContingencyStateSet(final ContingencyState[] contingencyStateArray) { - final Set contingencyStateSet = new HashSet(); + final Set contingencyStateSet = new HashSet<>(); Collections.addAll(contingencyStateSet, contingencyStateArray); return contingencyStateSet; } @@ -220,7 +220,7 @@ public int getSum(final Set truthStateSet, final Set call * Returns the sum of all pairs of tuples defined by the truth state set and call state set. */ public int getSum() { - return getSum(new HashSet(Arrays.asList(TruthState.values())), new HashSet(Arrays.asList(CallState.values()))); + return getSum(new HashSet<>(Arrays.asList(TruthState.values())), new HashSet<>(Arrays.asList(CallState.values()))); } /** @@ -229,7 +229,7 @@ public int getSum() { public Map getContingencyStateCounts(final GenotypeConcordanceScheme scheme) { scheme.validateScheme(); - final Map counts = new HashMap(); + final Map counts = new HashMap<>(); for (final ContingencyState contingencyState : ContingencyState.values()) { counts.put(contingencyState, 0); } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceScheme.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceScheme.java index 4be985a5424..1066f74ec00 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceScheme.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceScheme.java @@ -13,7 +13,7 @@ public class GenotypeConcordanceScheme { /** The underlying scheme */ - protected final Map scheme = new HashMap(); + protected final Map scheme = new HashMap<>(); /** These are convenience variables for defining a scheme. NA means that such a tuple should never be observed. */ public static final ContingencyState[] NA = {ContingencyState.NA}; @@ -119,7 +119,7 @@ public String getContingencyStateString(final TruthState truthState, final CallS * @return */ public Set getContingencyStateSet(final ContingencyState[] contingencyStateArray) { - final Set contingencyStateSet = new HashSet(); + final Set contingencyStateSet = new HashSet<>(); Collections.addAll(contingencyStateSet, contingencyStateArray); return contingencyStateSet; } diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/AlleleBalanceFilter.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/AlleleBalanceFilter.java index b3178409a40..5af73c7aefc 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/AlleleBalanceFilter.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/AlleleBalanceFilter.java @@ -34,7 +34,7 @@ public List headerLines() { @Override public String filter(final VariantContext ctx) { if (ctx.getHetCount() == 0) return null; - final Map, Counts> countsMap = new HashMap, Counts>(); + final Map, Counts> countsMap = new HashMap<>(); for (final Genotype gt : ctx.getGenotypesOrderedByName()) { if (gt.isNoCall() || !gt.isHet()) continue; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterApplyingVariantIterator.java b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterApplyingVariantIterator.java index e5f0aa21ef9..db2c5dc75d8 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterApplyingVariantIterator.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterApplyingVariantIterator.java @@ -45,7 +45,7 @@ public FilterApplyingVariantIterator(final Iterator iterator, @Override public VariantContext next() { final VariantContext ctx = this.iterator.next(); - final Set filterStrings = new HashSet(); + final Set filterStrings = new HashSet<>(); // Collect variant level filters for (final VariantFilter filter : this.filters) { @@ -54,8 +54,8 @@ public VariantContext next() { } // Collect genotype level filters in a Map of Sample -> List - final ListMap gtFilterStrings = new ListMap(); - final Set variantSamples = new HashSet(); + final ListMap gtFilterStrings = new ListMap<>(); + final Set variantSamples = new HashSet<>(); for (final Genotype gt : ctx.getGenotypes()) { if (gt.isCalled() && !gt.isHomRef()) variantSamples.add(gt.getSampleName()); @@ -81,7 +81,7 @@ public VariantContext next() { // Apply filters to the necessary genotypes builder.noGenotypes(); - final List newGenotypes = new ArrayList(ctx.getNSamples()); + final List newGenotypes = new ArrayList<>(ctx.getNSamples()); for (final Genotype gt : ctx.getGenotypes()) { final GenotypeBuilder gtBuilder = new GenotypeBuilder(gt); final List filters = gtFilterStrings.get(gt.getSampleName()); diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/BQSRGatherer.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/BQSRGatherer.java index a8952feaab0..c81435c215c 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/BQSRGatherer.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/BQSRGatherer.java @@ -34,8 +34,8 @@ public void gather(final List inputs, final File output) { * @return gathered recalibration GATK report */ public static GATKReport gatherReport(final List inputs) { - final SortedSet allReadGroups = new TreeSet(); - final LinkedHashMap> inputReadGroups = new LinkedHashMap>(); + final SortedSet allReadGroups = new TreeSet<>(); + final LinkedHashMap> inputReadGroups = new LinkedHashMap<>(); // Get the read groups from each input report for (final File input : inputs) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizer.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizer.java index 0cab00def58..53727311963 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizer.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizer.java @@ -203,7 +203,7 @@ public QualInterval merge(final QualInterval toMerge) { final long nCombinedErr = left.nErrors + right.nErrors; final int level = Math.max(left.level, right.level) + 1; - final Set subIntervals = new HashSet(Arrays.asList(left, right)); + final Set subIntervals = new HashSet<>(Arrays.asList(left, right)); QualInterval merged = new QualInterval(left.qStart, right.qEnd, nCombinedObs, nCombinedErr, level, subIntervals); return merged; @@ -262,7 +262,7 @@ private double calcPenalty(final double globalErrorRate) { */ private TreeSet quantize() { // create intervals for each qual individually - final TreeSet intervals = new TreeSet(); + final TreeSet intervals = new TreeSet<>(); for ( int qStart = 0; qStart < getNQualsInHistogram(); qStart++ ) { final long nObs = nObservationsPerQual.get(qStart); final double errorRate = QualityUtils.qualToErrorProb((byte)qStart); @@ -326,7 +326,7 @@ private void mergeLowestPenaltyIntervals(final TreeSet intervals) * @return */ private List intervalsToMap(final TreeSet intervals) { - final List map = new ArrayList(getNQualsInHistogram()); + final List map = new ArrayList<>(getNQualsInHistogram()); map.addAll(Collections.nCopies(getNQualsInHistogram(), Byte.MIN_VALUE)); for ( final QualInterval interval : intervals ) { for ( int q = interval.qStart; q <= interval.qEnd; q++ ) { diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtils.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtils.java index c8fac758c05..22c1e643453 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtils.java @@ -287,7 +287,7 @@ public static void generateCsv(final File out, final Map 0) { final Covariate[] nextCovariates = rit.next().getRequestedCovariates(); - final Set nextCovariateNames = new LinkedHashSet(nextCovariates.length); + final Set nextCovariateNames = new LinkedHashSet<>(nextCovariates.length); for (final Covariate nc : nextCovariates) { nextCovariateNames.add(nc.getClass().getSimpleName()); } @@ -381,19 +381,19 @@ public static SOLID_NOCALL_STRATEGY nocallStrategyFromString(String nocallStrate } private static List generateReportTables(final RecalibrationTables recalibrationTables, final Covariate[] requestedCovariates, boolean sortByCols) { - List result = new LinkedList(); + List result = new LinkedList<>(); int reportTableIndex = 0; int rowIndex = 0; - final Map covariateNameMap = new HashMap(requestedCovariates.length); + final Map covariateNameMap = new HashMap<>(requestedCovariates.length); for (final Covariate covariate : requestedCovariates) covariateNameMap.put(covariate, parseCovariateName(covariate)); for (int tableIndex = 0; tableIndex < recalibrationTables.numTables(); tableIndex++) { - final ArrayList> columnNames = new ArrayList>(); // initialize the array to hold the column names - columnNames.add(new MutablePair(covariateNameMap.get(requestedCovariates[0]), "%s")); // save the required covariate name so we can reference it in the future + final ArrayList> columnNames = new ArrayList<>(); // initialize the array to hold the column names + columnNames.add(new MutablePair<>(covariateNameMap.get(requestedCovariates[0]), "%s")); // save the required covariate name so we can reference it in the future if (tableIndex != RecalibrationTables.TableType.READ_GROUP_TABLE.ordinal()) { - columnNames.add(new MutablePair(covariateNameMap.get(requestedCovariates[1]), "%s")); // save the required covariate name so we can reference it in the future + columnNames.add(new MutablePair<>(covariateNameMap.get(requestedCovariates[1]), "%s")); // save the required covariate name so we can reference it in the future if (tableIndex >= RecalibrationTables.TableType.OPTIONAL_COVARIATE_TABLES_START.ordinal()) { columnNames.add(covariateValue); columnNames.add(covariateName); @@ -467,7 +467,7 @@ private static String parseCovariateName(final Covariate covariate) { * @return a non-null comma-separated string */ public static String covariateNames(final Covariate[] requestedCovariates) { - final List names = new ArrayList(requestedCovariates.length); + final List names = new ArrayList<>(requestedCovariates.length); for ( final Covariate cov : requestedCovariates ) names.add(cov.getClass().getSimpleName()); return Utils.join(",", names); @@ -568,7 +568,7 @@ private static void writeCSV(final PrintStream deltaTableFile, final Recalibrati printHeader(deltaTableFile); } - final Map covariateNameMap = new HashMap(requestedCovariates.length); + final Map covariateNameMap = new HashMap<>(requestedCovariates.length); for (final Covariate covariate : requestedCovariates) covariateNameMap.put(covariate, parseCovariateName(covariate)); @@ -583,7 +583,7 @@ private static void writeCSV(final PrintStream deltaTableFile, final Recalibrati } private static void printHeader(PrintStream out) { - final List header = new LinkedList(); + final List header = new LinkedList<>(); header.add("ReadGroup"); header.add("CovariateValue"); header.add("CovariateName"); @@ -624,11 +624,11 @@ private static NestedIntegerArray createDeltaTable(final Recalibrati dimensionsForDeltaTable[3] = Math.max(dimensionsForDeltaTable[3], dimensionsOfCovTable[3]); } - return new NestedIntegerArray(dimensionsForDeltaTable); + return new NestedIntegerArray<>(dimensionsForDeltaTable); } protected static List generateValuesFromKeys(final int[] keys, final Covariate[] covariates, final Map covariateNameMap) { - final List values = new ArrayList(4); + final List values = new ArrayList<>(4); values.add(covariates[RecalibrationTables.TableType.READ_GROUP_TABLE.ordinal()].formatKey(keys[0])); final int covariateIndex = keys[1]; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ContextCovariate.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ContextCovariate.java index a75564b7f95..bd057734325 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ContextCovariate.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ContextCovariate.java @@ -129,7 +129,7 @@ private static int createMask(final int contextSize) { private static ArrayList contextWith(final byte[] bases, final int contextSize, final int mask) { final int readLength = bases.length; - final ArrayList keys = new ArrayList(readLength); + final ArrayList keys = new ArrayList<>(readLength); // the first contextSize-1 bases will not have enough previous context for (int i = 1; i < contextSize && i <= readLength; i++) diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ReadGroupCovariate.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ReadGroupCovariate.java index 27b67e1e47a..d1261e78ae9 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ReadGroupCovariate.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/ReadGroupCovariate.java @@ -15,8 +15,8 @@ public class ReadGroupCovariate implements Covariate { - private final HashMap readGroupLookupTable = new HashMap(); - private final HashMap readGroupReverseLookupTable = new HashMap(); + private final HashMap readGroupLookupTable = new HashMap<>(); + private final HashMap readGroupReverseLookupTable = new HashMap<>(); private int nextId = 0; private String forceReadGroup; diff --git a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/RepeatCovariate.java b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/RepeatCovariate.java index aef4e94bb48..f21f7e8b81f 100644 --- a/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/RepeatCovariate.java +++ b/src/main/java/org/broadinstitute/hellbender/tools/recalibration/covariates/RepeatCovariate.java @@ -16,8 +16,8 @@ public abstract class RepeatCovariate implements Covariate { protected int MAX_REPEAT_LENGTH; protected int MAX_STR_UNIT_LENGTH; - private final HashMap repeatLookupTable = new HashMap(); - private final HashMap repeatReverseLookupTable = new HashMap(); + private final HashMap repeatLookupTable = new HashMap<>(); + private final HashMap repeatReverseLookupTable = new HashMap<>(); private int nextId = 0; // Initialize any member variables using the command-line arguments passed to the walkers @@ -121,7 +121,7 @@ public Pair findTandemRepeatUnits(byte[] readBases, int offset) if(maxRL > MAX_REPEAT_LENGTH) { maxRL = MAX_REPEAT_LENGTH; } - return new MutablePair(bestRepeatUnit, maxRL); + return new MutablePair<>(bestRepeatUnit, maxRL); } @Override @@ -177,7 +177,7 @@ public static Pair getRUandNRfromCovariate(final String value) { if (k == value.length() || nr <= 0) throw new IllegalStateException("Covariate is not of form (Repeat Unit) + Integer"); - return new MutablePair(value.substring(0,k), nr); + return new MutablePair<>(value.substring(0,k), nr); } /** diff --git a/src/main/java/org/broadinstitute/hellbender/utils/GenomeLoc.java b/src/main/java/org/broadinstitute/hellbender/utils/GenomeLoc.java index b4b68a1e0a8..9827d01edc8 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/GenomeLoc.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/GenomeLoc.java @@ -196,7 +196,7 @@ public final List subtract( final GenomeLoc that ) { if (equals(that)) { return Collections.emptyList(); } else if (containsP(that)) { - List l = new ArrayList(2); + List l = new ArrayList<>(2); /** * we have to create two new region, one for the before part, one for the after diff --git a/src/main/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSet.java b/src/main/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSet.java index 5caf60e3027..15736a55c69 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSet.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSet.java @@ -26,7 +26,7 @@ public class GenomeLocSortedSet extends AbstractSet { private GenomeLocParser genomeLocParser; // our private storage for the GenomeLoc's - private final List mArray = new ArrayList(); + private final List mArray = new ArrayList<>(); // cache this to make overlap checking much more efficient private int previousOverlapSearchIndex = -1; @@ -64,7 +64,7 @@ public GenomeLocSortedSet(final GenomeLocParser parser, final GenomeLoc e) { public GenomeLocSortedSet(final GenomeLocParser parser, final Collection l) { this(parser); - final ArrayList sorted = new ArrayList(l); + final ArrayList sorted = new ArrayList<>(l); Collections.sort(sorted); mArray.addAll(IntervalUtils.mergeIntervalLocations(sorted, IntervalMergingRule.OVERLAPPING_ONLY)); } @@ -201,7 +201,7 @@ public List getOverlapping(final GenomeLoc loc) { final int start = Math.max(-(index + 1) - 1, 0); final int size = mArray.size(); - final List overlapping = new LinkedList(); + final List overlapping = new LinkedList<>(); for ( int i = start; i < size; i++ ) { final GenomeLoc myLoc = mArray.get(i); if ( loc.overlapsP(myLoc) ) @@ -227,7 +227,7 @@ else if ( myLoc.isPast(loc) ) * @return a non-null list of locations that overlap loc */ protected List getOverlappingFullSearch(final GenomeLoc loc) { - final List overlapping = new LinkedList(); + final List overlapping = new LinkedList<>(); // super slow, but definitely works for ( final GenomeLoc myLoc : mArray ) { @@ -330,9 +330,9 @@ private boolean mergeOverlappingIntervalsFromAdd(final GenomeLoc loc, final int } public GenomeLocSortedSet subtractRegions(GenomeLocSortedSet toRemoveSet) { - LinkedList good = new LinkedList(); - Stack toProcess = new Stack(); - Stack toExclude = new Stack(); + LinkedList good = new LinkedList<>(); + Stack toProcess = new Stack<>(); + Stack toExclude = new Stack<>(); // initialize the stacks toProcess.addAll(mArray); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/Median.java b/src/main/java/org/broadinstitute/hellbender/utils/Median.java index 588211b5cc9..1fa68a71e47 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/Median.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/Median.java @@ -20,7 +20,7 @@ public Median() { public Median(final int maxValuesToKeep) { this.maxValuesToKeep = maxValuesToKeep; - this.values = new ArrayList(); + this.values = new ArrayList<>(); } public boolean isFull() { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/Utils.java b/src/main/java/org/broadinstitute/hellbender/utils/Utils.java index 73d34090fd1..935841b1842 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/Utils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/Utils.java @@ -30,7 +30,7 @@ private Utils(){} private static Logger logger = LogManager.getLogger(Utils.class); public static List cons(final T elt, final List l) { - List l2 = new ArrayList(); + List l2 = new ArrayList<>(); l2.add(elt); if (l != null) l2.addAll(l); return l2; @@ -255,7 +255,7 @@ public int size() { */ @SafeVarargs public static List append(final List left, T ... elts) { - final List l = new LinkedList(left); + final List l = new LinkedList<>(left); for (T t : elts){ l.add(t); } @@ -325,7 +325,7 @@ public static > List sorted(Collection c) { } public static > List sorted(Collection c, boolean reverse) { - List l = new ArrayList(c); + List l = new ArrayList<>(c); Collections.sort(l); if ( reverse ) Collections.reverse(l); return l; @@ -336,11 +336,11 @@ public static , V> List sorted(Map c) { } public static , V> List sorted(Map c, boolean reverse) { - List t = new ArrayList(c.keySet()); + List t = new ArrayList<>(c.keySet()); Collections.sort(t); if ( reverse ) Collections.reverse(t); - List l = new ArrayList(); + List l = new ArrayList<>(); for ( T k : t ) { l.add(c.get(k)); } @@ -364,7 +364,7 @@ static public byte[] reverse(byte[] bases) { } static public List reverse(final List l) { - final List newL = new ArrayList(l); + final List newL = new ArrayList<>(l); Collections.reverse(newL); return newL; } @@ -424,7 +424,7 @@ public static void fillArrayWithByte(byte[] array, byte value) { * @return a list with all combinations with size n of objects. */ public static List> makePermutations(final List objects, final int n, final boolean withReplacement) { - final List> combinations = new ArrayList>(); + final List> combinations = new ArrayList<>(); if ( n == 1 ) { for ( final T o : objects ) diff --git a/src/main/java/org/broadinstitute/hellbender/utils/clipping/ClippingOp.java b/src/main/java/org/broadinstitute/hellbender/utils/clipping/ClippingOp.java index 9ca3428aa74..e73a6eb1867 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/clipping/ClippingOp.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/clipping/ClippingOp.java @@ -286,7 +286,7 @@ private Cigar softClip(final Cigar __cigar, final int __startClipEnd, final int curLength += curElem.getLength(); } - Vector finalNewElements = new Vector(); + Vector finalNewElements = new Vector<>(); CigarElement lastElement = null; for (CigarElement elem : newElements) { if (lastElement == null || lastElement.getOperator() != elem.getOperator()) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/collections/NestedIntegerArray.java b/src/main/java/org/broadinstitute/hellbender/utils/collections/NestedIntegerArray.java index 69ca817bbc0..b2a138041a9 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/collections/NestedIntegerArray.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/collections/NestedIntegerArray.java @@ -154,7 +154,7 @@ private void fillAllLeaves(final Object[] array, final int[] path, final List(newPath, (T)value)); + result.add(new Leaf<>(newPath, (T)value)); } } } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/gene/Gene.java b/src/main/java/org/broadinstitute/hellbender/utils/gene/Gene.java index b44be92c059..253c23003e3 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/gene/Gene.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/gene/Gene.java @@ -10,7 +10,7 @@ * The transcripts must all be relative to the same strand. */ public class Gene extends Interval implements Iterable { - private final Map transcripts = new HashMap(); + private final Map transcripts = new HashMap<>(); public Gene(final String sequence, final int start, final int end, final boolean negative, final String name) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/gene/RefFlatReader.java b/src/main/java/org/broadinstitute/hellbender/utils/gene/RefFlatReader.java index 362bc950c1d..959a3cf8460 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/gene/RefFlatReader.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/gene/RefFlatReader.java @@ -39,12 +39,12 @@ static OverlapDetector load(final File refFlatFile, final SAMSequenceDicti } OverlapDetector load() { - final OverlapDetector overlapDetector = new OverlapDetector(0, 0); + final OverlapDetector overlapDetector = new OverlapDetector<>(0, 0); final int expectedColumns = RefFlatColumns.values().length; final TabbedTextFileWithHeaderParser parser = new TabbedTextFileWithHeaderParser(refFlatFile, RefFlatColumnLabels); final Map> refFlatLinesByGene = - new HashMap>(); + new HashMap<>(); for (final TabbedTextFileWithHeaderParser.Row row : parser) { final int lineNumber = parser.getCurrentLineNumber(); // getCurrentLineNumber returns the number of the next line @@ -60,7 +60,7 @@ OverlapDetector load() { } else { List transcriptLines = refFlatLinesByGene.get(geneName); if (transcriptLines == null) { - transcriptLines = new ArrayList(); + transcriptLines = new ArrayList<>(); refFlatLinesByGene.put(geneName, transcriptLines); } transcriptLines.add(row); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/haplotype/EventMap.java b/src/main/java/org/broadinstitute/hellbender/utils/haplotype/EventMap.java index 6b6f8075e5c..c92b14e2798 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/haplotype/EventMap.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/haplotype/EventMap.java @@ -75,7 +75,7 @@ protected void processCigarForInitialEvents() { case I: { if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig - final List insertionAlleles = new ArrayList(); + final List insertionAlleles = new ArrayList<>(); final int insertionStart = refLoc.getStart() + refPos - 1; final byte refByte = ref[refPos-1]; if( BaseUtils.isRegularBase(refByte) ) { @@ -108,7 +108,7 @@ protected void processCigarForInitialEvents() { { if( refPos > 0 ) { // protect against trying to create insertions/deletions at the beginning of a contig final byte[] deletionBases = Arrays.copyOfRange( ref, refPos - 1, refPos + elementLength ); // add padding base - final List deletionAlleles = new ArrayList(); + final List deletionAlleles = new ArrayList<>(); final int deletionStart = refLoc.getStart() + refPos - 1; final byte refByte = ref[refPos-1]; if( BaseUtils.isRegularBase(refByte) && BaseUtils.isAllRegularBases(deletionBases) ) { @@ -129,7 +129,7 @@ protected void processCigarForInitialEvents() { final byte altByte = alignment[alignmentPos]; if( refByte != altByte ) { // SNP! if( BaseUtils.isRegularBase(refByte) && BaseUtils.isRegularBase(altByte) ) { - final List snpAlleles = new ArrayList(); + final List snpAlleles = new ArrayList<>(); snpAlleles.add( Allele.create( refByte, true ) ); snpAlleles.add( Allele.create( altByte, false ) ); proposedEvents.add(new VariantContextBuilder(sourceNameToAdd, refLoc.getContig(), refLoc.getStart() + refPos, refLoc.getStart() + refPos, snpAlleles).make()); @@ -283,7 +283,7 @@ protected boolean updateToBlockSubstitutionIfBetter(final List n * @return a list that contains at least one element (leftMost) */ protected List getNeighborhood(final VariantContext leftMost, final int maxBPBetweenEvents) { - final List neighbors = new LinkedList(); + final List neighbors = new LinkedList<>(); VariantContext left = leftMost; for ( final VariantContext vc : getVariantContexts() ) { @@ -348,7 +348,7 @@ public static TreeSet buildEventMapsForHaplotypes( final List startPosKeySet = new TreeSet(); + final TreeSet startPosKeySet = new TreeSet<>(); int hapNumber = 0; if( debug ) logger.info("=== Best Haplotypes ==="); @@ -381,7 +381,7 @@ public int compare(VariantContext vc1, VariantContext vc2) { */ public static SortedSet getAllVariantContexts( final List haplotypes ) { // Using the cigar from each called haplotype figure out what events need to be written out in a VCF file - final TreeSet vcs = new TreeSet(new VariantContextComparator()); + final TreeSet vcs = new TreeSet<>(new VariantContextComparator()); for( final Haplotype h : haplotypes ) { vcs.addAll(h.getEventMap().getVariantContexts()); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/illumina/AdapterMarker.java b/src/main/java/org/broadinstitute/hellbender/utils/illumina/AdapterMarker.java index a1abb016e0e..1068db273fc 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/illumina/AdapterMarker.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/illumina/AdapterMarker.java @@ -33,12 +33,12 @@ public class AdapterMarker { // This is AtomicReference because one thread could be matching adapters while the threshold has been crossed in another // thread and the array is being replaced. - private final AtomicReference adapters = new AtomicReference(); + private final AtomicReference adapters = new AtomicReference<>(); // All the members below are only accessed within a synchronized block. private boolean thresholdReached = false; private int numAdaptersSeen = 0; - private final CollectionUtil.DefaultingMap seenCounts = new CollectionUtil.DefaultingMap(0); + private final CollectionUtil.DefaultingMap seenCounts = new CollectionUtil.DefaultingMap<>(0); /** * Truncates adapters to DEFAULT_ADAPTER_LENGTH @@ -54,7 +54,7 @@ public AdapterMarker(final AdapterPair... originalAdapters) { */ public AdapterMarker(final int adapterLength, final AdapterPair... originalAdapters) { // Truncate each AdapterPair to the given length, and then combine any that end up the same after truncation. - final ArrayList truncatedAdapters = new ArrayList(); + final ArrayList truncatedAdapters = new ArrayList<>(); for (final AdapterPair adapter : originalAdapters) { final TruncatedAdapterPair truncatedAdapter = makeTruncatedAdapterPair(adapter, adapterLength); final int matchingIndex = truncatedAdapters.indexOf(truncatedAdapter); @@ -218,7 +218,7 @@ private void tallyFoundAdapter(final AdapterPair foundAdapter) { if (numAdaptersSeen >= thresholdForSelectingAdaptersToKeep) { // Sort adapters by number of times each has been seen. - final TreeMap sortedAdapters = new TreeMap(new Comparator() { + final TreeMap sortedAdapters = new TreeMap<>(new Comparator() { @Override public int compare(final Integer integer, final Integer integer2) { // Reverse of natural ordering @@ -230,7 +230,7 @@ public int compare(final Integer integer, final Integer integer2) { } // Keep the #numAdaptersToKeep adapters that have been seen the most, plus any ties. - final ArrayList bestAdapters = new ArrayList(numAdaptersToKeep); + final ArrayList bestAdapters = new ArrayList<>(numAdaptersToKeep); int countOfLastAdapter = Integer.MAX_VALUE; for (final Map.Entry entry : sortedAdapters.entrySet()) { if (bestAdapters.size() >= numAdaptersToKeep) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java index d75b493373a..b2d1fa1c7ea 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/io/IOUtils.java @@ -126,7 +126,7 @@ public static File absolute(File file) { private static String absolutePath(File file) { File fileAbs = file.getAbsoluteFile(); - LinkedList names = new LinkedList(); + LinkedList names = new LinkedList<>(); while (fileAbs != null) { String name = fileAbs.getName(); fileAbs = fileAbs.getParentFile(); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/iterators/MarkDuplicatesWithMateCigarIterator.java b/src/main/java/org/broadinstitute/hellbender/utils/iterators/MarkDuplicatesWithMateCigarIterator.java index 8522d0a0dbe..ffc378b8da1 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/iterators/MarkDuplicatesWithMateCigarIterator.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/iterators/MarkDuplicatesWithMateCigarIterator.java @@ -101,8 +101,8 @@ public MarkDuplicatesWithMateCigarIterator(final SAMFileHeader header, } this.header = header; - backingIterator = new PeekableIterator(iterator); - outputBuffer = new SamRecordTrackingBuffer(maxRecordsInRam, blockSize, tmpDirs, header, SamRecordWithOrdinalAndSetDuplicateReadFlag.class); + backingIterator = new PeekableIterator<>(iterator); + outputBuffer = new SamRecordTrackingBuffer<>(maxRecordsInRam, blockSize, tmpDirs, header, SamRecordWithOrdinalAndSetDuplicateReadFlag.class); this.removeDuplicates = removeDuplicates; this.skipPairsWithNoMateCigar = skipPairsWithNoMateCigar; @@ -563,7 +563,7 @@ private boolean tryPollingTheToMarkQueue(final boolean flush, final ReadEndsForM final Set locations = toMarkQueue.getLocations(next); if (!locations.isEmpty()) { - AbstractMarkDuplicatesCommandLineProgram.trackOpticalDuplicates(new ArrayList(locations), + AbstractMarkDuplicatesCommandLineProgram.trackOpticalDuplicates(new ArrayList<>(locations), opticalDuplicateFinder, libraryIdGenerator); } } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/pileup/PileupElement.java b/src/main/java/org/broadinstitute/hellbender/utils/pileup/PileupElement.java index 437f7943493..d2ba59bbc28 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/pileup/PileupElement.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/pileup/PileupElement.java @@ -328,7 +328,7 @@ private LinkedList getBetween(final Direction direction) { else { // optimization: don't allocate list if not necessary if ( elements == null ) - elements = new LinkedList(); + elements = new LinkedList<>(); if ( increment > 0 ) // to keep the list in the right order, if we are incrementing positively add to the end diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/AlignmentUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/read/AlignmentUtils.java index e33ff4d8403..73a8998025c 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/AlignmentUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/AlignmentUtils.java @@ -744,7 +744,7 @@ protected static boolean cigarHasZeroSizeElement(final Cigar c) { * @return a newly allocated, cleaned up Cigar */ public static Cigar cleanUpCigar(final Cigar c) { - final List elements = new ArrayList(c.numCigarElements() - 1); + final List elements = new ArrayList<>(c.numCigarElements() - 1); for (final CigarElement ce : c.getCigarElements()) { if (ce.getLength() != 0 && (! elements.isEmpty() || ce.getOperator() != CigarOperator.D)) { @@ -779,7 +779,7 @@ public static Cigar removeTrailingDeletions(final Cigar c) { */ private static Cigar moveCigarLeft(Cigar cigar, int indexOfIndel) { // get the first few elements - ArrayList elements = new ArrayList(cigar.numCigarElements()); + ArrayList elements = new ArrayList<>(cigar.numCigarElements()); for (int i = 0; i < indexOfIndel - 1; i++) elements.add(cigar.getCigarElement(i)); @@ -923,7 +923,7 @@ public static Cigar trimCigarByBases(final Cigar cigar, final int start, final i */ @SuppressWarnings("fallthrough") private static Cigar trimCigar(final Cigar cigar, final int start, final int end, final boolean byReference) { - final List newElements = new LinkedList(); + final List newElements = new LinkedList<>(); int pos = 0; for ( final CigarElement elt : cigar.getCigarElements() ) { @@ -1057,7 +1057,7 @@ public static int calcFirstBaseMatchingReferenceInCigar(final Cigar cigar, int r public static Cigar applyCigarToCigar(final Cigar firstToSecond, final Cigar secondToThird) { final boolean DEBUG = false; - final List newElements = new LinkedList(); + final List newElements = new LinkedList<>(); final int nElements12 = firstToSecond.getCigarElements().size(); final int nElements23 = secondToThird.getCigarElements().size(); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/ArtificialSAMUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/read/ArtificialSAMUtils.java index 5a680de1826..503b72525c9 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/ArtificialSAMUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/ArtificialSAMUtils.java @@ -92,7 +92,7 @@ public static SAMRecord createArtificialRead(SAMFileHeader header, String name, record.setReadName(name); record.setReferenceIndex(refIndex); record.setAlignmentStart(alignmentStart); - List elements = new ArrayList(); + List elements = new ArrayList<>(); elements.add(new CigarElement(length, CigarOperator.characterToEnum('M'))); record.setCigar(new Cigar(elements)); record.setProperPairFlag(false); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/CigarUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/read/CigarUtils.java index aa8807781e4..fabcc5a4a9e 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/CigarUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/CigarUtils.java @@ -76,7 +76,7 @@ public static Cigar invertCigar (Cigar cigar) { public static boolean isCigarValid(Cigar cigar) { if (cigar.isValid(null, -1) == null) { // This should take care of most invalid Cigar Strings (picard's "exhaustive" implementation) - Stack cigarElementStack = new Stack(); // Stack to invert cigar string to find ending operator + Stack cigarElementStack = new Stack<>(); // Stack to invert cigar string to find ending operator CigarOperator startingOp = null; CigarOperator endingOp = null; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/ReadUtils.java b/src/main/java/org/broadinstitute/hellbender/utils/read/ReadUtils.java index d02ef17fcff..86be66ceb30 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/ReadUtils.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/ReadUtils.java @@ -361,7 +361,7 @@ public static Pair getReadCoordinateForReferenceCoordinate(fin readBases += nextCigarElement.getLength(); if (!cigarElementIterator.hasNext()) { if (allowGoalNotReached) { - return new MutablePair(CLIPPING_GOAL_NOT_REACHED, false); + return new MutablePair<>(CLIPPING_GOAL_NOT_REACHED, false); } else { throw new GATKException(String.format("Reference coordinate corresponds to a non-existent base in the read. This should never happen -- check read with alignment start: %s and cigar: %s", alignmentStart, cigar)); } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/SamComparison.java b/src/main/java/org/broadinstitute/hellbender/utils/read/SamComparison.java index 24f0fbcc6d5..62eff3d7833 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/SamComparison.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/SamComparison.java @@ -83,8 +83,8 @@ private boolean compareCoordinateSortedAlignments() { new SecondaryOrSupplementarySkippingIterator(reader2.iterator()); // Save any reads which haven't been matched during in-order scan. - final Map leftUnmatched = new HashMap(); - final Map rightUnmatched = new HashMap(); + final Map leftUnmatched = new HashMap<>(); + final Map rightUnmatched = new HashMap<>(); while (itLeft.hasCurrent()) { if (!itRight.hasCurrent()) { @@ -105,7 +105,7 @@ private boolean compareCoordinateSortedAlignments() { // Don't assume stability of order beyond the coordinate. Therefore grab all the // reads from the left that has the same coordinate. final SAMRecord left = itLeft.getCurrent(); - final Map leftCurrentCoordinate = new HashMap(); + final Map leftCurrentCoordinate = new HashMap<>(); final PrimaryAlignmentKey leftKey = new PrimaryAlignmentKey(left); leftCurrentCoordinate.put(leftKey, left); while (itLeft.advance()) { @@ -232,7 +232,7 @@ private boolean compareUnsortedAlignments() { final SecondaryOrSupplementarySkippingIterator it1 = new SecondaryOrSupplementarySkippingIterator(reader1.iterator()); final SecondaryOrSupplementarySkippingIterator it2 = new SecondaryOrSupplementarySkippingIterator(reader2.iterator()); - final HashMap leftUnmatched = new HashMap(); + final HashMap leftUnmatched = new HashMap<>(); for (; it1.hasCurrent(); it1.advance()) { final SAMRecord left = it1.getCurrent(); final PrimaryAlignmentKey leftKey = new PrimaryAlignmentKey(left); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesCommandLineProgram.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesCommandLineProgram.java index 94085dff178..dd84b8041cb 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesCommandLineProgram.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesCommandLineProgram.java @@ -56,7 +56,7 @@ public abstract class AbstractMarkDuplicatesCommandLineProgram extends AbstractO @Argument(shortName = "CO", doc = "Comment(s) to include in the output file's header.", optional = true) - public List COMMENT = new ArrayList(); + public List COMMENT = new ArrayList<>(); @Argument(doc = "If true do not write duplicates to the output file instead of writing them with appropriate flags set.") public boolean REMOVE_DUPLICATES = false; @@ -69,7 +69,7 @@ public abstract class AbstractMarkDuplicatesCommandLineProgram extends AbstractO public DuplicateScoringStrategy.ScoringStrategy DUPLICATE_SCORING_STRATEGY = ScoringStrategy.TOTAL_MAPPED_REFERENCE_LENGTH; /** The program groups that have been seen during the course of examining the input records. */ - protected final Set pgIdsSeen = new HashSet(); + protected final Set pgIdsSeen = new HashSet<>(); /** * We have to re-chain the program groups based on this algorithm. This returns the map from existing program group ID @@ -86,7 +86,7 @@ protected Map getChainedPgIds(final SAMFileHeader outputHeader) if (PROGRAM_GROUP_COMMAND_LINE == null) { PROGRAM_GROUP_COMMAND_LINE = this.getCommandLine(); } - chainedPgIds = new HashMap(); + chainedPgIds = new HashMap<>(); for (final String existingId : this.pgIdsSeen) { final String newPgId = pgIdGenerator.getNonCollidingId(PROGRAM_RECORD_ID); chainedPgIds.put(existingId, newPgId); @@ -146,7 +146,7 @@ protected void finalizeAndWriteMetrics(final LibraryIdGenerator libraryIdGenerat static class PgIdGenerator { private int recordCounter; - private final Set idsThatAreAlreadyTaken = new HashSet(); + private final Set idsThatAreAlreadyTaken = new HashSet<>(); PgIdGenerator(final SAMFileHeader header) { for (final SAMProgramRecord pgRecord : header.getProgramRecords()) { @@ -194,8 +194,8 @@ public SamHeaderAndIterator(final SAMFileHeader header, final CloseableIterator< * and checking of the inputs. */ protected SamHeaderAndIterator openInputs() { - final List headers = new ArrayList(INPUT.size()); - final List readers = new ArrayList(INPUT.size()); + final List headers = new ArrayList<>(INPUT.size()); + final List readers = new ArrayList<>(INPUT.size()); for (final File f : INPUT) { final SamReader reader = SamReaderFactory.makeDefault().enable(SamReaderFactory.Option.EAGERLY_DECODE).open(f); // eager decode @@ -239,8 +239,8 @@ public static void trackOpticalDuplicates(List ends, // Check if we need to partition since the orientations could have changed if (hasFR && hasRF) { // need to track them independently // Variables used for optical duplicate detection and tracking - final List trackOpticalDuplicatesF = new ArrayList(); - final List trackOpticalDuplicatesR = new ArrayList(); + final List trackOpticalDuplicatesF = new ArrayList<>(); + final List trackOpticalDuplicatesR = new ArrayList<>(); // Split into two lists: first of pairs and second of pairs, since they must have orientation and same starting end for (final ReadEnds end : ends) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DiskBasedReadEndsForMarkDuplicatesMap.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DiskBasedReadEndsForMarkDuplicatesMap.java index 15a6bd1d4ce..4080bf8c231 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DiskBasedReadEndsForMarkDuplicatesMap.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DiskBasedReadEndsForMarkDuplicatesMap.java @@ -35,7 +35,7 @@ public class DiskBasedReadEndsForMarkDuplicatesMap implements ReadEndsForMarkDup private final CoordinateSortedPairInfoMap pairInfoMap; public DiskBasedReadEndsForMarkDuplicatesMap(int maxOpenFiles) { - pairInfoMap = new CoordinateSortedPairInfoMap(maxOpenFiles, new Codec()); + pairInfoMap = new CoordinateSortedPairInfoMap<>(maxOpenFiles, new Codec()); } public ReadEndsForMarkDuplicates remove(int mateSequenceIndex, String key) { @@ -69,7 +69,7 @@ public Map.Entry decode() { try { final String key = readEndsForMarkDuplicatesCodec.getInputStream().readUTF(); final ReadEndsForMarkDuplicates record = readEndsForMarkDuplicatesCodec.decode(); - return new AbstractMap.SimpleEntry(key, record); + return new AbstractMap.SimpleEntry<>(key, record); } catch (IOException e) { throw new GATKException("Error loading ReadEndsForMarkDuplicatesMap from disk", e); } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DuplicationMetrics.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DuplicationMetrics.java index 98e896f0350..ce2fd4012d4 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DuplicationMetrics.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/DuplicationMetrics.java @@ -129,7 +129,7 @@ public Histogram calculateRoiHistogram() { } long uniquePairs = READ_PAIRS_EXAMINED - READ_PAIR_DUPLICATES; - Histogram histo = new Histogram(); + Histogram histo = new Histogram<>(); for (double x=1; x<=100; x+=1) { histo.increment(x, estimateRoi(ESTIMATED_LIBRARY_SIZE, x, READ_PAIRS_EXAMINED, uniquePairs)); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/LibraryIdGenerator.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/LibraryIdGenerator.java index 1b2017935f0..8c3fba165fb 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/LibraryIdGenerator.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/LibraryIdGenerator.java @@ -16,10 +16,10 @@ public class LibraryIdGenerator { private final SAMFileHeader header; - private final Map libraryIds = new HashMap(); // from library string to library id + private final Map libraryIds = new HashMap<>(); // from library string to library id private short nextLibraryId = 1; - private final Map metricsByLibrary = new HashMap(); - private final Histogram opticalDuplicatesByLibraryId = new Histogram(); + private final Map metricsByLibrary = new HashMap<>(); + private final Histogram opticalDuplicatesByLibraryId = new Histogram<>(); public LibraryIdGenerator(final SAMFileHeader header) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MarkQueue.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MarkQueue.java index 66ac8e5b069..e05ba36ab27 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MarkQueue.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MarkQueue.java @@ -82,14 +82,14 @@ public int compare(final ReadEndsForMateCigar lhs, final ReadEndsForMateCigar rh private int numDuplicates = 0; /** The nonDuplicateReadEndsSet of all read ends sorted by 5' start unclipped position. Some read ends in this nonDuplicateReadEndsSet may eventually be duplicates. */ - private final TreeSet nonDuplicateReadEndsSet = new TreeSet(new MarkQueueComparator()); + private final TreeSet nonDuplicateReadEndsSet = new TreeSet<>(new MarkQueueComparator()); /** * Reads in the main nonDuplicateReadEndsSet may occasionally have mates with the same chromosome, coordinate, and orientation, causing collisions * We store the 'best' end of the mate pair in the main nonDuplicateReadEndsSet, and the other end in this nonDuplicateReadEndsSet. We only remove from this.otherEndOfNonDuplicateReadEndsSet when * we remove something from this.nonDuplicateReadEndsSet. */ - private final TreeSet otherEndOfNonDuplicateReadEndsSet = new TreeSet(new MarkQueueComparator()); + private final TreeSet otherEndOfNonDuplicateReadEndsSet = new TreeSet<>(new MarkQueueComparator()); /** * If we have two items that are the same with respect to being in the "nonDuplicateReadEndsSet", then we must choose one. The "one" will diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MemoryBasedReadEndsForMarkDuplicatesMap.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MemoryBasedReadEndsForMarkDuplicatesMap.java index 384b144e45e..9e01887b841 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MemoryBasedReadEndsForMarkDuplicatesMap.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/MemoryBasedReadEndsForMarkDuplicatesMap.java @@ -16,7 +16,7 @@ class MemoryBasedReadEndsForMarkDuplicatesMap implements ReadEndsForMarkDuplicat * Index of this list is sequence index. Value is map from String {read group id:read name} to ReadEnds. * When a ReadEnds is put into this container, it is stored according to the sequenceIndex of the mate */ - private List> mapPerSequence = new ArrayList>(); + private List> mapPerSequence = new ArrayList<>(); public ReadEndsForMarkDuplicates remove(int mateSequenceIndex, String key) { if (mateSequenceIndex >= mapPerSequence.size()) { @@ -27,7 +27,7 @@ public ReadEndsForMarkDuplicates remove(int mateSequenceIndex, String key) { public void put(int mateSequenceIndex, String key, ReadEndsForMarkDuplicates readEnds) { while (mateSequenceIndex >= mapPerSequence.size()) { - mapPerSequence.add(new HashMap()); + mapPerSequence.add(new HashMap<>()); } mapPerSequence.get(mateSequenceIndex).put(key, readEnds); } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/PhysicalLocationForMateCigarSet.java b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/PhysicalLocationForMateCigarSet.java index 263c22fdfaa..f24def74703 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/PhysicalLocationForMateCigarSet.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/markduplicates/PhysicalLocationForMateCigarSet.java @@ -12,8 +12,8 @@ public class PhysicalLocationForMateCigarSet { /** * We want to return a set of ReadEnds but want to compare based on physical location, hence we store two sets. */ - private final Set readEnds = new HashSet(); - private final Set physicalLocations = new HashSet(); + private final Set readEnds = new HashSet<>(); + private final Set physicalLocations = new HashSet<>(); public PhysicalLocationForMateCigarSet() {} diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/AbstractAlignmentMerger.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/AbstractAlignmentMerger.java index 12a588227a0..1fb1b887016 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/AbstractAlignmentMerger.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/AbstractAlignmentMerger.java @@ -52,8 +52,8 @@ public abstract class AbstractAlignmentMerger { private SAMProgramRecord programRecord; private final boolean alignedReadsOnly; private final SAMFileHeader header; - private final List attributesToRetain = new ArrayList(); - private final List attributesToRemove = new ArrayList(); + private final List attributesToRetain = new ArrayList<>(); + private final List attributesToRemove = new ArrayList<>(); protected final File referenceFasta; private final Integer read1BasesTrimmed; private final Integer read2BasesTrimmed; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/BestMapqPrimaryAlignmentSelectionStrategy.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/BestMapqPrimaryAlignmentSelectionStrategy.java index f222df5b007..6d915fb27e1 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/BestMapqPrimaryAlignmentSelectionStrategy.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/BestMapqPrimaryAlignmentSelectionStrategy.java @@ -37,7 +37,7 @@ public void pickPrimaryAlignment(final HitsForInsert hits) { // Need to use selected strategy for picking primary. // Find all the hits with the best MAPQ. - final List primaryAlignmentIndices = new ArrayList(hits.numHits()); + final List primaryAlignmentIndices = new ArrayList<>(hits.numHits()); int bestMapQ = -1; for (int i = 0; i < hits.numHits(); ++i) { final int firstEndMapq; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/EarliestFragmentPrimaryAlignmentSelectionStrategy.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/EarliestFragmentPrimaryAlignmentSelectionStrategy.java index bb28ae48101..8440c7d44a3 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/EarliestFragmentPrimaryAlignmentSelectionStrategy.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/EarliestFragmentPrimaryAlignmentSelectionStrategy.java @@ -24,7 +24,7 @@ public void pickPrimaryAlignment(final HitsForInsert hitsForInsert) { if (hitsForInsert.numHits() == 0) throw new IllegalArgumentException("No alignments to pick from"); // Gather the earliest alignment(s) with best MAPQ - final List earliestAlignments = new ArrayList(); + final List earliestAlignments = new ArrayList<>(); int earliestMappedBase = Integer.MAX_VALUE; int bestMapQ = -1; for (int i = 0; i < hitsForInsert.numHits(); ++i) { diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/HitsForInsert.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/HitsForInsert.java index 569ea367887..70463a78ef9 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/HitsForInsert.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/HitsForInsert.java @@ -33,11 +33,11 @@ public enum NumPrimaryAlignmentState { // These are package-visible to make life easier for the PrimaryAlignmentSelectionStrategies. - final List firstOfPairOrFragment = new ArrayList(); - final List secondOfPair = new ArrayList(); + final List firstOfPairOrFragment = new ArrayList<>(); + final List secondOfPair = new ArrayList<>(); - private final List supplementalFirstOfPairOrFragment = new ArrayList(); - private final List supplementalSecondOfPair = new ArrayList(); + private final List supplementalFirstOfPairOrFragment = new ArrayList<>(); + private final List supplementalSecondOfPair = new ArrayList<>(); /** * @throws if numHits() == 0 diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MostDistantPrimaryAlignmentSelectionStrategy.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MostDistantPrimaryAlignmentSelectionStrategy.java index ae25f0d85fc..c6f5377d776 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MostDistantPrimaryAlignmentSelectionStrategy.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MostDistantPrimaryAlignmentSelectionStrategy.java @@ -25,7 +25,7 @@ public void pickPrimaryAlignment(final HitsForInsert hitsForInsert) { final BestEndAlignmentsAccumulator firstEndBest = new BestEndAlignmentsAccumulator(); final BestEndAlignmentsAccumulator secondEndBest = new BestEndAlignmentsAccumulator(); final CollectionUtil.MultiMap firstEndBySequence = - new CollectionUtil.MultiMap(); + new CollectionUtil.MultiMap<>(); final BestPairAlignmentsAccumulator pairBest = new BestPairAlignmentsAccumulator(); for (final SAMRecord rec : hitsForInsert.firstOfPairOrFragment) { @@ -110,7 +110,7 @@ private void moveToHead(final List list, final SAMRecord rec) { private static class BestEndAlignmentsAccumulator { public int bestMapq = -1; - public List bestAlignments = new ArrayList(); + public List bestAlignments = new ArrayList<>(); public void considerBest(final SAMRecord rec) { if (bestMapq == -1) { @@ -137,7 +137,7 @@ private static class BestPairAlignmentsAccumulator { public int bestDistance = -1; public int bestPairMapq = -1; public List> bestAlignmentPairs = - new ArrayList>(); + new ArrayList<>(); public void considerBest(final SAMRecord firstEnd, final SAMRecord secondEnd) { final int thisPairMapq = SAMUtils.combineMapqs(firstEnd.getMappingQuality(), secondEnd.getMappingQuality()); @@ -147,9 +147,9 @@ public void considerBest(final SAMRecord firstEnd, final SAMRecord secondEnd) { bestDistance = thisDistance; bestPairMapq = thisPairMapq; bestAlignmentPairs.clear(); - bestAlignmentPairs.add(new AbstractMap.SimpleEntry(firstEnd, secondEnd)); + bestAlignmentPairs.add(new AbstractMap.SimpleEntry<>(firstEnd, secondEnd)); } else if (thisDistance == bestDistance && thisPairMapq == bestPairMapq) { - bestAlignmentPairs.add(new AbstractMap.SimpleEntry(firstEnd, secondEnd)); + bestAlignmentPairs.add(new AbstractMap.SimpleEntry<>(firstEnd, secondEnd)); } } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MultiHitAlignedReadIterator.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MultiHitAlignedReadIterator.java index 1a041bbc256..c7b13ef6a56 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MultiHitAlignedReadIterator.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/MultiHitAlignedReadIterator.java @@ -38,7 +38,7 @@ public class MultiHitAlignedReadIterator implements CloseableIterator querynameOrderIterator, final PrimaryAlignmentSelectionStrategy primaryAlignmentSelectionStrategy) { this.primaryAlignmentSelectionStrategy = primaryAlignmentSelectionStrategy; - peekIterator = new PeekableIterator(new FilteringIterator(querynameOrderIterator, + peekIterator = new PeekableIterator<>(new FilteringIterator(querynameOrderIterator, new SamRecordFilter() { // Filter unmapped reads. public boolean filterOut(final SAMRecord record) { @@ -165,7 +165,7 @@ private void replaceHardWithSoftClips(final SAMRecord rec) { System.arraycopy(rec.getBaseQualities(), 0, quals, startHardClip, rec.getBaseQualities().length); // Fix the cigar! - elements = new ArrayList(elements); // make it modifiable + elements = new ArrayList<>(elements); // make it modifiable if (startHardClip > 0) elements.set(0, new CigarElement(first.getLength(), CigarOperator.S)); if (endHardClip > 0) elements.set(elements.size()-1, new CigarElement(last.getLength(), CigarOperator.S)); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/SamAlignmentMerger.java b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/SamAlignmentMerger.java index 0f06f1a957d..86dec43b466 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/SamAlignmentMerger.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/read/mergealignment/SamAlignmentMerger.java @@ -135,8 +135,8 @@ protected CloseableIterator getQuerynameSortedAlignedRecords() { // When the alignment records, including both ends of a pair, are in SAM files if (alignedSamFile != null && alignedSamFile.size() > 0) { - final List headers = new ArrayList(alignedSamFile.size()); - final List readers = new ArrayList(alignedSamFile.size()); + final List headers = new ArrayList<>(alignedSamFile.size()); + final List readers = new ArrayList<>(alignedSamFile.size()); for (final File f : this.alignedSamFile) { final SamReader r = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).open(f); headers.add(r.getFileHeader()); @@ -239,9 +239,9 @@ private class SeparateEndAlignmentIterator implements CloseableIterator read1Alignments, final List read2Alignments, File referenceFasta) { - final List headers = new ArrayList(); - final List read1 = new ArrayList(read1Alignments.size()); - final List read2 = new ArrayList(read2Alignments.size()); + final List headers = new ArrayList<>(); + final List read1 = new ArrayList<>(read1Alignments.size()); + final List read2 = new ArrayList<>(read2Alignments.size()); for (final File f : read1Alignments) { final SamReader r = SamReaderFactory.makeDefault().referenceSequence(referenceFasta).open(f); headers.add(r.getFileHeader()); @@ -254,9 +254,9 @@ public SeparateEndAlignmentIterator(final List read1Alignments, final List } final SamFileHeaderMerger headerMerger = new SamFileHeaderMerger(SAMFileHeader.SortOrder.coordinate, headers, false); - read1Iterator = new PeekableIterator( + read1Iterator = new PeekableIterator<>( new SuffixTrimingSamRecordIterator(new MergingSamRecordIterator(headerMerger, read1, true), "/1")); - read2Iterator = new PeekableIterator( + read2Iterator = new PeekableIterator<>( new SuffixTrimingSamRecordIterator(new MergingSamRecordIterator(headerMerger, read2, true), "/2")); header = headerMerger.getMergedHeader(); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportDataType.java b/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportDataType.java index 06bcb07e519..6b996074bf3 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportDataType.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportDataType.java @@ -49,7 +49,7 @@ private GATKReportDataType(java.lang.String dataTypeString) { this.dataTypeString = dataTypeString; } - private static final Map lookup = new HashMap(); + private static final Map lookup = new HashMap<>(); static { for (GATKReportDataType s : EnumSet.allOf(GATKReportDataType.class)) diff --git a/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportTable.java b/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportTable.java index f0a7780ab24..7992996a0ff 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportTable.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/report/GATKReportTable.java @@ -93,12 +93,12 @@ public GATKReportTable(BufferedReader reader, GATKReportVersion version) { // initialize the data final int nColumns = Integer.parseInt(tableData[TableDataHeaderFields.COLS.index()]); final int nRows = Integer.parseInt(tableData[TableDataHeaderFields.ROWS.index()]); - underlyingData = new ArrayList(nRows); - columnInfo = new ArrayList(nColumns); - columnNameToIndex = new HashMap(nColumns); + underlyingData = new ArrayList<>(nRows); + columnInfo = new ArrayList<>(nColumns); + columnNameToIndex = new HashMap<>(nColumns); // when reading from a file, the row ID mapping is just the index - rowIdToIndex = new HashMap(); + rowIdToIndex = new HashMap<>(); for ( int i = 0; i < nRows; i++ ) rowIdToIndex.put(i, i); @@ -183,10 +183,10 @@ public GATKReportTable(final String tableName, final String tableDescription, fi this.tableDescription = tableDescription; this.sortingWay = sortingWay; - underlyingData = new ArrayList(INITITAL_ARRAY_SIZE); - columnInfo = new ArrayList(numColumns); - columnNameToIndex = new HashMap(numColumns); - rowIdToIndex = new HashMap(); + underlyingData = new ArrayList<>(INITITAL_ARRAY_SIZE); + columnInfo = new ArrayList<>(numColumns); + columnNameToIndex = new HashMap<>(numColumns); + rowIdToIndex = new HashMap<>(); } /** @@ -737,12 +737,12 @@ public int compare(Object[] objectArr1, Object[] objectArr2) { case SORT_BY_ROW: final TreeMap sortedMap; try { - sortedMap = new TreeMap(rowIdToIndex); + sortedMap = new TreeMap<>(rowIdToIndex); } catch (ClassCastException e) { return underlyingData; } - final List orderedData = new ArrayList(underlyingData.size()); + final List orderedData = new ArrayList<>(underlyingData.size()); for ( final int rowKey : sortedMap.values() ) orderedData.add(underlyingData.get(rowKey)); diff --git a/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java b/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java index cae5aab1547..1190880fcd6 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/runtime/ProcessController.java @@ -264,7 +264,7 @@ public ProcessOutput execAndCheck(ProcessSettings settings) throws IOException { */ public static Set getRunning() { synchronized (running) { - return new HashSet(running); + return new HashSet<>(running); } } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/text/XReadLines.java b/src/main/java/org/broadinstitute/hellbender/utils/text/XReadLines.java index d6667e24db0..4e255493b1f 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/text/XReadLines.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/text/XReadLines.java @@ -71,7 +71,7 @@ public XReadLines(final Reader reader, final boolean trimWhitespace, final Strin * @return all of the lines in the file. */ public List readLines() { - List lines = new LinkedList(); + List lines = new LinkedList<>(); for ( String line : this ) { lines.add(line); } diff --git a/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/BasicInputParser.java b/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/BasicInputParser.java index 1a7dbd2e001..b7b7189106b 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/BasicInputParser.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/BasicInputParser.java @@ -15,8 +15,8 @@ */ public class BasicInputParser extends AbstractInputParser { private BufferedLineReader reader; - private final ArrayList inputs = new ArrayList(); - private final ArrayList fileNames = new ArrayList(); + private final ArrayList inputs = new ArrayList<>(); + private final ArrayList fileNames = new ArrayList<>(); String currentFileName = null; private String currentLine = null; private String nextLine = null; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/TabbedTextFileWithHeaderParser.java b/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/TabbedTextFileWithHeaderParser.java index e3821846935..6c5bb9b8e62 100644 --- a/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/TabbedTextFileWithHeaderParser.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/text/parsers/TabbedTextFileWithHeaderParser.java @@ -72,7 +72,7 @@ public void close() { /** * Map from column label to positional index. */ - private final Map columnLabelIndices = new HashMap(); + private final Map columnLabelIndices = new HashMap<>(); private final TabbedInputParser parser; private TheIterator extantIterator; diff --git a/src/main/java/org/broadinstitute/hellbender/utils/variant/DbSnpBitSetUtil.java b/src/main/java/org/broadinstitute/hellbender/utils/variant/DbSnpBitSetUtil.java index ab69d93cf73..58c27e18e35 100755 --- a/src/main/java/org/broadinstitute/hellbender/utils/variant/DbSnpBitSetUtil.java +++ b/src/main/java/org/broadinstitute/hellbender/utils/variant/DbSnpBitSetUtil.java @@ -15,7 +15,7 @@ */ public class DbSnpBitSetUtil { - private final Map sequenceToBitSet = new HashMap(); + private final Map sequenceToBitSet = new HashMap<>(); /** Little tuple class to contain one bitset for SNPs and another for Indels. */ public static class DbSnpBitSets { @@ -49,7 +49,7 @@ public DbSnpBitSetUtil(final File dbSnpFile, final Collection variantsToMatch) { if (dbSnpFile == null) throw new IllegalArgumentException("null dbSnpFile"); - final Map> tmp = new HashMap>(); + final Map> tmp = new HashMap<>(); tmp.put(this, EnumSet.copyOf(variantsToMatch)); loadVcf(dbSnpFile, sequenceDictionary, tmp); } @@ -62,7 +62,7 @@ public static DbSnpBitSets createSnpAndIndelBitSets(final File dbSnpFile, sets.snps = new DbSnpBitSetUtil(); sets.indels = new DbSnpBitSetUtil(); - final Map> map = new HashMap>(); + final Map> map = new HashMap<>(); map.put(sets.snps, EnumSet.of(DbSnpVariantType.SNP)); map.put(sets.indels, EnumSet.of(DbSnpVariantType.insertion, DbSnpVariantType.deletion)); loadVcf(dbSnpFile, sequenceDictionary, map); diff --git a/src/test/java/org/broadinstitute/hellbender/engine/FeatureManagerUnitTest.java b/src/test/java/org/broadinstitute/hellbender/engine/FeatureManagerUnitTest.java index 13b4ce9c9b3..e1a7474e0a5 100644 --- a/src/test/java/org/broadinstitute/hellbender/engine/FeatureManagerUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/engine/FeatureManagerUnitTest.java @@ -154,6 +154,6 @@ public void testHandleRequestForNonExistentFeatureInput() { // Requests for FeatureInputs not declared in the tool's class hierarchy (or associated ArgumentCollections) // should throw an exception - manager.getFeatures(new FeatureInput("featureInputNotDeclaredInTool"), new SimpleInterval("1", 1, 1)); + manager.getFeatures(new FeatureInput<>("featureInputNotDeclaredInTool"), new SimpleInterval("1", 1, 1)); } } diff --git a/src/test/java/org/broadinstitute/hellbender/engine/ReadsDataSourceUnitTest.java b/src/test/java/org/broadinstitute/hellbender/engine/ReadsDataSourceUnitTest.java index a104cc089ee..f528c9c8eae 100644 --- a/src/test/java/org/broadinstitute/hellbender/engine/ReadsDataSourceUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/engine/ReadsDataSourceUnitTest.java @@ -35,7 +35,7 @@ public void testHandleNullFileList() { @Test(expectedExceptions = IllegalArgumentException.class) public void testHandleEmptyFileList() { - ReadsDataSource readsSource = new ReadsDataSource(new ArrayList()); + ReadsDataSource readsSource = new ReadsDataSource(new ArrayList<>()); } @Test(expectedExceptions = UserException.CouldNotReadInputFile.class) @@ -71,7 +71,7 @@ public Object[][] getSingleFileCompleteTraversalData() { public void testSingleFileCompleteTraversal( final File samFile, final List expectedReadNames ) { ReadsDataSource readsSource = new ReadsDataSource(samFile); - List reads = new ArrayList(); + List reads = new ArrayList<>(); for ( SAMRecord read : readsSource ) { reads.add(read); } @@ -123,7 +123,7 @@ public void testSingleFileTraversalWithIntervals( final File samFile, final List ReadsDataSource readsSource = new ReadsDataSource(samFile); readsSource.setIntervalsForTraversal(intervals); - List reads = new ArrayList(); + List reads = new ArrayList<>(); for ( SAMRecord read : readsSource ) { reads.add(read); } @@ -174,7 +174,7 @@ public Object[][] getSingleFileQueryByIntervalData() { public void testSingleFileQueryByInterval( final File samFile, final SimpleInterval interval, final List expectedReadNames ) { ReadsDataSource readsSource = new ReadsDataSource(samFile); - List reads = new ArrayList(); + List reads = new ArrayList<>(); Iterator queryIterator = readsSource.query(interval); while ( queryIterator.hasNext() ) { reads.add(queryIterator.next()); @@ -205,7 +205,7 @@ public Object[][] getMultipleFilesCompleteTraversalData() { @Test(dataProvider = "MultipleFilesCompleteTraversalData") public void testMultipleFilesCompleteTraversal(final List samFiles, final List expectedReadNames) { ReadsDataSource readsSource = new ReadsDataSource(samFiles); - List reads = new ArrayList(); + List reads = new ArrayList<>(); for ( SAMRecord read : readsSource ) { reads.add(read); @@ -258,7 +258,7 @@ public void testMultipleFilesTraversalWithIntervals( final List samFiles, ReadsDataSource readsSource = new ReadsDataSource(samFiles); readsSource.setIntervalsForTraversal(intervals); - List reads = new ArrayList(); + List reads = new ArrayList<>(); for ( SAMRecord read : readsSource ) { reads.add(read); } @@ -305,7 +305,7 @@ public Object[][] getMultipleFilesQueryByIntervalData() { public void testMultipleFilesQueryByInterval( final List samFiles, final SimpleInterval interval, final List expectedReadNames ) { ReadsDataSource readsSource = new ReadsDataSource(samFiles); - List reads = new ArrayList(); + List reads = new ArrayList<>(); Iterator queryIterator = readsSource.query(interval); while ( queryIterator.hasNext() ) { reads.add(queryIterator.next()); diff --git a/src/test/java/org/broadinstitute/hellbender/engine/ReferenceContextUnitTest.java b/src/test/java/org/broadinstitute/hellbender/engine/ReferenceContextUnitTest.java index fca15307604..3add41af146 100644 --- a/src/test/java/org/broadinstitute/hellbender/engine/ReferenceContextUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/engine/ReferenceContextUnitTest.java @@ -139,7 +139,7 @@ public void testDynamicallyChangingWindow() { private void checkReferenceContextBases( final ReferenceContext refContext, final String expectedBases ) { byte[] contextBases = refContext.getBases(); - List contextBasesFromIterator = new ArrayList(); + List contextBasesFromIterator = new ArrayList<>(); Iterator baseIterator = refContext.iterator(); while ( baseIterator.hasNext() ) { contextBasesFromIterator.add(baseIterator.next()); diff --git a/src/test/java/org/broadinstitute/hellbender/metrics/MultiLevelCollectorTest.java b/src/test/java/org/broadinstitute/hellbender/metrics/MultiLevelCollectorTest.java index d4101dd1561..8da294af34c 100644 --- a/src/test/java/org/broadinstitute/hellbender/metrics/MultiLevelCollectorTest.java +++ b/src/test/java/org/broadinstitute/hellbender/metrics/MultiLevelCollectorTest.java @@ -64,7 +64,7 @@ public int getNumProcessed() { return numProcessed; } - private final Map unitsToMetrics = new HashMap(); + private final Map unitsToMetrics = new HashMap<>(); public Map getUnitsToMetrics() { return unitsToMetrics; @@ -119,25 +119,25 @@ public void addMetricsToFile(final MetricsFile total } } - public static final Map> accumulationLevelToPerUnitReads = new HashMap>(); + public static final Map> accumulationLevelToPerUnitReads = new HashMap<>(); static { - HashMap curMap = new HashMap(); + HashMap curMap = new HashMap<>(); curMap.put("__", 19); accumulationLevelToPerUnitReads.put(MetricAccumulationLevel.ALL_READS, curMap); - curMap = new HashMap(); + curMap = new HashMap<>(); curMap.put("Ma__", 10); curMap.put("Pa__", 9); accumulationLevelToPerUnitReads.put(MetricAccumulationLevel.SAMPLE, curMap); - curMap = new HashMap(); + curMap = new HashMap<>(); curMap.put("Ma_whatever_", 10); curMap.put("Pa_lib1_", 4); curMap.put("Pa_lib2_", 5); accumulationLevelToPerUnitReads.put(MetricAccumulationLevel.LIBRARY, curMap); - curMap = new HashMap(); + curMap = new HashMap<>(); curMap.put("Ma_whatever_me", 10); curMap.put("Pa_lib1_myself", 4); curMap.put("Pa_lib2_i", 3); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectAlignmentSummaryMetricsTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectAlignmentSummaryMetricsTest.java index 29157f9bd58..ccc260c9be2 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectAlignmentSummaryMetricsTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectAlignmentSummaryMetricsTest.java @@ -35,7 +35,7 @@ public void test() throws IOException { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final AlignmentSummaryMetrics metrics : output.getMetrics()) { @@ -98,7 +98,7 @@ public void testBisulfite() throws IOException { final NumberFormat format = NumberFormat.getInstance(); format.setMaximumFractionDigits(4); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final AlignmentSummaryMetrics metrics : output.getMetrics()) { @@ -155,7 +155,7 @@ public void testNoReference() throws IOException { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final AlignmentSummaryMetrics metrics : output.getMetrics()) { @@ -212,7 +212,7 @@ public void testZeroLengthReads() throws IOException { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final AlignmentSummaryMetrics metrics : output.getMetrics()) { // test that it doesn't blow up @@ -234,7 +234,7 @@ public void testMultipleLevelsOfMetrics() throws IOException { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final AlignmentSummaryMetrics metrics : output.getMetrics()) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectInsertSizeMetricsTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectInsertSizeMetricsTest.java index 54223865c61..9f45469d3a6 100755 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectInsertSizeMetricsTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/CollectInsertSizeMetricsTest.java @@ -33,7 +33,7 @@ public void test() throws IOException { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(outfile)); for (final InsertSizeMetrics metrics : output.getMetrics()) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetricsTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetricsTest.java index 192d70645a7..d37d12e2ba8 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetricsTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRnaSeqMetricsTest.java @@ -71,7 +71,7 @@ public void basic() throws Exception { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(metricsFile)); final RnaSeqMetrics metrics = output.getMetrics().get(0); @@ -145,7 +145,7 @@ public void testMultiLevel() throws Exception { }; runCommandLine(args); - final MetricsFile> output = new MetricsFile>(); + final MetricsFile> output = new MetricsFile<>(); output.read(new FileReader(metricsFile)); for (final RnaSeqMetrics metrics : output.getMetrics()) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetricsTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetricsTest.java index 41150cd4939..c5273f34ea0 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetricsTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/analysis/directed/CollectRrbsMetricsTest.java @@ -37,7 +37,7 @@ private void tearDown() { @Test public void chrMReads() throws Exception { final MetricsFile metricsFile = getSummaryFile( - CHR_M_SAM.getAbsolutePath(), CHR_M_REFERENCE.getAbsolutePath(), rootTestDir + "/READ_TEST", new ArrayList()); + CHR_M_SAM.getAbsolutePath(), CHR_M_REFERENCE.getAbsolutePath(), rootTestDir + "/READ_TEST", new ArrayList<>()); final RrbsSummaryMetrics metrics = metricsFile.getMetrics().get(0); Assert.assertEquals(metrics.READS_ALIGNED.intValue(), 5); Assert.assertEquals(metrics.NON_CPG_BASES.intValue(), 15); @@ -55,7 +55,7 @@ public void chrMReads() throws Exception { private MetricsFile getSummaryFile(final String input, final String reference, final String prefix, final List sequences) throws Exception { - final List argList = new ArrayList(); + final List argList = new ArrayList<>(); argList.add("--INPUT"); argList.add(input); argList.add("--METRICS_FILE_PREFIX"); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectoryTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectoryTest.java index 9579e467675..d7ab70b82de 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectoryTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/CheckIlluminaDirectoryTest.java @@ -157,7 +157,7 @@ public File writeTileMetricsOutFile(final File interopDir, final byte versionNum } public static Map> makeMap(final List lanes, final List> tiles) { - final Map> map = new HashMap>(); + final Map> map = new HashMap<>(); if (lanes.size() != tiles.size()) { throw new IllegalArgumentException("Number of lanes (" + lanes + ") does not equal number of tiles!"); @@ -328,14 +328,14 @@ public void differentSizedBclTest() { final File cycleDir = new File(basecallDir, "L005/C9.1"); writeFileOfSize(new File(cycleDir, "s_5_3.bcl"), 222); - final List args = makeCheckerArgs(basecallDir, lane, "50T", dataTypes, new ArrayList(), false, false); + final List args = makeCheckerArgs(basecallDir, lane, "50T", dataTypes, new ArrayList<>(), false, false); Assert.assertEquals(runCommandLine(args), 1); } @Test(expectedExceptions = SAMException.class) public void basedirDoesntExistTest() { final List args = makeCheckerArgs(new File("a_made_up_file/in_some_weird_location"), 1, "76T76T", - new IlluminaDataType[]{IlluminaDataType.Position}, new ArrayList(), false, false); + new IlluminaDataType[]{IlluminaDataType.Position}, new ArrayList<>(), false, false); runCommandLine(args); } @@ -344,7 +344,7 @@ public void symlinkLocsTest() { final List tileList = makeList(1101, 1102, 1103, 2101, 2102, 2103); final int lane = 5; makeFiles(new SupportedIlluminaFormat[]{Bcl}, lane, tileList, IlluminaFileUtilTest.cycleRange(1, 50)); - List args = makeCheckerArgs(basecallDir, lane, "50T", new IlluminaDataType[]{Position}, new ArrayList(), false, true); + List args = makeCheckerArgs(basecallDir, lane, "50T", new IlluminaDataType[]{Position}, new ArrayList<>(), false, true); writeTileMetricsOutFile(makeMap(makeList(lane), makeList(tileList))); createSingleLocsFile(); @@ -353,7 +353,7 @@ public void symlinkLocsTest() { Assert.assertEquals(runCommandLine(args), 0); //now that we have created the loc files lets test to make sure they are there - args = makeCheckerArgs(basecallDir, lane, "50T", new IlluminaDataType[]{IlluminaDataType.Position}, new ArrayList(), false, true); + args = makeCheckerArgs(basecallDir, lane, "50T", new IlluminaDataType[]{IlluminaDataType.Position}, new ArrayList<>(), false, true); Assert.assertEquals(runCommandLine(args), 0); } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodesTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodesTest.java index 5c5aeac0ce7..b7936b0890b 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodesTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/ExtractIlluminaBarcodesTest.java @@ -120,7 +120,7 @@ public void testNonWritableOutputFile() throws Exception { final File metricsFile = File.createTempFile("eib.", ".metrics"); metricsFile.deleteOnExit(); - final List args = new ArrayList(asList( + final List args = new ArrayList<>(asList( "--BASECALLS_DIR", basecallsDir.getPath(), "--LANE", Integer.toString(lane), "--READ_STRUCTURE", readStructure, @@ -221,7 +221,7 @@ public void testDualBarcodes() throws Exception { }; assertEquals(runCommandLine(args), 0); - final MetricsFile result = new MetricsFile(); + final MetricsFile result = new MetricsFile<>(); result.read(new FileReader(metricsFile)); assertEquals(result.getMetrics().get(0).PERFECT_MATCHES, 1, "Got wrong number of perfect matches"); assertEquals(result.getMetrics().get(0).ONE_MISMATCH_MATCHES, 0, "Got wrong number of one-mismatch matches"); @@ -248,7 +248,7 @@ public void testQualityBarcodes(final int quality, }; assertEquals(runCommandLine(args), 0); - final MetricsFile result = new MetricsFile(); + final MetricsFile result = new MetricsFile<>(); result.read(new FileReader(metricsFile)); assertEquals(result.getMetrics().get(0).PERFECT_MATCHES, perfectMatches, "Got wrong number of perfect matches for test: '" + testName + "'"); assertEquals(result.getMetrics().get(0).ONE_MISMATCH_MATCHES, oneMismatch, "Got wrong number of one-mismatch matches for test: '" + testName + "'"); @@ -286,7 +286,7 @@ private MetricsFile runIt(final final File metricsFile = File.createTempFile("eib.", ".metrics"); metricsFile.deleteOnExit(); - final List args = new ArrayList(asList( + final List args = new ArrayList<>(asList( "--BASECALLS_DIR", basecallsDir.getPath(), "--LANE", Integer.toString(lane), "--READ_STRUCTURE", readStructure, @@ -303,7 +303,7 @@ private MetricsFile runIt(final // Generate _barcode.txt files and metrics file. assertEquals(runCommandLine(args), 0); - final MetricsFile retval = new MetricsFile(); + final MetricsFile retval = new MetricsFile<>(); retval.read(new FileReader(metricsFile)); return retval; } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastqTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastqTest.java index 079629ee28b..e8d5398f8ec 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastqTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToFastqTest.java @@ -117,7 +117,7 @@ private void runStandardTest(final int lane, final String jobName, final String // Create barcode.params with output files in the temp directory final File libraryParams = new File(outputDir, libraryParamsFile); libraryParams.deleteOnExit(); - final List outputPrefixes = new ArrayList(); + final List outputPrefixes = new ArrayList<>(); final LineReader reader = new BufferedLineReader(new FileInputStream(new File(testDataDir, libraryParamsFile))); final PrintWriter writer = new PrintWriter(libraryParams); final String header = reader.readLine(); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSamTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSamTest.java index 81ebe932cb5..c309375fe76 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSamTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/IlluminaBasecallsToSamTest.java @@ -99,7 +99,7 @@ private void runStandardTest(final int lane, final String jobName, final String // Create barcode.params with output files in the temp directory final File libraryParams = new File(outputDir, libraryParamsFile); libraryParams.deleteOnExit(); - final List samFiles = new ArrayList(); + final List samFiles = new ArrayList<>(); final LineReader reader = new BufferedLineReader(new FileInputStream(new File(testDataDir, libraryParamsFile))); final PrintWriter writer = new PrintWriter(libraryParams); final String header = reader.readLine(); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetricsTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetricsTest.java index b080a1ac2c5..79df40ec040 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetricsTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/metrics/CollectIlluminaBasecallingMetricsTest.java @@ -151,7 +151,7 @@ private MetricsFile runIt(final int lane, f File basecallsDir = new File(rootTestDir.getPath(),basecallsDirName); - List argsList = new ArrayList(); + List argsList = new ArrayList<>(); argsList.add("--BASECALLS_DIR"); argsList.add(basecallsDir.getPath()); argsList.add("--LANE"); argsList.add(Integer.toString(lane)); argsList.add("--OUTPUT"); argsList.add(metricsFile.getPath()); @@ -165,7 +165,7 @@ private MetricsFile runIt(final int lane, f runCommandLine(argsList); - final MetricsFile retval = new MetricsFile(); + final MetricsFile retval = new MetricsFile<>(); retval.read(new FileReader(metricsFile)); return retval; } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BclParserTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BclParserTest.java index 5d8fc09ef18..d02c1a5c703 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BclParserTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BclParserTest.java @@ -120,7 +120,7 @@ public void fullBclParserTestImpl(final File dir, final String readStructure, fi final MultiTileBclFileUtil multiTileBclFileUtil = (MultiTileBclFileUtil) util.getUtil(IlluminaFileUtil.SupportedIlluminaFormat.MultiTileBcl); - final List tileIntegers = new ArrayList(); + final List tileIntegers = new ArrayList<>(); for (final int tile : tiles) { tileIntegers.add(tile); } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BinTdUtil.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BinTdUtil.java index 190b789f7b8..abf24f099ae 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BinTdUtil.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/BinTdUtil.java @@ -16,9 +16,9 @@ public static final String ltStr(final int lane, final int tile) { public static final byte G = (byte) 71; public static final byte T = (byte) 84; public static final byte P = (byte) 46; //dot - public static final Map> goldData = new HashMap>(); - public static final Map> goldIndices = new HashMap>(); - public static final Map goldSizes = new HashMap(); + public static final Map> goldData = new HashMap<>(); + public static final Map> goldIndices = new HashMap<>(); + public static final Map goldSizes = new HashMap<>(); static { int lane = 1; @@ -176,10 +176,10 @@ public static final String ltStr(final int lane, final int tile) { } public static Map clusterData(final int lane, final List tiles, final String readStructure, final IlluminaDataType... dataTypes) { - final List sortedTiles = new ArrayList(tiles); + final List sortedTiles = new ArrayList<>(tiles); Collections.sort(sortedTiles); - final Map data = new HashMap(); + final Map data = new HashMap<>(); int offset = 0; for (final int tile : sortedTiles) { final String key = ltStr(lane, tile); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/FilterParserTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/FilterParserTest.java index 528f48d090d..3b520703d10 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/FilterParserTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/FilterParserTest.java @@ -46,7 +46,7 @@ public static final Boolean[] allTilesToValues(Integer[] tiles) { } public static final List arrayToList(final Integer[] array) { - final List list = new ArrayList(); + final List list = new ArrayList<>(); for (int item : array) { list.add(item); } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtilTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtilTest.java index 95f3f5ddabd..28da9f84202 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtilTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/IlluminaFileUtilTest.java @@ -144,13 +144,13 @@ public void passNewUtilTest() { makeFiles(format, intensityDir, DEFAULT_LANE + 2, DEFAULT_TILES, DEFAULT_CYCLES, ".bz2"); } - final Set formatsToTest = new HashSet(); + final Set formatsToTest = new HashSet<>(); // TODO: I can't be bothered to build files for these. AW Collections.addAll(formatsToTest, SupportedIlluminaFormat.values()); formatsToTest.remove(SupportedIlluminaFormat.MultiTileBcl); formatsToTest.remove(SupportedIlluminaFormat.MultiTileFilter); formatsToTest.remove(SupportedIlluminaFormat.MultiTileLocs); - final ArrayList formatsList = new ArrayList(formatsToTest); + final ArrayList formatsList = new ArrayList<>(formatsToTest); for (int i = 0; i < 3; i++) { final IlluminaFileUtil fileUtil = new IlluminaFileUtil(new File(intensityDir, "BaseCalls"), DEFAULT_LANE + i); @@ -321,7 +321,7 @@ public void testDefaultPerTileUtil(final PerTileFileUtil ptfu, final String comp Assert.assertTrue(tFile.length() > 0); } - final List tiles = new ArrayList(DEFAULT_TILE_TEST_SUBSET); + final List tiles = new ArrayList<>(DEFAULT_TILE_TEST_SUBSET); final IlluminaFileMap subsetMap = ptfu.getFiles(DEFAULT_TILE_TEST_SUBSET); for (final Integer tile : subsetMap.keySet()) { tiles.remove(tile); @@ -397,7 +397,7 @@ public void testDefaultPerTilePerCycleUtil(final PerTilePerCycleFileUtil pcfu, f public void testSubsetDefaultPerTilePerCycleUtil(final PerTilePerCycleFileUtil pcfu, final File parentDir, final int[] cycles) { - final List tiles = new ArrayList(DEFAULT_TILE_TEST_SUBSET); + final List tiles = new ArrayList<>(DEFAULT_TILE_TEST_SUBSET); final CycleIlluminaFileMap subsetMap = pcfu.getFiles(DEFAULT_TILE_TEST_SUBSET, cycles); final CycleIlluminaFileMap cfmNoCycles; if (Arrays.equals(cycles, DEFAULT_CYCLES)) { @@ -597,7 +597,7 @@ private static List makeCycleFileList(final File dir, final String ext, } private static List makeCycleFileList(final File dir, final String ext, final int lane, final int[] cycles, final boolean longFmt, final int... tiles) { - final List files = new ArrayList(); + final List files = new ArrayList<>(); final File laneDir = new File(dir, laneDir(lane)); for (final int cycle : cycles) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParserTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParserTest.java index 95266a27a90..938cb7cb082 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParserTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTileParserTest.java @@ -12,7 +12,7 @@ public class PerTileParserTest { - private static final Map> FILE_TO_VALUE = new HashMap>(); + private static final Map> FILE_TO_VALUE = new HashMap<>(); private static final IlluminaFileMap FILE_MAP = new IlluminaFileMap(); static { FILE_MAP.put(1, new File("s_1_1")); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleParserTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleParserTest.java index c4a7fb87551..2b96cf747a9 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleParserTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PerTilePerCycleParserTest.java @@ -20,7 +20,7 @@ private class MockCycledIlluminaData implements IlluminaData { private final List values; public MockCycledIlluminaData() { - this.values = new ArrayList(); + this.values = new ArrayList<>(); } public void addValue(final String value) { @@ -85,7 +85,7 @@ public void close() { } public List getFileNames(final List tiles) { - final List fileNames = new ArrayList(); + final List fileNames = new ArrayList<>(); for (final Integer tile : tiles) { for (int i = 1; i <= MAX_CYCLE; i++) { fileNames.add(str_del(tile, i)); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PosParserTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PosParserTest.java index 26354df3c1a..1b987934afc 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PosParserTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/PosParserTest.java @@ -37,14 +37,14 @@ public TestResult(final int lane, final int tile, final float xPos, final float } public static List makeTestResults(final int lane, final int tile, final float [] xyPos, final int [] xyQseq) { - final ArrayList results = new ArrayList(); + final ArrayList results = new ArrayList<>(); for(int i = 0; i < xyPos.length; i+=2) { results.add(new TestResult(lane, tile, xyPos[i], xyPos[i+1], xyQseq[i], xyQseq[i+1])); } return results; } - public static Map> TEST_DATA = new HashMap>(); + public static Map> TEST_DATA = new HashMap<>(); static { float[] pos = { 101.35f, 207.8f, @@ -181,7 +181,7 @@ public void multiTileDataTest(final List tiles, final Integer startingT final PosParser parser = (startingTileIndex == null) ? new PosParser(fm, IlluminaFileUtil.SupportedIlluminaFormat.Pos) : new PosParser(fm, tiles.get(startingTileIndex), IlluminaFileUtil.SupportedIlluminaFormat.Pos); - final List expectedResultsList = new ArrayList(); + final List expectedResultsList = new ArrayList<>(); final int t1 = (startingTileIndex != null) ? startingTileIndex : 0; for(int i = t1; i < tiles.size(); i++) { expectedResultsList.addAll(TEST_DATA.get(files.get(i).getName())); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclReaderTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclReaderTest.java index d6a0f28972b..7803f1e612c 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclReaderTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/illumina/parser/readers/BclReaderTest.java @@ -103,7 +103,7 @@ public void lowQualityButPassingTest() throws ExecutionException, InterruptedExc final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(1); // Build a list of callables, then submit them and check for errors. - final Collection> callables = new LinkedList>(); + final Collection> callables = new LinkedList<>(); for (int i = 0; i < 10; i++) { final boolean even_i = i % 2 == 0; callables.add(new Callable() { @@ -121,7 +121,7 @@ public Void call() throws Exception { }); } final ExecutorService executorService = Executors.newFixedThreadPool(callables.size()); - final Collection> futures = new LinkedList>(); + final Collection> futures = new LinkedList<>(); for (final Callable callable : callables) { futures.add(executorService.submit(callable)); } @@ -138,7 +138,7 @@ public void lowQualityAndFailingTest() throws ExecutionException, InterruptedExc final BclQualityEvaluationStrategy bclQualityEvaluationStrategy = new BclQualityEvaluationStrategy(BclQualityEvaluationStrategy.ILLUMINA_ALLEGED_MINIMUM_QUALITY); // Build a list of callables, then submit them and check for errors. - final Collection> callables = new LinkedList>(); + final Collection> callables = new LinkedList<>(); for (int i = 0; i < 10; i++) { final boolean even_i = i % 2 == 0; callables.add(new Callable() { @@ -156,7 +156,7 @@ public Void call() throws Exception { }); } final ExecutorService executorService = Executors.newFixedThreadPool(callables.size()); - final Collection> futures = new LinkedList>(); + final Collection> futures = new LinkedList<>(); for (final Callable callable : callables) { futures.add(executorService.submit(callable)); } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScattererTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScattererTest.java index e96bf48bcff..f41ae12a75a 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScattererTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/interval/IntervalListScattererTest.java @@ -50,7 +50,7 @@ private Testcase(final IntervalList source, final int scatterWidth, final Mode m } } - private static final List testcases = new ArrayList(); + private static final List testcases = new ArrayList<>(); static { testcases.add(new Testcase( diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/AddCommentsToBamIntegrationTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/AddCommentsToBamIntegrationTest.java index 8419c34c0a2..5ccc2ee660b 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/AddCommentsToBamIntegrationTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/AddCommentsToBamIntegrationTest.java @@ -35,7 +35,7 @@ public void testAddCommentsToBam() throws Exception { // The original comments are massaged when they're added to the header. Perform the same massaging here, // and then compare the lists - final List massagedComments = new LinkedList(); + final List massagedComments = new LinkedList<>(); for (final String comment : commentList) { massagedComments.add(SAMTextHeaderCodec.COMMENT_PREFIX + comment); } @@ -58,7 +58,7 @@ public void testUsingNewlines() throws Exception { } private void runIt(final File inputFile, final File outputFile, final String[] commentList) { - final List args = new ArrayList(Arrays.asList( + final List args = new ArrayList<>(Arrays.asList( "--INPUT", inputFile.getAbsolutePath(), "--OUTPUT", outputFile.getAbsolutePath())); for (final String comment : commentList) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/MergeBamAlignmentIntegrationTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/MergeBamAlignmentIntegrationTest.java index 1876e1a591c..2a9064f4589 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/MergeBamAlignmentIntegrationTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/MergeBamAlignmentIntegrationTest.java @@ -408,7 +408,7 @@ public void testMultiHit() throws IOException { ); // Iterate over the merged output and gather some statistics - final Map accumulatorMap = new HashMap(); + final Map accumulatorMap = new HashMap<>(); final SamReader reader = SamReaderFactory.makeDefault().open(merged); for (final SAMRecord rec : reader) { diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastqIntegrationTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastqIntegrationTest.java index 3f8025a46bf..0dc82c72b15 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastqIntegrationTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/SamToFastqIntegrationTest.java @@ -193,7 +193,7 @@ public Object[][] badGroupedFiles() { public void testOkGroupedFiles(final String samFilename, final String fastq, final String secondEndFastq, final String [] groupFiles) throws IOException { final File samFile = new File(TEST_DATA_DIR,samFilename); - final Map> outputSets = new HashMap>(groupFiles.length); + final Map> outputSets = new HashMap<>(groupFiles.length); final String tmpDir = IOUtil.getDefaultTmpDir().getAbsolutePath() + "/"; final String [] args = new String[]{ @@ -321,7 +321,7 @@ public Object[][] trimmedData() { } private Set createFastqReadHeaderSet(final File file) { - final Set set = new HashSet(); + final Set set = new HashSet<>(); final FastqReader freader = new FastqReader(file); while (freader.hasNext()) { final FastqRecord frec = freader.next(); @@ -334,7 +334,7 @@ private Map createSamMatePairsMap(final File samFile) throws IO IOUtil.assertFileIsReadable(samFile); final SamReader reader = SamReaderFactory.makeDefault().open(samFile); - final Map map = new LinkedHashMap(); + final Map map = new LinkedHashMap<>(); for (final SAMRecord record : reader ) { MatePair mpair = map.get(record.getReadName()); if (mpair == null) { @@ -351,7 +351,7 @@ private Map createSamMatePairsMap(final File samFile) throws IO private Map> createPUPairsMap(final File samFile) throws IOException { IOUtil.assertFileIsReadable(samFile); final SamReader reader = SamReaderFactory.makeDefault().open(samFile); - final Map> map = new LinkedHashMap>(); + final Map> map = new LinkedHashMap<>(); Map curFileMap; for (final SAMRecord record : reader ) { @@ -359,7 +359,7 @@ private Map> createPUPairsMap(final File samFile) curFileMap = map.get(platformUnit); if(curFileMap == null) { - curFileMap = new LinkedHashMap(); + curFileMap = new LinkedHashMap<>(); map.put(platformUnit, curFileMap); } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesIntegrationTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesIntegrationTest.java index 8f6c46aa0ca..c90c8d832d2 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesIntegrationTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/sam/markduplicates/MarkDuplicatesIntegrationTest.java @@ -86,7 +86,7 @@ public void pgRecordChainingTest(final boolean suppressPg, // the PG ID should be the same for both ends of a pair. final SamReader reader = SamReaderFactory.makeDefault().open(outputSam); - final Map pgIdForReadName = new HashMap(); + final Map pgIdForReadName = new HashMap<>(); for (final SAMRecord rec : reader) { final String existingPgId = pgIdForReadName.get(rec.getReadName()); final String thisPgId = rec.getStringAttribute(SAMTag.PG.name()); @@ -136,15 +136,15 @@ private ExpectedPnAndVn(final String expectedPn, final String expectedVn) { @DataProvider(name = "pgRecordChainingTest") public Object[][] pgRecordChainingTestDataProvider() { // Two test cases: One in which PG record generation is enabled, the other in which it is turned off. - final Map> withPgMap = new HashMap>(); + final Map> withPgMap = new HashMap<>(); withPgMap.put("1AAXX.1.1", Arrays.asList(new ExpectedPnAndVn(TEST_BASE_NAME, null), new ExpectedPnAndVn(TEST_BASE_NAME, "1"), new ExpectedPnAndVn("bwa", "1"))); withPgMap.put("1AAXX.2.1", Arrays.asList(new ExpectedPnAndVn(TEST_BASE_NAME, null), new ExpectedPnAndVn("bwa", "2"))); withPgMap.put("1AAXX.3.1", Arrays.asList(new ExpectedPnAndVn(TEST_BASE_NAME, null))); - final Map> suppressPgMap = new HashMap>(); + final Map> suppressPgMap = new HashMap<>(); suppressPgMap .put("1AAXX.1.1", Arrays.asList(new ExpectedPnAndVn(TEST_BASE_NAME, "1"), new ExpectedPnAndVn("bwa", "1"))); suppressPgMap .put("1AAXX.2.1", Arrays.asList(new ExpectedPnAndVn("bwa", "2"))); - suppressPgMap .put("1AAXX.3.1", new ArrayList(0)); + suppressPgMap .put("1AAXX.3.1", new ArrayList<>(0)); return new Object[][] { { false, withPgMap}, { true, suppressPgMap} diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/AbstractVcfMergingClpTester.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/AbstractVcfMergingClpTester.java index 6ffd133d2b9..1a7d355a50c 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/AbstractVcfMergingClpTester.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/AbstractVcfMergingClpTester.java @@ -112,7 +112,7 @@ public void testMergeRandomScatter() throws IOException { final File five = new File(TEST_DATA_PATH, "CEUTrio-random-scatter-5.vcf"); final List inputs = Arrays.asList(zero, one, two, three, four, five); - final List> positionQueues = new ArrayList>(6); + final List> positionQueues = new ArrayList<>(6); positionQueues.add(0, loadContigPositions(zero)); positionQueues.add(1, loadContigPositions(one)); positionQueues.add(2, loadContigPositions(two)); @@ -154,7 +154,7 @@ private void validateResultsForMultipleInputs(final File output, final List loadContigPositions(final File inputFile) { final VCFFileReader reader = new VCFFileReader(inputFile, false); - final Queue contigPositions = new LinkedList(); + final Queue contigPositions = new LinkedList<>(); final CloseableIterator iterator = reader.iterator(); while (iterator.hasNext()) contigPositions.add(getContigPosition(iterator.next())); iterator.close(); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceTest.java index 4c4c977e79a..8b9ad6f8985 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/concordance/GenotypeConcordanceTest.java @@ -120,11 +120,11 @@ public void testGenotypeConcordance(final File vcf1, final String sample1, final private void assertMetricsFileEqual(final File actualMetricsFile, final File expectedMetricsFile) throws FileNotFoundException { // Actual metrics file - final MetricsFile> actual = new MetricsFile>(); + final MetricsFile> actual = new MetricsFile<>(); actual.read(new FileReader(actualMetricsFile)); // Expected metrics file - final MetricsFile> expected = new MetricsFile>(); + final MetricsFile> expected = new MetricsFile<>(); expected.read(new FileReader(expectedMetricsFile)); // Note - cannot use .equals as it calls .areHeadersEqual and they are not since the timestamp (at a minimum is different) @@ -149,7 +149,7 @@ public void testGenotypeConcordanceDetails() throws Exception { genotypeConcordance.instanceMain(new String[0]); - final Map nonZeroCounts = new HashMap(); + final Map nonZeroCounts = new HashMap<>(); nonZeroCounts.put(new TruthAndCallStates(TruthState.HET_REF_VAR1, CallState.HET_REF_VAR1), 104); nonZeroCounts.put(new TruthAndCallStates(TruthState.HOM_VAR1, CallState.HOM_VAR1), 59); nonZeroCounts.put(new TruthAndCallStates(TruthState.VC_FILTERED, CallState.VC_FILTERED), 40); @@ -230,7 +230,7 @@ public void testGenotypeConcordanceDetailsWithIntervals() throws Exception { genotypeConcordance.instanceMain(new String[0]); - final Map nonZeroCounts = new HashMap(); + final Map nonZeroCounts = new HashMap<>(); nonZeroCounts.put(new TruthAndCallStates(TruthState.HET_REF_VAR1, CallState.HET_REF_VAR1), 1); nonZeroCounts.put(new TruthAndCallStates(TruthState.VC_FILTERED, CallState.VC_FILTERED), 2); @@ -421,7 +421,7 @@ public Object[][] genotypeConcordanceDetermineStateDataProvider() { {Aref, Aref, TruthState.HOM_REF, Allele.NO_CALL, Allele.NO_CALL, CallState.NO_CALL} }; // Rebuild a new set of unit test data with all permutations of alleles. - final List allPermutationUnitTestDataList = new ArrayList(); + final List allPermutationUnitTestDataList = new ArrayList<>(); for (final Object[] unitTestData : originalUnitTestData) { allPermutationUnitTestDataList.add(unitTestData); final Allele truthAllele1 = (Allele) unitTestData[0]; @@ -474,7 +474,7 @@ public void testGenotypeConcordanceDetermineStateNull() throws Exception { @Test public void testGenotypeConcordanceDetermineStateFilter() throws Exception { - final Set filters = new HashSet(Arrays.asList("BAD!")); + final Set filters = new HashSet<>(Arrays.asList("BAD!")); // Filtering on the variant context final List alleles1 = makeUniqueListOfAlleles(Aref, C); @@ -490,7 +490,7 @@ public void testGenotypeConcordanceDetermineStateFilter() throws Exception { testGenotypeConcordanceDetermineState(vcFiltered, TruthState.VC_FILTERED, vcFiltered, CallState.VC_FILTERED, 0, 0); // Filtering on the genotype - final List gtFilters = new ArrayList(Arrays.asList("WICKED")); + final List gtFilters = new ArrayList<>(Arrays.asList("WICKED")); final List alleles3 = makeUniqueListOfAlleles(Aref, C); final Genotype gt3 = new GenotypeBuilder(TRUTH_SAMPLE_NAME, Arrays.asList(Aref, C)).filters(gtFilters).make(); final VariantContext vcGtFiltered = new VariantContextBuilder("test", snpLoc, snpLocStart, snpLocStop, alleles3).genotypes(gt3).make(); @@ -566,13 +566,13 @@ private void testGenotypeConcordanceDetermineState(final VariantContext truthVar * Simple method to return a list of unique alleles. */ private List makeUniqueListOfAlleles(final Allele... alleles) { - final Set uniqueAlleles = new HashSet(); + final Set uniqueAlleles = new HashSet<>(); for (final Allele allele : alleles) { if (!allele.equals(Allele.NO_CALL)) { uniqueAlleles.add(allele); } } if (!uniqueAlleles.contains(Aref)) uniqueAlleles.add(Aref); - return new ArrayList(uniqueAlleles); + return new ArrayList<>(uniqueAlleles); } } diff --git a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterVcfTest.java b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterVcfTest.java index 06a39bb52b9..061e63aee96 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterVcfTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/picard/vcf/filter/FilterVcfTest.java @@ -80,10 +80,10 @@ public void testFsFiltering() throws Exception { @Test public void testCombinedFiltering() throws Exception { - final TreeSet fails = new TreeSet(CollectionUtil.makeSet("rs13302979", "rs13303033", "rs2710876", "rs2799066", "rs28548431", "rs28566954", "rs71509448", "rs71628926", "tf2")); + final TreeSet fails = new TreeSet<>(CollectionUtil.makeSet("rs13302979", "rs13303033", "rs2710876", "rs2799066", "rs28548431", "rs28566954", "rs71509448", "rs71628926", "tf2")); final File out = testFiltering(INPUT, 0.4, 18, 22, 5.0d); final ListMap filters = slurpFilters(out); - Assert.assertEquals(new TreeSet(filters.keySet()), fails, "Failed sites did not match expected set of failed sites."); + Assert.assertEquals(new TreeSet<>(filters.keySet()), fails, "Failed sites did not match expected set of failed sites."); } /** Utility method that takes a a VCF and a set of parameters and filters the VCF. */ @@ -107,7 +107,7 @@ File testFiltering(final File vcf, final double minAb, final int minDp, final in /** Consumes a VCF and returns a ListMap where each they keys are the IDs of filtered out sites and the values are the set of filters. */ ListMap slurpFilters(final File vcf) { - final ListMap map = new ListMap(); + final ListMap map = new ListMap<>(); final VCFFileReader in = new VCFFileReader(vcf, false); for (final VariantContext ctx : in) { if (ctx.isNotFiltered()) continue; diff --git a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizerUnitTest.java b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizerUnitTest.java index c3286a1432d..ffbdf5e53f0 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizerUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/QualQuantizerUnitTest.java @@ -93,7 +93,7 @@ public void testMinInterestingQual() { // -------------------------------------------------------------------------------- private class QuantizerTestProvider extends TestDataProvider { - final List nObservationsPerQual = new ArrayList(); + final List nObservationsPerQual = new ArrayList<>(); final int nLevels; final List expectedMap; diff --git a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtilsUnitTest.java b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtilsUnitTest.java index 49753b37142..3a967c45ad6 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtilsUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalUtilsUnitTest.java @@ -40,9 +40,9 @@ public String toString() { @DataProvider(name = "CombineTablesProvider") public Object[][] createCombineTablesProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); - final List rows = new ArrayList(); + final List rows = new ArrayList<>(); for ( final int rg : Arrays.asList(0, 1) ) { for ( final int qual : Arrays.asList(0, 1) ) { rows.add(new Row(rg, qual, 1, 10)); @@ -51,7 +51,7 @@ public Object[][] createCombineTablesProvider() { logger.warn("Number of rows " + rows.size()); - List> permutations = new LinkedList>(); + List> permutations = new LinkedList<>(); permutations.addAll(Utils.makePermutations(rows, 1, false)); permutations.addAll(Utils.makePermutations(rows, 2, false)); permutations.addAll(Utils.makePermutations(rows, 3, false)); @@ -98,7 +98,7 @@ public void testCombineTables(final List table1, final List table2) { } public List makeExpected(final List table1, final List table2) { - final List combined = new LinkedList(); + final List combined = new LinkedList<>(); for ( final Row t1 : table1 ) combined.add(new Row(t1)); for ( final Row t2 : table2 ) { combine(combined, t2); @@ -119,7 +119,7 @@ private void combine(final List combined, final Row row) { } public NestedIntegerArray makeTable(final List rows) { - final NestedIntegerArray x = new NestedIntegerArray(3, 3); + final NestedIntegerArray x = new NestedIntegerArray<>(3, 3); for ( final Row r : rows ) x.put(new RecalDatum((long)r.no, (double)r.ne, (byte)10), r.rg, r.qual); return x; diff --git a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalibrationReportUnitTest.java b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalibrationReportUnitTest.java index 19205faa134..e3adf4b3925 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalibrationReportUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RecalibrationReportUnitTest.java @@ -35,8 +35,8 @@ private static RecalDatum createRandomRecalDatum(int maxObservations, int maxErr public void testOutput() { final int length = 100; - List quals = new ArrayList(QualityUtils.MAX_SAM_QUAL_SCORE + 1); - List counts = new ArrayList(QualityUtils.MAX_SAM_QUAL_SCORE + 1); + List quals = new ArrayList<>(QualityUtils.MAX_SAM_QUAL_SCORE + 1); + List counts = new ArrayList<>(QualityUtils.MAX_SAM_QUAL_SCORE + 1); for (int i = 0; i<= QualityUtils.MAX_SAM_QUAL_SCORE; i++) { quals.add((byte) i); @@ -47,8 +47,8 @@ public void testOutput() { final RecalibrationArgumentCollection RAC = new RecalibrationArgumentCollection(); quantizationInfo.noQuantization(); - final List requiredCovariates = new LinkedList(); - final List optionalCovariates = new LinkedList(); + final List requiredCovariates = new LinkedList<>(); + final List optionalCovariates = new LinkedList<>(); final ReadGroupCovariate rgCovariate = new ReadGroupCovariate(); rgCovariate.initialize(RAC); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RepeatCovariatesUnitTest.java b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RepeatCovariatesUnitTest.java index 55cfe4ec832..61856a2bc33 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RepeatCovariatesUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/recalibration/RepeatCovariatesUnitTest.java @@ -109,8 +109,8 @@ public void testManyObservations() { for (int r = 0; r < NUM_TEST_CASES; r++) { final StringBuilder sb = new StringBuilder(); // for each unit, generate a repeat unit at random with given random length - final ArrayList repeatUnits = new ArrayList(); - final ArrayList numsRepetitions = new ArrayList(); + final ArrayList repeatUnits = new ArrayList<>(); + final ArrayList numsRepetitions = new ArrayList<>(); for (int n=0; n < NUM_UNITS; n++) { final int repLength = 1+random.nextInt(MAX_REPEAT_UNIT_LENGTH); final String repeatUnit = getRandomBases(repLength); diff --git a/src/test/java/org/broadinstitute/hellbender/tools/walkers/bqsr/BaseRecalibratorUnitTest.java b/src/test/java/org/broadinstitute/hellbender/tools/walkers/bqsr/BaseRecalibratorUnitTest.java index 91cff56ca54..2325880b8fa 100644 --- a/src/test/java/org/broadinstitute/hellbender/tools/walkers/bqsr/BaseRecalibratorUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/tools/walkers/bqsr/BaseRecalibratorUnitTest.java @@ -91,7 +91,7 @@ public void basicDBQSRFractionalErrorTestMiddle() { @DataProvider(name = "CalculateIsIndelData") public Object[][] makeCalculateIsIndelData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); // this functionality can be adapted to provide input data for whatever you might want in your data for ( final EventType model : Arrays.asList(EventType.BASE_DELETION, EventType.BASE_INSERTION) ) { diff --git a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocParserUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocParserUnitTest.java index 950bdde510c..6068e543744 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocParserUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocParserUnitTest.java @@ -331,7 +331,7 @@ public void testCreateGenomeLocAtStop(FlankingGenomeLocTestData data) { @DataProvider(name = "parseGenomeLoc") public Object[][] makeParsingTest() { - final List tests = new LinkedList(); + final List tests = new LinkedList<>(); tests.add(new Object[]{ "1:10", "1", 10 }); tests.add(new Object[]{ "1:100", "1", 100 }); @@ -438,7 +438,7 @@ public void testcreateGenomeLocOnContig() throws FileNotFoundException { @DataProvider(name = "GenomeLocOnContig") public Object[][] makeGenomeLocOnContig() { - final List tests = new LinkedList(); + final List tests = new LinkedList<>(); final int contigLength = header.getSequence(0).getSequenceLength(); for ( int start = -10; start < contigLength + 10; start++ ) { @@ -467,7 +467,7 @@ public void testGenomeLocOnContig(final String contig, final int start, final in @DataProvider(name = "GenomeLocPadding") public Object[][] makeGenomeLocPadding() { - final List tests = new LinkedList(); + final List tests = new LinkedList<>(); final int contigLength = header.getSequence(0).getSequenceLength(); for ( int pad = 0; pad < contigLength + 1; pad++) { diff --git a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSetUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSetUnitTest.java index 46edc13e780..9615237bbba 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSetUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocSortedSetUnitTest.java @@ -317,7 +317,7 @@ public Object[][] makeGetOverlappingTest() throws Exception { final GenomeLoc middle = genomeLocParser.createGenomeLocOnContig("2", regionStart + 1, regionStart + 2); final GenomeLoc middle_past = genomeLocParser.createGenomeLocOnContig("2", region.getStop()-1, region.getStop()+10); - final List potentials = new LinkedList(); + final List potentials = new LinkedList<>(); potentials.add(region); if ( spanning != null ) potentials.add(spanning); if ( before_into != null ) potentials.add(before_into); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocUnitTest.java index 4c3d27c477b..e8ebe5f6490 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/GenomeLocUnitTest.java @@ -194,7 +194,7 @@ public void testReciprocalOverlapProvider(ReciprocalOverlapProvider cfg) { @DataProvider(name = "GenomeLocComparisons") public Object[][] createGenomeLocComparisons() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int start = 10; for ( int stop = start; stop < start + 3; stop++ ) { @@ -261,7 +261,7 @@ private MergeTest(final List locs) { @DataProvider(name = "SGLtest") public Object[][] createFindVariantRegionsData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{new MergeTest(Arrays.asList(loc1))}); tests.add(new Object[]{new MergeTest(Arrays.asList(loc1, loc2))}); @@ -277,7 +277,7 @@ public void testSimpleGenomeLoc(MergeTest test) { @Test(expectedExceptions = GATKException.class) public void testNotContiguousLocs() { - final List locs = new ArrayList(1); + final List locs = new ArrayList<>(1); locs.add(loc1); locs.add(loc3); testMerge(locs); @@ -288,7 +288,7 @@ private void testMerge(final List locs) { for ( int i = 1; i < locs.size(); i++ ) result1 = GenomeLoc.merge(result1, locs.get(i)); - GenomeLoc result2 = GenomeLoc.merge(new TreeSet(locs)); + GenomeLoc result2 = GenomeLoc.merge(new TreeSet<>(locs)); Assert.assertEquals(result1, result2); Assert.assertEquals(result1.getStart(), locs.get(0).getStart()); Assert.assertEquals(result1.getStop(), locs.get(locs.size() - 1).getStop()); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/MedianUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/MedianUnitTest.java index 7307ab868f8..0c4d5702354 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/MedianUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/MedianUnitTest.java @@ -23,7 +23,7 @@ public class MedianUnitTest extends BaseTest { // -------------------------------------------------------------------------------- private class MedianTestProvider extends TestDataProvider { - final List values = new ArrayList(); + final List values = new ArrayList<>(); final int cap; final Integer expected; @@ -64,7 +64,7 @@ public Object[][] makeMedianTestProvider() { @Test(dataProvider = "MedianTestProvider") public void testBasicLikelihoods(MedianTestProvider cfg) { - final Median median = new Median(cfg.cap); + final Median median = new Median<>(cfg.cap); int nAdded = 0; for ( final int value : cfg.values ) @@ -80,7 +80,7 @@ public void testBasicLikelihoods(MedianTestProvider cfg) { @Test(expectedExceptions = IllegalStateException.class) public void testEmptyMedian() { - final Median median = new Median(); + final Median median = new Median<>(); Assert.assertTrue(median.isEmpty()); final Integer d = 100; Assert.assertEquals(median.getMedian(d), d); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/NGSPlatformUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/NGSPlatformUnitTest.java index 4fc9467396e..12b9eb567e0 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/NGSPlatformUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/NGSPlatformUnitTest.java @@ -39,7 +39,7 @@ public void setup() throws FileNotFoundException { @DataProvider(name = "TestPrimary") public Object[][] makeTestPrimary() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( final NGSPlatform pl : NGSPlatform.values() ) { tests.add(new Object[]{pl, pl.BAM_PL_NAMES[0]}); @@ -56,9 +56,9 @@ public void testPrimary(final NGSPlatform pl, final String expectedPrimaryName) // make sure common names in BAMs are found @DataProvider(name = "TestMappings") public Object[][] makeTestMappings() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); - final Map expected = new HashMap(); + final Map expected = new HashMap<>(); // VALID VALUES ACCORDING TO SAM SPEC: https://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&ved=0CC8QFjAA&url=http%3A%2F%2Fsamtools.sourceforge.net%2FSAM1.pdf&ei=Dm8WUbXAEsi10QHYqoDwDQ&usg=AFQjCNFkMtvEi6LeiKgpxQGtHTlqWKw2yw&bvm=bv.42080656,d.dmQ expected.put("CAPILLARY", NGSPlatform.CAPILLARY); expected.put("LS454", NGSPlatform.LS454); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/UtilsUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/UtilsUnitTest.java index 9ad0e1f34e3..70855a6a256 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/UtilsUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/UtilsUnitTest.java @@ -20,9 +20,9 @@ public class UtilsUnitTest extends BaseTest { public void testAppend() { for ( int leftSize : Arrays.asList(0, 1, 2, 3) ) { for ( final int rightSize : Arrays.asList(0, 1, 2) ) { - final List left = new LinkedList(); + final List left = new LinkedList<>(); for ( int i = 0; i < leftSize; i++ ) left.add(i); - final List total = new LinkedList(); + final List total = new LinkedList<>(); for ( int i = 0; i < leftSize + rightSize; i++ ) total.add(i); if ( rightSize == 0 ) diff --git a/src/test/java/org/broadinstitute/hellbender/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java index a930d7c7ead..29d2e5ed66d 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/fasta/CachingIndexedFastaSequenceFileUnitTest.java @@ -122,7 +122,7 @@ public void testCachingIndexedFastaReaderTwoStage(File fasta, int cacheSize, int @DataProvider(name = "ParallelFastaTest") public Object[][] createParallelFastaTest() { - List params = new ArrayList(); + List params = new ArrayList<>(); for ( File fasta : Arrays.asList(simpleFasta) ) { for ( int cacheSize : CACHE_SIZES ) { diff --git a/src/test/java/org/broadinstitute/hellbender/utils/haplotype/EventMapUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/haplotype/EventMapUnitTest.java index 320cc20d709..2c596cad91e 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/haplotype/EventMapUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/haplotype/EventMapUnitTest.java @@ -21,7 +21,7 @@ public class EventMapUnitTest extends BaseTest { @DataProvider(name = "MyDataProvider") public Object[][] makeMyDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final List SNP_ALLELES = Arrays.asList("A", "C"); final List INS_ALLELES = Arrays.asList("A", "ACGTGA"); @@ -32,21 +32,21 @@ public Object[][] makeMyDataProvider() { for ( final int rightNotClump : Arrays.asList(-1, 1000) ) { for ( final int nClumped : Arrays.asList(3, 4) ) { for ( final List> alleles : Utils.makePermutations(allAlleles, nClumped, true)) { - final List allVCS = new LinkedList(); + final List allVCS = new LinkedList<>(); if ( leftNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, leftNotClump, SNP_ALLELES)); if ( middleNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, middleNotClump, SNP_ALLELES)); if ( rightNotClump != -1 ) allVCS.add(GATKVariantContextUtils.makeFromAlleles(NAME, CHR, rightNotClump, SNP_ALLELES)); int clumpStart = 50; - final List vcs = new LinkedList(); + final List vcs = new LinkedList<>(); for ( final List myAlleles : alleles ) { final VariantContext vc = GATKVariantContextUtils.makeFromAlleles(NAME, CHR, clumpStart, myAlleles); clumpStart = vc.getEnd() + 3; vcs.add(vc); } - tests.add(new Object[]{new EventMap(new LinkedList(allVCS)), Collections.emptyList()}); + tests.add(new Object[]{new EventMap(new LinkedList<>(allVCS)), Collections.emptyList()}); allVCS.addAll(vcs); tests.add(new Object[]{new EventMap(allVCS), vcs}); } @@ -76,7 +76,7 @@ else if ( ! expectedNeighbors.contains(vc) ) @DataProvider(name = "BlockSubstitutionsData") public Object[][] makeBlockSubstitutionsData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( int size = EventMap.MIN_NUMBER_OF_EVENTS_TO_COMBINE_INTO_BLOCK_SUBSTITUTION; size < 10; size++ ) { final String ref = StringUtils.repeat("A", size); @@ -115,7 +115,7 @@ public void testBlockSubstitutionsData(final String refBases, final String haplo @DataProvider(name = "AdjacentSNPIndelTest") public Object[][] makeAdjacentSNPIndelTest() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{"TT", "GCT", "1M1I1M", Arrays.asList(Arrays.asList("T", "GC"))}); tests.add(new Object[]{"GCT", "TT", "1M1D1M", Arrays.asList(Arrays.asList("GC", "T"))}); @@ -138,7 +138,7 @@ public void testAdjacentSNPIndelTest(final String refBases, final String haploty final EventMap ee = new EventMap(hap, refBases.getBytes(), loc, NAME); ee.replaceClumpedEventsWithBlockSubstitutions(); Assert.assertEquals(ee.getNumberOfEvents(), expectedAlleles.size()); - final List actuals = new ArrayList(ee.getVariantContexts()); + final List actuals = new ArrayList<>(ee.getVariantContexts()); for ( int i = 0; i < ee.getNumberOfEvents(); i++ ) { final VariantContext actual = actuals.get(i); Assert.assertEquals(actual.getReference().getDisplayString(), expectedAlleles.get(i).get(0)); @@ -148,7 +148,7 @@ public void testAdjacentSNPIndelTest(final String refBases, final String haploty @DataProvider(name = "MakeBlockData") public Object[][] makeMakeBlockData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("AGT", "A"), Arrays.asList("AGT", "G")}); tests.add(new Object[]{Arrays.asList("A", "G"), Arrays.asList("A", "AGT"), Arrays.asList("A", "GGT")}); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/haplotype/HaplotypeUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/haplotype/HaplotypeUnitTest.java index 0cc765796af..420485c5dce 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/haplotype/HaplotypeUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/haplotype/HaplotypeUnitTest.java @@ -26,7 +26,7 @@ public class HaplotypeUnitTest extends BaseTest { public void testSimpleInsertionAllele() { final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M)); final Cigar h1Cigar = new Cigar(h1CigarList); String h1bases = "AACTTCTGGTCAACTGGTCAACTGGTCAACTGGTCA"; @@ -41,7 +41,7 @@ public void testSimpleInsertionAllele() { public void testSimpleDeletionAllele() { final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M)); final Cigar h1Cigar = new Cigar(h1CigarList); String h1bases = "ATCAACTGGTCAACTGGTCAACTGGTCA"; @@ -56,7 +56,7 @@ public void testSimpleDeletionAllele() { public void testSimpleSNPAllele() { final String bases = "ACTGGTCAACTGGTCAACTGGTCAACTGGTCA"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(bases.length(), CigarOperator.M)); final Cigar h1Cigar = new Cigar(h1CigarList); String h1bases = "AGTGGTCAACTGGTCAACTGGTCAACTGGTCA"; @@ -71,7 +71,7 @@ public void testSimpleSNPAllele() { public void testComplexInsertionAllele() { final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(4, CigarOperator.M)); h1CigarList.add(new CigarElement(10, CigarOperator.I)); h1CigarList.add(new CigarElement(8, CigarOperator.M)); @@ -90,7 +90,7 @@ public void testComplexInsertionAllele() { public void testComplexDeletionAllele() { final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(4, CigarOperator.M)); h1CigarList.add(new CigarElement(10, CigarOperator.I)); h1CigarList.add(new CigarElement(8, CigarOperator.M)); @@ -109,7 +109,7 @@ public void testComplexDeletionAllele() { public void testComplexSNPAllele() { final String bases = "ATCG" + "CCGGCCGGCC" + "ATCGATCG" + "AGGGGGA" + "AGGC"; - final ArrayList h1CigarList = new ArrayList(); + final ArrayList h1CigarList = new ArrayList<>(); h1CigarList.add(new CigarElement(4, CigarOperator.M)); h1CigarList.add(new CigarElement(10, CigarOperator.I)); h1CigarList.add(new CigarElement(8, CigarOperator.M)); @@ -128,7 +128,7 @@ private void basicInsertTest(String ref, String alt, int loc, Cigar cigar, Strin final Haplotype h = new Haplotype(hap.getBytes()); final Allele h1refAllele = Allele.create(ref, true); final Allele h1altAllele = Allele.create(alt, false); - final ArrayList alleles = new ArrayList(); + final ArrayList alleles = new ArrayList<>(); alleles.add(h1refAllele); alleles.add(h1altAllele); final VariantContext vc = new VariantContextBuilder().alleles(alleles).loc("1", loc, loc + h1refAllele.getBases().length - 1).make(); @@ -159,7 +159,7 @@ public void testConsolidateCigar() throws Exception { @DataProvider(name = "TrimmingData") public Object[][] makeTrimmingData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); // this functionality can be adapted to provide input data for whatever you might want in your data final GenomeLoc loc = new UnvalidatingGenomeLoc("20", 0, 10, 20); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/illumina/ClippingUtilTest.java b/src/test/java/org/broadinstitute/hellbender/utils/illumina/ClippingUtilTest.java index 2e6c0f78830..4b88bc85887 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/illumina/ClippingUtilTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/illumina/ClippingUtilTest.java @@ -285,7 +285,7 @@ public String getName() { */ private static String makeBogusReadString(final int len) { final StringBuilder builder = new StringBuilder(len); - final Map nextChar = new HashMap(); + final Map nextChar = new HashMap<>(); nextChar.put('A', 'C'); nextChar.put('C', 'G'); nextChar.put('G', 'T'); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/interval/IntervalUtilsUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/interval/IntervalUtilsUnitTest.java index 8b7a97c4034..64d391adfa6 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/interval/IntervalUtilsUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/interval/IntervalUtilsUnitTest.java @@ -243,8 +243,8 @@ public void testMergeListsBySetOperatorNoOverlap() { @Test public void testMergeListsBySetOperatorAllOverlap() { // a couple of lists we'll use for the testing - List allSites = new ArrayList(); - List listEveryTwoFromTwo = new ArrayList(); + List allSites = new ArrayList<>(); + List listEveryTwoFromTwo = new ArrayList<>(); // create the two lists we'll use for (int x = 1; x < 101; x++) { @@ -265,8 +265,8 @@ public void testMergeListsBySetOperatorAllOverlap() { @Test public void testMergeListsBySetOperator() { // a couple of lists we'll use for the testing - List allSites = new ArrayList(); - List listEveryTwoFromTwo = new ArrayList(); + List allSites = new ArrayList<>(); + List listEveryTwoFromTwo = new ArrayList<>(); // create the two lists we'll use for (int x = 1; x < 101; x++) { @@ -288,8 +288,8 @@ public void testMergeListsBySetOperator() { @Test public void testOverlappingIntervalsFromSameSourceWithIntersection() { // a couple of lists we'll use for the testing - List source1 = new ArrayList(); - List source2 = new ArrayList(); + List source1 = new ArrayList<>(); + List source2 = new ArrayList<>(); source1.add(hg19GenomeLocParser.createGenomeLoc("1", 10, 20)); source1.add(hg19GenomeLocParser.createGenomeLoc("1", 15, 25)); @@ -698,7 +698,7 @@ public void testScatterContigIntervalsMax() { } private List testFiles(String prefix, int count, String suffix) { - ArrayList files = new ArrayList(); + ArrayList files = new ArrayList<>(); for (int i = 1; i <= count; i++) { files.add(createTempFile(prefix + i, suffix)); } @@ -781,7 +781,7 @@ protected FlankingIntervalsTestData(Class clazz, String name, File referenceF } private static List parse(GenomeLocParser parser, List locs) { - List parsed = new ArrayList(); + List parsed = new ArrayList<>(); for (String loc: locs) parsed.add("unmapped".equals(loc) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(loc)); return parsed; @@ -935,7 +935,7 @@ public void testWriteFlankingIntervals(FlankingIntervalsTestData data) throws Ex File originalFile = createTempFile("original.", ".intervals"); File flankingFile = createTempFile("flanking.", ".intervals"); try { - List lines = new ArrayList(); + List lines = new ArrayList<>(); for (GenomeLoc loc: data.original) lines.add(loc.toString()); FileUtils.writeLines(originalFile, lines); @@ -958,7 +958,7 @@ public void testWritingBadFlankingIntervals(FlankingIntervalsTestData data) thro File originalFile = createTempFile("original.", ".intervals"); File flankingFile = createTempFile("flanking.", ".intervals"); try { - List lines = new ArrayList(); + List lines = new ArrayList<>(); for (GenomeLoc loc: data.original) lines.add(loc.toString()); FileUtils.writeLines(originalFile, lines); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/pileup/PileupElementUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/pileup/PileupElementUnitTest.java index 1b051deb00c..4cfd759484b 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/pileup/PileupElementUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/pileup/PileupElementUnitTest.java @@ -103,7 +103,7 @@ public void testPileupElementTest(LIBSTest params) { @DataProvider(name = "PrevAndNextTest") public Object[][] makePrevAndNextTest() { - final List tests = new LinkedList(); + final List tests = new LinkedList<>(); final List operators = Arrays.asList(CigarOperator.I, CigarOperator.P, CigarOperator.S); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/read/AlignmentUtilsUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/read/AlignmentUtilsUnitTest.java index bc3c196952c..c05fba12773 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/read/AlignmentUtilsUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/read/AlignmentUtilsUnitTest.java @@ -104,14 +104,14 @@ private SAMRecord createMappedRead(String name, int start) { private final List> makeCigarElementCombinations() { // this functionality can be adapted to provide input data for whatever you might want in your data - final List cigarElements = new LinkedList(); + final List cigarElements = new LinkedList<>(); for ( final int size : Arrays.asList(0, 10) ) { for ( final CigarOperator op : CigarOperator.values() ) { cigarElements.add(new CigarElement(size, op)); } } - final List> combinations = new LinkedList>(); + final List> combinations = new LinkedList<>(); for ( final int nElements : Arrays.asList(1, 2, 3) ) { combinations.addAll(Utils.makePermutations(cigarElements, nElements, true)); } @@ -122,7 +122,7 @@ private final List> makeCigarElementCombinations() { @DataProvider(name = "CalcNumDifferentBasesData") public Object[][] makeCalcNumDifferentBasesData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{"5M", "ACGTA", "ACGTA", 0}); tests.add(new Object[]{"5M", "ACGTA", "ACGTT", 1}); @@ -149,7 +149,7 @@ public void testCalcNumDifferentBases(final String cigarString, final String ref @DataProvider(name = "NumAlignedBasesCountingSoftClips") public Object[][] makeNumAlignedBasesCountingSoftClips() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final EnumSet alignedToGenome = EnumSet.of(CigarOperator.M, CigarOperator.EQ, CigarOperator.X, CigarOperator.S); for ( final List elements : makeCigarElementCombinations() ) { @@ -172,7 +172,7 @@ public void testNumAlignedBasesCountingSoftClips(final Cigar cigar, final int ex @DataProvider(name = "CigarHasZeroElement") public Object[][] makeCigarHasZeroElement() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( final List elements : makeCigarElementCombinations() ) { boolean hasZero = false; @@ -190,7 +190,7 @@ public void testCigarHasZeroSize(final Cigar cigar, final boolean hasZero) { @DataProvider(name = "NumHardClipped") public Object[][] makeNumHardClipped() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( final List elements : makeCigarElementCombinations() ) { int n = 0; @@ -212,7 +212,7 @@ public void testNumHardClipped(final Cigar cigar, final int expected) { @DataProvider(name = "NumAlignedBlocks") public Object[][] makeNumAlignedBlocks() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( final List elements : makeCigarElementCombinations() ) { int n = 0; @@ -239,7 +239,7 @@ public void testNumAlignedBlocks(final Cigar cigar, final int expected) { @DataProvider(name = "ConsolidateCigarData") public Object[][] makeConsolidateCigarData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); // this functionality can be adapted to provide input data for whatever you might want in your data tests.add(new Object[]{"1M1M", "2M"}); @@ -251,14 +251,14 @@ public Object[][] makeConsolidateCigarData() { tests.add(new Object[]{"1M1M1M1D2M1M", "3M1D3M"}); tests.add(new Object[]{"6M6M6M", "18M"}); - final List elements = new LinkedList(); + final List elements = new LinkedList<>(); int i = 1; for ( final CigarOperator op : CigarOperator.values() ) { elements.add(new CigarElement(i++, op)); } for ( final List ops : Utils.makePermutations(elements, 3, false) ) { final String expected = new Cigar(ops).toString(); - final List cutElements = new LinkedList(); + final List cutElements = new LinkedList<>(); for ( final CigarElement elt : ops ) { for ( int j = 0; j < elt.getLength(); j++ ) { cutElements.add(new CigarElement(1, elt.getOperator())); @@ -282,7 +282,7 @@ public void testConsolidateCigarWithData(final String testCigarString, final Str @DataProvider(name = "SoftClipsDataProvider") public Object[][] makeSoftClipsDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); // this functionality can be adapted to provide input data for whatever you might want in your data for ( final int lengthOfLeftClip : Arrays.asList(0, 1, 10) ) { @@ -341,7 +341,7 @@ public void testSoftClipsData(final byte[] qualsOfSoftClipsOnLeft, final int mid @DataProvider(name = "MismatchCountDataProvider") public Object[][] makeMismatchCountDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int readLength = 20; final int lengthOfIndel = 2; @@ -442,7 +442,7 @@ private static String buildTestCigarString(final char middleOp, final int length @DataProvider(name = "AlignmentByteArrayOffsetDataProvider") public Object[][] makeAlignmentByteArrayOffsetDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int readLength = 20; final int lengthOfIndel = 2; @@ -496,7 +496,7 @@ public void testAlignmentByteArrayOffsetData(final Cigar cigar, final int offset @DataProvider(name = "ReadToAlignmentByteArrayDataProvider") public Object[][] makeReadToAlignmentByteArrayDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int readLength = 20; final int lengthOfIndel = 2; @@ -549,7 +549,7 @@ else if ( middleOp == 'I' && i == startOfIndelBases - 1 ) @DataProvider(name = "LeftAlignIndelDataProvider") public Object[][] makeLeftAlignIndelDataProvider() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final byte[] repeat1Reference = "ABCDEFGHIJKLMNOPXXXXXXXXXXABCDEFGHIJKLMNOP".getBytes(); final byte[] repeat2Reference = "ABCDEFGHIJKLMNOPXYXYXYXYXYABCDEFGHIJKLMNOP".getBytes(); @@ -667,7 +667,7 @@ public void testLeftAlignIndelData(final Cigar originalCigar, final Cigar expect @DataProvider(name = "TrimCigarData") public Object[][] makeTrimCigarData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( final CigarOperator op : Arrays.asList(CigarOperator.D, CigarOperator.EQ, CigarOperator.X, CigarOperator.M) ) { for ( int myLength = 1; myLength < 6; myLength++ ) { @@ -749,7 +749,7 @@ public void testTrimCigar(final String cigarString, final int start, final int l @DataProvider(name = "TrimCigarByBasesData") public Object[][] makeTrimCigarByBasesData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{"2M3I4M", 0, 8, "2M3I4M"}); tests.add(new Object[]{"2M3I4M", 1, 8, "1M3I4M"}); @@ -788,7 +788,7 @@ public void testTrimCigarByBase(final String cigarString, final int start, final @DataProvider(name = "ApplyCigarToCigarData") public Object[][] makeApplyCigarToCigarData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); for ( int i = 1; i < 5; i++ ) tests.add(new Object[]{i + "M", i + "M", i + "M"}); @@ -841,7 +841,7 @@ public void testApplyCigarToCigar(final String firstToSecondString, final String @DataProvider(name = "ReadOffsetFromCigarData") public Object[][] makeReadOffsetFromCigarData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int SIZE = 10; for ( int i = 0; i < SIZE; i++ ) { @@ -895,7 +895,7 @@ public void testReadOffsetFromCigar(final String cigarString, final int startOnC @DataProvider(name = "AddCigarElementsData") public Object[][] makeAddCigarElementsData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); final int SIZE = 10; for ( final CigarOperator op : Arrays.asList(CigarOperator.I, CigarOperator.M, CigarOperator.S, CigarOperator.EQ, CigarOperator.X)) { @@ -919,7 +919,7 @@ public void testAddCigarElements(final String cigarString, final int pos, final final CigarElement elt = cigar.getCigarElement(0); final Cigar expectedCigar = TextCigarCodec.decode(expectedCigarString); - final List elts = new LinkedList(); + final List elts = new LinkedList<>(); final int actualEndPos = AlignmentUtils.addCigarElements(elts, pos, start, end, elt); Assert.assertEquals(actualEndPos, pos + elt.getLength()); @@ -928,7 +928,7 @@ public void testAddCigarElements(final String cigarString, final int pos, final @DataProvider(name = "GetBasesCoveringRefIntervalData") public Object[][] makeGetBasesCoveringRefIntervalData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); // matches // 0123 @@ -984,7 +984,7 @@ public void testGetBasesCoveringRefInterval(final String basesString, final int @DataProvider(name = "StartsOrEndsWithInsertionOrDeletionData") public Object[][] makeStartsOrEndsWithInsertionOrDeletionData() { - List tests = new ArrayList(); + List tests = new ArrayList<>(); tests.add(new Object[]{"2M", false}); tests.add(new Object[]{"1D2M", true}); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/read/ReadUtilsUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/read/ReadUtilsUnitTest.java index f0d7698f9fd..3804859bd92 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/read/ReadUtilsUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/read/ReadUtilsUnitTest.java @@ -183,7 +183,7 @@ public void testGetBasesReverseComplement() { public void testGetMaxReadLength() { for( final int minLength : Arrays.asList( 5, 30, 50 ) ) { for( final int maxLength : Arrays.asList( 50, 75, 100 ) ) { - final List reads = new ArrayList(); + final List reads = new ArrayList<>(); for( int readLength = minLength; readLength <= maxLength; readLength++ ) { reads.add( ArtificialSAMUtils.createRandomRead( readLength ) ); } @@ -191,7 +191,7 @@ public void testGetMaxReadLength() { } } - final List reads = new LinkedList(); + final List reads = new LinkedList<>(); Assert.assertEquals(ReadUtils.getMaxReadLength(reads), 0, "Empty list should have max length of zero"); } @@ -229,7 +229,7 @@ public void testReadWithNsRefAfterDeletion() throws FileNotFoundException { @DataProvider(name = "HasWellDefinedFragmentSizeData") public Object[][] makeHasWellDefinedFragmentSizeData() throws Exception { - final List tests = new LinkedList(); + final List tests = new LinkedList<>(); // setup a basic read that will work final SAMFileHeader header = ArtificialSAMUtils.createArtificialSamHeader(); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesTester.java b/src/test/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesTester.java index 9b7942c6d1d..cd837ea41c9 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesTester.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/read/markduplicates/AbstractMarkDuplicatesTester.java @@ -118,7 +118,7 @@ public void test() { Assert.assertEquals(outputRecords, this.getNumberOfRecords(), ("saw " + outputRecords + " output records, vs. " + this.getNumberOfRecords() + " input records")); // Check the values written to metrics.txt against our input expectations - final MetricsFile> metricsOutput = new MetricsFile>(); + final MetricsFile> metricsOutput = new MetricsFile<>(); try{ metricsOutput.read(new FileReader(metricsFile)); } diff --git a/src/test/java/org/broadinstitute/hellbender/utils/read/testers/SamFileTester.java b/src/test/java/org/broadinstitute/hellbender/utils/read/testers/SamFileTester.java index 611c5e59b54..cfc9e692351 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/read/testers/SamFileTester.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/read/testers/SamFileTester.java @@ -14,7 +14,7 @@ public abstract class SamFileTester extends CommandLineProgramTest { private final SAMRecordSetBuilder samRecordSetBuilder; - protected final Map duplicateFlags = new HashMap(); + protected final Map duplicateFlags = new HashMap<>(); private File outputDir; private File output; private int readNameCounter = 0; diff --git a/src/test/java/org/broadinstitute/hellbender/utils/recalibration/EventTypeUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/recalibration/EventTypeUnitTest.java index d09454163bd..9d3acf545dc 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/recalibration/EventTypeUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/recalibration/EventTypeUnitTest.java @@ -22,7 +22,7 @@ public void testEventTypes() { @Test public void testEventTypesEnumItself() { - final Set shortReps = new HashSet(); + final Set shortReps = new HashSet<>(); for ( final EventType et : EventType.values() ) { Assert.assertFalse(shortReps.contains(et.toString()), "Short representative for EventType has duplicates for " + et); shortReps.add(et.toString()); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/report/GATKReportUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/report/GATKReportUnitTest.java index f0c0f5d59e1..667580603c8 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/report/GATKReportUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/report/GATKReportUnitTest.java @@ -87,7 +87,7 @@ private boolean isSorted(GATKReportTable table) { System.err.println ("Error: " + e.getMessage()); } - ArrayList rows = new ArrayList(); + ArrayList rows = new ArrayList<>(); try { // Open the file FileInputStream fStream = new FileInputStream(testingSortingTableFile); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/runtime/ProcessControllerUnitTest.java b/src/test/java/org/broadinstitute/hellbender/utils/runtime/ProcessControllerUnitTest.java index e7de7cb6d8e..1359963273d 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/runtime/ProcessControllerUnitTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/runtime/ProcessControllerUnitTest.java @@ -55,7 +55,7 @@ public void testEnvironment() { job.getStdoutSettings().setBufferSize(-1); job.setRedirectErrorStream(true); - Map env = new HashMap(System.getenv()); + Map env = new HashMap<>(System.getenv()); env.put(key, value); job.setEnvironment(env); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/test/BaseTest.java b/src/test/java/org/broadinstitute/hellbender/utils/test/BaseTest.java index 324f1652257..c7ada8f215b 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/test/BaseTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/test/BaseTest.java @@ -223,8 +223,8 @@ public static void assertEqualsDoubleSmart(final double actual, final double exp } public static void assertEqualsSet(final Set actual, final Set expected, final String info) { - final Set actualSet = new HashSet(actual); - final Set expectedSet = new HashSet(expected); + final Set actualSet = new HashSet<>(actual); + final Set expectedSet = new HashSet<>(expected); Assert.assertTrue(actualSet.equals(expectedSet), info); // note this is necessary due to testng bug for set comps } @@ -327,7 +327,7 @@ public static void assertGenotypesAreEqual(final Genotype actual, final Genotype } private static void assertAttributesEquals(final Map actual, Map expected) { - final Set expectedKeys = new HashSet(expected.keySet()); + final Set expectedKeys = new HashSet<>(expected.keySet()); for ( final Map.Entry act : actual.entrySet() ) { final Object actualValue = act.getValue(); diff --git a/src/test/java/org/broadinstitute/hellbender/utils/text/parsers/TextFileParsersTest.java b/src/test/java/org/broadinstitute/hellbender/utils/text/parsers/TextFileParsersTest.java index 83a0ab8423b..45e308184ab 100644 --- a/src/test/java/org/broadinstitute/hellbender/utils/text/parsers/TextFileParsersTest.java +++ b/src/test/java/org/broadinstitute/hellbender/utils/text/parsers/TextFileParsersTest.java @@ -33,7 +33,7 @@ public class TextFileParsersTest { public void testTextFileParser(Object fileOrStream) throws IOException { FormatUtil format = new FormatUtil(); - List expected = new ArrayList(); + List expected = new ArrayList<>(); if (fileOrStream instanceof File) { BufferedReader reader = new BufferedReader(new FileReader((File)fileOrStream)); String line = null; @@ -86,7 +86,7 @@ private Object[][] getBasicInputParserData() public void testMultiFileParsing(Object fileOrStream1, Object fileOrStream2) throws IOException { FormatUtil format = new FormatUtil(); - List expected = new ArrayList(); + List expected = new ArrayList<>(); if (fileOrStream1 instanceof File) { BufferedReader reader = new BufferedReader(new FileReader((File)fileOrStream1)); String line = null;