Skip to content

Commit

Permalink
0.23.1
Browse files Browse the repository at this point in the history
  • Loading branch information
ryan-williams committed Jun 20, 2017
1 parent b61fdd5 commit f93b20b
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 20 deletions.
4 changes: 2 additions & 2 deletions build.sbt
Expand Up @@ -2,7 +2,7 @@ organization := "org.hammerlab.adam"

name := sparkName("core")

version := "0.23.1-SNAPSHOT"
version := "0.23.1"

addSparkDeps
publishTestJar
Expand All @@ -20,7 +20,7 @@ deps ++= Seq(
libs.value('bdg_utils_metrics),
libs.value('bdg_utils_misc),
libs.value('commons_io),
libs.value('hadoop_bam),
"org.seqdoop" % "hadoop-bam" % "7.8.0" exclude("org.apache.hadoop", "hadoop-client"),
libs.value('htsjdk),
libs.value('loci),
libs.value('log4j),
Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
@@ -1 +1 @@
addSbtPlugin("org.hammerlab" % "sbt-parent" % "1.7.7-SNAPSHOT")
addSbtPlugin("org.hammerlab" % "sbt-parent" % "2.0.1")
Expand Up @@ -91,7 +91,7 @@ class ADAMContextSuite
test("can read a small .CRAM file") {
val path = testFile("artificial.cram")
val referencePath = resourceUrl("artificial.fa").toString
sc.hadoopConfiguration.set(REFERENCE_SOURCE_PATH_PROPERTY,
hadoopConf.set(REFERENCE_SOURCE_PATH_PROPERTY,
referencePath)
val reads: RDD[AlignmentRecord] = sc.loadAlignments(path).rdd
reads.count() should === (10)
Expand Down
Expand Up @@ -21,7 +21,7 @@ import org.apache.hadoop.fs.{ FileSystem, Path }
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.rdd.ParallelFileMerger._
import org.bdgenomics.adam.util.ADAMFunSuite
import org.seqdoop.hadoop_bam.CRAMInputFormat
import org.seqdoop.hadoop_bam.CRAMInputFormat.REFERENCE_SOURCE_PATH_PROPERTY

class ParallelFileMergerSuite
extends ADAMFunSuite {
Expand Down Expand Up @@ -63,7 +63,7 @@ class ParallelFileMergerSuite

val fileSizes = Seq(29408, 3093)

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val (size, sizes) = getFullSize(fs, files)

assert(size === fileSizes.sum.toLong)
Expand Down Expand Up @@ -93,7 +93,7 @@ class ParallelFileMergerSuite
)
.map(new Path(_))

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val fileSizesMap =
files
.map(f (f, fs.getFileStatus(f).getLen().toInt))
Expand Down Expand Up @@ -127,7 +127,7 @@ class ParallelFileMergerSuite
)
.map(new Path(_))

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val fileSizesMap =
files
.map(f (f, fs.getFileStatus(f).getLen().toInt))
Expand Down Expand Up @@ -173,7 +173,7 @@ class ParallelFileMergerSuite
reads.transform(_.repartition(4))
.saveAsSam(outPath, asSingleFile = true, deferMerging = true)

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
(outPath + "_tail") / "part-r-0000%d".format(i)
})).map(new Path(_))
Expand All @@ -182,7 +182,7 @@ class ParallelFileMergerSuite
mergePaths(
outPath,
filesToMerge,
sc.broadcast(sc.hadoopConfiguration),
sc.broadcast(hadoopConf),
false,
false
)
Expand All @@ -199,7 +199,7 @@ class ParallelFileMergerSuite
reads.transform(_.repartition(4))
.saveAsSam(outPath, asSingleFile = true, deferMerging = true)

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
(outPath + "_tail") / "part-r-0000%d".format(i)
})).map(new Path(_))
Expand All @@ -208,7 +208,7 @@ class ParallelFileMergerSuite
mergePaths(
outPath,
filesToMerge,
sc.broadcast(sc.hadoopConfiguration),
sc.broadcast(hadoopConf),
true,
false
)
Expand All @@ -220,15 +220,17 @@ class ParallelFileMergerSuite

test("merge a sharded cram file") {
val referencePath = resourceUrl("artificial.fa").toString
sc.hadoopConfiguration.set(CRAMInputFormat.REFERENCE_SOURCE_PATH_PROPERTY,
referencePath)
hadoopConf.set(
REFERENCE_SOURCE_PATH_PROPERTY,
referencePath
)
val reads = sc.loadAlignments(testFile("artificial.cram"))
val outPath = tmpFile("out.cram")

reads.transform(_.repartition(4))
.saveAsSam(outPath, isSorted = true, asSingleFile = true, deferMerging = true)

val fs = FileSystem.get(sc.hadoopConfiguration)
val fs = FileSystem.get(hadoopConf)
val filesToMerge = (Seq(outPath + "_head") ++ (0 until 4).map(i => {
(outPath + "_tail") / "part-r-0000%d".format(i)
})).map(new Path(_))
Expand All @@ -237,7 +239,7 @@ class ParallelFileMergerSuite
mergePaths(
outPath,
filesToMerge,
sc.broadcast(sc.hadoopConfiguration),
sc.broadcast(hadoopConf),
false,
true
)
Expand Down
Expand Up @@ -25,7 +25,7 @@ class FragmentRDDSuite extends ADAMFunSuite {

test("don't lose any reads when piping interleaved fastq to sam") {
// write suffixes at end of reads
sc.hadoopConfiguration.setBoolean(InterleavedFASTQInFormatter.WRITE_SUFFIXES, true)
hadoopConf.setBoolean(InterleavedFASTQInFormatter.WRITE_SUFFIXES, true)

val fragmentsPath = testFile("interleaved_fastq_sample1.ifq")
val ardd = sc.loadFragments(fragmentsPath)
Expand Down
Expand Up @@ -217,7 +217,7 @@ class AlignmentRecordRDDSuite
test("round trip with single CRAM file produces equivalent Read values") {
val readsPath = testFile("artificial.cram")
val referencePath = resourceUrl("artificial.fa").toString
sc.hadoopConfiguration.set(REFERENCE_SOURCE_PATH_PROPERTY,
hadoopConf.set(REFERENCE_SOURCE_PATH_PROPERTY,
referencePath)

val ardd = sc.loadBam(readsPath)
Expand Down Expand Up @@ -250,7 +250,7 @@ class AlignmentRecordRDDSuite
test("round trip with sharded CRAM file produces equivalent Read values") {
val readsPath = testFile("artificial.cram")
val referencePath = resourceUrl("artificial.fa").toString
sc.hadoopConfiguration.set(REFERENCE_SOURCE_PATH_PROPERTY,
hadoopConf.set(REFERENCE_SOURCE_PATH_PROPERTY,
referencePath)

val ardd = sc.loadBam(readsPath)
Expand Down
5 changes: 4 additions & 1 deletion src/test/scala/org/bdgenomics/adam/util/ADAMFunSuite.scala
Expand Up @@ -29,7 +29,10 @@ import org.hammerlab.test.resources.{ File, Url }
import org.scalactic.TypeCheckedTripleEquals

abstract class ADAMFunSuite
extends KryoSparkSuite(classOf[ADAMKryoRegistrator], referenceTracking = true)
extends KryoSparkSuite(
classOf[ADAMKryoRegistrator],
referenceTracking = true
)
with ContigNameCanEqualString
with LocusCanEqualInt
with ClearContigNames
Expand Down

0 comments on commit f93b20b

Please sign in to comment.