Skip to content

Commit

Permalink
Merge pull request #21 from ryan-williams/h
Browse files Browse the repository at this point in the history
upgrade plugin, spark
  • Loading branch information
ryan-williams committed Aug 6, 2017
2 parents c51cd7a + 1f0523a commit 5735f94
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 62 deletions.
24 changes: 12 additions & 12 deletions build.sbt
@@ -1,28 +1,28 @@

organization := "org.hammerlab.genomics"
name := "loci"
version := "1.5.8"
version := "2.0.0"

addSparkDeps

deps ++= Seq(
libs.value('args4j),
libs.value('args4s),
libs.value('htsjdk),
libs.value('iterators),
libs.value('paths),
libs.value('scalautils),
libs.value('string_utils)
args4j,
"org.hammerlab" ^^ "args4s" ^ "1.3.0",
htsjdk,
iterators % "1.3.0",
paths % "1.2.0",
scalautils,
string_utils % "1.2.0"
)

compileAndTestDeps += libs.value('reference)
compileAndTestDeps += reference % "1.4.0"

// Shade Guava due to use of RangeSet classes from 16.0.1 that don't exist in Spark/Hadoop's Guava 11.0.2.
shadedDeps += "com.google.guava" % "guava" % "19.0"
shadedDeps += guava

// Rename shaded Guava classes.
shadeRenames += "com.google.common.**" -> "org.hammerlab.guava.@1"
shadeRenames += "com.google.thirdparty.**" -> "org.hammerlab.guava.@1"
shadeRenames += "com.google.common.**" "org.hammerlab.guava.@1"
shadeRenames += "com.google.thirdparty.**" "org.hammerlab.guava.@1"

// Publish JAR that includes shaded Guava.
publishThinShadedJar
Expand Down
2 changes: 1 addition & 1 deletion project/plugins.sbt
@@ -1 +1 @@
addSbtPlugin("org.hammerlab" % "sbt-parent" % "2.0.1")
addSbtPlugin("org.hammerlab" % "sbt-parent" % "3.0.0")

This file was deleted.

@@ -1,7 +1,6 @@
package org.hammerlab.genomics.loci.parsing

import htsjdk.variant.vcf.VCFFileReader
import org.apache.hadoop.conf.Configuration
import org.hammerlab.genomics.loci.VariantContext
import org.hammerlab.genomics.loci.args.LociArgs
import org.hammerlab.genomics.reference.ContigName.Factory
Expand Down Expand Up @@ -56,16 +55,17 @@ object ParsedLoci {
* (lociFileOpt), and return a [[ParsedLoci]] encapsulating the result. The latter can then be converted into a
* [[org.hammerlab.genomics.loci.set.LociSet]] when contig-lengths are available / have been parsed from read-sets.
*/
def apply(args: LociArgs,
hadoopConfiguration: Configuration): Option[ParsedLoci] =
apply(args.lociStrOpt, args.lociFileOpt, hadoopConfiguration)
def apply(args: LociArgs): Option[ParsedLoci] =
apply(
args.lociStrOpt,
args.lociFileOpt
)

def apply(lociStrOpt: Option[String],
lociFileOpt: Option[Path],
hadoopConfiguration: Configuration)(implicit factory: Factory): Option[ParsedLoci] =
lociFileOpt: Option[Path])(implicit factory: Factory): Option[ParsedLoci] =
(lociStrOpt, lociFileOpt) match {
case (Some(lociStr), _) => Some(ParsedLoci(lociStr))
case (_, Some(lociPath)) => Some(loadFromPath(lociPath, hadoopConfiguration))
case (_, Some(lociPath)) => Some(loadFromPath(lociPath))
case _ =>
None
}
Expand All @@ -78,8 +78,7 @@ object ParsedLoci {
* "chrX:5-10,chr12-10-20", etc. Whitespace is ignored.
* @return parsed loci
*/
private def loadFromPath(path: Path,
hadoopConfiguration: Configuration)(implicit factory: Factory): ParsedLoci =
private def loadFromPath(path: Path)(implicit factory: Factory): ParsedLoci =
path.extension match {
case "vcf" LociRanges.fromVCF(path)
case "loci" | "txt" ParsedLoci(path.lines)
Expand Down
@@ -1,6 +1,5 @@
package org.hammerlab.genomics.loci.parsing

import org.apache.hadoop.conf.Configuration
import org.hammerlab.genomics.loci.set.test.LociSetUtil
import org.hammerlab.genomics.reference.test.ClearContigNames
import org.hammerlab.genomics.reference.test.LociConversions.intToLocus
Expand All @@ -12,16 +11,13 @@ class ParsedLociSuite
with ClearContigNames
with LociSetUtil {

val conf = new Configuration

// Loci-from-VCF sanity check.
test("vcf loading") {
val loci =
lociSet(
ParsedLoci(
lociStrOpt = None,
lociFileOpt = Some(File("truth.chr20.vcf").path),
conf
lociFileOpt = Some(File("truth.chr20.vcf").path)
).get
)

Expand Down
25 changes: 25 additions & 0 deletions src/test/scala/org/hammerlab/genomics/loci/set/LociSetSuite.scala
Expand Up @@ -178,4 +178,29 @@ class LociSetSuite
iter3.next() should ===(100000000000L - 1)
iter3.hasNext should ===(false)
}

test("take") {
val set = lociSet("chr1:100-200,chr2:30-40,chr3:50-51,chr4:1000-1100")

set.take(10) should be(
(
lociSet("chr1:100-110"),
lociSet("chr1:110-200,chr2:30-40,chr3:50-51,chr4:1000-1100")
)
)

set.take(0) should be(
(
lociSet(""),
set
)
)

set.take(200) should be(
(
lociSet("chr1:100-200,chr2:30-40,chr3:50-51,chr4:1000-1089"),
lociSet("chr4:1089-1100")
)
)
}
}

0 comments on commit 5735f94

Please sign in to comment.