Skip to content

Commit

Permalink
Merge 4a27435 into 29f3b2e
Browse files Browse the repository at this point in the history
  • Loading branch information
akmorrow13 committed May 9, 2019
2 parents 29f3b2e + 4a27435 commit 57ea2c0
Show file tree
Hide file tree
Showing 35 changed files with 17,985 additions and 16,766 deletions.
8 changes: 7 additions & 1 deletion .gitignore
Expand Up @@ -26,6 +26,7 @@ workfiles/
# metadata
example-files/notebooks/metastore_db/*
mango-viz/examples/.ipynb_checkpoints/*
*/*/.ipynb_checkpoints/*

# builds
mango-viz/build/*
Expand All @@ -34,11 +35,16 @@ mango-viz/build/*
venv/*
metastore_db/*
mango-python/adam/*
mango-python/dist/*
mango-python/build/*
mango-python/bdgenomics.mango.egg-info/*
mango-viz/metastore_db/*
mango-viz/venv/*
*.bak

mango-viz/mangoviz/static/*
mango-viz/bdgenomics/mango/pileup/static/*
mango-viz/bdgenomics/mango/js/dist/*
mango-viz/bdgenomics.mango.pileup.egg-info/*
*.pyc

example-files/metastore_db/*
Expand Down
3 changes: 3 additions & 0 deletions example-files/browser-scripts/run-http-example.sh
@@ -0,0 +1,3 @@
bin/mango-submit ./example-files/hg19.genome \
-variants http://s3.amazonaws.com/1000genomes/phase1/analysis_results/integrated_call_sets/ALL.chr1.integrated_phase1_v3.20101123.snps_indels_svs.genotypes.vcf.gz \
-reads http://s3.amazonaws.com/1000genomes/phase1/data/NA19661/exome_alignment/NA19661.mapped.illumina.mosaik.MXL.exome.20110411.bam
62 changes: 31 additions & 31 deletions mango-cli/pom.xml
Expand Up @@ -123,21 +123,21 @@
</build>

<dependencies>
<dependency>
<groupId>com.esotericsoftware.kryo</groupId>
<artifactId>kryo</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.github.samtools</groupId>
<artifactId>htsjdk</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>it.unimi.dsi</groupId>
<artifactId>fastutil</artifactId>
<scope>compile</scope>
</dependency>
<!--<dependency>-->
<!--<groupId>com.esotericsoftware.kryo</groupId>-->
<!--<artifactId>kryo</artifactId>-->
<!--<scope>compile</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>com.github.samtools</groupId>-->
<!--<artifactId>htsjdk</artifactId>-->
<!--<scope>compile</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>it.unimi.dsi</groupId>-->
<!--<artifactId>fastutil</artifactId>-->
<!--<scope>compile</scope>-->
<!--</dependency>-->
<dependency>
<groupId>net.liftweb</groupId>
<artifactId>lift-json_${scala.version.prefix}</artifactId>
Expand Down Expand Up @@ -181,12 +181,12 @@
<artifactId>adam-core${spark.version.prefix}${scala.version.prefix}</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.bdgenomics.adam</groupId>
<artifactId>adam-core${spark.version.prefix}${scala.version.prefix}</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<!--<dependency>-->
<!--<groupId>org.bdgenomics.adam</groupId>-->
<!--<artifactId>adam-core${spark.version.prefix}${scala.version.prefix}</artifactId>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<dependency>
<groupId>org.bdgenomics.bdg-formats</groupId>
<artifactId>bdg-formats</artifactId>
Expand All @@ -202,11 +202,11 @@
<artifactId>utils-intervalrdd${spark.version.prefix}${scala.version.prefix}</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-io${spark.version.prefix}${scala.version.prefix}</artifactId>
<scope>compile</scope>
</dependency>
<!--<dependency>-->
<!--<groupId>org.bdgenomics.utils</groupId>-->
<!--<artifactId>utils-io${spark.version.prefix}${scala.version.prefix}</artifactId>-->
<!--<scope>compile</scope>-->
<!--</dependency>-->
<dependency>
<groupId>org.bdgenomics.utils</groupId>
<artifactId>utils-metrics${spark.version.prefix}${scala.version.prefix}</artifactId>
Expand Down Expand Up @@ -258,10 +258,10 @@
<artifactId>mango-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.seqdoop</groupId>
<artifactId>hadoop-bam</artifactId>
<scope>compile</scope>
</dependency>
<!--<dependency>-->
<!--<groupId>org.seqdoop</groupId>-->
<!--<artifactId>hadoop-bam</artifactId>-->
<!--<scope>compile</scope>-->
<!--</dependency>-->
</dependencies>
</project>
113 changes: 88 additions & 25 deletions mango-cli/src/main/scala/org/bdgenomics/mango/cli/VizReads.scala
Expand Up @@ -18,14 +18,16 @@
package org.bdgenomics.mango.cli

import java.net.URI

import ga4gh.Common.Strand
import ga4gh.Reads.ReadAlignment
import net.liftweb.json.Serialization.write
import net.liftweb.json._
import org.apache.spark.SparkContext
import org.bdgenomics.adam.models.{ SequenceRecord, ReferencePosition, ReferenceRegion, SequenceDictionary }
import org.bdgenomics.adam.models.{ ReferencePosition, ReferenceRegion, SequenceDictionary, SequenceRecord }
import org.bdgenomics.mango.cli.util.Materializer
import org.bdgenomics.mango.converters.{ SearchFeaturesRequestGA4GH, SearchVariantsRequestGA4GH, SearchReadsRequestGA4GH }
import org.bdgenomics.mango.core.util.{ Genome, GenomeConfig, VizUtils, VizCacheIndicator }
import org.bdgenomics.mango.converters.{ SearchFeaturesRequestGA4GH, SearchReadsRequestGA4GH, SearchVariantsRequestGA4GH }
import org.bdgenomics.mango.core.util.{ Genome, GenomeConfig, VizCacheIndicator, VizUtils }
import org.bdgenomics.mango.filters._
import org.bdgenomics.mango.models._
import org.bdgenomics.utils.cli._
Expand All @@ -38,7 +40,10 @@ import org.eclipse.jetty.util.resource.Resource
import org.fusesource.scalate.TemplateEngine
import org.kohsuke.args4j.{ Argument, Option => Args4jOption }
import org.scalatra._

import scala.io.Source
import net.liftweb.json.JsonAST._
import net.liftweb.json.Extraction._

object VizTimers extends Metrics {
//HTTP requests
Expand Down Expand Up @@ -229,6 +234,40 @@ class VizReadsArgs extends Args4jBase with ParquetArgs {

}

/**
* Holds arguments for browser template.
*
* @param dictionary String of comma delimited Reference Regions.
* @param twoBitUrl String of url to twobit reference.
* @param genes Optional gene endpoint
* @param reads Optional List of read IDs for endpoint
* @param variants Optional Map of variant IDs and corresponding comma delimited genotype names
* @param features Optional Map of feature IDs and boolean that determines whether track is coverage
* @param referenceName contig referenceName for starting location in browser
* @param start start position for location in browser
* @param end end position for location in browser
*/
case class BrowserArgs(dictionary: String,
twoBitUrl: String,
genes: Option[String],
reads: Option[List[String]],
variants: Option[Map[String, String]],
features: Option[Map[String, Boolean]],
referenceName: String,
start: Long,
end: Long) {

def toMap(): Map[String, Any] = {
Map("dictionary" -> dictionary,
"twoBitUrl" -> twoBitUrl,
"genes" -> genes,
"reads" -> reads,
"variants" -> variants,
"features" -> features,
"region" -> ReferenceRegion(referenceName, start, end))
}
}

class VizServlet extends ScalatraServlet {
implicit val formats = net.liftweb.json.DefaultFormats

Expand All @@ -246,9 +285,20 @@ class VizServlet extends ScalatraServlet {
// set initial referenceRegion so it is defined. pick first chromosome to view
val firstChr = VizReads.genome.chromSizes.records.head.name
session("referenceRegion") = ReferenceRegion(firstChr, 1, 100)
templateEngine.layout("/WEB-INF/layouts/overall.ssp",
Map("dictionary" -> VizReads.formatDictionaryOpts(VizReads.genome.chromSizes),
"regions" -> VizReads.formatClickableRegions(VizReads.prefetchedRegions)))

val args = Map("dictionary" -> VizReads.formatDictionaryOpts(VizReads.genome.chromSizes),
"regions" -> VizReads.formatClickableRegions(VizReads.prefetchedRegions))

try {
templateEngine.layout("/WEB-INF/layouts/overall.ssp", args)
} catch {
case e: Exception => {
println(e.getMessage)
// for testing purposes in VizReadsSuite
NotFound(net.liftweb.json.compactRender(decompose(args)))
}
}

}

// Used to set the viewRegion in the backend when a user clicks on the home page
Expand Down Expand Up @@ -278,29 +328,42 @@ class VizServlet extends ScalatraServlet {
}

val variantSamples: Option[Map[String, String]] = try {
Some(VizReads.materializer.getVariantContext().get.samples.map(r => (LazyMaterialization.filterKeyFromFile(r._1), r._2.map(_.getId).mkString(","))))
Some(VizReads.materializer.getVariantContext().get.samples.map(r => {
(LazyMaterialization.filterKeyFromFile(r._1), r._2.map(_.getId).mkString(","))
}))
} catch {
case e: Exception => None
}

val featureSamples = try {
val featureSamples: Option[Map[String, Boolean]] = try {
Some(VizReads.materializer.getFeatures().get.getFiles.map(r => {
(LazyMaterialization.filterKeyFromFile(r), VizReads.coveragePaths.contains(r))
}))
}).toMap)
} catch {
case e: Exception => None
}

templateEngine.layout("/WEB-INF/layouts/browser.ssp",
Map("dictionary" -> VizReads.formatDictionaryOpts(VizReads.genome.chromSizes),
"twoBitUrl" -> VizReads.genome.twoBitPath,
"genes" -> (if (VizReads.genome.genes.isDefined) Some(VizReads.GENES_REQUEST) else None),
"reads" -> readsSamples,
"variants" -> variantSamples,
"features" -> featureSamples,
"contig" -> session("referenceRegion").asInstanceOf[ReferenceRegion].referenceName,
"start" -> session("referenceRegion").asInstanceOf[ReferenceRegion].start.toString,
"end" -> session("referenceRegion").asInstanceOf[ReferenceRegion].end.toString))
val region = session("referenceRegion").asInstanceOf[ReferenceRegion]
val args = BrowserArgs(VizReads.formatDictionaryOpts(VizReads.genome.chromSizes),
VizReads.genome.twoBitPath,
(if (VizReads.genome.genes.isDefined) Some(VizReads.GENES_REQUEST) else None),
readsSamples,
variantSamples,
featureSamples,
region.referenceName,
region.start,
region.end)

try {
templateEngine.layout("/WEB-INF/layouts/browser.ssp",
args.toMap())
} catch {
case e: Exception => {
println(e.getMessage)
// for testing purposes in VizReadsSuite
NotFound(net.liftweb.json.compactRender(decompose(args)))
}
}
}

// used in browser.ssp to set contig list for wheel
Expand Down Expand Up @@ -534,7 +597,7 @@ class VizReads(protected val args: VizReadsArgs) extends BDGSparkCommand[VizRead
if (readsPaths.nonEmpty) {
object readsWait
VizReads.syncObject += (AlignmentRecordMaterialization.name -> readsWait)
Some(new AlignmentRecordMaterialization(sc, readsPaths, VizReads.genome.chromSizes, args.repartition, Some(prefetch)))
Some(new AlignmentRecordMaterialization(sc, readsPaths, VizReads.genome.chromSizes, Some(prefetch)))
} else None
} else None
}
Expand All @@ -550,7 +613,7 @@ class VizReads(protected val args: VizReadsArgs) extends BDGSparkCommand[VizRead
if (variantsPaths.nonEmpty) {
object variantsWait
VizReads.syncObject += (VariantContextMaterialization.name -> variantsWait)
Some(new VariantContextMaterialization(sc, variantsPaths, VizReads.genome.chromSizes, args.repartition, Some(prefetch)))
Some(new VariantContextMaterialization(sc, variantsPaths, VizReads.genome.chromSizes, Some(prefetch)))
} else None
} else None
}
Expand Down Expand Up @@ -578,7 +641,7 @@ class VizReads(protected val args: VizReadsArgs) extends BDGSparkCommand[VizRead
}

/**
* Runs total data scan over all feature and variant files, calculating the normalied frequency at all
* Runs total data scan over all feature and variant files, calculating the normalized frequency at all
* windows in the genome.
*
* @return Returns list of windowed regions in the genome and their corresponding normalized frequencies
Expand Down Expand Up @@ -625,11 +688,11 @@ class VizReads(protected val args: VizReadsArgs) extends BDGSparkCommand[VizRead

for (region <- regions) {
if (VizReads.materializer.featuresExist)
VizReads.materializer.getFeatures().get.get(Some(region)).count()
VizReads.materializer.getFeatures().get.get(Some(region))
if (VizReads.materializer.readsExist)
VizReads.materializer.getReads().get.get(Some(region)).count()
VizReads.materializer.getReads().get.get(Some(region))
if (VizReads.materializer.variantContextExist)
VizReads.materializer.getVariantContext().get.get(Some(region)).count()
VizReads.materializer.getVariantContext().get.get(Some(region))
}
}

Expand Down

0 comments on commit 57ea2c0

Please sign in to comment.