Skip to content

Commit

Permalink
Merge 81ed51f into 988897e
Browse files Browse the repository at this point in the history
  • Loading branch information
heuermh committed Jun 12, 2019
2 parents 988897e + 81ed51f commit c2caab7
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,16 @@ object TransformFeatures extends BDGCommandCompanion {
class TransformFeaturesArgs extends Args4jBase with ParquetSaveArgs {
@Argument(required = true, metaVar = "INPUT",
usage = "The feature file to convert (e.g., .bed, .gff/.gtf, .gff3, .interval_list, .narrowPeak). If extension is not detected, Parquet is assumed.", index = 0)
var featuresFile: String = _
var featuresPath: String = _

@Argument(required = true, metaVar = "OUTPUT",
usage = "Location to write ADAM feature data. If extension is not detected, Parquet is assumed.", index = 1)
var outputPath: String = null

@Args4jOption(required = false, name = "-reference",
usage = "Load reference for features; .dict as HTSJDK sequence dictionary format, .genome as Bedtools genome file format, .txt as UCSC Genome Browser chromInfo files.")
var referencePath: String = null

@Args4jOption(required = false, name = "-num_partitions",
usage = "Number of partitions to load a text file using.")
var numPartitions: Int = _
Expand All @@ -62,10 +66,12 @@ class TransformFeatures(val args: TransformFeaturesArgs)
def run(sc: SparkContext) {
checkWriteablePath(args.outputPath, sc.hadoopConfiguration)

val optSequenceDictionary = Option(args.referencePath).map(sc.loadSequenceDictionary(_))

sc.loadFeatures(
args.featuresFile,
optMinPartitions = Option(args.numPartitions),
optProjection = None
args.featuresPath,
optSequenceDictionary = optSequenceDictionary,
optMinPartitions = Option(args.numPartitions)
).save(args.outputPath, args.single, args.disableFastConcat)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ class TransformSequencesArgs extends Args4jBase with ParquetSaveArgs {
usage = "Location to write ADAM sequence data. If extension is not detected, Parquet is assumed.", index = 1)
var outputPath: String = null

@Args4jOption(required = false, name = "-create_reference",
usage = "Create reference from sequence names and lengths. Defaults to false.")
var createReference: Boolean = false

@Args4jOption(required = false, name = "-single",
usage = "Save as a single file, for the text formats.")
var single: Boolean = false
Expand All @@ -66,6 +70,7 @@ class TransformSequences(val args: TransformSequencesArgs)
case Alphabet.PROTEIN => sc.loadProteinSequences(args.sequencesFile, optPredicate = None, optProjection = None)
case Alphabet.RNA => sc.loadRnaSequences(args.sequencesFile, optPredicate = None, optProjection = None)
}
sequences.save(args.outputPath, args.single, args.disableFastConcat)
val maybeCreateReference = if (args.createReference) sequences.createSequenceDictionary() else sequences
maybeCreateReference.save(args.outputPath, args.single, args.disableFastConcat)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ class TransformSlicesArgs extends Args4jBase with ParquetSaveArgs {
usage = "Maximum slice length. Defaults to 10000L.")
var maximumLength: Long = 10000L

@Args4jOption(required = false, name = "-create_reference",
usage = "Create reference from sequence names and lengths. Defaults to false.")
var createReference: Boolean = false

@Args4jOption(required = false, name = "-single",
usage = "Save as a single file, for the text formats.")
var single: Boolean = false
Expand All @@ -59,11 +63,13 @@ class TransformSlices(val args: TransformSlicesArgs)
val companion = TransformSlices

def run(sc: SparkContext) {
sc.loadSlices(
val slices = sc.loadSlices(
args.slicesFile,
maximumLength = args.maximumLength,
optPredicate = None,
optProjection = None
).save(args.outputPath, args.single, args.disableFastConcat)
)
val maybeCreateReference = if (args.createReference) slices.createSequenceDictionary() else slices
maybeCreateReference.save(args.outputPath, args.single, args.disableFastConcat)
}
}

0 comments on commit c2caab7

Please sign in to comment.