Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

random nits #437

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -219,7 +219,8 @@ class ADAMContext(val sc: SparkContext) extends Serializable with Logging {
} else {
reads
}
} else if ((filePath.endsWith(".fastq") || filePath.endsWith(".fq")) && classOf[AlignmentRecord].isAssignableFrom(manifest[T].runtimeClass)) {
} else if ((filePath.endsWith(".fastq") || filePath.endsWith(".fq")) &&
classOf[AlignmentRecord].isAssignableFrom(manifest[T].runtimeClass)) {

if (projection.isDefined) {
log.warn("Projection is ignored when loading a FASTQ file")
Expand Down
Expand Up @@ -92,6 +92,13 @@ object AlignmentRecordContext extends Serializable with Logging {

class AlignmentRecordContext(val sc: SparkContext) extends Serializable with Logging {

/**
* Load AlignmentRecords from two paired-end FASTQ files.
*
* @param firstPairPath Path to read first-mates from
* @param secondPairPath Path to read second-mates from
* @param fixPairs Iff true, joins the first- and second-reads around their read name (minus the /1 or /2 suffix)
*/
def adamFastqLoad(firstPairPath: String,
secondPairPath: String,
fixPairs: Boolean = false): RDD[AlignmentRecord] = {
Expand Down Expand Up @@ -119,7 +126,7 @@ class AlignmentRecordContext(val sc: SparkContext) extends Serializable with Log
.build())
} else {
// all paired end reads should have the same name, except for the last two
// characters, which will be _1/_2
// characters, which will be {/1, /2}
firstPairRdd.keyBy(_.getReadName.toString.dropRight(2)).join(secondPairRdd.keyBy(_.getReadName.toString.dropRight(2)))
.flatMap(kv => Seq(AlignmentRecord.newBuilder(kv._2._1)
.setReadPaired(true)
Expand Down
Expand Up @@ -87,8 +87,22 @@ class AlignmentRecordRDDFunctions(rdd: RDD[AlignmentRecord])
// write file to disk
val conf = rdd.context.hadoopConfiguration
asSam match {
case true => withKey.saveAsNewAPIHadoopFile(filePath, classOf[LongWritable], classOf[SAMRecordWritable], classOf[ADAMSAMOutputFormat[LongWritable]], conf)
case false => withKey.saveAsNewAPIHadoopFile(filePath, classOf[LongWritable], classOf[SAMRecordWritable], classOf[ADAMBAMOutputFormat[LongWritable]], conf)
case true =>
withKey.saveAsNewAPIHadoopFile(
filePath,
classOf[LongWritable],
classOf[SAMRecordWritable],
classOf[ADAMSAMOutputFormat[LongWritable]],
conf
)
case false =>
withKey.saveAsNewAPIHadoopFile(
filePath,
classOf[LongWritable],
classOf[SAMRecordWritable],
classOf[ADAMBAMOutputFormat[LongWritable]],
conf
)
}
}

Expand Down
1 change: 1 addition & 0 deletions pom.xml
Expand Up @@ -283,6 +283,7 @@
<alignParameters>true</alignParameters>
<alignSingleLineCaseStatements>true</alignSingleLineCaseStatements>
<doubleIndentClassDeclaration>true</doubleIndentClassDeclaration>
<preserveDanglingCloseParenthesis>true</preserveDanglingCloseParenthesis>
</configuration>
</execution>
</executions>
Expand Down