- Represents a one-to-one dependency between partitions of the parent and child RDDs.
- SPARK 源码分析技术分享(bilibilid视频汇总套装视频): https://www.bilibili.com/video/av37442139/
- github: https://github.com/opensourceteams/spark-scala-maven
- csdn(汇总视频在线看): https://blog.csdn.net/thinktothings/article/details/84726769
a bc
a
package com.opensource.bigdata.spark.local.rdd.operation.dependency.narrow.n_02_RangeDependency
import com.opensource.bigdata.spark.local.rdd.operation.base.BaseScalaSparkContext
object Run1 extends BaseScalaSparkContext{
def main(args: Array[String]): Unit = {
val sc = pre()
val rdd1 = sc.textFile("/opt/data/2/c.txt",2)
println(rdd1.collect().mkString("\n"))
//rdd1.partitions(0).asInstanceOf[org.apache.spark.rdd.HadoopPartition]
sc.stop()
}
}