A Wikipedia Dump Loader for Spark in Scala
Add the following to your build.sbt
.
// Add dependency of `spark-wikipedia-dump-loader` in GitHub
dependsOn(RootProject(uri("https://github.com/nwtgck/spark-wikipedia-dump-loader.git#e6e358dd8cdd5b6200b89f5d2aa76c74b5c1d0d7")))
(from: sbt/sbt#3489)
Here is a complete code to use spark-wikipedia-dump-loader
.
package io.github.nwtgck.spark_wikipedia_dump_loader_example
import org.apache.spark.sql.{Dataset, SparkSession}
import io.github.nwtgck.spark_wikipedia_dump_loader.{Page, Redirect, Revision, WikipediaDumpLoader}
object Main {
def main(args: Array[String]): Unit = {
// Create spark session
val sparkSession: SparkSession = SparkSession
.builder()
.appName("Wikipedia Dump Loader Test [Spark session]")
.master("local[*]")
.config("spark.executor.memory", "1g")
.getOrCreate()
// Create page Dataset
val pageDs: Dataset[Page] = WikipediaDumpLoader.readXmlFilePath(
sparkSession,
filePath = "./wikidump.xml"
)
// Print all pages
for (page <- pageDs) {
println(page)
}
}
}
wikidump.xml
above is found in HERE.