Skip to content

Commit

Permalink
Using Hadoop credential provider API to store password
Browse files Browse the repository at this point in the history
Change-Id: Ie774eeb9376f8b5d7379f1976826e12e9c529be3
  • Loading branch information
jerryshao committed Jun 13, 2018
1 parent 3e5b4ae commit 575152b
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 10 deletions.
11 changes: 10 additions & 1 deletion core/src/main/scala/org/apache/spark/SSLOptions.scala
Expand Up @@ -21,6 +21,7 @@ import java.io.File
import java.security.NoSuchAlgorithmException
import javax.net.ssl.SSLContext

import org.apache.hadoop.conf.Configuration
import org.eclipse.jetty.util.ssl.SslContextFactory

import org.apache.spark.internal.Logging
Expand Down Expand Up @@ -163,11 +164,16 @@ private[spark] object SSLOptions extends Logging {
* missing in SparkConf, the corresponding setting is used from the default configuration.
*
* @param conf Spark configuration object where the settings are collected from
* @param hadoopConf Hadoop configuration to get settings
* @param ns the namespace name
* @param defaults the default configuration
* @return [[org.apache.spark.SSLOptions]] object
*/
def parse(conf: SparkConf, ns: String, defaults: Option[SSLOptions] = None): SSLOptions = {
def parse(
conf: SparkConf,
hadoopConf: Configuration,
ns: String,
defaults: Option[SSLOptions] = None): SSLOptions = {
val enabled = conf.getBoolean(s"$ns.enabled", defaultValue = defaults.exists(_.enabled))

val port = conf.getWithSubstitution(s"$ns.port").map(_.toInt)
Expand All @@ -179,9 +185,11 @@ private[spark] object SSLOptions extends Logging {
.orElse(defaults.flatMap(_.keyStore))

val keyStorePassword = conf.getWithSubstitution(s"$ns.keyStorePassword")
.orElse(Option(hadoopConf.getPassword(s"$ns.keyStorePassword")).map(new String(_)))
.orElse(defaults.flatMap(_.keyStorePassword))

val keyPassword = conf.getWithSubstitution(s"$ns.keyPassword")
.orElse(Option(hadoopConf.getPassword(s"$ns.keyPassword")).map(new String(_)))
.orElse(defaults.flatMap(_.keyPassword))

val keyStoreType = conf.getWithSubstitution(s"$ns.keyStoreType")
Expand All @@ -194,6 +202,7 @@ private[spark] object SSLOptions extends Logging {
.orElse(defaults.flatMap(_.trustStore))

val trustStorePassword = conf.getWithSubstitution(s"$ns.trustStorePassword")
.orElse(Option(hadoopConf.getPassword(s"$ns.trustStorePassword")).map(new String(_)))
.orElse(defaults.flatMap(_.trustStorePassword))

val trustStoreType = conf.getWithSubstitution(s"$ns.trustStoreType")
Expand Down
10 changes: 7 additions & 3 deletions core/src/main/scala/org/apache/spark/SecurityManager.scala
Expand Up @@ -19,11 +19,11 @@ package org.apache.spark

import java.net.{Authenticator, PasswordAuthentication}
import java.nio.charset.StandardCharsets.UTF_8
import javax.net.ssl._

import org.apache.hadoop.io.Text
import org.apache.hadoop.security.{Credentials, UserGroupInformation}

import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.launcher.SparkLauncher
Expand Down Expand Up @@ -111,11 +111,14 @@ private[spark] class SecurityManager(
)
}

private val hadoopConf = SparkHadoopUtil.get.newConfiguration(sparkConf)
// the default SSL configuration - it will be used by all communication layers unless overwritten
private val defaultSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl", defaults = None)
private val defaultSSLOptions =
SSLOptions.parse(sparkConf, hadoopConf, "spark.ssl", defaults = None)

def getSSLOptions(module: String): SSLOptions = {
val opts = SSLOptions.parse(sparkConf, s"spark.ssl.$module", Some(defaultSSLOptions))
val opts =
SSLOptions.parse(sparkConf, hadoopConf, s"spark.ssl.$module", Some(defaultSSLOptions))
logDebug(s"Created SSL options for $module: $opts")
opts
}
Expand Down Expand Up @@ -325,6 +328,7 @@ private[spark] class SecurityManager(
.orElse(Option(secretKey))
.orElse(Option(sparkConf.getenv(ENV_AUTH_SECRET)))
.orElse(sparkConf.getOption(SPARK_AUTH_SECRET_CONF))
.orElse(Option(hadoopConf.getPassword(SPARK_AUTH_SECRET_CONF)).map(new String(_)))
.getOrElse {
throw new IllegalArgumentException(
s"A secret key must be specified via the $SPARK_AUTH_SECRET_CONF config")
Expand Down
17 changes: 11 additions & 6 deletions core/src/test/scala/org/apache/spark/SSLOptionsSuite.scala
Expand Up @@ -20,6 +20,7 @@ package org.apache.spark
import java.io.File
import javax.net.ssl.SSLContext

import org.apache.hadoop.conf.Configuration
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.util.SparkConfWithEnv
Expand All @@ -40,6 +41,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
.toSet

val conf = new SparkConf
val hadoopConf = new Configuration()
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
Expand All @@ -49,7 +51,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
conf.set("spark.ssl.enabledAlgorithms", algorithms.mkString(","))
conf.set("spark.ssl.protocol", "TLSv1.2")

val opts = SSLOptions.parse(conf, "spark.ssl")
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl")

assert(opts.enabled === true)
assert(opts.trustStore.isDefined === true)
Expand All @@ -70,6 +72,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath

val conf = new SparkConf
val hadoopConf = new Configuration()
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.keyStore", keyStorePath)
conf.set("spark.ssl.keyStorePassword", "password")
Expand All @@ -80,8 +83,8 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
"TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA")
conf.set("spark.ssl.protocol", "SSLv3")

val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, "spark.ssl.ui", defaults = Some(defaultOpts))
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults = Some(defaultOpts))

assert(opts.enabled === true)
assert(opts.trustStore.isDefined === true)
Expand All @@ -103,6 +106,7 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
val trustStorePath = new File(this.getClass.getResource("/truststore").toURI).getAbsolutePath

val conf = new SparkConf
val hadoopConf = new Configuration()
conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.ui.enabled", "false")
conf.set("spark.ssl.ui.port", "4242")
Expand All @@ -117,8 +121,8 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
conf.set("spark.ssl.ui.enabledAlgorithms", "ABC, DEF")
conf.set("spark.ssl.protocol", "SSLv3")

val defaultOpts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, "spark.ssl.ui", defaults = Some(defaultOpts))
val defaultOpts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl.ui", defaults = Some(defaultOpts))

assert(opts.enabled === false)
assert(opts.port === Some(4242))
Expand All @@ -139,12 +143,13 @@ class SSLOptionsSuite extends SparkFunSuite with BeforeAndAfterAll {
val conf = new SparkConfWithEnv(Map(
"ENV1" -> "val1",
"ENV2" -> "val2"))
val hadoopConf = new Configuration()

conf.set("spark.ssl.enabled", "true")
conf.set("spark.ssl.keyStore", "${env:ENV1}")
conf.set("spark.ssl.trustStore", "${env:ENV2}")

val opts = SSLOptions.parse(conf, "spark.ssl", defaults = None)
val opts = SSLOptions.parse(conf, hadoopConf, "spark.ssl", defaults = None)
assert(opts.keyStore === Some(new File("val1")))
assert(opts.trustStore === Some(new File("val2")))
}
Expand Down

0 comments on commit 575152b

Please sign in to comment.