Skip to content

Commit

Permalink
Fix SecurityException when HDFS Repository used against HA Namenodes (#…
Browse files Browse the repository at this point in the history
…27196)

* Sense HA HDFS settings and remove permission restrictions during regular execution.

This PR adds integration tests for HA-Enabled HDFS deployments, both regular and secured.
The Mini HDFS fixture has been updated to optionally run in HA-Mode. A new test suite has
been added for reproducing the effects of a Namenode failing over during regular repository
usage. Going forward, the HDFS Repository will still be subject to its self imposed permission
restrictions during normal use, but will no longer restrict them when running against an HA
enabled HDFS cluster. Instead, the plugin will rely on the provided security policy and not
further restrict the permissions so that the transparent operation to failover to a different
Namenode in the client does not raise security exceptions. Additionally, we are now testing the
secure mode with SASL based wire encryption of data between Elasticsearch and HDFS. This
includes a missing library (commons codec) in order to support this change.
  • Loading branch information
jbaiera committed Dec 4, 2017
1 parent 6a37571 commit 02a0d87
Show file tree
Hide file tree
Showing 10 changed files with 747 additions and 129 deletions.
192 changes: 134 additions & 58 deletions plugins/repository-hdfs/build.gradle
Expand Up @@ -18,6 +18,7 @@
*/

import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.test.ClusterConfiguration
import org.elasticsearch.gradle.test.RestIntegTestTask

import java.nio.file.Files
Expand Down Expand Up @@ -51,6 +52,7 @@ dependencies {
compile 'com.google.protobuf:protobuf-java:2.5.0'
compile 'commons-logging:commons-logging:1.1.3'
compile 'commons-cli:commons-cli:1.2'
compile 'commons-codec:commons-codec:1.10'
compile 'commons-collections:commons-collections:3.2.2'
compile 'commons-configuration:commons-configuration:1.6'
compile 'commons-io:commons-io:2.4'
Expand All @@ -66,14 +68,6 @@ dependencyLicenses {
mapping from: /hadoop-.*/, to: 'hadoop'
}

task hdfsFixture(type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture
executable = new File(project.javaHome, 'bin/java')
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
args 'hdfs.MiniHDFS',
baseDir
}

// MIT Kerberos Vagrant Testing Fixture
String box = "krb5kdc"
Map<String,String> vagrantEnvVars = [
Expand Down Expand Up @@ -116,38 +110,116 @@ for (String principal : principals) {
krb5AddPrincipals.dependsOn(create)
}

task secureHdfsFixture(type: org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture, krb5kdcFixture, krb5AddPrincipals
executable = new File(project.javaHome, 'bin/java')
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"
// Create HDFS File System Testing Fixtures for HA/Secure combinations
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
project.tasks.create(fixtureName, org.elasticsearch.gradle.test.AntFixture) {
dependsOn project.configurations.hdfsFixture
executable = new File(project.javaHome, 'bin/java')
env 'CLASSPATH', "${ -> project.configurations.hdfsFixture.asPath }"

final List<String> miniHDFSArgs = []

// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
dependsOn krb5kdcFixture, krb5AddPrincipals
Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf")
miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5Config}");
if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
}
}

// If it's an HA fixture, set a nameservice to use in the JVM options
if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
}

Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab")
Path krb5Config = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf")
// Common options
miniHDFSArgs.add('hdfs.MiniHDFS')
miniHDFSArgs.add(baseDir)

final List<String> miniHDFSArgs = ["-Djava.security.krb5.conf=${krb5Config}"]
// If it's a secure fixture, then set the principal name and keytab locations to use for auth.
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
Path keytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab")
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add("${keytabPath}")
}

if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
miniHDFSArgs.add('--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED')
args miniHDFSArgs.toArray()
}
}

// The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks:
project.afterEvaluate {
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration
cluster.dependsOn(project.bundlePlugin)

miniHDFSArgs.add('hdfs.MiniHDFS')
miniHDFSArgs.add(baseDir)
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add("${keytabPath}")
Task restIntegTestTask = project.tasks.getByName(integTestTaskName)
restIntegTestTask.clusterConfig.plugin(project.path)

// Default jvm arguments for all test clusters
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + System.getProperty('tests.jvm.argline', '')

// If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options.
if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) {
Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath()
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath()

restIntegTestTask.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}")
jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}"
if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
jvmArgs = jvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED'
}

// If it's the HA + Secure tests then also set the Kerberos settings for the integration test JVM since we'll
// need to auth to HDFS to trigger namenode failovers.
if (integTestTaskName.equals('integTestSecureHa')) {
Task restIntegTestTaskRunner = project.tasks.getByName("${integTestTaskName}Runner")
restIntegTestTaskRunner.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
restIntegTestTaskRunner.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
restIntegTestTaskRunner.jvmArg "-Djava.security.krb5.conf=${krb5conf}"
if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
restIntegTestTaskRunner.jvmArg '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED'
}

Path hdfsKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("hdfs_hdfs.build.elastic.co.keytab").toAbsolutePath()
restIntegTestTaskRunner.systemProperty "test.krb5.keytab.hdfs", "${hdfsKT}"
}
}

restIntegTestTask.clusterConfig.jvmArgs = jvmArgs
}
}

// Create a Integration Test suite just for HA based tests
RestIntegTestTask integTestHa = project.tasks.create('integTestHa', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode."
}

// Create a Integration Test suite just for security based tests
RestIntegTestTask integTestSecure = project.tasks.create('integTestSecure', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS secured by MIT Kerberos."
}

args miniHDFSArgs.toArray()
// Create a Integration Test suite just for HA related security based tests
RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos."
}

// Determine HDFS Fixture compatibility for the current build environment.
boolean fixtureSupported = false
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// hdfs fixture will not start without hadoop native libraries on windows
String nativePath = System.getenv("HADOOP_HOME")
if (nativePath != null) {
Path path = Paths.get(nativePath);
if (Files.isDirectory(path) &&
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
Files.exists(path.resolve("bin").resolve("winutils.exe")) &&
Files.exists(path.resolve("bin").resolve("hadoop.dll")) &&
Files.exists(path.resolve("bin").resolve("hdfs.dll"))) {
fixtureSupported = true
} else {
throw new IllegalStateException("HADOOP_HOME: ${path} is invalid, does not contain hadoop native libraries in \$HADOOP_HOME/bin");
Expand All @@ -157,55 +229,63 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
fixtureSupported = true
}

// Always ignore HA integration tests in the normal integration test runner, they are included below as
// part of their own HA-specific integration test tasks.
integTestRunner.exclude('**/Ha*TestSuiteIT.class')

if (fixtureSupported) {
// Check depends on the HA test. Already depends on the standard test.
project.check.dependsOn(integTestHa)

// Both standard and HA tests depend on their respective HDFS fixtures
integTestCluster.dependsOn hdfsFixture
integTestHaCluster.dependsOn haHdfsFixture

// The normal test runner only runs the standard hdfs rest tests
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository'

// Only include the HA integration tests for the HA test task
integTestHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class'])
} else {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
// just tests that the plugin loads
// The normal integration test runner will just test that the plugin loads
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
// HA fixture is unsupported. Don't run them.
integTestHa.setEnabled(false)
}

// Secure HDFS testing relies on the Vagrant based Kerberos fixture.
boolean secureFixtureSupported = false
if (fixtureSupported) {
secureFixtureSupported = project.rootProject.vagrantSupported
}

// Create a Integration Test suite just for security based tests
// This must execute before the afterEvaluate block from integTestSecure
project.afterEvaluate {
Path elasticsearchKT = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs").resolve("elasticsearch.keytab").toAbsolutePath()
Path krb5conf = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("conf").resolve("krb5.conf").toAbsolutePath()

project.integTestSecureCluster.dependsOn(project.bundlePlugin)
project.integTestSecure.clusterConfig.plugin(project.path)
project.integTestSecure.clusterConfig.extraConfigFile("repository-hdfs/krb5.keytab", "${elasticsearchKT}")
final String baseJvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + "-Djava.security.krb5.conf=${krb5conf}" +
" " + System.getProperty('tests.jvm.argline', '')
final String jvmArgs
if (project.rootProject.ext.javaVersion == JavaVersion.VERSION_1_9) {
jvmArgs = baseJvmArgs + " " + '--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED'
} else {
jvmArgs = baseJvmArgs
}

project.integTestSecure.clusterConfig.jvmArgs = jvmArgs
}

RestIntegTestTask integTestSecure = project.tasks.create('integTestSecure', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS secured by MIT Kerberos."
}

if (secureFixtureSupported) {
project.check.dependsOn(integTestSecure)
project.check.dependsOn(integTestSecureHa)

// Fixture dependencies
integTestSecureCluster.dependsOn secureHdfsFixture, krb5kdcFixture
integTestSecureHaCluster.dependsOn secureHaHdfsFixture, krb5kdcFixture

// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
Path hdfsKeytabPath = project(':test:fixtures:krb5kdc-fixture').buildDir.toPath().resolve("keytabs")
project.dependencies {
testRuntime fileTree(dir: hdfsKeytabPath.toString(), include: ['*.keytab'])
}

// Run just the secure hdfs rest test suite.
integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class')

// Only include the HA integration tests for the HA test task
integTestSecureHaRunner.patternSet.setIncludes(['**/Ha*TestSuiteIT.class'])
} else {
// Security tests unsupported. Don't run these tests.
integTestSecure.enabled = false
integTestSecureHa.enabled = false
}

thirdPartyAudit.excludes = [
Expand Down Expand Up @@ -309,11 +389,7 @@ thirdPartyAudit.excludes = [
'org.apache.commons.beanutils.DynaProperty',
'org.apache.commons.beanutils.PropertyUtils',
'org.apache.commons.compress.archivers.tar.TarArchiveEntry',
'org.apache.commons.compress.archivers.tar.TarArchiveInputStream',
'org.apache.commons.codec.DecoderException',
'org.apache.commons.codec.binary.Base64',
'org.apache.commons.codec.binary.Hex',
'org.apache.commons.codec.digest.DigestUtils',
'org.apache.commons.compress.archivers.tar.TarArchiveInputStream',
'org.apache.commons.daemon.Daemon',
'org.apache.commons.daemon.DaemonContext',
'org.apache.commons.digester.AbstractObjectCreationFactory',
Expand Down
@@ -0,0 +1 @@
4b95f4897fa13f2cd904aee711aeafc0c5295cd8

0 comments on commit 02a0d87

Please sign in to comment.