Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
FALCON-1919 Provide user the option to store sensitive information wi…
…th Hadoop credential provider

Tested alias properties being resolved correctly with Hadoop credential provider.

Author: yzheng-hortonworks <yzheng@hortonworks.com>

Reviewers: "Balu Vellanki <balu@apache.org>, Venkat Ranganathan <venkat@hortonworks.com>"

Closes #138 from yzheng-hortonworks/FALCON-1919
  • Loading branch information
yzheng-hortonworks authored and bvellanki committed May 13, 2016
1 parent ed410e8 commit 941c4fa325521aa1816e1ccb178d20762f726ba8
Showing 4 changed files with 98 additions and 5 deletions.
@@ -18,13 +18,14 @@

package org.apache.falcon.security;

import org.apache.falcon.FalconException;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;

/**
* Helper class for Hadoop credential provider functionality. Reflection to used to avoid
@@ -72,18 +73,39 @@ public static boolean isProviderAvailable() {
|| methFlush == null);
}

public static String resolveAlias(Configuration conf, String alias) throws IOException {
public static String resolveAlias(Configuration conf, String alias) throws FalconException {
try {
char[] cred = (char[]) methGetPassword.invoke(conf, alias);
if (cred == null) {
throw new IOException("The provided alias cannot be resolved");
throw new FalconException("The provided alias cannot be resolved");
}
return new String(cred);
} catch (InvocationTargetException ite) {
throw new RuntimeException("Error resolving password "
throw new FalconException("Error resolving password "
+ " from the credential providers ", ite.getTargetException());
} catch (IllegalAccessException iae) {
throw new RuntimeException("Error invoking the credential provider method", iae);
throw new FalconException("Error invoking the credential provider method", iae);
}
}

public static void createCredentialEntry(Configuration conf, String alias, String credential)
throws FalconException {
if (!isProviderAvailable()) {
throw new FalconException("CredentialProvider facility not available in the hadoop environment");
}

try {
List<?> result = (List<?>) methGetProviders.invoke(null, new Object[] { conf });
Object provider = result.get(0);
LOG.debug("Using credential provider " + provider);

methCreateCredEntry.invoke(provider, new Object[] { alias, credential.toCharArray() });
methFlush.invoke(provider, new Object[] {});
} catch (InvocationTargetException ite) {
throw new FalconException(
"Error creating credential entry using the credential provider", ite.getTargetException());
} catch (IllegalAccessException iae) {
throw new FalconException("Error accessing the credential create method", iae);
}
}
}
@@ -22,6 +22,8 @@
import org.apache.commons.lang3.StringUtils;
import org.apache.falcon.FalconException;
import org.apache.falcon.expression.ExpressionHelper;
import org.apache.falcon.security.CredentialProviderHelper;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@@ -42,6 +44,9 @@ public abstract class ApplicationProperties extends Properties {

private static final Logger LOG = LoggerFactory.getLogger(ApplicationProperties.class);

public static final String CREDENTIAL_PROVIDER_PROPERTY = "credential.provider.path";
public static final String ALIAS_PROPERTY_PREFIX = "credential.provider.alias.for.";

protected abstract String getPropertyFile();

protected String domain;
@@ -169,6 +174,31 @@ protected Set<String> getKeys(Set<Object> keySet) {
return keys;
}

public void resolveAlias() throws FalconException {
try {
final Configuration conf = new Configuration();
String providerPath = getProperty(CREDENTIAL_PROVIDER_PROPERTY);
if (providerPath != null) {
conf.set(CredentialProviderHelper.CREDENTIAL_PROVIDER_PATH, providerPath);
}

Properties aliasProperties = new Properties();
for (Object keyObj : keySet()) {
String key = (String) keyObj;
if (key.startsWith(ALIAS_PROPERTY_PREFIX)) {
String propertyKey = key.substring(ALIAS_PROPERTY_PREFIX.length());
String propertyValue = CredentialProviderHelper.resolveAlias(conf, getProperty(key));
aliasProperties.setProperty(propertyKey, propertyValue);
}
}
LOG.info("Resolved alias properties: {}", aliasProperties.stringPropertyNames());
putAll(aliasProperties);
} catch (Exception e) {
LOG.error("Exception while resolving credential alias", e);
throw new FalconException("Exception while resolving credential alias", e);
}
}

@Override
public String getProperty(String key) {
return StringUtils.trim(super.getProperty(key));
@@ -37,6 +37,7 @@ public final class StartupProperties extends ApplicationProperties {
public static final String SAFEMODE_PROPERTY = "falcon.safeMode";
private static final String SAFEMODE_FILE = ".safemode";
private static final String CONFIGSTORE_PROPERTY = "config.store.uri";

private static FileSystem fileSystem;
private static Path storePath;

@@ -48,6 +49,7 @@ public final class StartupProperties extends ApplicationProperties {

private StartupProperties() throws FalconException {
super();
resolveAlias();
}

@Override
@@ -19,6 +19,8 @@
package org.apache.falcon.util;

import org.apache.falcon.FalconException;
import org.apache.falcon.security.CredentialProviderHelper;
import org.apache.hadoop.conf.Configuration;
import org.testng.Assert;
import org.testng.annotations.Test;

@@ -29,6 +31,43 @@
* Test for Application properties test.
*/
public class ApplicationPropertiesTest {
private static final String ALIAS_1 = "alias-key-1";
private static final String ALIAS_2 = "alias-key-2";
private static final String PASSWORD_1 = "password1";
private static final String PASSWORD_2 = "password2";
private static final String PROPERTY_1 = "property-key-1";
private static final String PROPERTY_2 = "property-key-2";
private static final String JKS_FILE_NAME = "credentials.jks";

@Test
public void testResolveAlias() throws Exception {
// hadoop credential provider needs to be available
Assert.assertTrue(CredentialProviderHelper.isProviderAvailable());

// clean credential provider store
File credDir = new File(".");
File file = new File(credDir, JKS_FILE_NAME);
file.delete();

// add alias to hadoop credential provider
Configuration conf = new Configuration();
String providerPath = "jceks://file/" + credDir.getAbsolutePath() + "/" + JKS_FILE_NAME;
conf.set(CredentialProviderHelper.CREDENTIAL_PROVIDER_PATH, providerPath);
CredentialProviderHelper.createCredentialEntry(conf, ALIAS_1, PASSWORD_1);
CredentialProviderHelper.createCredentialEntry(conf, ALIAS_2, PASSWORD_2);

// test case: no credential properties to resolve
ApplicationProperties properties = new ConfigLocation();
properties.resolveAlias();

// test case: multiple credential properties to resolve
properties.put(ApplicationProperties.CREDENTIAL_PROVIDER_PROPERTY, providerPath);
properties.put(ApplicationProperties.ALIAS_PROPERTY_PREFIX + PROPERTY_1, ALIAS_1);
properties.put(ApplicationProperties.ALIAS_PROPERTY_PREFIX + PROPERTY_2, ALIAS_2);
properties.resolveAlias();
Assert.assertEquals(properties.getProperty(PROPERTY_1), PASSWORD_1);
Assert.assertEquals(properties.getProperty(PROPERTY_2), PASSWORD_2);
}

@Test
public void testConfigLocation() throws Exception {

0 comments on commit 941c4fa

Please sign in to comment.