Skip to content

Commit

Permalink
HBASE-23312 HBase Thrift SPNEGO configs (HBASE-19852) should be backw…
Browse files Browse the repository at this point in the history
…ards compatible

HBase Thrift SPNEGO configs should not be required.
The `hbase.thrift.spnego.keytab.file` and
`hbase.thrift.spnego.principal` configs should fall
back to the `hbase.thrift.keytab.file` and
`hbase.thrift.kerberos.principal` configs. This will
avoid any issues during upgrades.

Signed-off-by: Josh Elser <elserj@apache.org>
Amending-author: Josh Elser <elserj@apache.org>

Closes apache#850
  • Loading branch information
risdenk authored and joshelser committed Nov 25, 2019
1 parent 7700739 commit 483617d
Show file tree
Hide file tree
Showing 4 changed files with 324 additions and 73 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@

package org.apache.hadoop.hbase.thrift;

import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_PRINCIPAL_KEY;

import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.Base64;
Expand All @@ -29,7 +26,6 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
Expand Down Expand Up @@ -66,25 +62,14 @@ public class ThriftHttpServlet extends TServlet {
public static final String NEGOTIATE = "Negotiate";

public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory,
UserGroupInformation serviceUGI, Configuration conf,
HBaseServiceHandler handler, boolean securityEnabled, boolean doAsEnabled)
throws IOException {
UserGroupInformation serviceUGI, UserGroupInformation httpUGI,
HBaseServiceHandler handler, boolean securityEnabled, boolean doAsEnabled) {
super(processor, protocolFactory);
this.serviceUGI = serviceUGI;
this.httpUGI = httpUGI;
this.handler = handler;
this.securityEnabled = securityEnabled;
this.doAsEnabled = doAsEnabled;

if (securityEnabled) {
// login the spnego principal
UserGroupInformation.setConfiguration(conf);
this.httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
conf.get(THRIFT_SPNEGO_PRINCIPAL_KEY),
conf.get(THRIFT_SPNEGO_KEYTAB_FILE_KEY)
);
} else {
this.httpUGI = null;
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SELECTOR_NUM;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_DEFAULT;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SERVER_SOCKET_READ_TIMEOUT_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SPNEGO_PRINCIPAL_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_ENABLED_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_CIPHER_SUITES_KEY;
import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SSL_EXCLUDE_PROTOCOLS_KEY;
Expand Down Expand Up @@ -172,6 +174,7 @@ public class ThriftServer extends Configured implements Tool {
protected ThriftMetrics metrics;
protected HBaseServiceHandler hbaseServiceHandler;
protected UserGroupInformation serviceUGI;
protected UserGroupInformation httpUGI;
protected boolean httpEnabled;

protected SaslUtil.QualityOfProtection qop;
Expand Down Expand Up @@ -210,8 +213,19 @@ protected void setupParamters() throws IOException {
conf.get(THRIFT_DNS_INTERFACE_KEY, "default"),
conf.get(THRIFT_DNS_NAMESERVER_KEY, "default")));
userProvider.login(THRIFT_KEYTAB_FILE_KEY, THRIFT_KERBEROS_PRINCIPAL_KEY, host);

// Setup the SPNEGO user for HTTP if configured
String spnegoPrincipal = getSpengoPrincipal(conf, host);
String spnegoKeytab = getSpnegoKeytab(conf);
UserGroupInformation.setConfiguration(conf);
// login the SPNEGO principal using UGI to avoid polluting the login user
this.httpUGI = UserGroupInformation.loginUserFromKeytabAndReturnUGI(spnegoPrincipal,
spnegoKeytab);
}
this.serviceUGI = userProvider.getCurrent().getUGI();
if (httpUGI == null) {
this.httpUGI = serviceUGI;
}

this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
this.metrics = createThriftMetrics(conf);
Expand Down Expand Up @@ -249,6 +263,37 @@ protected void setupParamters() throws IOException {
pauseMonitor.start();
}

private String getSpengoPrincipal(Configuration conf, String host) throws IOException {
String principal = conf.get(THRIFT_SPNEGO_PRINCIPAL_KEY);
if (principal == null) {
// We cannot use the Hadoop configuration deprecation handling here since
// the THRIFT_KERBEROS_PRINCIPAL_KEY config is still valid for regular Kerberos
// communication. The preference should be to use the THRIFT_SPNEGO_PRINCIPAL_KEY
// config so that THRIFT_KERBEROS_PRINCIPAL_KEY doesn't control both backend
// Kerberos principal and SPNEGO principal.
LOG.info("Using deprecated {} config for SPNEGO principal. Use {} instead.",
THRIFT_KERBEROS_PRINCIPAL_KEY, THRIFT_SPNEGO_PRINCIPAL_KEY);
principal = conf.get(THRIFT_KERBEROS_PRINCIPAL_KEY);
}
// Handle _HOST in principal value
return org.apache.hadoop.security.SecurityUtil.getServerPrincipal(principal, host);
}

private String getSpnegoKeytab(Configuration conf) {
String keytab = conf.get(THRIFT_SPNEGO_KEYTAB_FILE_KEY);
if (keytab == null) {
// We cannot use the Hadoop configuration deprecation handling here since
// the THRIFT_KEYTAB_FILE_KEY config is still valid for regular Kerberos
// communication. The preference should be to use the THRIFT_SPNEGO_KEYTAB_FILE_KEY
// config so that THRIFT_KEYTAB_FILE_KEY doesn't control both backend
// Kerberos keytab and SPNEGO keytab.
LOG.info("Using deprecated {} config for SPNEGO keytab. Use {} instead.",
THRIFT_KEYTAB_FILE_KEY, THRIFT_SPNEGO_KEYTAB_FILE_KEY);
keytab = conf.get(THRIFT_KEYTAB_FILE_KEY);
}
return keytab;
}

protected void startInfoServer() throws IOException {
// Put up info server.
int port = conf.getInt(THRIFT_INFO_SERVER_PORT , THRIFT_INFO_SERVER_PORT_DEFAULT);
Expand Down Expand Up @@ -316,11 +361,10 @@ protected void printUsageAndExit(Options options, int exitCode)
* Create a Servlet for the http server
* @param protocolFactory protocolFactory
* @return the servlet
* @throws IOException IOException
*/
protected TServlet createTServlet(TProtocolFactory protocolFactory) throws IOException {
return new ThriftHttpServlet(processor, protocolFactory, serviceUGI,
conf, hbaseServiceHandler, securityEnabled, doAsEnabled);
protected TServlet createTServlet(TProtocolFactory protocolFactory) {
return new ThriftHttpServlet(processor, protocolFactory, serviceUGI, httpUGI,
hbaseServiceHandler, securityEnabled, doAsEnabled);
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift;

import static org.apache.hadoop.hbase.thrift.Constants.THRIFT_SUPPORT_PROXYUSER_KEY;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;

import java.io.File;
import java.nio.file.Paths;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
import java.util.Set;

import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosTicket;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.thrift.generated.Hbase;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.http.HttpHeaders;
import org.apache.http.auth.AuthSchemeProvider;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.KerberosCredentials;
import org.apache.http.client.config.AuthSchemes;
import org.apache.http.config.Lookup;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.impl.auth.SPNegoSchemeFactory;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.kerby.kerberos.kerb.client.JaasKrbUtil;
import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.THttpClient;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.ietf.jgss.GSSName;
import org.ietf.jgss.Oid;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Start the HBase Thrift HTTP server on a random port through the command-line
* interface and talk to it from client side with SPNEGO security enabled.
*
* Supplemental test to TestThriftSpnegoHttpServer which falls back to the original
* Kerberos principal and keytab configuration properties, not the separate
* SPNEGO-specific properties.
*/
@Category({ClientTests.class, LargeTests.class})
public class TestThriftSpnegoHttpFallbackServer extends TestThriftHttpServer {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestThriftSpnegoHttpFallbackServer.class);

private static final Logger LOG =
LoggerFactory.getLogger(TestThriftSpnegoHttpFallbackServer.class);

private static SimpleKdcServer kdc;
private static File serverKeytab;
private static File clientKeytab;

private static String clientPrincipal;
private static String serverPrincipal;
private static String spnegoServerPrincipal;

private static SimpleKdcServer buildMiniKdc() throws Exception {
SimpleKdcServer kdc = new SimpleKdcServer();

File kdcDir = Paths.get(TEST_UTIL.getRandomDir().toString()).toAbsolutePath().toFile();
kdcDir.mkdirs();
kdc.setWorkDir(kdcDir);

kdc.setKdcHost(HConstants.LOCALHOST);
int kdcPort = HBaseTestingUtility.randomFreePort();
kdc.setAllowTcp(true);
kdc.setAllowUdp(false);
kdc.setKdcTcpPort(kdcPort);

LOG.info("Starting KDC server at " + HConstants.LOCALHOST + ":" + kdcPort);

kdc.init();

return kdc;
}

private static void addSecurityConfigurations(Configuration conf) {
KerberosName.setRules("DEFAULT");

HBaseKerberosUtils.setKeytabFileForTesting(serverKeytab.getAbsolutePath());

conf.setBoolean(THRIFT_SUPPORT_PROXYUSER_KEY, true);
conf.setBoolean(Constants.USE_HTTP_CONF_KEY, true);

conf.set(Constants.THRIFT_KERBEROS_PRINCIPAL_KEY, serverPrincipal);
conf.set(Constants.THRIFT_KEYTAB_FILE_KEY, serverKeytab.getAbsolutePath());

HBaseKerberosUtils.setSecuredConfiguration(conf, spnegoServerPrincipal,
spnegoServerPrincipal);
conf.set("hadoop.proxyuser.HTTP.hosts", "*");
conf.set("hadoop.proxyuser.HTTP.groups", "*");
conf.set(Constants.THRIFT_KERBEROS_PRINCIPAL_KEY, spnegoServerPrincipal);
}

@BeforeClass
public static void setUpBeforeClass() throws Exception {
kdc = buildMiniKdc();
kdc.start();

File keytabDir = Paths.get(TEST_UTIL.getRandomDir().toString()).toAbsolutePath().toFile();
keytabDir.mkdirs();

clientPrincipal = "client@" + kdc.getKdcConfig().getKdcRealm();
clientKeytab = new File(keytabDir, clientPrincipal + ".keytab");
kdc.createAndExportPrincipals(clientKeytab, clientPrincipal);

serverPrincipal = "hbase/" + HConstants.LOCALHOST + "@" + kdc.getKdcConfig().getKdcRealm();
serverKeytab = new File(keytabDir, serverPrincipal.replace('/', '_') + ".keytab");

spnegoServerPrincipal = "HTTP/" + HConstants.LOCALHOST + "@" + kdc.getKdcConfig().getKdcRealm();
// Add SPNEGO principal to server keytab
kdc.createAndExportPrincipals(serverKeytab, serverPrincipal, spnegoServerPrincipal);

TEST_UTIL.getConfiguration().setBoolean(Constants.USE_HTTP_CONF_KEY, true);
addSecurityConfigurations(TEST_UTIL.getConfiguration());

TestThriftHttpServer.setUpBeforeClass();
}

@AfterClass
public static void tearDownAfterClass() throws Exception {
TestThriftHttpServer.tearDownAfterClass();

try {
if (null != kdc) {
kdc.stop();
kdc = null;
}
} catch (Exception e) {
LOG.info("Failed to stop mini KDC", e);
}
}

@Override
protected void talkToThriftServer(String url, int customHeaderSize) throws Exception {
// Close httpClient and THttpClient automatically on any failures
try (
CloseableHttpClient httpClient = createHttpClient();
THttpClient tHttpClient = new THttpClient(url, httpClient)
) {
tHttpClient.open();
if (customHeaderSize > 0) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < customHeaderSize; i++) {
sb.append("a");
}
tHttpClient.setCustomHeader(HttpHeaders.USER_AGENT, sb.toString());
}

TProtocol prot = new TBinaryProtocol(tHttpClient);
Hbase.Client client = new Hbase.Client(prot);
TestThriftServer.createTestTables(client);
TestThriftServer.checkTableList(client);
TestThriftServer.dropTestTables(client);
}
}

private CloseableHttpClient createHttpClient() throws Exception {
final Subject clientSubject = JaasKrbUtil.loginUsingKeytab(clientPrincipal, clientKeytab);
final Set<Principal> clientPrincipals = clientSubject.getPrincipals();
// Make sure the subject has a principal
assertFalse("Found no client principals in the clientSubject.",
clientPrincipals.isEmpty());

// Get a TGT for the subject (might have many, different encryption types). The first should
// be the default encryption type.
Set<KerberosTicket> privateCredentials =
clientSubject.getPrivateCredentials(KerberosTicket.class);
assertFalse("Found no private credentials in the clientSubject.",
privateCredentials.isEmpty());
KerberosTicket tgt = privateCredentials.iterator().next();
assertNotNull("No kerberos ticket found.", tgt);

// The name of the principal
final String clientPrincipalName = clientPrincipals.iterator().next().getName();

return Subject.doAs(clientSubject, new PrivilegedExceptionAction<CloseableHttpClient>() {
@Override
public CloseableHttpClient run() throws Exception {
// Logs in with Kerberos via GSS
GSSManager gssManager = GSSManager.getInstance();
// jGSS Kerberos login constant
Oid oid = new Oid("1.2.840.113554.1.2.2");
GSSName gssClient = gssManager.createName(clientPrincipalName, GSSName.NT_USER_NAME);
GSSCredential credential = gssManager.createCredential(gssClient,
GSSCredential.DEFAULT_LIFETIME, oid, GSSCredential.INITIATE_ONLY);

Lookup<AuthSchemeProvider> authRegistry = RegistryBuilder.<AuthSchemeProvider>create()
.register(AuthSchemes.SPNEGO, new SPNegoSchemeFactory(true, true))
.build();

BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new KerberosCredentials(credential));

return HttpClients.custom()
.setDefaultAuthSchemeRegistry(authRegistry)
.setDefaultCredentialsProvider(credentialsProvider)
.build();
}
});
}
}
Loading

0 comments on commit 483617d

Please sign in to comment.