From ab7664bb9116f1a591a25bd61dbb53f09039a248 Mon Sep 17 00:00:00 2001 From: zdeng Date: Tue, 16 May 2023 09:39:35 +0800 Subject: [PATCH 1/8] HIVE-27352: Support both LDAP and Kerberos Auth in HS2 --- .../minikdc/TestHS2AuthMechsWithMiniKdc.java | 166 ++++++++++++++++++ .../hive/jdbc/HttpBasicAuthInterceptor.java | 6 + .../jdbc/HttpKerberosRequestInterceptor.java | 8 +- .../hive/jdbc/HttpRequestInterceptorBase.java | 9 + .../hive/jdbc/HttpTokenAuthInterceptor.java | 6 + .../jwt/HttpJwtAuthRequestInterceptor.java | 5 + .../saml/HttpSamlAuthRequestInterceptor.java | 5 + .../apache/hive/service/auth/AuthType.java | 69 ++++++-- .../hive/service/auth/HiveAuthFactory.java | 30 ++-- .../service/cli/thrift/ThriftHttpServlet.java | 45 ++++- .../hive/service/auth/TestAuthType.java | 151 ++++++++-------- 11 files changed, 381 insertions(+), 119 deletions(-) create mode 100644 itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java new file mode 100644 index 000000000000..feee1e61cb89 --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java @@ -0,0 +1,166 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import com.google.common.collect.ImmutableMap; + +import javax.security.sasl.AuthenticationException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.service.auth.AuthenticationProviderFactory; +import org.apache.hive.service.auth.HiveAuthConstants; +import org.apache.hive.service.auth.PasswdAuthenticationProvider; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.junit.Assert.fail; + +public class TestHS2AuthMechsWithMiniKdc { + private static MiniHS2 miniHS2 = null; + private static MiniHiveKdc miniHiveKdc = null; + + public static class CustomAuthForTest implements PasswdAuthenticationProvider { + private static List authentications = new ArrayList<>(); + private static Map validUsers = + ImmutableMap.of("user1", "password1", "user2", "password2", "user3", "password3"); + static String error_message = "Error validating the user: %s"; + @Override + public void Authenticate(String user, String password) throws AuthenticationException { + authentications.add(user); + if (validUsers.containsKey(user) && validUsers.get(user).equals(password)) { + // noop + } else { + throw new AuthenticationException(String.format(error_message, user)); + } + } + public static String getLastAuthenticateUser() { + return authentications.get(authentications.size() - 1); + } + public static int getAuthenticationSize() { + return authentications.size(); + } + public static void clear() { + authentications.clear(); + } + } + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + miniHiveKdc = new MiniHiveKdc(); + HiveConf hiveConf = new HiveConf(); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS, CustomAuthForTest.class.getName()); + + AuthenticationProviderFactory.AuthMethods.CUSTOM.getConf().set(HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname, + CustomAuthForTest.class.getName()); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf, + HiveAuthConstants.AuthTypes.KERBEROS.getAuthName() + "," + HiveAuthConstants.AuthTypes.CUSTOM.getAuthName()); + miniHS2.getHiveConf().setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_ALL_MODE); + miniHS2.start(new HashMap<>()); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + + private void testKrbPasswordAuth(boolean httpMode) throws Exception { + String baseJdbc, jdbc; + if (!httpMode) { + baseJdbc = miniHS2.getBaseJdbcURL() + "default;"; + jdbc = miniHS2.getJdbcURL(); + } else { + baseJdbc = miniHS2.getBaseHttpJdbcURL() + "default;transportMode=http;httpPath=cliservice;"; + jdbc = miniHS2.getHttpJdbcURL(); + } + + // First we try logging through Kerberos + try { + String principle = miniHiveKdc.getFullyQualifiedServicePrincipal("dummy_user"); + DriverManager.getConnection(baseJdbc + "default;principal=" + principle); + fail("Should fail to establish the connection as server principle is wrong"); + } catch (Exception e) { + if (!httpMode) { + Assert.assertTrue(e.getMessage().contains("GSS initiate failed")); + } else { + Assert.assertTrue(ExceptionUtils.getStackTrace(e).contains("Failed to find any Kerberos ticket")); + } + } + + try (Connection hs2Conn = DriverManager.getConnection(jdbc)) { + try (Statement statement = hs2Conn.createStatement()) { + statement.execute("create table if not exists test_hs2_with_multiple_auths(a string)"); + statement.execute("set hive.support.concurrency"); + validateResult(statement.getResultSet(), 1); + } + } + + // Next, test logging through user/password + try { + DriverManager.getConnection(baseJdbc + "user=user1;password=password2"); + fail("Should fail to establish the connection as password is wrong"); + } catch (Exception e) { + if (!httpMode) { + Assert.assertTrue(e.getMessage().contains("Error validating the login")); + } else { + Assert.assertTrue(e.getMessage().contains("HTTP Response code: 401")); + } + Assert.assertTrue(CustomAuthForTest.getAuthenticationSize() == 1); + Assert.assertEquals("user1", CustomAuthForTest.getLastAuthenticateUser()); + } + + try (Connection hs2Conn = DriverManager.getConnection(baseJdbc + "user=user2;password=password2")) { + try (Statement statement = hs2Conn.createStatement()) { + statement.execute("set hive.support.concurrency"); + validateResult(statement.getResultSet(), 1); + } + } + + Assert.assertEquals("user2", CustomAuthForTest.getLastAuthenticateUser()); + CustomAuthForTest.clear(); + } + + @Test + public void testKrbPasswordAuth() throws Exception { + testKrbPasswordAuth(false); // Test the binary mode + testKrbPasswordAuth(true); // Test the http mode + } + + private void validateResult(ResultSet rs, int expectedSize) throws Exception { + int actualSize = 0; + while (rs.next()) { + actualSize ++; + } + Assert.assertEquals(expectedSize, actualSize); + } +} diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java index 1887e07b19d2..1602ba7aa443 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpBasicAuthInterceptor.java @@ -53,4 +53,10 @@ protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContex Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, httpContext); httpRequest.addHeader(basicAuthHeader); } + + @Override + protected String getAuthType() { + // Let the server determine which particular password based method is using. + return "UIDPWD"; + } } \ No newline at end of file diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java index 516825fa4553..c211260589b9 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpKerberosRequestInterceptor.java @@ -64,9 +64,15 @@ protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContex // Set the session key token (Base64 encoded) in the headers httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " + HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader); } catch (Exception e) { - throw new HttpException(e.getMessage(), e); + // e.getMessage() is null at UndeclaredThrowableException + throw new HttpException("Failed to find any Kerberos ticket", e); } finally { kerberosLock.unlock(); } } + + @Override + protected String getAuthType() { + return "KERBEROS"; + } } \ No newline at end of file diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpRequestInterceptorBase.java b/jdbc/src/java/org/apache/hive/jdbc/HttpRequestInterceptorBase.java index 862d299643ee..10b7246af47f 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpRequestInterceptorBase.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpRequestInterceptorBase.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.Objects; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import org.apache.hadoop.hive.conf.Constants; @@ -51,6 +52,13 @@ public abstract class HttpRequestInterceptorBase implements HttpRequestIntercept protected abstract void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContext) throws Exception; + /** + * The auth method to tell the server to choose right auth mechanism to + * validate the request, especially when the server supports multiple auth methods in parallel. + * @return the auth method the client is using, should not be null or empty + */ + protected abstract String getAuthType(); + public HttpRequestInterceptorBase(CookieStore cs, String cn, boolean isSSL, Map additionalHeaders, Map customCookies) { this.cookieStore = cs; @@ -103,6 +111,7 @@ public void process(HttpRequest httpRequest, HttpContext httpContext) httpRequest.addHeader(entry.getKey(), entry.getValue()); } } + httpRequest.addHeader(Utils.JdbcConnectionParams.AUTH_TYPE, Objects.requireNonNull(getAuthType())); // Add custom cookies if passed to the jdbc driver if (customCookies != null) { String cookieHeaderKeyValues = ""; diff --git a/jdbc/src/java/org/apache/hive/jdbc/HttpTokenAuthInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/HttpTokenAuthInterceptor.java index fbfa7f6d5a6e..4c9038808413 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HttpTokenAuthInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HttpTokenAuthInterceptor.java @@ -44,4 +44,10 @@ protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContex throws Exception { httpRequest.addHeader(HIVE_DELEGATION_TOKEN_HEADER, tokenStr); } + + @Override + protected String getAuthType() { + // The auth token is Kerberos based + return "KERBEROS"; + } } \ No newline at end of file diff --git a/jdbc/src/java/org/apache/hive/jdbc/jwt/HttpJwtAuthRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/jwt/HttpJwtAuthRequestInterceptor.java index 51390c686b84..917bc983526d 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/jwt/HttpJwtAuthRequestInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/jwt/HttpJwtAuthRequestInterceptor.java @@ -46,4 +46,9 @@ public HttpJwtAuthRequestInterceptor(String signedJwt, CookieStore cookieStore, protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContext) { httpRequest.addHeader(HttpHeaders.AUTHORIZATION, HttpAuthUtils.BEARER + " " + signedJwt); } + + @Override + protected String getAuthType() { + return "JWT"; + } } diff --git a/jdbc/src/java/org/apache/hive/jdbc/saml/HttpSamlAuthRequestInterceptor.java b/jdbc/src/java/org/apache/hive/jdbc/saml/HttpSamlAuthRequestInterceptor.java index 430dc8d0ef03..1e2bc429bbec 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/saml/HttpSamlAuthRequestInterceptor.java +++ b/jdbc/src/java/org/apache/hive/jdbc/saml/HttpSamlAuthRequestInterceptor.java @@ -64,4 +64,9 @@ protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext httpContex httpRequest.addHeader(HiveSamlUtils.SSO_TOKEN_RESPONSE_PORT, port); } } + + @Override + protected String getAuthType() { + return "SAML"; + } } diff --git a/service/src/java/org/apache/hive/service/auth/AuthType.java b/service/src/java/org/apache/hive/service/auth/AuthType.java index 281c9a67d62b..83c86b3a2b3a 100644 --- a/service/src/java/org/apache/hive/service/auth/AuthType.java +++ b/service/src/java/org/apache/hive/service/auth/AuthType.java @@ -18,14 +18,18 @@ package org.apache.hive.service.auth; +import com.google.common.collect.ImmutableSet; + import org.apache.commons.lang3.EnumUtils; +import org.apache.hadoop.hive.conf.HiveServer2TransportMode; -import java.util.Arrays; +import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; -import java.util.HashSet; import java.util.Iterator; +import java.util.List; import java.util.Set; +import java.util.stream.Collectors; /** * AuthType is used to parse and verify @@ -33,40 +37,52 @@ * Throws an exception if the config value is not allowed. */ public class AuthType { - static final Set PASSWORD_BASED_TYPES = new HashSet<>(Arrays.asList( - HiveAuthConstants.AuthTypes.LDAP, HiveAuthConstants.AuthTypes.CUSTOM, HiveAuthConstants.AuthTypes.PAM)); + static final Set PASSWORD_BASED_TYPES = ImmutableSet.of( + HiveAuthConstants.AuthTypes.LDAP, HiveAuthConstants.AuthTypes.CUSTOM, + HiveAuthConstants.AuthTypes.PAM, HiveAuthConstants.AuthTypes.NONE + ); private final BitSet typeBits; + private final List authTypes; + private final HiveServer2TransportMode mode; - public AuthType(String authTypes) throws Exception { + public AuthType(String authTypes, HiveServer2TransportMode mode) { + this.authTypes = new ArrayList<>(); + this.mode = mode; typeBits = new BitSet(); parseTypes(authTypes); verifyTypes(authTypes); } - private void parseTypes(String authTypes) throws Exception { + private void parseTypes(String authTypes) { String[] types = authTypes.split(","); for (String type : types) { if (!EnumUtils.isValidEnumIgnoreCase(HiveAuthConstants.AuthTypes.class, type)) { - throw new Exception(type + " is not a valid authentication type."); + throw new IllegalArgumentException(type + " is not a valid authentication type."); } - typeBits.set(EnumUtils.getEnumIgnoreCase(HiveAuthConstants.AuthTypes.class, type).ordinal()); + HiveAuthConstants.AuthTypes authType = EnumUtils.getEnumIgnoreCase(HiveAuthConstants.AuthTypes.class, type); + this.authTypes.add(authType); + typeBits.set(authType.ordinal()); } } - private void verifyTypes(String authTypes) throws Exception { + private void verifyTypes(String authTypes) { if (typeBits.cardinality() == 1) { // single authentication type has no conflicts return; } + if (typeBits.get(HiveAuthConstants.AuthTypes.NOSASL.ordinal())) { + throw new UnsupportedOperationException("NOSASL can't be along with other auth methods: " + authTypes); + } + + if (areAnyEnabled(PASSWORD_BASED_TYPES) && !isExactlyOneEnabled(PASSWORD_BASED_TYPES)) { + throw new RuntimeException("Multiple password based auth methods found: " + authTypes); + } + if ((typeBits.get(HiveAuthConstants.AuthTypes.SAML.ordinal()) || typeBits.get(HiveAuthConstants.AuthTypes.JWT.ordinal())) && - !typeBits.get(HiveAuthConstants.AuthTypes.NOSASL.ordinal()) && - !typeBits.get(HiveAuthConstants.AuthTypes.KERBEROS.ordinal()) && - !typeBits.get(HiveAuthConstants.AuthTypes.NONE.ordinal()) && - (!areAnyEnabled(PASSWORD_BASED_TYPES) || isExactlyOneEnabled(PASSWORD_BASED_TYPES))) { - // SAML can be enabled with another password based authentication types - return; + (mode == HiveServer2TransportMode.all || mode == HiveServer2TransportMode.binary)) { + throw new UnsupportedOperationException("HiveServer2 binary mode doesn't support JWT and SAML," + + " please consider using http mode only"); } - throw new Exception("The authentication types have conflicts: " + authTypes); } private boolean isExactlyOneEnabled(Collection types) { @@ -94,14 +110,18 @@ public boolean isEnabled(HiveAuthConstants.AuthTypes type) { return typeBits.get(type.ordinal()); } + public boolean isPasswordBasedAuthEnabled() { + return areAnyEnabled(PASSWORD_BASED_TYPES); + } + + public String getAuthTypes() { + return authTypes.stream().map(au -> au.getAuthName()).collect(Collectors.joining(",")); + } public String getPasswordBasedAuthStr() { if (isEnabled(HiveAuthConstants.AuthTypes.NOSASL)) { return HiveAuthConstants.AuthTypes.NOSASL.getAuthName(); } - if (isEnabled(HiveAuthConstants.AuthTypes.NONE)) { - return HiveAuthConstants.AuthTypes.NONE.getAuthName(); - } for (HiveAuthConstants.AuthTypes type : PASSWORD_BASED_TYPES) { if (isEnabled(type)) { return type.getAuthName(); @@ -109,4 +129,15 @@ public String getPasswordBasedAuthStr() { } return ""; } + + public boolean isLoadedFirst(HiveAuthConstants.AuthTypes type) { + if (!isEnabled(type) || authTypes.isEmpty()) { + return false; + } + return authTypes.get(0) == type; + } + + public boolean isPasswordBasedAuth(HiveAuthConstants.AuthTypes type) { + return PASSWORD_BASED_TYPES.contains(type); + } } diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index e55222bd357d..9c281010e03f 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -29,6 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; @@ -40,7 +41,6 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ProxyUsers; -import org.apache.hive.service.auth.HiveAuthConstants.AuthTypes; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.rpc.thrift.TCLIService; import org.apache.thrift.TProcessorFactory; @@ -58,16 +58,16 @@ public class HiveAuthFactory { private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class); private HadoopThriftAuthBridge.Server saslServer; - private String authTypeStr; private final String transportMode; private final HiveConf conf; + private final AuthType authType; private String hadoopAuth; private MetastoreDelegationTokenManager delegationTokenManager = null; public HiveAuthFactory(HiveConf conf) throws TTransportException { this.conf = conf; transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE); - authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); + String authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); // ShimLoader.getHadoopShims().isSecurityEnabled() will only check that // hadoopAuth is not simple, it does not guarantee it is kerberos hadoopAuth = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); @@ -79,6 +79,7 @@ public HiveAuthFactory(HiveConf conf) throws TTransportException { authTypeStr = HiveAuthConstants.AuthTypes.NONE.getAuthName(); } } + authType = new AuthType(authTypeStr, HiveServer2TransportMode.valueOf(transportMode.toLowerCase())); if (isSASLWithKerberizedHadoop()) { saslServer = HadoopThriftAuthBridge.getBridge().createServer( @@ -133,12 +134,8 @@ public TTransportFactory getAuthTransFactory() throws LoginException { } catch (TTransportException e) { throw new LoginException(e.getMessage()); } - if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.KERBEROS.getAuthName())) { - // no-op - } else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) { + if (authType.isPasswordBasedAuthEnabled()) { + String authTypeStr = authType.getPasswordBasedAuthStr(); try { serverTransportFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap(), @@ -147,19 +144,16 @@ public TTransportFactory getAuthTransFactory() throws LoginException { throw new LoginException ("Error setting callback handler" + e); } } else { - throw new LoginException("Unsupported authentication type " + authTypeStr); + throw new LoginException("Unsupported authentication type " + authType.getAuthTypes()); } transportFactory = saslServer.wrapTransportFactory(serverTransportFactory); - } else if (authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NONE.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.LDAP.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.PAM.getAuthName()) || - authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.CUSTOM.getAuthName())) { + } else if (authType.isPasswordBasedAuthEnabled()) { + String authTypeStr = authType.getPasswordBasedAuthStr(); transportFactory = PlainSaslHelper.getPlainTransportFactory(authTypeStr); - } else if (authTypeStr - .equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName())) { + } else if (authType.isEnabled(HiveAuthConstants.AuthTypes.NOSASL)) { transportFactory = new TTransportFactory(); } else { - throw new LoginException("Unsupported authentication type " + authTypeStr); + throw new LoginException("Unsupported authentication type " + authType.getAuthTypes()); } String trustedDomain = HiveConf.getVar(conf, ConfVars.HIVE_SERVER2_TRUSTED_DOMAIN).trim(); @@ -201,7 +195,7 @@ public String getUserAuthMechanism() { public boolean isSASLWithKerberizedHadoop() { return "kerberos".equalsIgnoreCase(hadoopAuth) - && !authTypeStr.equalsIgnoreCase(HiveAuthConstants.AuthTypes.NOSASL.getAuthName()); + && !authType.isEnabled(HiveAuthConstants.AuthTypes.NOSASL); } public boolean isSASLKerberosUser() { diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 0c192f45ca49..60055979e73e 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -47,6 +47,7 @@ import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.Utils; @@ -112,6 +113,7 @@ public class ThriftHttpServlet extends TServlet { private final HiveAuthFactory hiveAuthFactory; private static final String HIVE_DELEGATION_TOKEN_HEADER = "X-Hive-Delegation-Token"; private static final String X_FORWARDED_FOR = "X-Forwarded-For"; + private static final String AUTH_TYPE = "auth"; private JWTValidator jwtValidator; @@ -120,7 +122,7 @@ public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory, HiveAuthFactory hiveAuthFactory, HiveConf hiveConf) throws Exception { super(processor, protocolFactory); this.hiveConf = hiveConf; - this.authType = new AuthType(authType); + this.authType = new AuthType(authType, HiveServer2TransportMode.http); this.serviceUGI = serviceUGI; this.httpUGI = httpUGI; this.hiveAuthFactory = hiveAuthFactory; @@ -215,7 +217,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) clientUserName = doPasswdAuth(request, HiveAuthConstants.AuthTypes.NOSASL.getAuthName()); } else { // For a kerberos setup - if (isKerberosAuthMode(authType)) { + if (isAuthTypeEnabled(request, HiveAuthConstants.AuthTypes.KERBEROS)) { String delegationToken = request.getHeader(HIVE_DELEGATION_TOKEN_HEADER); // Each http request must have an Authorization header if ((delegationToken != null) && (!delegationToken.isEmpty())) { @@ -223,9 +225,9 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) } else { clientUserName = doKerberosAuth(request); } - } else if (authType.isEnabled(HiveAuthConstants.AuthTypes.JWT) && hasJWT(request)) { + } else if (isAuthTypeEnabled(request, HiveAuthConstants.AuthTypes.JWT)) { clientUserName = validateJWT(request, response); - } else if (authType.isEnabled(HiveAuthConstants.AuthTypes.SAML)) { + } else if (isAuthTypeEnabled(request, HiveAuthConstants.AuthTypes.SAML)) { // check if this request needs a SAML redirect String authHeader = request.getHeader(HttpAuthUtils.AUTHORIZATION); if ((authHeader == null || authHeader.isEmpty()) && needsRedirect(request, response)) { @@ -292,7 +294,7 @@ protected void doPost(HttpServletRequest request, HttpServletResponse response) } // Send a 401 to the client response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); - if(isKerberosAuthMode(authType)) { + if(isAuthTypeEnabled(request, HiveAuthConstants.AuthTypes.KERBEROS)) { response.addHeader(HttpAuthUtils.WWW_AUTHENTICATE, HttpAuthUtils.NEGOTIATE); } else { try { @@ -763,8 +765,37 @@ private String getAuthHeader(HttpServletRequest request) return authHeaderBase64String; } - private boolean isKerberosAuthMode(AuthType authType) { - return authType.isEnabled(HiveAuthConstants.AuthTypes.KERBEROS); + private boolean isAuthTypeEnabled(HttpServletRequest request, + HiveAuthConstants.AuthTypes authType) { + String authMechs = request.getHeader(AUTH_TYPE); + + if (authType.getAuthName().equalsIgnoreCase(authMechs) && this.authType.isEnabled(authType) || + ("UIDPWD".equalsIgnoreCase(authMechs) && this.authType.isPasswordBasedAuth(authType))) { + return true; + } else if (authMechs == null) { + // Kerberos -> JWT -> SAML -> Password(fall through if there is no match) + // If the auth header is missing, the request must come from the old running client. + // We support mixing JWT and SAML or LDAP auth methods in old client, + // the way to tell them is whether there is JWT token in request header. + if (this.authType.isEnabled(HiveAuthConstants.AuthTypes.JWT)) { + if (authType == HiveAuthConstants.AuthTypes.JWT) { + if (hasJWT(request)) { + return true; + } + } else if (authType != HiveAuthConstants.AuthTypes.KERBEROS) { + // If you wish to try JWT,KERBEROS with the old client, + // append the property http.header.auth=kerberos to the URL. + return this.authType.isEnabled(authType); + } + } + + if (this.authType.isLoadedFirst(authType)) { + // This is important to keep compatible with old clients, the first auth method + // takes over when the "auth" header is missing. + return true; + } + } + return false; } private boolean hasJWT(HttpServletRequest request) { diff --git a/service/src/test/org/apache/hive/service/auth/TestAuthType.java b/service/src/test/org/apache/hive/service/auth/TestAuthType.java index c3eebdff12fa..af95d97f1c75 100644 --- a/service/src/test/org/apache/hive/service/auth/TestAuthType.java +++ b/service/src/test/org/apache/hive/service/auth/TestAuthType.java @@ -18,6 +18,10 @@ package org.apache.hive.service.auth; +import com.google.common.collect.ImmutableSet; + +import org.apache.commons.lang3.EnumUtils; +import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import org.junit.Assert; import org.junit.Test; @@ -37,7 +41,7 @@ public void testSingleAuth() throws Exception { } private void testSingleAuth(HiveAuthConstants.AuthTypes type) throws Exception { - AuthType authType = new AuthType(type.getAuthName()); + AuthType authType = new AuthType(type.getAuthName(), HiveServer2TransportMode.http); Assert.assertTrue(authType.isEnabled(type)); if (type == HiveAuthConstants.AuthTypes.NOSASL || type == HiveAuthConstants.AuthTypes.NONE || AuthType.PASSWORD_BASED_TYPES.contains(type)) { @@ -63,7 +67,7 @@ public void testOnePasswordAuthWithJWT() throws Exception { } private void testOnePasswordAuthWithSAML(HiveAuthConstants.AuthTypes type) throws Exception { - AuthType authType = new AuthType("SAML," + type.getAuthName()); + AuthType authType = new AuthType("SAML," + type.getAuthName(), HiveServer2TransportMode.http); Assert.assertTrue(authType.isEnabled(HiveAuthConstants.AuthTypes.SAML)); Assert.assertTrue(authType.isEnabled(type)); @@ -75,10 +79,40 @@ private void testOnePasswordAuthWithSAML(HiveAuthConstants.AuthTypes type) throw Assert.assertFalse(authType.isEnabled(disabledType)); } Assert.assertEquals(type.getAuthName(), authType.getPasswordBasedAuthStr()); + + verify("SAML," + type.getAuthName(), HiveServer2TransportMode.binary, true); + verify("SAML," + type.getAuthName(), HiveServer2TransportMode.all, true); + } + + private void verify(String authTypes, HiveServer2TransportMode mode, boolean shouldThrowException) { + try { + AuthType authType = new AuthType(authTypes, mode); + if (shouldThrowException) { + Assert.fail("HiveServer2 " + mode.name() + " mode cann't support " + authTypes + " by design"); + } else { + String[] authMethods = authTypes.split(","); + for (int i = 0; i < authMethods.length; i++) { + HiveAuthConstants.AuthTypes authMech = EnumUtils.getEnumIgnoreCase(HiveAuthConstants.AuthTypes.class, + authMethods[i]); + Assert.assertTrue(authType.isEnabled(authMech)); + if (i == 0) { + Assert.assertTrue(authType.isLoadedFirst(authMech)); + } else { + Assert.assertFalse(authType.isLoadedFirst(authMech)); + } + } + } + } catch (Exception e) { + if (!shouldThrowException) { + Assert.fail("HiveServer2 " + mode.name() + " mode should be able to support " + authTypes); + } else { + Assert.assertTrue(e instanceof RuntimeException); + } + } } private void testOnePasswordAuthWithJWT(HiveAuthConstants.AuthTypes type) throws Exception { - AuthType authType = new AuthType("JWT," + type.getAuthName()); + AuthType authType = new AuthType("JWT," + type.getAuthName(), HiveServer2TransportMode.http); Assert.assertTrue(authType.isEnabled(HiveAuthConstants.AuthTypes.JWT)); Assert.assertTrue(authType.isEnabled(type)); @@ -90,81 +124,50 @@ private void testOnePasswordAuthWithJWT(HiveAuthConstants.AuthTypes type) throws Assert.assertFalse(authType.isEnabled(disabledType)); } Assert.assertEquals(type.getAuthName(), authType.getPasswordBasedAuthStr()); - } - - @Test(expected = Exception.class) - public void testKerberosWithSAML() throws Exception { - AuthType authType = new AuthType("KERBEROS,SAML"); - } - - @Test(expected = Exception.class) - public void testKerberosWithSAMLAndLdap() throws Exception { - AuthType authType = new AuthType("KERBEROS,SAML,LDAP"); - } - - @Test(expected = Exception.class) - public void testKerberosWithLdap() throws Exception { - AuthType authType = new AuthType("KERBEROS,LDAP"); - } - - @Test(expected = Exception.class) - public void testNoneWithSAML() throws Exception { - AuthType authType = new AuthType("NONE,SAML"); - } - - @Test(expected = Exception.class) - public void testNoSaslWithSAML() throws Exception { - AuthType authType = new AuthType("NOSASL,SAML"); - } - - @Test(expected = Exception.class) - public void testMultiPasswordAuthWithSAML() throws Exception { - AuthType authType = new AuthType("SAML,LDAP,PAM,CUSTOM"); - } - - @Test(expected = Exception.class) - public void testMultiPasswordAuth() throws Exception { - AuthType authType = new AuthType("LDAP,PAM,CUSTOM"); - } - - @Test(expected = Exception.class) - public void testNotExistAuth() throws Exception { - AuthType authType = new AuthType("SAML,OTHER"); - authType = new AuthType("JWT,OTHER"); - } - - @Test(expected = Exception.class) - public void testKerberosWithJWT() throws Exception { - AuthType authType = new AuthType("KERBEROS,JWT"); - } - - @Test(expected = Exception.class) - public void testKerberosWithJWTAndLdap() throws Exception { - AuthType authType = new AuthType("KERBEROS,JWT,LDAP"); - } - - @Test(expected = Exception.class) - public void testNoneWithJWT() throws Exception { - AuthType authType = new AuthType("NONE,JWT"); - } - - @Test(expected = Exception.class) - public void testNoSaslWithJWT() throws Exception { - AuthType authType = new AuthType("NOSASL,JWT"); - } - - @Test(expected = Exception.class) - public void testMultiPasswordAuthWithJWT() throws Exception { - AuthType authType = new AuthType("JWT,LDAP,PAM,CUSTOM"); + verify("JWT," + type.getAuthName(), HiveServer2TransportMode.binary, true); + verify("JWT," + type.getAuthName(), HiveServer2TransportMode.all, true); } @Test - public void testLDAPWithSAMLAndJWT() throws Exception { - AuthType authType = new AuthType("JWT,SAML,LDAP"); + public void testMultipleAuthMethods() { + Set entries = ImmutableSet.of( + new EntryForTest("KERBEROS,SAML", HiveServer2TransportMode.binary, true), + new EntryForTest("KERBEROS,SAML", HiveServer2TransportMode.http, false), + new EntryForTest("KERBEROS,SAML,LDAP", HiveServer2TransportMode.all, true), + new EntryForTest("KERBEROS,SAML,LDAP", HiveServer2TransportMode.http, false), + new EntryForTest("KERBEROS,LDAP", HiveServer2TransportMode.all, false), + new EntryForTest("NONE,SAML", HiveServer2TransportMode.all, true), + new EntryForTest("NONE,SAML", HiveServer2TransportMode.http, false), + new EntryForTest("NOSASL,SAML", HiveServer2TransportMode.all, true), + new EntryForTest("SAML,LDAP,PAM,CUSTOM", HiveServer2TransportMode.http, true), + new EntryForTest("SAML,OTHER", HiveServer2TransportMode.all, true), + new EntryForTest("LDAP,PAM,CUSTOM", HiveServer2TransportMode.binary, true), + new EntryForTest("KERBEROS,JWT", HiveServer2TransportMode.binary, true), + new EntryForTest("KERBEROS,JWT", HiveServer2TransportMode.http, false), + new EntryForTest("KERBEROS,JWT,LDAP", HiveServer2TransportMode.http, false), + new EntryForTest("KERBEROS,JWT,LDAP", HiveServer2TransportMode.all, true), + new EntryForTest("NONE,JWT", HiveServer2TransportMode.all, true), + new EntryForTest("NOSASL,JWT", HiveServer2TransportMode.http, true), + new EntryForTest("JWT,LDAP,PAM,CUSTOM", HiveServer2TransportMode.http, true), + new EntryForTest("JWT,SAML,LDAP", HiveServer2TransportMode.http, false), + new EntryForTest("JWT,SAML,LDAP", HiveServer2TransportMode.all, true), + new EntryForTest("JWT,SAML", HiveServer2TransportMode.http, false), + new EntryForTest("JWT,SAML", HiveServer2TransportMode.binary, true) + ); + + for (EntryForTest entry : entries) { + verify(entry.authTypes, entry.mode, entry.shouldThrowException); + } } - @Test - public void testSAMLWithJWT() throws Exception { - AuthType authType = new AuthType("JWT,SAML"); + private class EntryForTest { + String authTypes; + HiveServer2TransportMode mode; + boolean shouldThrowException; + EntryForTest(String authTypes, HiveServer2TransportMode mode, boolean shouldThrowException) { + this.authTypes = authTypes; + this.mode = mode; + this.shouldThrowException = shouldThrowException; + } } } From 65ff676abc8a0b68e6cdaa30397dbe00a4ac7d03 Mon Sep 17 00:00:00 2001 From: zdeng Date: Wed, 17 May 2023 18:03:27 +0800 Subject: [PATCH 2/8] minor fix --- .../org/apache/hive/service/cli/thrift/ThriftHttpServlet.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 60055979e73e..5c5d3c719aff 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -770,7 +770,7 @@ private boolean isAuthTypeEnabled(HttpServletRequest request, String authMechs = request.getHeader(AUTH_TYPE); if (authType.getAuthName().equalsIgnoreCase(authMechs) && this.authType.isEnabled(authType) || - ("UIDPWD".equalsIgnoreCase(authMechs) && this.authType.isPasswordBasedAuth(authType))) { + "UIDPWD".equalsIgnoreCase(authMechs) && this.authType.isPasswordBasedAuth(authType)) { return true; } else if (authMechs == null) { // Kerberos -> JWT -> SAML -> Password(fall through if there is no match) From 5c607fd90cfe21efa8e044c7485a7b5f80f27dd1 Mon Sep 17 00:00:00 2001 From: zdeng Date: Thu, 18 May 2023 11:05:52 +0800 Subject: [PATCH 3/8] fix ut --- .../apache/hive/service/auth/HiveAuthFactory.java | 2 -- .../hive/service/cli/thrift/ThriftHttpServlet.java | 13 ++++++------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index 9c281010e03f..bd73f14560a0 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -143,8 +143,6 @@ public TTransportFactory getAuthTransFactory() throws LoginException { } catch (AuthenticationException e) { throw new LoginException ("Error setting callback handler" + e); } - } else { - throw new LoginException("Unsupported authentication type " + authType.getAuthTypes()); } transportFactory = saslServer.wrapTransportFactory(serverTransportFactory); } else if (authType.isPasswordBasedAuthEnabled()) { diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 5c5d3c719aff..c3b23527c741 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -767,21 +767,20 @@ private String getAuthHeader(HttpServletRequest request) private boolean isAuthTypeEnabled(HttpServletRequest request, HiveAuthConstants.AuthTypes authType) { - String authMechs = request.getHeader(AUTH_TYPE); + String authMethod = request.getHeader(AUTH_TYPE); - if (authType.getAuthName().equalsIgnoreCase(authMechs) && this.authType.isEnabled(authType) || - "UIDPWD".equalsIgnoreCase(authMechs) && this.authType.isPasswordBasedAuth(authType)) { + if (authType.getAuthName().equalsIgnoreCase(authMethod) && this.authType.isEnabled(authType) || + "UIDPWD".equalsIgnoreCase(authMethod) && this.authType.isPasswordBasedAuth(authType)) { + // Request has already set the "auth" header return true; - } else if (authMechs == null) { + } else if (authMethod == null) { // Kerberos -> JWT -> SAML -> Password(fall through if there is no match) // If the auth header is missing, the request must come from the old running client. // We support mixing JWT and SAML or LDAP auth methods in old client, // the way to tell them is whether there is JWT token in request header. if (this.authType.isEnabled(HiveAuthConstants.AuthTypes.JWT)) { if (authType == HiveAuthConstants.AuthTypes.JWT) { - if (hasJWT(request)) { - return true; - } + return hasJWT(request); } else if (authType != HiveAuthConstants.AuthTypes.KERBEROS) { // If you wish to try JWT,KERBEROS with the old client, // append the property http.header.auth=kerberos to the URL. From 96821d180ffe86dded6a3cb0b14f4b4a5ca975dd Mon Sep 17 00:00:00 2001 From: zdeng Date: Thu, 18 May 2023 15:02:40 +0800 Subject: [PATCH 4/8] Add test for kerberos and LDAP --- itests/hive-minikdc/pom.xml | 18 +++ .../minikdc/TestHS2AuthMechsWithMiniKdc.java | 136 ++++++++++-------- 2 files changed, 96 insertions(+), 58 deletions(-) diff --git a/itests/hive-minikdc/pom.xml b/itests/hive-minikdc/pom.xml index 92a89cc5ffaa..fc9cbf2cf523 100644 --- a/itests/hive-minikdc/pom.xml +++ b/itests/hive-minikdc/pom.xml @@ -29,6 +29,24 @@ + + org.apache.directory.server + apacheds-server-integ + ${apache-directory-server.version} + test + + + dom4j + dom4j + + + + + org.apache.directory.server + apacheds-test-framework + ${apache-directory-server.version} + test + org.apache.hive hive-common diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java index feee1e61cb89..8bd787572b88 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java @@ -18,79 +18,101 @@ package org.apache.hive.minikdc; -import com.google.common.collect.ImmutableMap; - -import javax.security.sasl.AuthenticationException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; -import java.util.Map; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.apache.directory.server.annotations.CreateLdapServer; +import org.apache.directory.server.annotations.CreateTransport; +import org.apache.directory.server.core.annotations.ApplyLdifFiles; +import org.apache.directory.server.core.annotations.ContextEntry; +import org.apache.directory.server.core.annotations.CreateDS; +import org.apache.directory.server.core.annotations.CreateIndex; +import org.apache.directory.server.core.annotations.CreatePartition; +import org.apache.directory.server.core.integ.AbstractLdapTestUnit; +import org.apache.directory.server.core.integ.FrameworkRunner; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hive.jdbc.miniHS2.MiniHS2; import org.apache.hive.service.auth.AuthenticationProviderFactory; import org.apache.hive.service.auth.HiveAuthConstants; -import org.apache.hive.service.auth.PasswdAuthenticationProvider; import org.junit.AfterClass; import org.junit.Assert; -import org.junit.BeforeClass; +import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; import static org.junit.Assert.fail; -public class TestHS2AuthMechsWithMiniKdc { +/** + * TestSuite to test Hive's LDAP Authentication provider with an + * in-process LDAP Server (Apache Directory Server instance). + */ +@RunWith(FrameworkRunner.class) +@CreateLdapServer(transports = { + @CreateTransport(protocol = "LDAP", port = 10390 ), + @CreateTransport(protocol = "LDAPS", port = 10640 ) +}) + +@CreateDS(partitions = { + @CreatePartition( + name = "example", + suffix = "dc=example,dc=com", + contextEntry = @ContextEntry(entryLdif = + "dn: dc=example,dc=com\n" + + "dc: example\n" + + "objectClass: top\n" + + "objectClass: domain\n\n" + ), + indexes = { + @CreateIndex(attribute = "objectClass"), + @CreateIndex(attribute = "cn"), + @CreateIndex(attribute = "uid") + } + ) +}) + +@ApplyLdifFiles({ + "ldap/example.com.ldif", + "ldap/microsoft.schema.ldif", + "ldap/ad.example.com.ldif" +}) +// Test HS2 with Kerberos + LDAP auth methods +public class TestHS2AuthMechsWithMiniKdc extends AbstractLdapTestUnit { private static MiniHS2 miniHS2 = null; private static MiniHiveKdc miniHiveKdc = null; - public static class CustomAuthForTest implements PasswdAuthenticationProvider { - private static List authentications = new ArrayList<>(); - private static Map validUsers = - ImmutableMap.of("user1", "password1", "user2", "password2", "user3", "password3"); - static String error_message = "Error validating the user: %s"; - @Override - public void Authenticate(String user, String password) throws AuthenticationException { - authentications.add(user); - if (validUsers.containsKey(user) && validUsers.get(user).equals(password)) { - // noop - } else { - throw new AuthenticationException(String.format(error_message, user)); - } - } - public static String getLastAuthenticateUser() { - return authentications.get(authentications.size() - 1); - } - public static int getAuthenticationSize() { - return authentications.size(); + @Before + public void setUpBefore() throws Exception { + if (miniHS2 == null) { + Class.forName(MiniHS2.getJdbcDriverName()); + miniHiveKdc = new MiniHiveKdc(); + HiveConf hiveConf = new HiveConf(); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL, + "ldap://localhost:" + ldapServer.getPort()); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN, + "uid=%s,ou=People,dc=example,dc=com"); + + AuthenticationProviderFactory.AuthMethods.LDAP.getConf().setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL, + "ldap://localhost:" + ldapServer.getPort()); + AuthenticationProviderFactory.AuthMethods.LDAP.getConf().setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN, + "uid=%s,ou=People,dc=example,dc=com"); + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf, + HiveAuthConstants.AuthTypes.KERBEROS.getAuthName() + "," + HiveAuthConstants.AuthTypes.LDAP.getAuthName()); + miniHS2.getHiveConf().setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_ALL_MODE); + miniHS2.start(new HashMap<>()); } - public static void clear() { - authentications.clear(); - } - } - - @BeforeClass - public static void setUpBeforeClass() throws Exception { - Class.forName(MiniHS2.getJdbcDriverName()); - miniHiveKdc = new MiniHiveKdc(); - HiveConf hiveConf = new HiveConf(); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); - hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS, CustomAuthForTest.class.getName()); - - AuthenticationProviderFactory.AuthMethods.CUSTOM.getConf().set(HiveConf.ConfVars.HIVE_SERVER2_CUSTOM_AUTHENTICATION_CLASS.varname, - CustomAuthForTest.class.getName()); - miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf, - HiveAuthConstants.AuthTypes.KERBEROS.getAuthName() + "," + HiveAuthConstants.AuthTypes.CUSTOM.getAuthName()); - miniHS2.getHiveConf().setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, MiniHS2.HS2_ALL_MODE); - miniHS2.start(new HashMap<>()); } @AfterClass public static void tearDownAfterClass() throws Exception { + if (ldapServer.isStarted()) { + ldapServer.stop(); + } miniHS2.stop(); } @@ -125,9 +147,9 @@ private void testKrbPasswordAuth(boolean httpMode) throws Exception { } } - // Next, test logging through user/password + // Next, test logging through LDAP try { - DriverManager.getConnection(baseJdbc + "user=user1;password=password2"); + DriverManager.getConnection(baseJdbc + "user=user1;password=password"); fail("Should fail to establish the connection as password is wrong"); } catch (Exception e) { if (!httpMode) { @@ -135,25 +157,22 @@ private void testKrbPasswordAuth(boolean httpMode) throws Exception { } else { Assert.assertTrue(e.getMessage().contains("HTTP Response code: 401")); } - Assert.assertTrue(CustomAuthForTest.getAuthenticationSize() == 1); - Assert.assertEquals("user1", CustomAuthForTest.getLastAuthenticateUser()); } - try (Connection hs2Conn = DriverManager.getConnection(baseJdbc + "user=user2;password=password2")) { + try (Connection hs2Conn = DriverManager.getConnection(baseJdbc + "user=user2;password=user2")) { try (Statement statement = hs2Conn.createStatement()) { statement.execute("set hive.support.concurrency"); validateResult(statement.getResultSet(), 1); } } - - Assert.assertEquals("user2", CustomAuthForTest.getLastAuthenticateUser()); - CustomAuthForTest.clear(); } @Test public void testKrbPasswordAuth() throws Exception { - testKrbPasswordAuth(false); // Test the binary mode - testKrbPasswordAuth(true); // Test the http mode + // Test the binary mode + testKrbPasswordAuth(false); + // Test the http mode + testKrbPasswordAuth(true); } private void validateResult(ResultSet rs, int expectedSize) throws Exception { @@ -163,4 +182,5 @@ private void validateResult(ResultSet rs, int expectedSize) throws Exception { } Assert.assertEquals(expectedSize, actualSize); } + } From 0d4f2714b50213a81dec4e3360dfb1884dfc86e0 Mon Sep 17 00:00:00 2001 From: zdeng Date: Mon, 22 May 2023 14:42:59 +0800 Subject: [PATCH 5/8] UT for Kerberos + JWT in http mode --- itests/hive-minikdc/pom.xml | 6 + .../hive/minikdc/TestHS2JWTWithMiniKdc.java | 181 ++++++++++++++++++ .../auth.jwt/jwt-authorized-key.json | 12 ++ .../auth.jwt/jwt-unauthorized-key.json | 12 ++ .../auth.jwt/jwt-verification-jwks.json | 20 ++ 5 files changed, 231 insertions(+) create mode 100644 itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java create mode 100644 itests/hive-minikdc/src/test/resources/auth.jwt/jwt-authorized-key.json create mode 100644 itests/hive-minikdc/src/test/resources/auth.jwt/jwt-unauthorized-key.json create mode 100644 itests/hive-minikdc/src/test/resources/auth.jwt/jwt-verification-jwks.json diff --git a/itests/hive-minikdc/pom.xml b/itests/hive-minikdc/pom.xml index fc9cbf2cf523..62b4d68ecf45 100644 --- a/itests/hive-minikdc/pom.xml +++ b/itests/hive-minikdc/pom.xml @@ -29,6 +29,12 @@ + + com.github.tomakehurst + wiremock-jre8-standalone + 2.32.0 + test + org.apache.directory.server apacheds-server-integ diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java new file mode 100644 index 000000000000..e6c31413a53c --- /dev/null +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java @@ -0,0 +1,181 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.minikdc; + +import com.github.tomakehurst.wiremock.junit.WireMockRule; +import com.nimbusds.jose.JWSAlgorithm; +import com.nimbusds.jose.JWSHeader; +import com.nimbusds.jose.JWSSigner; +import com.nimbusds.jose.crypto.RSASSASigner; +import com.nimbusds.jose.jwk.RSAKey; +import com.nimbusds.jwt.JWTClaimsSet; +import com.nimbusds.jwt.SignedJWT; + +import java.io.File; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.Date; +import java.util.HashMap; +import java.util.UUID; +import java.util.concurrent.TimeUnit; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveServer2TransportMode; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.service.auth.HiveAuthConstants; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +import static com.github.tomakehurst.wiremock.client.WireMock.get; +import static com.github.tomakehurst.wiremock.client.WireMock.ok; + +/** + * Test for supporting Kerberos + JWT in HS2 http mode + * + */ +public class TestHS2JWTWithMiniKdc { + private static final int MOCK_JWKS_SERVER_PORT = 8089; + private static final File jwtAuthorizedKeyFile = + new File("src/test/resources/auth.jwt/jwt-authorized-key.json"); + private static final File jwtUnauthorizedKeyFile = + new File("src/test/resources/auth.jwt/jwt-unauthorized-key.json"); + private static final File jwtVerificationJWKSFile = + new File("src/test/resources/auth.jwt/jwt-verification-jwks.json"); + + private static MiniHS2 miniHS2 = null; + private static MiniHiveKdc miniHiveKdc = null; + + @ClassRule + public static final WireMockRule MOCK_JWKS_SERVER = new WireMockRule(MOCK_JWKS_SERVER_PORT); + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + Class.forName(MiniHS2.getJdbcDriverName()); + MOCK_JWKS_SERVER.stubFor(get("/jwks") + .willReturn(ok() + .withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath())))); + miniHiveKdc = new MiniHiveKdc(); + HiveConf hiveConf = new HiveConf(); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HiveServer2TransportMode.http.name()); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION_JWT_JWKS_URL, "http://localhost:" + + MOCK_JWKS_SERVER_PORT + "/jwks"); + + miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf, + HiveAuthConstants.AuthTypes.KERBEROS.getAuthName() + "," + HiveAuthConstants.AuthTypes.JWT.getAuthName()); + miniHS2.start(new HashMap<>()); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + + @Test + public void testKrbJwtAuth() throws Exception { + String krb5Url = new StringBuilder(miniHS2.getBaseHttpJdbcURL()) + .append("default;") + .append("transportMode=http;httpPath=cliservice;") + .append("principal=" + miniHiveKdc.getFullHiveServicePrincipal()) + .toString(); + + try (Connection hs2Conn = DriverManager.getConnection(krb5Url)) { + try (Statement statement = hs2Conn.createStatement()) { + statement.execute("create table if not exists test_hs2_with_jwt_kerberos(a string)"); + statement.execute("set hive.support.concurrency"); + validateResult(statement.getResultSet(), 1); + } + } + + StringBuilder jwtUrl = new StringBuilder(miniHS2.getBaseHttpJdbcURL()) + .append("default;") + .append("transportMode=http;httpPath=cliservice;") + .append("auth=jwt;") + .append("jwt="); + + try (Connection hs2Conn = DriverManager.getConnection(jwtUrl + .append(generateJWT("user1", true, TimeUnit.MINUTES.toMillis(5))) + .toString())) { + try (Statement statement = hs2Conn.createStatement()) { + statement.execute("create table if not exists test_hs2_with_jwt_kerberos(a string)"); + statement.execute("set hive.support.concurrency"); + validateResult(statement.getResultSet(), 1); + } + } + + try { + DriverManager.getConnection(jwtUrl + .append(generateJWT("user1", false, TimeUnit.MINUTES.toMillis(5))) + .toString()); + Assert.fail("Exception is expected as JWT token is invalid"); + } catch (Exception e) { + Assert.assertTrue(e.getMessage().contains("HTTP Response code: 401")); + } + } + + private String generateJWT(String user, boolean authorized, long lifeTimeMillis) throws Exception { + final byte[] content = authorized ? + Files.readAllBytes(jwtAuthorizedKeyFile.toPath()) : + Files.readAllBytes(jwtUnauthorizedKeyFile.toPath()); + RSAKey rsaKeyPair = RSAKey.parse(new String(content, StandardCharsets.UTF_8)); + + // Create RSA-signer with the private key + JWSSigner signer = new RSASSASigner(rsaKeyPair); + + JWSHeader header = new JWSHeader + .Builder(JWSAlgorithm.RS256) + .keyID(rsaKeyPair.getKeyID()) + .build(); + + Date now = new Date(); + Date expirationTime = new Date(now.getTime() + lifeTimeMillis); + JWTClaimsSet claimsSet = new JWTClaimsSet.Builder() + .jwtID(UUID.randomUUID().toString()) + .issueTime(now) + .issuer("auth-server") + .subject(user) + .expirationTime(expirationTime) + .claim("custom-claim-or-payload", "custom-claim-or-payload") + .build(); + + SignedJWT signedJWT = new SignedJWT(header, claimsSet); + + // Compute the RSA signature + signedJWT.sign(signer); + + return signedJWT.serialize(); + } + + private void validateResult(ResultSet rs, int expectedSize) throws Exception { + int actualSize = 0; + while (rs.next()) { + actualSize ++; + } + Assert.assertEquals(expectedSize, actualSize); + } + +} diff --git a/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-authorized-key.json b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-authorized-key.json new file mode 100644 index 000000000000..b5b4fb40e7c9 --- /dev/null +++ b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-authorized-key.json @@ -0,0 +1,12 @@ +{ + "p": "-8lxjB9JZA44XBLLVGnY20x28uT8NQ1BlbqI0Tlr96An4B_PzgPL5_bFFB7SWs8ehSWn9z2SJfClhQpBLfy-2mXvJek_xgibESIlPXqY9Qrg7-PhRmPs3whyiIsnn8tpPMm2XJ_4n0Y-Yfx4nwErGdy84LiKFMDXPEk2a7ndYWs", + "kty": "RSA", + "q": "0YAcTLBnTrSUiciE0lliIkAidW0TnHP48v-vJitLEz0d8mlTZ_aeOQJm6CUOqF7BqQv3Z8OK_HYKXfOr7xzUlfROONybUXRFE0LvT5Fjvrq-56QGB6GeFq5i6HKlRcC_8TD6WwUJWIzeYuPqhp_FYIpT4ds131d5VYPKDCdY_dM", + "d": "VsxW72idEAtoZQDphvxJ0t54EyRfcIJVB9BZuqnyNTfH-VsaUO3st86w_PMU_i0lmyIc8dkCmwOb8R2pRXDo6UxEYUe5YfBnvn9iYF3Ll2QfPOKfZhDBOfqSjEb1po20is7mXTQORBv3bhSo664pasHItTwDz-KKI-FiIu_PYq0lYihuaedUUMp3MQTvDFulpFWEKzqseBDat07BholvxjzlnBK-Ez3KI9qGH8VIIk5TGW5pVu3cQe1WC8NJOe3xR9vu7XX6xvhVLPP7fvKiXJWJ_I_SagAhR1JW0uDJl_b0CrYYeVUnt_pzvW1BeJGz7ysCXcHlLBUh72XrpW-O7Q", + "e": "AQAB", + "kid": "123", + "qi": "9yk0mg4LY48YS8cvG51wMVfKfEjSbt2ygKxqabdsP-qSVpz-KVJtCmbKa57jm2BaMV_mRBQFodxu4XN58VGsj5MzXC5Jb_CkLeQfkp6ZKvehZhiJn3HF0Kb19u9xPvKDclHpKl-UMM1Pcu8Ww52DOyOYcHa1_SLZ05CcOWvMkS8", + "dp": "HYtToYeCSxVIE7W42hzZb1IXmwS3e1ok2fbbWwGL47CNPUU-UwQrBvrzwRqkwDcRc7opbV9yKLWGFohPgZ_onSPc3evyqcAUwfvptr8N96LhJgTtSB8tijYpilAZxCxQGuvoVBIJUFcjtsezN6Uhc5VtLEk7GphOKSrGEfnrOiU", + "dq": "tF2uf5v0JT-1DnazW4IWydQblqtlEfKKp3LX8W2egh7BNJ3XcA9UI1LdFAord2u1IXwq8YvZkgdyX3bVVNSmdb_SxIOxuMv4WF_tNry-eku-5iFCC7nqKC7U-rkRb19GIToAoPJSHImTQOJmXKcbQEV3eGDJHdLqpGQFRLdvl38", + "n": "zg12QaFTsez1EijOYRFzNZdowOt79ePqxCMQ-EEHynUhEZ6TIDnXfjWfuWocS1qRRglUUbHerEtmACUKPQShaG8uL0ZXiLqDr2QSuqrTtr2VUGesxZc6GiqkZlnWFNu5kSUvtemcKxWl8OLFf-5kNnGW4_4xM6BIwosYZnddfFqQT5IP6iTMZIUIKXxY4s1dadYRIiMteNutro67fhOLKabHkyC6ILE6f6VZsYbb_NXC5yC--7DiC2GYKzy7TKmaczuDfQZVgVY-nL9kTPIdhf334EYHQfYmLdvLc56g8-cxY3xh2GnwAj1JcT2u3hsS4KS05bUFHFnveO5uxIYKMQ" +} \ No newline at end of file diff --git a/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-unauthorized-key.json b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-unauthorized-key.json new file mode 100644 index 000000000000..f4845de7459d --- /dev/null +++ b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-unauthorized-key.json @@ -0,0 +1,12 @@ +{ + "p": "wvzuDSY6dIsIJB0UM5BIncN6ui5ee-KHpCmBhh_ia2iX3DluQODEgITw7gDATTDdQsBD-nJLjrqUs5g5Gmt0UgZucXQ5PCt1CK6dLEZCaLivw2fsHYvOKeTkdA49wqLkTc8pkfQs09N-b6NspDDqVJPFffBvFpR_IBFay-xKa5k", + "kty": "RSA", + "q": "sQzza69VkEmgUm50pEGjgu-OxugOrjcHrjQ42A23YVwAAJ90qPNQa62O7dv5oWmSX2PJ7TgjkzbvtTycLfT_vUeapwfCcJe4WoDg54xF3E35yBvBIwReRiavxf5nWsHEtd5kBg6wRIndGwGUBE91xaLg21spjH7nQKtG9vKeNM8", + "d": "UbiPIpr7agQqpM3ERfaXsKNMETyBrIYr3yoggHQ7XQkSPepCgRhE86puRmjf76FtZ3RwpJwjLfO6Ap0fIE9LXXE8otTF9sMnC9fe7odHkEu61Wr3aQM-53dgZoJL7XU53LOo0cNO44SBbw11d2cYlAR3KuCEK7bCLMBOkK1gdxVpgDC7DgxVgnP39bUlf4fA5gQeT5nNGnCWTV4jMVWCyEb0Ck5CvGJp1cCKaMSEvV4j6AM72EkAn8PogTSOJpurRJaTky0De7-ncT2Sv5DCuOIkMhsHqayLbm7a84ORHqsnWpZV85WVW-xxiivkVpqtSDRKCI94pMa9DWszjNJW8Q", + "e": "AQAB", + "kid": "sig-1642039368", + "qi": "CXP_tewCHyXk6PNDcbI0wtXsaWJryOJfMsc7roBCoOwDbTekUFXhOfRmFX5ZTNetRNDpw9nNiQDXt8pyw7UZ-0EhD1cLst1slS__hBi5QEAGo9cUxl3RGeMAFtY9O8B1gjFyKkG5BzdddGBKGQT3Tg23Eyzn6EA_NCw4XAKnkwQ", + "dp": "aAdzphZQN595n3LYNU50P59sWeqlRCkuvvnZ_coDDdUGuFr3pKuGix7iP8is0EISuitD2VmjUCnhbhP3202bCKwfvm4Inz58OT6X4mg1xBNMys8mHPla6-UPsY9rie1IKu8suY7xX65FlaA2NT9XtfoE8tUVH5HoZR59N7EAX3k", + "dq": "mTkZDO-fgBCH4-7dmS2JIY7KpI897T2IsxVUwH4WXvastd1Jq9FuntGEKYu_HRbtawpEPbzg5M2dY97BVvB5xshKKhWIC8Lx87knapw19XOyIKEMY46rO9DNO-9waNXatH5zV96sY5RgOrgB7j0KMnFEYfIiIgnNfmT8NElB63c", + "n": "htq92ltGQrZv19TlhluoqmXjjRXw_NWEd0nPZsWrbLnr8lZ-gOxsjIsDMjb5HNDNmuAS7pg2d_o5ZZAY1sSjKf_EuUPZN-MOej8ZBOtrMxEH7e_t37kYIbbJSuzt55poZdRli6BE8CVDesS4W-wsFZ0MvUazAUADh3onARN7Arf3jwknm5CLafE_JzKrNKZadBElEFEAEu5y9n_SuTlemw3P81lOVmZmjGjfqtPx01O5aV_truMjrQa3NUivu1ihrjvJl0xc3rwJe7qDrfEqgvpBQ-vrAsvg3Jiz5Idj6cU3J0hNtV4ixYxcDQecNlgR7gBeIp3E8BXL1kGOOHYUtw" +} \ No newline at end of file diff --git a/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-verification-jwks.json b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-verification-jwks.json new file mode 100644 index 000000000000..a6fd935a0a3b --- /dev/null +++ b/itests/hive-minikdc/src/test/resources/auth.jwt/jwt-verification-jwks.json @@ -0,0 +1,20 @@ +{ + "keys": [ + { + "kty": "RSA", + "e": "AQAB", + "alg": "RS256", + "kid": "819d1e61429dd3d3caef129c0ac2bae8c6d46fbc", + "use": "sig", + "n": "qfR12Bcs_hSL0Y1fN5TYZeUQIFmuVRYa210na81BFj91xxwtICJY6ckZCI3Jf0v2tPLOT_iKVk4WBCZ7AVJVvZqHuttkyrdFROMVTe6DwmcjbbkgACMVildTnHy9xy2KuX-OZsEYzgHuRgfe_Y-JN6LoxBYZx6VoBLpgK-F0Q-0O_bRgZhHifVG4ZzARjhgz0PvBb700GtOTHS6mQIfToPErbgqcowKN9k-mJqJr8xpXSHils-Yw97LHjICZmvA5B8EPNW28DwFOE5JrsPcyrFKOAYl4NcSYQgjl-17TWE5_tFdZ8Lz-srjiPMoHlBjZD1C7aO03LI-_9u8lVsktMw" + }, + { + "kty": "RSA", + "e": "AQAB", + "alg": "RS256", + "kid": "123", + "use": "sig", + "n": "zg12QaFTsez1EijOYRFzNZdowOt79ePqxCMQ-EEHynUhEZ6TIDnXfjWfuWocS1qRRglUUbHerEtmACUKPQShaG8uL0ZXiLqDr2QSuqrTtr2VUGesxZc6GiqkZlnWFNu5kSUvtemcKxWl8OLFf-5kNnGW4_4xM6BIwosYZnddfFqQT5IP6iTMZIUIKXxY4s1dadYRIiMteNutro67fhOLKabHkyC6ILE6f6VZsYbb_NXC5yC--7DiC2GYKzy7TKmaczuDfQZVgVY-nL9kTPIdhf334EYHQfYmLdvLc56g8-cxY3xh2GnwAj1JcT2u3hsS4KS05bUFHFnveO5uxIYKMQ" + } + ] +} \ No newline at end of file From 3dc8bc72a8a45a3cb80d07e3858454f1372230ad Mon Sep 17 00:00:00 2001 From: zdeng Date: Thu, 25 May 2023 16:45:51 +0800 Subject: [PATCH 6/8] publish Kerberos principal --- .../hive/minikdc/TestHiveAuthFactory.java | 4 +- .../apache/hive/service/auth/AuthType.java | 60 ++++++++++++++++--- .../hive/service/auth/HiveAuthFactory.java | 16 +---- .../hive/service/auth/saml/HiveSamlUtils.java | 4 -- .../cli/thrift/ThriftBinaryCLIService.java | 2 +- .../cli/thrift/ThriftHttpCLIService.java | 8 +-- .../service/cli/thrift/ThriftHttpServlet.java | 4 +- .../server/HS2ActivePassiveHARegistry.java | 3 +- .../hive/service/server/HiveServer2.java | 14 +---- .../hive/service/auth/TestAuthType.java | 3 +- .../cli/thrift/ThriftHttpServletTest.java | 5 +- 11 files changed, 73 insertions(+), 50 deletions(-) diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java index 0940c79c7215..36b25e11a39d 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHiveAuthFactory.java @@ -57,7 +57,7 @@ public void testStartTokenManagerForMemoryTokenStore() throws Exception { Assert.assertNotNull(keyTabFile); hiveConf.setVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB, keyTabFile); - HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf); + HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf, false); Assert.assertNotNull(authFactory); Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory", authFactory.getAuthTransFactory().getClass().getName()); @@ -81,7 +81,7 @@ public void testStartTokenManagerForDBTokenStore() throws Exception { hiveConf.setVar(ConfVars.METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS, "org.apache.hadoop.hive.metastore.security.DBTokenStore"); - HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf); + HiveAuthFactory authFactory = new HiveAuthFactory(hiveConf, false); Assert.assertNotNull(authFactory); Assert.assertEquals("org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge$Server$TUGIAssumingTransportFactory", authFactory.getAuthTransFactory().getClass().getName()); diff --git a/service/src/java/org/apache/hive/service/auth/AuthType.java b/service/src/java/org/apache/hive/service/auth/AuthType.java index 83c86b3a2b3a..128a72a0a82d 100644 --- a/service/src/java/org/apache/hive/service/auth/AuthType.java +++ b/service/src/java/org/apache/hive/service/auth/AuthType.java @@ -18,9 +18,13 @@ package org.apache.hive.service.auth; +import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; import org.apache.commons.lang3.EnumUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import java.util.ArrayList; @@ -37,14 +41,13 @@ * Throws an exception if the config value is not allowed. */ public class AuthType { - static final Set PASSWORD_BASED_TYPES = ImmutableSet.of( - HiveAuthConstants.AuthTypes.LDAP, HiveAuthConstants.AuthTypes.CUSTOM, - HiveAuthConstants.AuthTypes.PAM, HiveAuthConstants.AuthTypes.NONE - ); + static final Set PASSWORD_BASED_TYPES = ImmutableSet.of(HiveAuthConstants.AuthTypes.LDAP, + HiveAuthConstants.AuthTypes.CUSTOM, HiveAuthConstants.AuthTypes.PAM, HiveAuthConstants.AuthTypes.NONE); private final BitSet typeBits; private final List authTypes; private final HiveServer2TransportMode mode; + @VisibleForTesting public AuthType(String authTypes, HiveServer2TransportMode mode) { this.authTypes = new ArrayList<>(); this.mode = mode; @@ -74,14 +77,18 @@ private void verifyTypes(String authTypes) { throw new UnsupportedOperationException("NOSASL can't be along with other auth methods: " + authTypes); } + if (typeBits.get(HiveAuthConstants.AuthTypes.NONE.ordinal())) { + throw new UnsupportedOperationException("None can't be along with other auth methods: " + authTypes); + } + if (areAnyEnabled(PASSWORD_BASED_TYPES) && !isExactlyOneEnabled(PASSWORD_BASED_TYPES)) { throw new RuntimeException("Multiple password based auth methods found: " + authTypes); } - if ((typeBits.get(HiveAuthConstants.AuthTypes.SAML.ordinal()) || typeBits.get(HiveAuthConstants.AuthTypes.JWT.ordinal())) && - (mode == HiveServer2TransportMode.all || mode == HiveServer2TransportMode.binary)) { - throw new UnsupportedOperationException("HiveServer2 binary mode doesn't support JWT and SAML," + - " please consider using http mode only"); + if ((typeBits.get(HiveAuthConstants.AuthTypes.SAML.ordinal()) || typeBits.get( + HiveAuthConstants.AuthTypes.JWT.ordinal())) && (mode == HiveServer2TransportMode.all || mode == HiveServer2TransportMode.binary)) { + throw new UnsupportedOperationException( + "HiveServer2 binary mode doesn't support JWT and SAML," + " please consider using http mode only"); } } @@ -140,4 +147,41 @@ public boolean isLoadedFirst(HiveAuthConstants.AuthTypes type) { public boolean isPasswordBasedAuth(HiveAuthConstants.AuthTypes type) { return PASSWORD_BASED_TYPES.contains(type); } + + /** + * Refer from configuration to see if Kerberos auth method is enabled + * @return true if kerberos is enabled, otherwise false. + */ + public static boolean isKerberosAuthMode(Configuration conf) { + AuthType authType = authTypeFromConf(conf, true); + return authType.isEnabled(HiveAuthConstants.AuthTypes.KERBEROS); + } + + /** + * Refer from configuration to see if SAML auth method is enabled + * @return true if SAML is enabled, otherwise false. + */ + public static boolean isSamlAuthMode(Configuration conf) { + AuthType authType = authTypeFromConf(conf, true); + return authType.isEnabled(HiveAuthConstants.AuthTypes.SAML); + } + + public static AuthType authTypeFromConf(Configuration conf, boolean isHttpMode) { + String authTypeStr = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); + boolean isAuthTypeEmpty = StringUtils.isEmpty(authTypeStr); + final HiveServer2TransportMode transportMode; + if (isHttpMode) { + transportMode = HiveServer2TransportMode.http; + if (isAuthTypeEmpty) { + authTypeStr = HiveAuthConstants.AuthTypes.NOSASL.getAuthName(); + } + } else { + transportMode = HiveServer2TransportMode.binary; + if (isAuthTypeEmpty) { + authTypeStr = HiveAuthConstants.AuthTypes.NONE.getAuthName(); + } + } + return new AuthType(authTypeStr, transportMode); + } + } diff --git a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java index bd73f14560a0..8ff99cfcb1fb 100644 --- a/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java +++ b/service/src/java/org/apache/hive/service/auth/HiveAuthFactory.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; @@ -58,28 +57,17 @@ public class HiveAuthFactory { private static final Logger LOG = LoggerFactory.getLogger(HiveAuthFactory.class); private HadoopThriftAuthBridge.Server saslServer; - private final String transportMode; private final HiveConf conf; private final AuthType authType; private String hadoopAuth; private MetastoreDelegationTokenManager delegationTokenManager = null; - public HiveAuthFactory(HiveConf conf) throws TTransportException { + public HiveAuthFactory(HiveConf conf, boolean isHttpMode) throws TTransportException { this.conf = conf; - transportMode = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE); - String authTypeStr = conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION); // ShimLoader.getHadoopShims().isSecurityEnabled() will only check that // hadoopAuth is not simple, it does not guarantee it is kerberos hadoopAuth = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple"); - // In http mode we use NOSASL as the default auth type - if (authTypeStr == null) { - if ("http".equalsIgnoreCase(transportMode)) { - authTypeStr = HiveAuthConstants.AuthTypes.NOSASL.getAuthName(); - } else { - authTypeStr = HiveAuthConstants.AuthTypes.NONE.getAuthName(); - } - } - authType = new AuthType(authTypeStr, HiveServer2TransportMode.valueOf(transportMode.toLowerCase())); + authType = AuthType.authTypeFromConf(conf, isHttpMode); if (isSASLWithKerberizedHadoop()) { saslServer = HadoopThriftAuthBridge.getBridge().createServer( diff --git a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java index ae01251b65f8..5a5700b6e4be 100644 --- a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java +++ b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java @@ -35,10 +35,6 @@ public class HiveSamlUtils { public static final String STATUS_KEY = "status"; public static final String MESSAGE_KEY = "message"; - public static boolean isSamlAuthMode(String authType) { - return authType.toLowerCase().contains(HiveAuthConstants.AuthTypes.SAML.toString().toLowerCase()); - } - /** * Gets the configured callback url path for the SAML service provider. Also, makes sure * that the port number is same as the HTTP thrift port. diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java index 8fc728573dbc..edb7631fabf1 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftBinaryCLIService.java @@ -73,7 +73,7 @@ protected void initServer() { new ThreadFactoryWithGarbageCleanup(threadPoolName)); // Thrift configs - hiveAuthFactory = new HiveAuthFactory(hiveConf); + hiveAuthFactory = new HiveAuthFactory(hiveConf, false); TTransportFactory transportFactory = hiveAuthFactory.getAuthTransFactory(); TProcessorFactory processorFactory = hiveAuthFactory.getAuthProcFactory(this); TServerSocket serverSocket = null; diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java index e6273f454319..160a65735b82 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpCLIService.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hive.service.ServiceUtils; +import org.apache.hive.service.auth.AuthType; import org.apache.hive.service.auth.HiveAuthFactory; import org.apache.hive.service.auth.saml.HiveSamlHttpServlet; import org.apache.hive.service.auth.saml.HiveSamlUtils; @@ -183,7 +184,7 @@ public void onClosed(Connection connection) { server.addConnector(connector); // Thrift configs - hiveAuthFactory = new HiveAuthFactory(hiveConf); + hiveAuthFactory = new HiveAuthFactory(hiveConf, true); TProcessor processor = new TCLIService.Processor(this); TProtocolFactory protocolFactory = new TBinaryProtocol.Factory(); // Set during the init phase of HiveServer2 if auth mode is kerberos @@ -191,8 +192,7 @@ public void onClosed(Connection connection) { UserGroupInformation serviceUGI = cliService.getServiceUGI(); // UGI for the http/_HOST (SPNego) principal UserGroupInformation httpUGI = cliService.getHttpUGI(); - String authType = hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION); - TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, authType, serviceUGI, httpUGI, + TServlet thriftHttpServlet = new ThriftHttpServlet(processor, protocolFactory, serviceUGI, httpUGI, hiveAuthFactory, hiveConf); // Context handler @@ -219,7 +219,7 @@ public void onClosed(Connection connection) { server.setHandler(context); } context.addServlet(new ServletHolder(thriftHttpServlet), httpPath); - if (HiveSamlUtils.isSamlAuthMode(authType)) { + if (AuthType.isSamlAuthMode(hiveConf)) { String ssoPath = HiveSamlUtils.getCallBackPath(hiveConf); context.addServlet(new ServletHolder(new HiveSamlHttpServlet(hiveConf)), ssoPath); } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index c3b23527c741..9d5445659f16 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -118,11 +118,11 @@ public class ThriftHttpServlet extends TServlet { private JWTValidator jwtValidator; public ThriftHttpServlet(TProcessor processor, TProtocolFactory protocolFactory, - String authType, UserGroupInformation serviceUGI, UserGroupInformation httpUGI, + UserGroupInformation serviceUGI, UserGroupInformation httpUGI, HiveAuthFactory hiveAuthFactory, HiveConf hiveConf) throws Exception { super(processor, protocolFactory); this.hiveConf = hiveConf; - this.authType = new AuthType(authType, HiveServer2TransportMode.http); + this.authType = AuthType.authTypeFromConf(hiveConf, true); this.serviceUGI = serviceUGI; this.httpUGI = httpUGI; this.hiveAuthFactory = hiveAuthFactory; diff --git a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java index 18cfa8d3111b..f7dbe3c48316 100644 --- a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java +++ b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java @@ -45,6 +45,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hive.service.ServiceException; +import org.apache.hive.service.auth.AuthType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -360,7 +361,7 @@ private Map getConfsToPublish() { // Auth specific confs confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname)); - if (HiveServer2.isKerberosAuthMode(conf)) { + if (AuthType.isKerberosAuthMode(conf)) { confsToPublish.put(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, conf.get(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname)); } diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java b/service/src/java/org/apache/hive/service/server/HiveServer2.java index 5dde31e40206..e8eaab550aef 100644 --- a/service/src/java/org/apache/hive/service/server/HiveServer2.java +++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java @@ -104,6 +104,7 @@ import org.apache.hive.http.security.PamAuthenticator; import org.apache.hive.service.CompositeService; import org.apache.hive.service.ServiceException; +import org.apache.hive.service.auth.AuthType; import org.apache.hive.service.auth.saml.HiveSaml2Client; import org.apache.hive.service.auth.saml.HiveSamlUtils; import org.apache.hive.service.cli.CLIService; @@ -501,14 +502,6 @@ public static boolean isAllTransportMode(HiveConf hiveConf) { return false; } - public static boolean isKerberosAuthMode(Configuration hiveConf) { - String authMode = hiveConf.get(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname); - if (authMode != null && (authMode.equalsIgnoreCase("KERBEROS"))) { - return true; - } - return false; - } - /** * ACLProvider for providing appropriate ACLs to CuratorFrameworkFactory */ @@ -573,7 +566,7 @@ private void addConfsToPublish(HiveConf hiveConf, Map confsToPub // Auth specific confs confsToPublish.put(ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION)); - if (isKerberosAuthMode(hiveConf)) { + if (AuthType.isKerberosAuthMode(hiveConf)) { confsToPublish.put(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL)); } @@ -1018,8 +1011,7 @@ public synchronized void stop() { if (zKClientForPrivSync != null) { zKClientForPrivSync.close(); } - if (hiveConf != null && HiveSamlUtils - .isSamlAuthMode(hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION))) { + if (hiveConf != null && AuthType.isSamlAuthMode(hiveConf)) { // this is mostly for testing purposes to make sure that SAML client is // reinitialized after a HS2 is restarted. HiveSaml2Client.shutdown(); diff --git a/service/src/test/org/apache/hive/service/auth/TestAuthType.java b/service/src/test/org/apache/hive/service/auth/TestAuthType.java index af95d97f1c75..09b0207972d3 100644 --- a/service/src/test/org/apache/hive/service/auth/TestAuthType.java +++ b/service/src/test/org/apache/hive/service/auth/TestAuthType.java @@ -137,7 +137,7 @@ public void testMultipleAuthMethods() { new EntryForTest("KERBEROS,SAML,LDAP", HiveServer2TransportMode.http, false), new EntryForTest("KERBEROS,LDAP", HiveServer2TransportMode.all, false), new EntryForTest("NONE,SAML", HiveServer2TransportMode.all, true), - new EntryForTest("NONE,SAML", HiveServer2TransportMode.http, false), + new EntryForTest("NONE,SAML", HiveServer2TransportMode.http, true), new EntryForTest("NOSASL,SAML", HiveServer2TransportMode.all, true), new EntryForTest("SAML,LDAP,PAM,CUSTOM", HiveServer2TransportMode.http, true), new EntryForTest("SAML,OTHER", HiveServer2TransportMode.all, true), @@ -170,4 +170,5 @@ private class EntryForTest { this.shouldThrowException = shouldThrowException; } } + } diff --git a/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java b/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java index c490cc2ebbc1..28ff60ecb1f9 100644 --- a/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java +++ b/service/src/test/org/apache/hive/service/cli/thrift/ThriftHttpServletTest.java @@ -44,9 +44,10 @@ public class ThriftHttpServletTest { @Before public void setUp() throws Exception { + HiveConf hiveConf = new HiveConf(); String authType = HiveAuthConstants.AuthTypes.KERBEROS.toString(); - thriftHttpServlet = new ThriftHttpServlet(null, null, authType, null, null, null, - new HiveConf()); + hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION, authType); + thriftHttpServlet = new ThriftHttpServlet(null, null, null, null, null, hiveConf); } @Test From 03920affdd43bf2560c92d68ac18355c0167a0f5 Mon Sep 17 00:00:00 2001 From: zdeng Date: Fri, 26 May 2023 22:30:24 +0800 Subject: [PATCH 7/8] review1 --- itests/hive-minikdc/pom.xml | 2 +- .../org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java | 1 - .../java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java | 1 - itests/hive-unit/pom.xml | 2 +- itests/pom.xml | 1 + service/src/java/org/apache/hive/service/auth/AuthType.java | 5 +++-- .../apache/hive/service/cli/thrift/ThriftHttpServlet.java | 1 - 7 files changed, 6 insertions(+), 7 deletions(-) diff --git a/itests/hive-minikdc/pom.xml b/itests/hive-minikdc/pom.xml index 62b4d68ecf45..c71c5ce4c004 100644 --- a/itests/hive-minikdc/pom.xml +++ b/itests/hive-minikdc/pom.xml @@ -32,7 +32,7 @@ com.github.tomakehurst wiremock-jre8-standalone - 2.32.0 + ${wiremock-jre8.version} test diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java index 8bd787572b88..52f30c5787c7 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java @@ -90,7 +90,6 @@ public void setUpBefore() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); miniHiveKdc = new MiniHiveKdc(); HiveConf hiveConf = new HiveConf(); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL, "ldap://localhost:" + ldapServer.getPort()); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java index e6c31413a53c..9400dc50f0dd 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java @@ -79,7 +79,6 @@ public static void setUpBeforeClass() throws Exception { .withBody(Files.readAllBytes(jwtVerificationJWKSFile.toPath())))); miniHiveKdc = new MiniHiveKdc(); HiveConf hiveConf = new HiveConf(); - hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HiveServer2TransportMode.http.name()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION_JWT_JWKS_URL, "http://localhost:" + diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml index 7bed6bff4cdb..1ce91327028c 100644 --- a/itests/hive-unit/pom.xml +++ b/itests/hive-unit/pom.xml @@ -194,7 +194,7 @@ com.github.tomakehurst wiremock-jre8-standalone - 2.32.0 + ${wiremock-jre8.version} test diff --git a/itests/pom.xml b/itests/pom.xml index b42c55534d42..1a97ef6cb53a 100644 --- a/itests/pom.xml +++ b/itests/pom.xml @@ -25,6 +25,7 @@ Hive Integration - Parent .. + 2.32.0 custom-serde diff --git a/service/src/java/org/apache/hive/service/auth/AuthType.java b/service/src/java/org/apache/hive/service/auth/AuthType.java index 128a72a0a82d..104483049f22 100644 --- a/service/src/java/org/apache/hive/service/auth/AuthType.java +++ b/service/src/java/org/apache/hive/service/auth/AuthType.java @@ -85,8 +85,9 @@ private void verifyTypes(String authTypes) { throw new RuntimeException("Multiple password based auth methods found: " + authTypes); } - if ((typeBits.get(HiveAuthConstants.AuthTypes.SAML.ordinal()) || typeBits.get( - HiveAuthConstants.AuthTypes.JWT.ordinal())) && (mode == HiveServer2TransportMode.all || mode == HiveServer2TransportMode.binary)) { + if ((typeBits.get(HiveAuthConstants.AuthTypes.SAML.ordinal()) || + typeBits.get(HiveAuthConstants.AuthTypes.JWT.ordinal())) && + (mode == HiveServer2TransportMode.all || mode == HiveServer2TransportMode.binary)) { throw new UnsupportedOperationException( "HiveServer2 binary mode doesn't support JWT and SAML," + " please consider using http mode only"); } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java index 9d5445659f16..100f5f00ce44 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftHttpServlet.java @@ -47,7 +47,6 @@ import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; -import org.apache.hadoop.hive.conf.HiveServer2TransportMode; import org.apache.hadoop.hive.shims.HadoopShims.KerberosNameShim; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.hive.shims.Utils; From a3d1dc529c978da7c3c3f0c031ad8aa88316ef9a Mon Sep 17 00:00:00 2001 From: zdeng Date: Sat, 27 May 2023 21:18:39 +0800 Subject: [PATCH 8/8] fix ut --- .../org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java | 1 + .../test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java | 1 + 2 files changed, 2 insertions(+) diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java index 52f30c5787c7..50b00541aa65 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2AuthMechsWithMiniKdc.java @@ -93,6 +93,7 @@ public void setUpBefore() throws Exception { hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_URL, "ldap://localhost:" + ldapServer.getPort()); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_PLAIN_LDAP_USERDNPATTERN, "uid=%s,ou=People,dc=example,dc=com"); diff --git a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java index 9400dc50f0dd..da7bd7a4e55c 100644 --- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java +++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHS2JWTWithMiniKdc.java @@ -80,6 +80,7 @@ public static void setUpBeforeClass() throws Exception { miniHiveKdc = new MiniHiveKdc(); HiveConf hiveConf = new HiveConf(); hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_TRANSPORT_MODE, HiveServer2TransportMode.http.name()); hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION_JWT_JWKS_URL, "http://localhost:" + MOCK_JWKS_SERVER_PORT + "/jwks");