Skip to content

Commit

Permalink
[#3489] improvement(hive-catalog): Add user authentication e2e test f…
Browse files Browse the repository at this point in the history
…or Hive catalog (#3525)

### What changes were proposed in this pull request?

Add e2e tests to test end-to-end user authentication. 

### Why are the changes needed?

Verify that user authentication works

Fix: #3489

### Does this PR introduce _any_ user-facing change?

N/A

### How was this patch tested?

Test locally.
  • Loading branch information
yuqi1129 committed May 27, 2024
1 parent 844a8e6 commit e745864
Show file tree
Hide file tree
Showing 17 changed files with 430 additions and 27 deletions.
1 change: 1 addition & 0 deletions .github/workflows/backend-integration-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -113,3 +113,4 @@ jobs:
distribution/package/logs/gravitino-server.log
catalogs/**/*.log
catalogs/**/*.tar
distribution/**/*.log
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
/** Helper methods to create SortOrders to pass into Gravitino. */
public class SortOrders {

/** NONE is used to indicate that there is no sort order. */
public static final SortOrder[] NONE = new SortOrder[0];

/**
* Create a sort order by the given expression with the ascending sort direction and nulls first
* ordering.
Expand Down
2 changes: 2 additions & 0 deletions bin/gravitino.sh
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@ if [ "$JVM_VERSION" -eq 17 ]; then
JAVA_OPTS+=" --add-opens java.security.jgss/sun.security.krb5=ALL-UNNAMED"
fi

#JAVA_OPTS+=" -Djava.securit.krb5.conf=/etc/krb5.conf"

addJarInDir "${GRAVITINO_HOME}/libs"

case "${1}" in
Expand Down
1 change: 1 addition & 0 deletions catalogs/catalog-hive/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ tasks.test {

doFirst {
environment("GRAVITINO_CI_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-hive:0.1.12")
environment("GRAVITINO_CI_KERBEROS_HIVE_DOCKER_IMAGE", "datastrato/gravitino-ci-kerberos-hive:0.1.1")
}

val init = project.extra.get("initIntegrationTest") as (Test) -> Unit
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.lang.reflect.Method;
import java.time.Instant;
import java.util.Arrays;
import java.util.List;
Expand All @@ -74,6 +75,7 @@
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.util.KerberosName;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -144,7 +146,6 @@ public void initialize(Map<String, String> conf, CatalogInfo info) throws Runtim
// and gravitinoConfig will be passed to Hive config, and gravitinoConfig has higher priority
mergeConfig.forEach(hadoopConf::set);
hiveConf = new HiveConf(hadoopConf, HiveCatalogOperations.class);
UserGroupInformation.setConfiguration(hadoopConf);

initKerberosIfNecessary(conf, hadoopConf);

Expand Down Expand Up @@ -173,7 +174,7 @@ private void initKerberosIfNecessary(Map<String, String> conf, Configuration had

String keytabUri =
(String)
catalogPropertiesMetadata.getOrDefault(conf, HiveCatalogPropertiesMeta.KET_TAB_URI);
catalogPropertiesMetadata.getOrDefault(conf, HiveCatalogPropertiesMeta.KEY_TAB_URI);
Preconditions.checkArgument(StringUtils.isNotBlank(keytabUri), "Keytab uri can't be blank");
// TODO: Support to download the file from Kerberos HDFS
Preconditions.checkArgument(
Expand Down Expand Up @@ -201,6 +202,10 @@ private void initKerberosIfNecessary(Map<String, String> conf, Configuration had
new ScheduledThreadPoolExecutor(
1, getThreadFactory(String.format("Kerberos-check-%s", info.id())));

LOG.info("krb5 path: {}", System.getProperty("java.security.krb5.conf"));
refreshKerberosConfig();
KerberosName.resetDefaultRealm();
UserGroupInformation.setConfiguration(hadoopConf);
UserGroupInformation.loginUserFromKeytab(catalogPrincipal, keytabFile.getAbsolutePath());

UserGroupInformation kerberosLoginUgi = UserGroupInformation.getCurrentUser();
Expand All @@ -224,10 +229,28 @@ private void initKerberosIfNecessary(Map<String, String> conf, Configuration had

} catch (IOException ioe) {
throw new UncheckedIOException(ioe);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}

private void refreshKerberosConfig() {
Class<?> classRef;
try {
if (System.getProperty("java.vendor").contains("IBM")) {
classRef = Class.forName("com.ibm.security.krb5.internal.Config");
} else {
classRef = Class.forName("sun.security.krb5.Config");
}

Method refershMethod = classRef.getMethod("refresh");
refershMethod.invoke(null);
} catch (Exception e) {
throw new RuntimeException(e);
}
}

@VisibleForTesting
int getClientPoolSize(Map<String, String> conf) {
return (int) catalogPropertiesMetadata.getOrDefault(conf, CLIENT_POOL_SIZE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata {

public static final boolean DEFAULT_IMPERSONATION_ENABLE = false;

public static final String KET_TAB_URI = "kerberos.keytab-uri";
public static final String KEY_TAB_URI = "kerberos.keytab-uri";

public static final String PRINCIPAL = "kerberos.principal";

Expand Down Expand Up @@ -69,9 +69,9 @@ public class HiveCatalogPropertiesMeta extends BaseCatalogPropertiesMetadata {
false,
false))
.put(
KET_TAB_URI,
KEY_TAB_URI,
PropertyEntry.stringImmutablePropertyEntry(
KET_TAB_URI, "The uri of key tab for the catalog", false, null, false, false))
KEY_TAB_URI, "The uri of key tab for the catalog", false, null, false, false))
.put(
PRINCIPAL,
PropertyEntry.stringImmutablePropertyEntry(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public Object doAs(
ops.getClientPool()
.run(
client -> {
return client.getDelegationToken(realUser.getUserName(), principal.getName());
return client.getDelegationToken(principal.getName(), realUser.getUserName());
});

Token<DelegationTokenIdentifier> delegationToken = new Token<DelegationTokenIdentifier>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.CLIENT_POOL_SIZE;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.FETCH_TIMEOUT_SEC;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.IMPERSONATION_ENABLE;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KET_TAB_URI;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.KEY_TAB_URI;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.METASTORE_URIS;
import static com.datastrato.gravitino.catalog.hive.HiveCatalogPropertiesMeta.PRINCIPAL;
import static com.datastrato.gravitino.connector.BaseCatalog.CATALOG_BYPASS_PREFIX;
Expand Down Expand Up @@ -81,7 +81,7 @@ void testPropertyMeta() {
Assertions.assertFalse(
propertyEntryMap.get(CLIENT_POOL_CACHE_EVICTION_INTERVAL_MS).isRequired());
Assertions.assertFalse(propertyEntryMap.get(IMPERSONATION_ENABLE).isRequired());
Assertions.assertFalse(propertyEntryMap.get(KET_TAB_URI).isRequired());
Assertions.assertFalse(propertyEntryMap.get(KEY_TAB_URI).isRequired());
Assertions.assertFalse(propertyEntryMap.get(PRINCIPAL).isRequired());
Assertions.assertFalse(propertyEntryMap.get(CHECK_INTERVAL_SEC).isRequired());
Assertions.assertFalse(propertyEntryMap.get(FETCH_TIMEOUT_SEC).isRequired());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,9 @@ public static void startup() throws Exception {

@AfterAll
public static void stop() throws IOException {
client.dropMetalake(metalakeName);
if (client != null) {
client.dropMetalake(metalakeName);
}
if (hiveClientPool != null) {
hiveClientPool.close();
}
Expand All @@ -216,6 +218,9 @@ public static void stop() throws IOException {
} catch (Exception e) {
LOG.error("Failed to close CloseableGroup", e);
}

AbstractIT.customConfigs.clear();
AbstractIT.client = null;
}

@AfterEach
Expand Down
Loading

0 comments on commit e745864

Please sign in to comment.