Skip to content

Commit

Permalink
BaseTestImpersonation refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
kingswanwho committed Jan 30, 2023
1 parent 0bf1a94 commit 9b9e422
Show file tree
Hide file tree
Showing 9 changed files with 270 additions and 191 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.drill.exec.hive.HiveTestUtilities;
import org.apache.drill.exec.impersonation.BaseTestImpersonation;
import org.apache.drill.exec.store.hive.HiveStoragePluginConfig;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.TestBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -94,8 +95,8 @@ protected static void prepHiveConfAndData() throws Exception {
whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

studentData = getPhysicalFileFromResource("student.txt");
voterData = getPhysicalFileFromResource("voter.txt");
studentData = ClusterFixture.getResource("student.txt");
voterData = ClusterFixture.getResource("voter.txt");
}

protected static void startHiveMetaStore() throws Exception {
Expand Down Expand Up @@ -143,7 +144,7 @@ protected static Path getWhPathForHiveObject(final String dbName, final String t
}

protected static void addHiveStoragePlugin(final Map<String, String> hiveConfig) throws Exception {
getDrillbitContext().getStorage().put(hivePluginName, createHiveStoragePlugin(hiveConfig));
cluster.storageRegistry().put(hivePluginName, createHiveStoragePlugin(hiveConfig));
}

protected void showTablesHelper(final String db, List<String> expectedTables) throws Exception {
Expand Down Expand Up @@ -192,7 +193,7 @@ public static void stopHiveMetaStore() throws Exception {

static void queryView(String viewName) throws Exception {
String query = String.format("SELECT rownum FROM %s.tmp.%s ORDER BY rownum LIMIT 1", MINI_DFS_STORAGE_PLUGIN_NAME, viewName);
testBuilder()
client.testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("rownum")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,8 +206,8 @@ public void user0_showTables() throws Exception {
public void user0_allowed_g_student_user0() throws Exception {
// SELECT on "student_user0" table is granted to user "user0"
updateClient(org1Users[0]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_student_user0));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_student_user0));
}

@Test
Expand All @@ -220,7 +220,7 @@ public void user0_forbidden_g_voter_role0() throws Exception {
// SELECT on table "student_user0" is NOT granted to user "user0" directly or indirectly through role "role0" as
// user "user0" is not part of role "role0"
updateClient(org1Users[0]);
test("USE " + hivePluginName + "." + db_general);
run("USE " + hivePluginName + "." + db_general);
final String query = String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_voter_role0);
errorMsgTestHelper(query, "Principal [name=user0_1, type=USER] does not have following privileges for " +
"operation QUERY [[SELECT] on Object [type=TABLE_OR_VIEW, name=db_general.voter_role0]]\n");
Expand Down Expand Up @@ -262,7 +262,7 @@ public void user1_showTables() throws Exception {
public void user1_forbidden_g_student_user0() throws Exception {
// SELECT on table "student_user0" is NOT granted to user "user1"
updateClient(org1Users[1]);
test("USE " + hivePluginName + "." + db_general);
run("USE " + hivePluginName + "." + db_general);
final String query = String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_student_user0);
errorMsgTestHelper(query, "Principal [name=user1_1, type=USER] does not have following privileges for " +
"operation QUERY [[SELECT] on Object [type=TABLE_OR_VIEW, name=db_general.student_user0]]\n");
Expand All @@ -277,8 +277,8 @@ public void user1_forbidden_vw_student_user0() throws Exception {
public void user1_allowed_g_voter_role0() throws Exception {
// SELECT on "voter_role0" table is granted to role "role0" and user "user1" is part the role "role0"
updateClient(org1Users[1]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_voter_role0));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_voter_role0));
}

@Test
Expand All @@ -291,7 +291,7 @@ public void user1_allowed_g_voter_role0_but_forbidden_g_student_user2() throws E
// SELECT on "voter_role0" table is granted to role "role0" and user "user1" is part the role "role0"
// SELECT on "student_user2" table is NOT granted to either role "role0" or user "user1"
updateClient(org1Users[1]);
test("USE " + hivePluginName + "." + db_general);
run("USE " + hivePluginName + "." + db_general);
final String query =
String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", g_voter_role0, g_student_user2);
errorMsgTestHelper(query, "Principal [name=user1_1, type=USER] does not have following privileges for " +
Expand All @@ -303,7 +303,7 @@ public void user1_allowed_vw_voter_role0_but_forbidden_vw_student_user2() throws
// SELECT on "vw_voter_role0" table is granted to role "role0" and user "user1" is part the role "role0"
// SELECT on "vw_student_user2" table is NOT granted to either role "role0" or user "user1"
updateClient(org1Users[1]);
test("USE " + hivePluginName + "." + db_general);
run("USE " + hivePluginName + "." + db_general);
final String query =
String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", vw_voter_role0, vw_student_user2);
errorMsgTestHelper(query, "Principal [name=user1_1, type=USER] does not have following privileges for " +
Expand All @@ -326,8 +326,8 @@ public void user1_allowed_v_student_u1g1_750() throws Exception {
public void user2_allowed_g_voter_role0() throws Exception {
// SELECT on "voter_role0" table is granted to role "role0" and user "user2" is part the role "role0"
updateClient(org1Users[2]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_voter_role0));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_voter_role0));
}

@Test
Expand All @@ -339,8 +339,8 @@ public void user2_allowed_vw_voter_role0() throws Exception {
public void user2_allowed_g_student_user2() throws Exception {
// SELECT on "student_user2" table is granted to user "user2"
updateClient(org1Users[2]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_student_user2));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s ORDER BY name LIMIT 2", g_student_user2));
}

@Test
Expand All @@ -353,15 +353,15 @@ public void user2_allowed_g_voter_role0_and_g_student_user2() throws Exception {
// SELECT on "voter_role0" table is granted to role "role0" and user "user2" is part the role "role0"
// SELECT on "student_user2" table is granted to user "user2"
updateClient(org1Users[2]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", g_voter_role0, g_student_user2));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", g_voter_role0, g_student_user2));
}

@Test
public void user2_allowed_vw_voter_role0_and_vw_student_user2() throws Exception {
updateClient(org1Users[2]);
test("USE " + hivePluginName + "." + db_general);
test(String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", vw_voter_role0, vw_student_user2));
run("USE " + hivePluginName + "." + db_general);
run(String.format("SELECT * FROM %s v JOIN %s s on v.name = s.name limit 2;", vw_voter_role0, vw_student_user2));
}

@Test
Expand All @@ -386,7 +386,7 @@ private static void queryHiveView(String usr, String viewName) throws Exception
String query = String.format("SELECT COUNT(*) AS rownum FROM %s.%s.%s",
hivePluginName, db_general, viewName);
updateClient(usr);
testBuilder()
client.testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("rownum")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -872,7 +872,7 @@ public static void shutdown() throws Exception {
}

private static void queryHiveTableOrView(String db, String table) throws Exception {
test(String.format("SELECT * FROM hive.%s.%s LIMIT 2", db, table));
run(String.format("SELECT * FROM hive.%s.%s LIMIT 2", db, table));
}

private static void queryTableNotFound(String db, String table) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,18 @@
*/
package org.apache.drill.exec.impersonation;

import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.shaded.guava.com.google.common.base.Strings;
import org.apache.commons.io.FileUtils;
import org.apache.drill.PlanTestBase;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.dotdrill.DotDrillType;
import org.apache.drill.exec.store.StoragePluginRegistry;
import org.apache.drill.exec.store.dfs.FileSystemConfig;
import org.apache.drill.exec.store.dfs.WorkspaceConfig;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.shaded.guava.com.google.common.base.Strings;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterFixtureBuilder;
import org.apache.drill.test.ClusterTest;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
Expand All @@ -43,7 +45,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

public class BaseTestImpersonation extends PlanTestBase {
public class BaseTestImpersonation extends ClusterTest {
protected static final String MINI_DFS_STORAGE_PLUGIN_NAME = "mini_dfs_plugin";
protected static final String processUser = System.getProperty("user.name");

Expand Down Expand Up @@ -112,19 +114,28 @@ protected static void startMiniDfsCluster(String testClass, boolean isImpersonat
}

protected static void startDrillCluster(final boolean isImpersonationEnabled) throws Exception {
final Properties props = cloneDefaultTestConfigProperties();
final Properties props = new Properties();
props.setProperty(ExecConstants.IMPERSONATION_ENABLED, Boolean.toString(isImpersonationEnabled));
startDrillCluster(props);
startDrillCluster(1, props);
}

protected static void startDrillCluster(final Properties props) throws Exception {
updateTestCluster(1, DrillConfig.create(props));
protected static void startDrillCluster(int newDrillbitCount, final Properties props) throws Exception {
startDrillCluster(newDrillbitCount, null, props);
}

protected static void startDrillCluster(int newDrillbitCount, final DrillConfig configs, final Properties props) throws Exception {
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher).clusterSize(newDrillbitCount);
builder.configBuilder().configProps(props);
if (configs != null) {
builder.configBuilder().configProps(configs);
}
startCluster(builder);
}

protected static void addMiniDfsBasedStorage(final Map<String, WorkspaceConfig> workspaces) throws Exception {
// Create a HDFS based storage plugin based on local storage plugin and add it to plugin registry (connection string
// for mini dfs is varies for each run).
StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
StoragePluginRegistry pluginRegistry = cluster.storageRegistry();
FileSystemConfig lfsPluginConfig = (FileSystemConfig) pluginRegistry.getPlugin("dfs").getConfig();

String connection = dfsConf.get(FileSystem.FS_DEFAULT_NAME_KEY);
Expand Down Expand Up @@ -177,9 +188,14 @@ protected static String getUserHome(String user) {

protected static void createView(final String viewOwner, final String viewGroup, final short viewPerms,
final String newViewName, final String fromSourceSchema, final String fromSourceTableName) throws Exception {
// updateClient(viewOwner);
// test(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, viewPerms));
// test(String.format("CREATE VIEW %s.%s AS SELECT * FROM %s.%s;",
// getWSSchema(viewOwner), newViewName, fromSourceSchema, fromSourceTableName));

updateClient(viewOwner);
test(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, viewPerms));
test(String.format("CREATE VIEW %s.%s AS SELECT * FROM %s.%s;",
run(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, viewPerms));
run(String.format("CREATE VIEW %s.%s AS SELECT * FROM %s.%s;",
getWSSchema(viewOwner), newViewName, fromSourceSchema, fromSourceTableName));

// Verify the view file created has the expected permissions and ownership
Expand All @@ -193,8 +209,8 @@ protected static void createView(final String viewOwner, final String viewGroup,
protected static void createView(final String viewOwner, final String viewGroup, final String viewName,
final String viewDef) throws Exception {
updateClient(viewOwner);
test(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, (short) 0750));
test("CREATE VIEW %s.%s.%s AS %s", MINI_DFS_STORAGE_PLUGIN_NAME, "tmp", viewName, viewDef);
run(String.format("ALTER SESSION SET `%s`='%o';", ExecConstants.NEW_VIEW_DEFAULT_PERMS_KEY, (short) 0750));
run("CREATE VIEW %s.%s.%s AS %s", MINI_DFS_STORAGE_PLUGIN_NAME, "tmp", viewName, viewDef);
final Path viewFilePath = new Path("/tmp/", viewName + DotDrillType.VIEW.getEnding());
fs.setOwner(viewFilePath, viewOwner, viewGroup);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,11 +45,11 @@ public static void setup() throws Exception {

private static void createTestData() throws Exception {
// Create test table in minidfs.tmp schema for use in test queries
test(String.format("CREATE TABLE %s.tmp.dfsRegion AS SELECT * FROM cp.`region.json`", MINI_DFS_STORAGE_PLUGIN_NAME));
run(String.format("CREATE TABLE %s.tmp.dfsRegion AS SELECT * FROM cp.`region.json`", MINI_DFS_STORAGE_PLUGIN_NAME));

// generate a large enough file that the DFS will not fulfill requests to read a
// page of data all at once, see notes above testReadLargeParquetFileFromDFS()
test(String.format(
run(String.format(
"CREATE TABLE %s.tmp.large_employee AS " +
"(SELECT employee_id, full_name FROM cp.`employee.json`) " +
"UNION ALL (SELECT employee_id, full_name FROM cp.`employee.json`)" +
Expand All @@ -75,8 +75,8 @@ private static void createTestData() throws Exception {
*/
@Test
public void testReadLargeParquetFileFromDFS() throws Exception {
test(String.format("USE %s", MINI_DFS_STORAGE_PLUGIN_NAME));
test("SELECT * FROM tmp.`large_employee`");
run(String.format("USE %s", MINI_DFS_STORAGE_PLUGIN_NAME));
run("SELECT * FROM tmp.`large_employee`");
}

@Test // DRILL-3037
Expand All @@ -96,7 +96,7 @@ public void testSimpleQuery() throws Exception {

@AfterClass
public static void removeMiniDfsBasedStorage() throws Exception {
getDrillbitContext().getStorage().remove(MINI_DFS_STORAGE_PLUGIN_NAME);
cluster.storageRegistry().remove(MINI_DFS_STORAGE_PLUGIN_NAME);
stopMiniDfsCluster();
}
}

0 comments on commit 9b9e422

Please sign in to comment.