Skip to content

Commit

Permalink
HIVE-26246: Filter out results 'show connectors' on HMS client-side (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangbutao committed Aug 15, 2022
1 parent 4cafe1f commit 5a50af4
Show file tree
Hide file tree
Showing 8 changed files with 72 additions and 5 deletions.
Expand Up @@ -49,7 +49,7 @@ public AuthorizationMetaStoreFilterHook(Configuration conf) {
public List<String> filterTableNames(String catName, String dbName, List<String> tableList)
throws MetaException {
List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbName, tableList);
return getTableNames(getFilteredObjects(listObjs));
return getFilteredObjectNames(getFilteredObjects(listObjs));
}
@Override
public List<Table> filterTables(List<Table> tableList) throws MetaException {
Expand Down Expand Up @@ -98,7 +98,7 @@ private List<String> getDbNames(List<HivePrivilegeObject> filteredObjects) {
return tnames;
}

private List<String> getTableNames(List<HivePrivilegeObject> filteredObjects) {
private List<String> getFilteredObjectNames(List<HivePrivilegeObject> filteredObjects) {
List<String> tnames = new ArrayList<String>();
for(HivePrivilegeObject obj : filteredObjects) {
tnames.add(obj.getObjectName());
Expand Down Expand Up @@ -153,6 +153,10 @@ public List<TableMeta> filterTableMetas(String catName,String dbName,List<TableM
.filter(e -> filteredTableNames.contains(e.getTableName())).collect(Collectors.toList());
}


@Override
public List<String> filterDataConnectors(List<String> dcList) throws MetaException {
List<HivePrivilegeObject> listObjs = HivePrivilegeObjectUtils.getHivePrivDcObjects(dcList);
return getFilteredObjectNames(getFilteredObjects(listObjs));
}
}

Expand Up @@ -48,4 +48,18 @@ public static List<HivePrivilegeObject> getHivePrivDbObjects(List<String> dbList

}

/**
* Convert list of dcnames into list of HivePrivilegeObject
* @param dcList
* @return
*/
public static List<HivePrivilegeObject> getHivePrivDcObjects(List<String> dcList) {
List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>();
for (String dcname : dcList) {
objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATACONNECTOR, null, dcname));
}
return objs;

}

}
Expand Up @@ -233,6 +233,11 @@ public final List<String> filterPartitionNames(String s, String s1, String s2, L
return list;
}

@Override
public List<String> filterDataConnectors(List<String> dcList) throws MetaException {
return dcList;
}

private List<String> filterDatabaseObjects(HiveMetaStoreAuthzInfo hiveMetaStoreAuthzInfo) throws MetaException {
List<String> ret = null;

Expand Down
Expand Up @@ -90,4 +90,9 @@ public List<String> filterPartitionNames(String catName, String dbName, String t
return partitionNames;
}

@Override
public List<String> filterDataConnectors(List<String> dcList) throws MetaException {
return dcList;
}

}
Expand Up @@ -1342,7 +1342,8 @@ public DataConnector getDataConnector(String name)
*/
@Override
public List<String> getAllDataConnectorNames() throws MetaException, TException {
return client.get_dataconnectors();
List<String> connectorNames = client.get_dataconnectors();
return FilterUtils.filterDataConnectorsIfEnabled(isClientFilterEnabled, filterHook, connectorNames);
}

/**
Expand Down
Expand Up @@ -143,5 +143,12 @@ List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList)
*/
List<String> filterPartitionNames(String catName, String dbName, String tblName,
List<String> partitionNames) throws MetaException;

/**
* Filter given list of data connectors
* @param dcList
* @return List of filtered Dc names
*/
List<String> filterDataConnectors(List<String> dcList) throws MetaException;
}

Expand Up @@ -110,7 +110,7 @@ public static List<String> filterDataConnectorsIfEnabled(
List<String> connectorNames) throws MetaException {

if (isFilterEnabled) {
return filterHook.filterDatabases(connectorNames); // TODO add a new ATZ call
return filterHook.filterDataConnectors(connectorNames);
}
return connectorNames;
}
Expand Down
Expand Up @@ -23,6 +23,7 @@
import java.util.List;
import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
import org.apache.hadoop.hive.metastore.api.CompactionType;
import org.apache.hadoop.hive.metastore.api.DataConnector;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
Expand Down Expand Up @@ -141,6 +142,14 @@ public List<String> filterPartitionNames(String catName, String dbName, String t
}
return partitionNames;
}

@Override
public List<String> filterDataConnectors(List<String> dcList) throws MetaException {
if (blockResults) {
return new ArrayList<>();
}
return dcList;
}
}

protected static HiveMetaStoreClient client;
Expand All @@ -153,6 +162,12 @@ public List<String> filterPartitionNames(String catName, String dbName, String t
private static String DBNAME2 = "testdb2";
private static final String TAB1 = "tab1";
private static final String TAB2 = "tab2";
private static String DCNAME1 = "test_connector1";
private static String DCNAME2 = "test_connector2";
private static String mysql_type = "mysql";
private static String mysql_url = "jdbc:mysql://localhost:3306/hive";
private static String postgres_type = "postgres";
private static String postgres_url = "jdbc:postgresql://localhost:5432";


protected HiveMetaStoreClient createClient(Configuration metaStoreConf) throws Exception {
Expand Down Expand Up @@ -210,6 +225,8 @@ protected void creatEnv(Configuration conf) throws Exception {

client.dropDatabase(DBNAME1, true, true, true);
client.dropDatabase(DBNAME2, true, true, true);
client.dropDataConnector(DCNAME1, true, true);
client.dropDataConnector(DCNAME2, true, true);
Database db1 = new DatabaseBuilder()
.setName(DBNAME1)
.setCatalogName(Warehouse.DEFAULT_CATALOG_NAME)
Expand Down Expand Up @@ -238,6 +255,10 @@ protected void creatEnv(Configuration conf) throws Exception {
.inTable(tab2)
.addValue("value2")
.addToTable(client, conf);
DataConnector dc1 = new DataConnector(DCNAME1, mysql_type, mysql_url);
DataConnector dc2 = new DataConnector(DCNAME2, postgres_type, postgres_url);
client.createDataConnector(dc1);
client.createDataConnector(dc2);

TestTxnDbUtil.cleanDb(conf);
TestTxnDbUtil.prepDb(conf);
Expand Down Expand Up @@ -271,6 +292,8 @@ public void testHMSServerWithoutFilter() throws Exception {
assertEquals(1, client.getPartitionsByNames(DBNAME1, TAB2, Lists.newArrayList("name=value1")).size());

assertEquals(2, client.showCompactions().getCompacts().size());

assertEquals(2, client.getAllDataConnectorNames().size());
}

/**
Expand Down Expand Up @@ -318,6 +341,8 @@ public void testHMSClientWithoutFilter() throws Exception {
assertEquals(1, client.getPartitionsByNames(DBNAME1, TAB2, Lists.newArrayList("name=value1")).size());

assertEquals(2, client.showCompactions().getCompacts().size());

assertEquals(2, client.getAllDataConnectorNames().size());
}

/**
Expand All @@ -336,6 +361,7 @@ public void testHMSClientWithFilter() throws Exception {
testFilterForTables(false);
testFilterForPartition(false);
testFilterForCompaction();
testFilterForDataConnector();
}

protected void testFilterForDb(boolean filterAtServer) throws Exception {
Expand Down Expand Up @@ -404,4 +430,9 @@ protected void testFilterForPartition(boolean filterAtServer) throws Exception {
protected void testFilterForCompaction() throws Exception {
assertEquals(0, client.showCompactions().getCompacts().size());
}

protected void testFilterForDataConnector() throws Exception {
assertNotNull(client.getDataConnector(DCNAME1));
assertEquals(0, client.getAllDataConnectorNames().size());
}
}

0 comments on commit 5a50af4

Please sign in to comment.