Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ public DummyFilterHook(Configuration conf) {
}

@Override
public List<TableMeta> filterTableMetas(String catName, String dbName, List<TableMeta> tableMetas)
public List<TableMeta> filterTableMetas(List<TableMeta> tableMetas)
throws MetaException {
try {
Assert.assertNotNull(SessionState.get());
Expand All @@ -87,7 +87,7 @@ public List<TableMeta> filterTableMetas(String catName, String dbName, List<Tabl
} catch (Exception e) {
throw new RuntimeException(e);
}
return super.filterTableMetas(catName, dbName, tableMetas);
return super.filterTableMetas(tableMetas);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;

import java.util.ArrayList;
import java.util.List;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import javax.annotation.Nullable;
Expand All @@ -35,6 +36,8 @@
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;

import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
Expand All @@ -53,16 +56,66 @@
public class SQLStdHiveAuthorizationValidatorForTest extends SQLStdHiveAuthorizationValidator {

final String BYPASS_OBJTYPES_KEY = "test.hive.authz.sstd.validator.bypassObjTypes";
final String OUTPUT_PRIV_OBJS= "test.hive.authz.sstd.validator.outputPrivObjs";
Set<HivePrivilegeObject.HivePrivilegeObjectType> bypassObjectTypes;
boolean shouldOutputObjs;

public SQLStdHiveAuthorizationValidatorForTest(HiveMetastoreClientFactory metastoreClientFactory,
HiveConf conf, HiveAuthenticationProvider authenticator,
SQLStdHiveAccessControllerWrapper privController, HiveAuthzSessionContext ctx)
throws HiveAuthzPluginException {
super(metastoreClientFactory, conf, authenticator, privController, ctx);
shouldOutputObjs = conf.getBoolean(OUTPUT_PRIV_OBJS, false);
setupBypass(conf.get(BYPASS_OBJTYPES_KEY,""));
}

static private <T> void addIfNotNull(StringBuilder sb, String fieldName, T field) {
if (field == null) return;
sb.append(" " + fieldName + " " + field);
}

private String privilegeObjectToStringForTest(HivePrivilegeObject privObj) {
StringBuilder sb = new StringBuilder("HIVE PRIVILEGE OBJECT {");
addIfNotNull(sb, "objectName:", privObj.getObjectName());
addIfNotNull(sb, "type:", privObj.getType());
addIfNotNull(sb, "actionType:", privObj.getActionType());
addIfNotNull(sb, "dbName:", privObj.getDbname());
// these two are all caps to prevent these being masked, since the word [Oo]wner is masked by qtests
addIfNotNull(sb, "OWNER:", privObj.getOwnerName());
addIfNotNull(sb, "OWNERTYPE:", privObj.getOwnerType());
addIfNotNull(sb, "columns:", privObj.getColumns());
addIfNotNull(sb, "partKeys", privObj.getPartKeys());
addIfNotNull(sb, "commandKeys", privObj.getCommandParams());;
sb.append("}");
return sb.toString();
}

private void outputHivePrivilegeObjects(String prefix, List<HivePrivilegeObject> privilegeObjects) {
if (!shouldOutputObjs) return;

List<String> privStrs = new ArrayList<>(privilegeObjects.size());
for (HivePrivilegeObject privilege : privilegeObjects) {
privStrs.add(privilegeObjectToStringForTest(privilege));
}

if (privStrs.isEmpty()) return;

// Sort to make test output stable
Collections.sort(privStrs);

LogHelper console = SessionState.getConsole();
console.printInfo(prefix, false);
for (String privStr : privStrs) {
console.printInfo(privStr, false);
}
}

@Override
public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, HiveAuthzContext context) {
outputHivePrivilegeObjects("filterListCmdObjects", listObjs);
return super.filterListCmdObjects(listObjs, context);
}

private void setupBypass(String bypassObjectTypesConf){
bypassObjectTypes = new HashSet<HivePrivilegeObject.HivePrivilegeObjectType>();
if (!bypassObjectTypesConf.isEmpty()){
Expand Down Expand Up @@ -95,6 +148,8 @@ public boolean apply(@Nullable HivePrivilegeObject hivePrivilegeObject) {
public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs,
List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException,
HiveAccessControlException {
outputHivePrivilegeObjects("inputHObjs:", inputHObjs);
outputHivePrivilegeObjects("outputHObjs:", outputHObjs);
switch (hiveOpType) {
case DFS:
case SET:
Expand All @@ -106,6 +161,7 @@ public void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObje

}

@Override
public boolean needTransform() {
// In the future, we can add checking for username, groupname, etc based on
// HiveAuthenticationProvider. For example,
Expand All @@ -115,8 +171,10 @@ public boolean needTransform() {

// Please take a look at the instructions in HiveAuthorizer.java before
// implementing applyRowFilterAndColumnMasking
@Override
public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext context,
List<HivePrivilegeObject> privObjs) throws SemanticException {
outputHivePrivilegeObjects("applyRowFilterAndColumnMasking:", privObjs);
List<HivePrivilegeObject> needRewritePrivObjs = new ArrayList<>();
for (HivePrivilegeObject privObj : privObjs) {
if (privObj.getObjectName().equals("masking_test") || privObj.getObjectName().startsWith("masking_test_n")) {
Expand Down Expand Up @@ -158,5 +216,4 @@ public List<HivePrivilegeObject> applyRowFilterAndColumnMasking(HiveAuthzContext
}
return needRewritePrivObjs;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -19,19 +19,23 @@

import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;

import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private;
import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.TableMeta;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.session.SessionState;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Metastore filter hook for filtering out the list of objects that the current authorization
* implementation does not allow user to see
Expand All @@ -51,9 +55,10 @@ public List<String> filterTableNames(String catName, String dbName, List<String>
List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbName, tableList);
return getFilteredObjectNames(getFilteredObjects(listObjs));
}

@Override
public List<Table> filterTables(List<Table> tableList) throws MetaException {
List<HivePrivilegeObject> listObjs = getHivePrivObjects(tableList);
List<HivePrivilegeObject> listObjs = tablesToPrivilegeObjs(tableList);
return getFilteredTableList(getFilteredObjects(listObjs),tableList);
}

Expand Down Expand Up @@ -133,25 +138,45 @@ private List<HivePrivilegeObject> getHivePrivObjects(String dbName, List<String>
return objs;
}

private List<HivePrivilegeObject> getHivePrivObjects(List<Table> tableList) {
private HivePrivilegeObject createPrivilegeObjectForTable(String dbName, String tableName, String owner,
PrincipalType ownerType) {
return new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName, null, null,
HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, null, owner, ownerType);
}

private List<HivePrivilegeObject> tablesToPrivilegeObjs(List<Table> tableList) {
List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>();
for(Table tableObject : tableList) {
objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, tableObject.getDbName(), tableObject.getTableName(), null, null,
HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, null, tableObject.getOwner(), tableObject.getOwnerType()));
for (Table tableObject : tableList) {
objs.add(createPrivilegeObjectForTable(tableObject.getDbName(), tableObject.getTableName(), tableObject.getOwner(),
tableObject.getOwnerType()));
}
return objs;
}

@Override
public List<TableMeta> filterTableMetas(String catName,String dbName,List<TableMeta> tableMetas) throws MetaException {
List<String> tableNames = new ArrayList<>();
for(TableMeta tableMeta: tableMetas){
tableNames.add(tableMeta.getTableName());
}
List<String> filteredTableNames = filterTableNames(catName,dbName,tableNames);
return tableMetas.stream()
.filter(e -> filteredTableNames.contains(e.getTableName())).collect(Collectors.toList());
}
private List<HivePrivilegeObject> tableMetasToPrivilegeObjs(List<TableMeta> tableMetas) {
List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>();
for (TableMeta tableMeta : tableMetas) {
objs.add(createPrivilegeObjectForTable(tableMeta.getDbName(), tableMeta.getTableName(), tableMeta.getOwnerName(),
tableMeta.getOwnerType()));
}
return objs;
}

private ImmutablePair<String, String> tableMetaKey(String dbName, String tableName) {
return new ImmutablePair(dbName, tableName);
}

@Override
public List<TableMeta> filterTableMetas(List<TableMeta> tableMetas) throws MetaException {
List<HivePrivilegeObject> listObjs = tableMetasToPrivilegeObjs(tableMetas);
List<HivePrivilegeObject> filteredList = getFilteredObjects(listObjs);
Set<ImmutablePair<String, String>> filteredNames = filteredList.stream()
.map(e -> tableMetaKey(e.getDbname(), e.getObjectName()))
.collect(Collectors.toSet());
return tableMetas.stream()
.filter(e -> filteredNames.contains(tableMetaKey(e.getDbName(), e.getTableName())))
.collect(Collectors.toList());
}

@Override
public List<String> filterDataConnectors(List<String> dcList) throws MetaException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,14 @@ public final List<String> filterCatalogs(List<String> catalogs) throws MetaExcep
}

@Override
public final List<TableMeta> filterTableMetas(String catName, String dbName, List<TableMeta> tableMetas)
@Deprecated
public List<TableMeta> filterTableMetas(String catName, String dbName,List<TableMeta> tableMetas)
throws MetaException {
return filterTableMetas(tableMetas);
}

@Override
public final List<TableMeta> filterTableMetas(List<TableMeta> tableMetas)
throws MetaException {
return tableMetas;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
--! qt:authorizer
set hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
set test.hive.authz.sstd.validator.outputPrivObjs=true;
set test.hive.authz.sstd.validator.bypassObjTypes=DATABASE;
set hive.test.authz.sstd.hs2.mode=true;
set user.name=testuser;

CREATE DATABASE test_auth_obj_db;
CREATE TABLE test_auth_obj_db.test_privs(i int);
set user.name=testuser2;
CREATE TABLE test_auth_obj_db.test_privs2(s string, i int);
set user.name=testuser;
SHOW DATABASES LIKE 'test_auth_obj_db';
SHOW TABLES IN test_auth_obj_db;
EXPLAIN SELECT * FROM test_auth_obj_db.test_privs;
EXPLAIN INSERT INTO test_auth_obj_db.test_privs VALUES (1),(2),(3);
set user.name=testuser2;
DROP TABLE test_auth_obj_db.test_privs2;
set user.name=testuser;
DROP TABLE test_auth_obj_db.test_privs;
DROP DATABASE test_auth_obj_db;
Loading