Skip to content

HIVE-28848:Remove DFS_URI auth from ALTER_PARTITION if there is no ch… #5766

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jul 10, 2025
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -20,12 +20,15 @@
package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;

import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
import org.apache.hadoop.hive.metastore.events.PreEventContext;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
import org.slf4j.Logger;
@@ -76,10 +79,17 @@ private List<HivePrivilegeObject> getOutputHObjs() {
ret.add(getHivePrivilegeObject(event.getTable()));

Partition newPartition = event.getNewPartition();
String newUri = (newPartition != null) ? getSdLocation(newPartition.getSd()) : "";
String oldUri = "";

if (StringUtils.isNotEmpty(newUri)) {
ret.add(getHivePrivilegeObjectDfsUri(newUri));
if (event.getOldPartVals() != null && event.getOldPartVals().size() > 0) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We don't have any changes in HMSHandler as part of this change. IIUC null is sent for oldPartition values and tmpPart is new partition here https://github.com/apache/hive/blob/master/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HMSHandler.java#L6184

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The oldPartition values are the same as the tmpPart values in this method

oldUri = this.getOldPartSdLocation(event);
}

String newUri = (newPartition != null) ? getSdLocation(newPartition.getSd()) : "";

// Skip DFS_URI auth if new partition loc is empty or old and new partition loc are same
if (StringUtils.isNotEmpty(newUri) && !StringUtils.equalsIgnoreCase(oldUri, newUri)) {
ret.add(getHivePrivilegeObjectDfsUri(newUri));
}

COMMAND_STR = buildCommandString(COMMAND_STR, event.getTableName(), newPartition);
@@ -89,6 +99,19 @@ private List<HivePrivilegeObject> getOutputHObjs() {
return ret;
}

private String getOldPartSdLocation(PreAlterPartitionEvent event) {
Table table = new Table(event.getTable());
try {
org.apache.hadoop.hive.ql.metadata.Partition partition = Hive.get().getPartition(table,
Warehouse.makeSpecFromValues(event.getTable().getPartitionKeys(), event.getOldPartVals()));

return partition.getLocation();

} catch (HiveException e) {
throw new RuntimeException(e);
}
}

private String buildCommandString(String cmdStr, String tbl, Partition partition ) {
String ret = cmdStr;

@@ -100,4 +123,4 @@ private String buildCommandString(String cmdStr, String tbl, Partition partition

return ret;
}
}
}
Original file line number Diff line number Diff line change
@@ -18,6 +18,7 @@

package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;

import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
@@ -45,12 +46,13 @@
import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.filtercontext.TableFilterContext;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.thrift.TException;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import org.junit.Test;

import java.util.ArrayList;
import java.util.List;
@@ -66,6 +68,7 @@
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.verify;

/*
@@ -87,6 +90,7 @@ public class TestHiveMetaStoreAuthorizer {
private static final String metaConfVal = "";

private static final String TEST_DATA_DIR = new File("file:///testdata").getPath();
private static final List<String> PARTCOL_SCHEMA = Lists.newArrayList("yyyy", "mm", "dd");
private RawStore rawStore;
private Configuration conf;
private HMSHandler hmsHandler;
@@ -696,6 +700,106 @@ public void testU_DropDataConnector_authorizedUser() {
}
}


/**
* Captures and returns the privilege objects for Alter Partition
*/
private Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> getHivePrivilegeObjectsForAlterPartition()
throws HiveAuthzPluginException, HiveAccessControlException {
@SuppressWarnings("unchecked")
Class<List<HivePrivilegeObject>> class_listPrivObjects = (Class) List.class;
ArgumentCaptor<List<HivePrivilegeObject>> inputsCapturer = ArgumentCaptor
.forClass(class_listPrivObjects);
ArgumentCaptor<List<HivePrivilegeObject>> outputsCapturer = ArgumentCaptor
.forClass(class_listPrivObjects);

verify(mockHiveAuthorizer).checkPrivileges(eq(HiveOperationType.ALTERPARTITION_FILEFORMAT),
inputsCapturer.capture(), outputsCapturer.capture(),
any(HiveAuthzContext.class));

return new ImmutablePair<>(inputsCapturer.getValue(), outputsCapturer.getValue());
}
@Test
public void testV_AlterPartition_DFSUriPrivObject() {
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
try {
List<List<String>> testValues = createTable4PartColsParts();
List<Partition> oldParts = hmsHandler.get_partitions(dbName, tblName, (short) -1);
Partition oldPart = oldParts.get(3);
Partition newPart = makeTestChangesOnPartition(oldPart);

hmsHandler.rename_partition(dbName, tblName,oldPart.getValues(),newPart);

Pair<List<HivePrivilegeObject>, List<HivePrivilegeObject>> io = getHivePrivilegeObjectsForAlterPartition();
List<HivePrivilegeObject> outputs = io.getRight();

List<HivePrivilegeObject> tableOutputs = outputs.stream()
.filter(o -> o.getType() == HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI)
.collect(Collectors.toList());

assertEquals("Should have one DFS_URI privilege object", 1, tableOutputs.size());
HivePrivilegeObject DFSUriObj = tableOutputs.get(0);

assertEquals("DFS_URI should be same as new partition location",
oldPart.getSd().getLocation()+ "/hh=01", DFSUriObj.getObjectName());
} catch (Exception e) {
fail("testV_AlterPartition_DFSUriPrivObject() failed with " + e);
}
}

protected Table createPartitionedTestTable(String dbName, String tableName,
List<String> partCols, boolean setPartitionLevelPrivilages)
throws Exception {

Database db = new DatabaseBuilder()
.setName(dbName)
.build(conf);
hmsHandler.create_database(db);

TableBuilder builder = new TableBuilder()
.setDbName(dbName)
.setTableName(tableName)
.addCol("id", "int")
.addCol("name", "string");

partCols.forEach(col -> builder.addPartCol(col, "string"));
Table table = builder.build(conf);

hmsHandler.create_table(table);
return table;
}

protected List<List<String>> createTable4PartColsParts() throws
Exception {
Table table = createPartitionedTestTable(dbName, tblName, PARTCOL_SCHEMA, false);
List<List<String>> testValues = Lists.newArrayList(
Lists.newArrayList("1999", "01", "02"),
Lists.newArrayList("2009", "02", "10"),
Lists.newArrayList("2017", "10", "26"),
Lists.newArrayList("2017", "11", "27"));

for (List<String> vals : testValues) {
addPartition(table, vals);
}

return testValues;
}

protected void addPartition(Table table, List<String> values)
throws TException {
PartitionBuilder partitionBuilder = new PartitionBuilder().inTable(table);
values.forEach(val -> partitionBuilder.addValue(val));
hmsHandler.add_partition(partitionBuilder.build(conf));
}

protected static Partition makeTestChangesOnPartition(Partition partition) {
Partition newPart = new Partition(partition);
newPart.getParameters().put("hmsTestParam001", "testValue001");
newPart.getSd().setLocation(partition.getSd().getLocation() + "/hh=01");
newPart.setValues(Lists.newArrayList("2018", "11", "27"));
return newPart;
}

@Test
public void testUnAuthorizedCause() {
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser));
Original file line number Diff line number Diff line change
@@ -6103,7 +6103,8 @@ private void alter_partitions_with_environment_context(String catName, String db
if (tmpPart.getSd() != null && tmpPart.getSd().getCols() != null && tmpPart.getSd().getCols().isEmpty()) {
tmpPart.getSd().setCols(table.getSd().getCols());
}
firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, null, tmpPart, this));
// old part values are same as new partition values here
firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, table, tmpPart.getValues(), tmpPart, this));
}
oldParts = alterHandler.alterPartitions(getMS(), wh, catName, db_name, tbl_name, new_parts,
environmentContext, writeIdList, writeId, this);
Loading
Oops, something went wrong.