Skip to content
Permalink
Browse files
Merge branch 'branch-1.1'
  • Loading branch information
Konstantin Boudnik committed Jan 28, 2016
2 parents 4bdb3c9 + 749c394 commit 0c0091e535d5c799dd6db404cffe8859fd234852
Showing 1 changed file with 60 additions and 0 deletions.
@@ -0,0 +1,60 @@
--- apache-hive-1.2.1-src/ql/src/java/org/apache/hadoop/hive/ql/Driver.java.orig 2015-06-18 22:51:23.000000000 +0200
+++ apache-hive-1.2.1-src/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 2016-01-27 14:34:20.179641745 +0100
@@ -33,6 +33,7 @@
import java.util.Queue;
import java.util.Set;

+import com.google.common.collect.Sets;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -557,12 +558,27 @@
*/
public static void doAuthorization(BaseSemanticAnalyzer sem, String command)
throws HiveException, AuthorizationException {
- HashSet<ReadEntity> inputs = sem.getInputs();
- HashSet<WriteEntity> outputs = sem.getOutputs();
SessionState ss = SessionState.get();
HiveOperation op = ss.getHiveOperation();
Hive db = sem.getDb();

+ Set<ReadEntity> additionalInputs = new HashSet<ReadEntity>();
+ for (Entity e : sem.getInputs()) {
+ if (e.getType() == Entity.Type.PARTITION) {
+ additionalInputs.add(new ReadEntity(e.getTable()));
+ }
+ }
+
+ Set<WriteEntity> additionalOutputs = new HashSet<WriteEntity>();
+ for (Entity e : sem.getOutputs()) {
+ if (e.getType() == Entity.Type.PARTITION) {
+ additionalOutputs.add(new WriteEntity(e.getTable(), WriteEntity.WriteType.DDL_NO_LOCK));
+ }
+ }
+
+ Set<ReadEntity> inputs = Sets.union(sem.getInputs(), additionalInputs);
+ Set<WriteEntity> outputs = Sets.union(sem.getOutputs(), additionalOutputs);
+
if (ss.isAuthorizationModeV2()) {
// get mapping of tables to columns used
ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();
@@ -759,8 +775,8 @@

}

- private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs,
- HashSet<WriteEntity> outputs, String command, Map<String, List<String>> tab2cols,
+ private static void doAuthorizationV2(SessionState ss, HiveOperation op, Set<ReadEntity> inputs,
+ Set<WriteEntity> outputs, String command, Map<String, List<String>> tab2cols,
Map<String, List<String>> updateTab2Cols) throws HiveException {

/* comment for reviewers -> updateTab2Cols needed to be separate from tab2cols because if I
@@ -780,7 +796,7 @@
}

private static List<HivePrivilegeObject> getHivePrivObjects(
- HashSet<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols) {
+ Set<? extends Entity> privObjects, Map<String, List<String>> tableName2Cols) {
List<HivePrivilegeObject> hivePrivobjs = new ArrayList<HivePrivilegeObject>();
if(privObjects == null){
return hivePrivobjs;

0 comments on commit 0c0091e

Please sign in to comment.