Skip to content

Commit

Permalink
HIVE-2484: Enable ALTER TABLE SET SERDE to work on partition level (X…
Browse files Browse the repository at this point in the history
…iao Li via He Yongqiang)

git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1181197 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Yongqiang He committed Oct 10, 2011
1 parent af94df9 commit 2e796e1
Show file tree
Hide file tree
Showing 7 changed files with 255 additions and 49 deletions.
29 changes: 22 additions & 7 deletions ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
Expand Up @@ -2801,16 +2801,31 @@ private int alterTable(Hive db, AlterTableDesc alterTbl) throws HiveException {
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) {
tbl.getTTable().getParameters().putAll(alterTbl.getProps());
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDEPROPS) {
tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
alterTbl.getProps());
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) {
tbl.setSerializationLib(alterTbl.getSerdeName());
if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) {
if (part != null) {
part.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
alterTbl.getProps());
} else {
tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
alterTbl.getProps());
}
tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl
.getDeserializer()));
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSERDE) {
String serdeName = alterTbl.getSerdeName();
if (part != null) {
part.getTPartition().getSd().getSerdeInfo().setSerializationLib(serdeName);
if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) {
part.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
alterTbl.getProps());
}
part.getTPartition().getSd().setCols(part.getTPartition().getSd().getCols());
} else {
tbl.setSerializationLib(alterTbl.getSerdeName());
if ((alterTbl.getProps() != null) && (alterTbl.getProps().size() > 0)) {
tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
alterTbl.getProps());
}
tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl.
getDeserializer()));
}
} else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDFILEFORMAT) {
if(part != null) {
part.getTPartition().getSd().setInputFormat(alterTbl.getInputFormat());
Expand Down
Expand Up @@ -195,6 +195,10 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
analyzeAlterTableLocation(ast, tableName, partSpec);
} else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES) {
analyzeAlterTablePartMergeFiles(tablePart, ast, tableName, partSpec);
} else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERIALIZER) {
analyzeAlterTableSerde(ast, tableName, partSpec);
} else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES) {
analyzeAlterTableSerdeProps(ast, tableName, partSpec);
}
break;
}
Expand Down Expand Up @@ -294,12 +298,6 @@ public void analyzeInternal(ASTNode ast) throws SemanticException {
case HiveParser.TOK_ALTERTABLE_PROPERTIES:
analyzeAlterTableProps(ast, false);
break;
case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
analyzeAlterTableSerdeProps(ast);
break;
case HiveParser.TOK_ALTERTABLE_SERIALIZER:
analyzeAlterTableSerde(ast);
break;
case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
analyzeAlterTableClusterSort(ast);
break;
Expand Down Expand Up @@ -945,52 +943,38 @@ private void analyzeAlterTableProps(ASTNode ast, boolean expectView)
alterTblDesc), conf));
}

private void analyzeAlterTableSerdeProps(ASTNode ast)
private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName,
HashMap<String, String> partSpec)
throws SemanticException {
String tableName = getUnescapedName((ASTNode)ast.getChild(0));
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0))
.getChild(0));
AlterTableDesc alterTblDesc = new AlterTableDesc(
AlterTableTypes.ADDSERDEPROPS);
alterTblDesc.setProps(mapProp);
alterTblDesc.setOldName(tableName);
alterTblDesc.setPartSpec(partSpec);

try {
Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
if (tab != null) {
inputs.add(new ReadEntity(tab));
outputs.add(new WriteEntity(tab));
}
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}

addInputsOutputsAlterTable(tableName, partSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
alterTblDesc), conf));
}

private void analyzeAlterTableSerde(ASTNode ast) throws SemanticException {
String tableName = getUnescapedName((ASTNode)ast.getChild(0));
String serdeName = unescapeSQLString(ast.getChild(1).getText());
private void analyzeAlterTableSerde(ASTNode ast, String tableName,
HashMap<String, String> partSpec)
throws SemanticException {

String serdeName = unescapeSQLString(ast.getChild(0).getText());
AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDE);
if (ast.getChildCount() > 2) {
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(2))
if (ast.getChildCount() > 1) {
HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1))
.getChild(0));
alterTblDesc.setProps(mapProp);
}
alterTblDesc.setOldName(tableName);
alterTblDesc.setSerdeName(serdeName);
alterTblDesc.setPartSpec(partSpec);

try {
Table tab = db.getTable(db.getCurrentDatabase(), tableName, false);
if (tab != null) {
inputs.add(new ReadEntity(tab));
outputs.add(new WriteEntity(tab));
}
} catch (HiveException e) {
throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
}

addInputsOutputsAlterTable(tableName, partSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
alterTblDesc), conf));
}
Expand Down
10 changes: 5 additions & 5 deletions ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
Expand Up @@ -572,7 +572,6 @@ alterTableStatementSuffix
| alterStatementSuffixArchive
| alterStatementSuffixUnArchive
| alterStatementSuffixProperties
| alterStatementSuffixSerdeProperties
| alterTblPartitionStatement
| alterStatementSuffixClusterbySortby
;
Expand Down Expand Up @@ -705,10 +704,10 @@ alterViewSuffixProperties
alterStatementSuffixSerdeProperties
@init { msgs.push("alter serdes statement"); }
@after { msgs.pop(); }
: name=Identifier KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
-> ^(TOK_ALTERTABLE_SERIALIZER $name $serdeName tableProperties?)
| name=Identifier KW_SET KW_SERDEPROPERTIES tableProperties
-> ^(TOK_ALTERTABLE_SERDEPROPERTIES $name tableProperties)
: KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
-> ^(TOK_ALTERTABLE_SERIALIZER $serdeName tableProperties?)
| KW_SET KW_SERDEPROPERTIES tableProperties
-> ^(TOK_ALTERTABLE_SERDEPROPERTIES tableProperties)
;

tablePartitionPrefix
Expand All @@ -732,6 +731,7 @@ alterTblPartitionStatementSuffix
| alterStatementSuffixLocation
| alterStatementSuffixProtectMode
| alterStatementSuffixMergeFiles
| alterStatementSuffixSerdeProperties
;

alterStatementSuffixFileFormat
Expand Down
Expand Up @@ -56,8 +56,6 @@ public final class SemanticAnalyzerFactory {
commandType.put(HiveParser.TOK_ALTERTABLE_ARCHIVE, HiveOperation.ALTERTABLE_ARCHIVE);
commandType.put(HiveParser.TOK_ALTERTABLE_UNARCHIVE, HiveOperation.ALTERTABLE_UNARCHIVE);
commandType.put(HiveParser.TOK_ALTERTABLE_PROPERTIES, HiveOperation.ALTERTABLE_PROPERTIES);
commandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER, HiveOperation.ALTERTABLE_SERIALIZER);
commandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES, HiveOperation.ALTERTABLE_SERDEPROPERTIES);
commandType.put(HiveParser.TOK_ALTERTABLE_CLUSTER_SORT, HiveOperation.ALTERTABLE_CLUSTER_SORT);
commandType.put(HiveParser.TOK_SHOWDATABASES, HiveOperation.SHOWDATABASES);
commandType.put(HiveParser.TOK_SHOWTABLES, HiveOperation.SHOWTABLES);
Expand Down Expand Up @@ -104,8 +102,14 @@ public final class SemanticAnalyzerFactory {
new HiveOperation[] { HiveOperation.ALTERTABLE_LOCATION,
HiveOperation.ALTERPARTITION_LOCATION });
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES,
new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
new HiveOperation[] {HiveOperation.ALTERTABLE_MERGEFILES,
HiveOperation.ALTERPARTITION_MERGEFILES });
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERIALIZER,
new HiveOperation[] {HiveOperation.ALTERTABLE_SERIALIZER,
HiveOperation.ALTERPARTITION_SERIALIZER });
tablePartitionCommandType.put(HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES,
new HiveOperation[] {HiveOperation.ALTERTABLE_SERDEPROPERTIES,
HiveOperation.ALTERPARTITION_SERDEPROPERTIES });
}

public static BaseSemanticAnalyzer get(HiveConf conf, ASTNode tree)
Expand Down
2 changes: 2 additions & 0 deletions ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
Expand Up @@ -44,7 +44,9 @@ public enum HiveOperation {
ALTERTABLE_UNARCHIVE("ALTERTABLE_UNARCHIVE", new Privilege[]{Privilege.ALTER_DATA}, null),
ALTERTABLE_PROPERTIES("ALTERTABLE_PROPERTIES", new Privilege[]{Privilege.ALTER_METADATA}, null),
ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER", new Privilege[]{Privilege.ALTER_METADATA}, null),
ALTERPARTITION_SERIALIZER("ALTERPARTITION_SERIALIZER", new Privilege[]{Privilege.ALTER_METADATA}, null),
ALTERTABLE_SERDEPROPERTIES("ALTERTABLE_SERDEPROPERTIES", new Privilege[]{Privilege.ALTER_METADATA}, null),
ALTERPARTITION_SERDEPROPERTIES("ALTERPARTITION_SERDEPROPERTIES", new Privilege[]{Privilege.ALTER_METADATA}, null),
ALTERTABLE_CLUSTER_SORT("ALTERTABLE_CLUSTER_SORT", new Privilege[]{Privilege.ALTER_METADATA}, null),
SHOWDATABASES("SHOWDATABASES", new Privilege[]{Privilege.SHOW_DATABASE}, null),
SHOWTABLES("SHOWTABLES", null, null),
Expand Down
33 changes: 33 additions & 0 deletions ql/src/test/queries/clientpositive/alter_table_serde.q
@@ -0,0 +1,33 @@
-- test table
create table test_table (id int, query string, name string);
describe extended test_table;

alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
describe extended test_table;

alter table test_table set serdeproperties ('field.delim' = ',');
describe extended test_table;

drop table test_table;

--- test partitioned table
create table test_table (id int, query string, name string) partitioned by (dt string);

alter table test_table add partition (dt = '2011');
describe extended test_table partition (dt='2011');

alter table test_table set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
describe extended test_table partition (dt='2011');

alter table test_table set serdeproperties ('field.delim' = ',');
describe extended test_table partition (dt='2011');

-- test partitions

alter table test_table partition(dt='2011') set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
describe extended test_table partition (dt='2011');

alter table test_table partition(dt='2011') set serdeproperties ('field.delim' = ',');
describe extended test_table partition (dt='2011');

drop table test_table

0 comments on commit 2e796e1

Please sign in to comment.