Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

HIVE-1871 Bug in merging dynamic partitions introduced by HIVE-1806

(He Yongqiag via namit)



git-svn-id: https://svn.apache.org/repos/asf/hive/trunk@1054856 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information...
commit a804a3d7f0726253c94af66cef260086307b4360 1 parent ceafb22
Namit Jain authored
View
3  CHANGES.txt
@@ -659,6 +659,9 @@ Trunk - Unreleased
HIVE-1874 fix HBase filter pushdown broken by HIVE-1638
(John Sichi via namit)
+ HIVE-1871 Bug in merging dynamic partitions introduced by HIVE-1806
+ (He Yongqiag via namit)
+
TESTS
HIVE-1464. improve test query performance
View
19 ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java
@@ -126,16 +126,17 @@ public int execute(DriverContext driverContext) {
// need to merge and they can simply be moved to the target directory.
LoadMultiFilesDesc lmfd = work.getLoadMultiFilesWork();
if (lmfd != null) {
- Path destPath = new Path(lmfd.getTargetDir());
- FileSystem fs = destPath.getFileSystem(conf);
- if (!fs.exists(destPath)) {
- fs.mkdirs(destPath);
- }
boolean isDfsDir = lmfd.getIsDfsDir();
- for (String s: lmfd.getSourceDirs()) {
- Path srcPath = new Path(s);
- Path dstPath = new Path(destPath, srcPath.getName());
- moveFile(srcPath, dstPath, isDfsDir);
+ int i = 0;
+ while (i <lmfd.getSourceDirs().size()) {
+ Path srcPath = new Path(lmfd.getSourceDirs().get(i));
+ Path destPath = new Path(lmfd.getTargetDirs().get(i));
+ FileSystem fs = destPath.getFileSystem(conf);
+ if (!fs.exists(destPath.getParent())) {
+ fs.mkdirs(destPath.getParent());
+ }
+ moveFile(srcPath, destPath, isDfsDir);
+ i++;
}
}
View
29 ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java
@@ -25,8 +25,6 @@
import java.util.List;
import java.util.Map;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@@ -42,7 +40,6 @@
public class ConditionalResolverMergeFiles implements ConditionalResolver,
Serializable {
private static final long serialVersionUID = 1L;
- static final private Log LOG = LogFactory.getLog(ConditionalResolverMergeFiles.class.getName());
public ConditionalResolverMergeFiles() {
}
@@ -180,14 +177,35 @@ public void setDPCtx(DynamicPartitionCtx dp) {
// add the merge MR job
setupMapRedWork(conf, work, trgtSize, totalSz);
resTsks.add(mrTask);
-
+
// add the move task for those partitions that do not need merging
if (toMove.size() > 0) { //
// modify the existing move task as it is already in the candidate running tasks
MoveWork mvWork = (MoveWork) mvTask.getWork();
LoadFileDesc lfd = mvWork.getLoadFileWork();
+
+ String targetDir = lfd.getTargetDir();
+ List<String> targetDirs = new ArrayList<String>(toMove.size());
+ int numDPCols = dpCtx.getNumDPCols();
+
+ for (int i = 0; i < toMove.size(); i++) {
+ String toMoveStr = toMove.get(i);
+ if (toMoveStr.endsWith(Path.SEPARATOR)) {
+ toMoveStr = toMoveStr.substring(0, toMoveStr.length() - 1);
+ }
+ String [] moveStrSplits = toMoveStr.split(Path.SEPARATOR);
+ int dpIndex = moveStrSplits.length - numDPCols;
+ String target = targetDir;
+ while (dpIndex < moveStrSplits.length) {
+ target = target + Path.SEPARATOR + moveStrSplits[dpIndex];
+ dpIndex ++;
+ }
+
+ targetDirs.add(target);
+ }
+
LoadMultiFilesDesc lmfd = new LoadMultiFilesDesc(toMove,
- lfd.getTargetDir(), lfd.getIsDfsDir(), lfd.getColumns(), lfd.getColumnTypes());
+ targetDirs, lfd.getIsDfsDir(), lfd.getColumns(), lfd.getColumnTypes());
mvWork.setLoadFileWork(null);
mvWork.setLoadTableWork(null);
mvWork.setMultiFilesDesc(lmfd);
@@ -245,6 +263,7 @@ private long getMergeSize(FileSystem inpFs, Path dirPath, long avgSize) {
for (FileStatus fStat : fStats) {
totalSz += fStat.getLen();
}
+
if (totalSz < avgSize * fStats.length) {
return totalSz;
} else {
View
16 ql/src/java/org/apache/hadoop/hive/ql/plan/LoadMultiFilesDesc.java
@@ -27,7 +27,7 @@
*/
public class LoadMultiFilesDesc implements Serializable {
private static final long serialVersionUID = 1L;
- private String targetDir;
+ private List<String> targetDirs;
private boolean isDfsDir;
// list of columns, comma separated
private String columns;
@@ -37,19 +37,19 @@
public LoadMultiFilesDesc() {
}
- public LoadMultiFilesDesc(final List<String> sourceDirs, final String targetDir,
+ public LoadMultiFilesDesc(final List<String> sourceDirs, final List<String> targetDir,
final boolean isDfsDir, final String columns, final String columnTypes) {
this.srcDirs = sourceDirs;
- this.targetDir = targetDir;
+ this.targetDirs = targetDir;
this.isDfsDir = isDfsDir;
this.columns = columns;
this.columnTypes = columnTypes;
}
- @Explain(displayName = "destination")
- public String getTargetDir() {
- return targetDir;
+ @Explain(displayName = "destinations")
+ public List<String> getTargetDirs() {
+ return targetDirs;
}
@Explain(displayName = "sources")
@@ -61,8 +61,8 @@ public void setSourceDirs(List<String> srcs) {
this.srcDirs = srcs;
}
- public void setTargetDir(final String targetDir) {
- this.targetDir = targetDir;
+ public void setTargetDirs(final List<String> targetDir) {
+ this.targetDirs = targetDir;
}
@Explain(displayName = "hdfs directory")
View
38 ql/src/test/queries/clientpositive/merge_dynamic_partition3.q
@@ -0,0 +1,38 @@
+set hive.exec.dynamic.partition=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+create table srcpart_merge_dp like srcpart;
+
+create table merge_dynamic_part like srcpart;
+
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11);
+
+load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12);
+
+load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
+load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11);
+load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
+load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12);
+
+show partitions srcpart_merge_dp;
+
+set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
+set hive.merge.mapfiles=true;
+set hive.merge.mapredfiles=true;
+set hive.merge.smallfiles.avgsize=3000;
+set hive.exec.compress.output=false;
+
+explain
+insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08';
+
+insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08';
+
+select ds, hr, count(1) from merge_dynamic_part where ds>='2008-04-08' group by ds, hr order by ds, hr;
+
+show table extended like `merge_dynamic_part`;
View
243 ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out
@@ -0,0 +1,243 @@
+PREHOOK: query: create table srcpart_merge_dp like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table srcpart_merge_dp like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@srcpart_merge_dp
+PREHOOK: query: create table merge_dynamic_part like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table merge_dynamic_part like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@merge_dynamic_part
+PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+PREHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+PREHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+PREHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+PREHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-08', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=11
+PREHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=11)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=11
+PREHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv1.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=12
+PREHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+PREHOOK: type: LOAD
+POSTHOOK: query: load data local inpath '../data/files/kv2.txt' INTO TABLE srcpart_merge_dp partition(ds='2008-04-09', hr=12)
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-09/hr=12
+PREHOOK: query: show partitions srcpart_merge_dp
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions srcpart_merge_dp
+POSTHOOK: type: SHOWPARTITIONS
+ds=2008-04-08/hr=11
+ds=2008-04-08/hr=12
+ds=2008-04-09/hr=11
+ds=2008-04-09/hr=12
+PREHOOK: query: explain
+insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08'
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+ (TOK_QUERY (TOK_FROM (TOK_TABREF srcpart_merge_dp)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB merge_dynamic_part (TOK_PARTSPEC (TOK_PARTVAL ds) (TOK_PARTVAL hr)))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_TABLE_OR_COL ds)) (TOK_SELEXPR (TOK_TABLE_OR_COL hr))) (TOK_WHERE (>= (TOK_TABLE_OR_COL ds) '2008-04-08'))))
+
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3
+ Stage-4
+ Stage-0 depends on stages: Stage-4, Stage-3
+ Stage-2 depends on stages: Stage-0
+ Stage-3
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Alias -> Map Operator Tree:
+ srcpart_merge_dp
+ TableScan
+ alias: srcpart_merge_dp
+ Filter Operator
+ predicate:
+ expr: (ds >= '2008-04-08')
+ type: boolean
+ Select Operator
+ expressions:
+ expr: key
+ type: string
+ expr: value
+ type: string
+ expr: ds
+ type: string
+ expr: hr
+ type: string
+ outputColumnNames: _col0, _col1, _col2, _col3
+ File Output Operator
+ compressed: false
+ GlobalTableId: 1
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: merge_dynamic_part
+
+ Stage: Stage-5
+ Conditional Operator
+
+ Stage: Stage-4
+ Move Operator
+ files:
+ hdfs directory: true
+ destination: pfile:/Users/heyongqiang/Documents/workspace/Hive-3/build/ql/scratchdir/hive_2010-12-30_12-23-17_495_2252811203861954406/-ext-10000
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ partition:
+ ds
+ hr
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: merge_dynamic_part
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+ Stage: Stage-3
+ Map Reduce
+ Alias -> Map Operator Tree:
+ pfile:/Users/heyongqiang/Documents/workspace/Hive-3/build/ql/scratchdir/hive_2010-12-30_12-23-17_495_2252811203861954406/-ext-10002
+ File Output Operator
+ compressed: false
+ GlobalTableId: 0
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: merge_dynamic_part
+
+
+PREHOOK: query: insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+PREHOOK: Input: default@srcpart_merge_dp@ds=2008-04-09/hr=11
+PREHOOK: Input: default@srcpart_merge_dp@ds=2008-04-09/hr=12
+PREHOOK: Output: default@merge_dynamic_part
+POSTHOOK: query: insert overwrite table merge_dynamic_part partition (ds, hr) select key, value, ds, hr from srcpart_merge_dp where ds>='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart_merge_dp@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart_merge_dp@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@srcpart_merge_dp@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@srcpart_merge_dp@ds=2008-04-09/hr=12
+POSTHOOK: Output: default@merge_dynamic_part@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@merge_dynamic_part@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@merge_dynamic_part@ds=2008-04-09/hr=11
+POSTHOOK: Output: default@merge_dynamic_part@ds=2008-04-09/hr=12
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: select ds, hr, count(1) from merge_dynamic_part where ds>='2008-04-08' group by ds, hr order by ds, hr
+PREHOOK: type: QUERY
+PREHOOK: Input: default@merge_dynamic_part@ds=2008-04-08/hr=11
+PREHOOK: Input: default@merge_dynamic_part@ds=2008-04-08/hr=12
+PREHOOK: Input: default@merge_dynamic_part@ds=2008-04-09/hr=11
+PREHOOK: Input: default@merge_dynamic_part@ds=2008-04-09/hr=12
+PREHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-12-30_12-23-36_755_3232943744307821509/-mr-10000
+POSTHOOK: query: select ds, hr, count(1) from merge_dynamic_part where ds>='2008-04-08' group by ds, hr order by ds, hr
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@merge_dynamic_part@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@merge_dynamic_part@ds=2008-04-08/hr=12
+POSTHOOK: Input: default@merge_dynamic_part@ds=2008-04-09/hr=11
+POSTHOOK: Input: default@merge_dynamic_part@ds=2008-04-09/hr=12
+POSTHOOK: Output: file:/var/folders/6g/6grtCwPMEf4sqHUPpy6xQG9ByHg/-Tmp-/heyongqiang/hive_2010-12-30_12-23-36_755_3232943744307821509/-mr-10000
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+2008-04-08 11 500
+2008-04-08 12 500
+2008-04-09 11 1000
+2008-04-09 12 1000
+PREHOOK: query: show table extended like `merge_dynamic_part`
+PREHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: query: show table extended like `merge_dynamic_part`
+POSTHOOK: type: SHOW_TABLESTATUS
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=11).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: merge_dynamic_part PARTITION(ds=2008-04-09,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ]
+tableName:merge_dynamic_part
+owner:null
+location:pfile:/Users/heyongqiang/Documents/workspace/Hive-3/build/ql/test/data/warehouse/merge_dynamic_part
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+columns:struct columns { string key, string value}
+partitioned:true
+partitionColumns:struct partition_columns { string ds, string hr}
+totalNumberFiles:6
+totalFileSize:34830
+maxFileSize:5812
+minFileSize:5791
+lastAccessTime:0
+lastUpdateTime:1293740615000
+
Please sign in to comment.
Something went wrong with that request. Please try again.