From 3aa206aae97edd366fc6673b1be43fafd5632ad2 Mon Sep 17 00:00:00 2001 From: Anoop Sharma Date: Wed, 18 Apr 2018 17:07:11 +0000 Subject: [PATCH 1/3] JIRA TRAFODION-3027 Enhance jenkins checktests to include more tests. Details in jira. --- core/sqf/sql/scripts/ilh_regrinit | 5 + core/sqf/sql/scripts/ilh_trafinit | 6 +- core/sqf/sql/scripts/regrinit.sql | 18 + core/sql/regress/compGeneral/EXPECTEDTOK | 7 +- core/sql/regress/compGeneral/FILTERTOK | 31 + core/sql/regress/compGeneral/TESTTOK | 21 + core/sql/regress/compGeneral/TESTTOK.sh | 51 ++ core/sql/regress/core/EXPECTED000.SB | 8 +- core/sql/regress/core/TEST000 | 22 +- core/sql/regress/executor/EXPECTED013.SB | 2 +- core/sql/regress/executor/TEST013 | 2 +- core/sql/regress/executor/TEST063 | 1 + core/sql/regress/hive/EXPECTED009 | 56 +- core/sql/regress/hive/EXPECTED030 | 205 ++++++- core/sql/regress/hive/EXPECTED040 | 228 ++++++++ core/sql/regress/privs1/EXPECTED137 | 8 + core/sql/regress/privs1/TEST137 | 4 + core/sql/regress/privs1/TEST141 | 3 + core/sql/regress/tools/runallsb | 27 + core/sql/regress/tools/runregr | 29 + core/sql/regress/tools/runregr_charsets.ksh | 5 + core/sql/regress/tools/runregr_checktest1.ksh | 4 + core/sql/regress/tools/runregr_checktest2.ksh | 4 + core/sql/regress/tools/runregr_checktests.ksh | 528 ++++++++++++++++++ .../sql/regress/tools/runregr_compGeneral.ksh | 18 +- core/sql/regress/tools/runregr_core.ksh | 5 + core/sql/regress/tools/runregr_executor.ksh | 5 + core/sql/regress/tools/runregr_fullstack2.ksh | 5 + core/sql/regress/tools/runregr_hive.ksh | 9 +- core/sql/regress/tools/runregr_privs1.ksh | 5 + core/sql/regress/tools/runregr_privs2.ksh | 5 + core/sql/regress/tools/runregr_seabase.ksh | 5 + core/sql/regress/tools/runregr_udr.ksh | 5 + 33 files changed, 1274 insertions(+), 63 deletions(-) create mode 100755 core/sqf/sql/scripts/ilh_regrinit create mode 100755 core/sqf/sql/scripts/regrinit.sql create mode 100755 core/sql/regress/compGeneral/FILTERTOK create mode 100755 core/sql/regress/compGeneral/TESTTOK create mode 100755 core/sql/regress/compGeneral/TESTTOK.sh create mode 100755 core/sql/regress/tools/runregr_checktest1.ksh create mode 100755 core/sql/regress/tools/runregr_checktest2.ksh create mode 100755 core/sql/regress/tools/runregr_checktests.ksh diff --git a/core/sqf/sql/scripts/ilh_regrinit b/core/sqf/sql/scripts/ilh_regrinit new file mode 100755 index 0000000000..22ecdd3a1c --- /dev/null +++ b/core/sqf/sql/scripts/ilh_regrinit @@ -0,0 +1,5 @@ +echo "executing: sqlci -> regrinit.sql" +sqlci < initialize trafodion' +echo ' - initializes auth, registers users, creates regr schema and namespace' if [[ -z $1 ]]; then read -p 'Do you want to proceed? y/n : ' lv_ans @@ -96,10 +97,7 @@ sqlci < get schemas" -sqlci <&1 | sed -r 's/.+sqlparser\.y/sqlparser.y/' >LOGTOK; + +# extract a list of conflicts from the sqlparser.output file +awk '/State [0-9]+ conflicts:/ { printf "%06d ", $2; print } ' sqlparser.output | grep State | sed -r 's/ State [0-9]+//' >LOGTOK_conflicts +# extract a list of parser states (state number and first descriptive line) from the parser output file +awk '/^State 0$/,/untilthelastline/ { print }' sqlparser.output | awk '/^State [0-9]+$/ { printf "%06d ", $2; getline; getline; print }' >LOGTOK_gramm +# join the two extracted files on the state number (first 6 digits) +join LOGTOK_conflicts LOGTOK_gramm >LOGTOK_join +# replace state numbers with nnnn, so unrelated parser changes don't cause this test to fail +echo " " >>LOGTOK +cat LOGTOK_join | sed -r 's/^[0-9]+ conflicts/nnnn conflicts/' | sed -r 's/reduce [0-9]+/reduce nnnn/' >>LOGTOK + +# delete some of the larger output files produced (uncomment for debugging) +rm sqlparser.output sqlparser.tab.c; diff --git a/core/sql/regress/core/EXPECTED000.SB b/core/sql/regress/core/EXPECTED000.SB index e1a28dd72e..4d0b9598c7 100644 --- a/core/sql/regress/core/EXPECTED000.SB +++ b/core/sql/regress/core/EXPECTED000.SB @@ -1,8 +1,2 @@ ->> ->>upsert into TRAFODION."_MD_".DEFAULTS -+> values -+> ('SCHEMA ', 'TRAFODION.SCH ', 'inserted during seabase regressions run', 0); - ---- 1 row(s) inserted. ->> +>>-- not logging since setup operations may already have been done >>log; diff --git a/core/sql/regress/core/TEST000 b/core/sql/regress/core/TEST000 index eb24d0bca5..9e846ddc6b 100755 --- a/core/sql/regress/core/TEST000 +++ b/core/sql/regress/core/TEST000 @@ -22,25 +22,7 @@ -- log LOG000 clear; - -upsert into TRAFODION."_MD_".DEFAULTS - values - ('SCHEMA ', 'TRAFODION.SCH ', 'inserted during seabase regressions run', 0); - +-- not logging since setup operations may already have been done log; --- not logging it since it may or may not exist -create shared schema trafodion.sch; - -initialize authorization; -register user sql_user1 as sql_user1; -register user sql_user2 as sql_user2; -register user sql_user3 as sql_user3; -register user sql_user4 as sql_user4; -register user sql_user5 as sql_user5; -register user sql_user6 as sql_user6; -register user sql_user7 as sql_user7; -register user sql_user8 as sql_user8; -register user sql_user9 as sql_user9; -register user sql_user10 as sql_user10; - +obey $$TRAF_HOME$$/sql/scripts/regrinit.sql; diff --git a/core/sql/regress/executor/EXPECTED013.SB b/core/sql/regress/executor/EXPECTED013.SB index bb94d7572c..17bc30a8f9 100644 --- a/core/sql/regress/executor/EXPECTED013.SB +++ b/core/sql/regress/executor/EXPECTED013.SB @@ -493,7 +493,7 @@ CONTROL QUERY DEFAULT --- SQL operation failed with errors. >> ->>create schema trafodion.t013sch1; +>>create schema if not exists trafodion.t013sch1; --- SQL operation complete. >>set schema trafodion.t013sch1; diff --git a/core/sql/regress/executor/TEST013 b/core/sql/regress/executor/TEST013 index 34b9a8896d..e29a668710 100755 --- a/core/sql/regress/executor/TEST013 +++ b/core/sql/regress/executor/TEST013 @@ -213,7 +213,7 @@ drop volatile table t013t1; create volatile index tempi on t013t1(a); drop volatile index tempi; -create schema trafodion.t013sch1; +create schema if not exists trafodion.t013sch1; set schema trafodion.t013sch1; create volatile table t013t1 (a int); diff --git a/core/sql/regress/executor/TEST063 b/core/sql/regress/executor/TEST063 index 9e3dc4bd39..a8199bb173 100755 --- a/core/sql/regress/executor/TEST063 +++ b/core/sql/regress/executor/TEST063 @@ -48,6 +48,7 @@ -- returns unordered results because of a parallel plan. Until we can resolve -- this issue, let's run test063 using robust_query_optimization minimum control query default robust_query_optimization 'minimum'; +cqd traf_object_encryption ''; ?section cleanup #ifMX diff --git a/core/sql/regress/hive/EXPECTED009 b/core/sql/regress/hive/EXPECTED009 index a59e5dd100..f89d8efa20 100644 --- a/core/sql/regress/hive/EXPECTED009 +++ b/core/sql/regress/hive/EXPECTED009 @@ -214,7 +214,7 @@ A B C >>invoke t009hivecust1; -- Definition of Trafodion table TRAFODION.HIVE_T009.T009HIVECUST1 --- Definition current Tue Sep 5 08:23:44 2017 +-- Definition current Tue Apr 10 12:50:07 2018 ( SYSKEY LARGEINT NO DEFAULT NOT NULL NOT DROPPABLE @@ -263,7 +263,7 @@ A B C >>invoke t009hivecust2; -- Definition of Trafodion table TRAFODION.HIVE_T009.T009HIVECUST2 --- Definition current Tue Sep 5 08:23:55 2017 +-- Definition current Tue Apr 10 12:50:20 2018 ( SYSKEY LARGEINT NO DEFAULT NOT NULL NOT DROPPABLE @@ -434,7 +434,7 @@ T009T2 >>invoke hive.sch_t009.t009t1; -- Definition of hive table T009T1 --- Definition current Tue Sep 5 08:25:19 2017 +-- Definition current Tue Apr 10 12:51:54 2018 ( A INT @@ -516,7 +516,7 @@ ROW_ID COLS >>invoke bblike1; -- Definition of Trafodion table TRAFODION.HIVE_T009.BBLIKE1 --- Definition current Tue Sep 5 08:25:34 2017 +-- Definition current Tue Apr 10 12:52:10 2018 ( ROW_ID VARCHAR(100) CHARACTER SET ISO88591 @@ -533,7 +533,7 @@ ROW_ID COLS >>invoke bblike2; -- Definition of Trafodion table TRAFODION.HIVE_T009.BBLIKE2 --- Definition current Tue Sep 5 08:25:40 2017 +-- Definition current Tue Apr 10 12:52:16 2018 ( ROW_ID VARCHAR(100) CHARACTER SET ISO88591 @@ -572,7 +572,7 @@ ROW_ID COLS >>invoke hive.hive.store_sales; -- Definition of hive table STORE_SALES --- Definition current Tue Sep 5 08:25:49 2017 +-- Definition current Tue Apr 10 12:52:22 2018 ( SS_SOLD_DATE_SK INT @@ -616,7 +616,7 @@ ROW_ID COLS ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME ............ ### STATEMENT_NAME ........... S -PLAN_ID ................ 212356315820604834 +PLAN_ID ................ ### ROWS_OUT ............... ### EST_TOTAL_COST ......... ### STATEMENT ................ select * from store_sales where ss_item_sk = 1; @@ -629,7 +629,7 @@ ROWS_OUT ............... ### EST_OPER_COST .......... ### EST_TOTAL_COST ......... ### DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB + est_memory_per_node ### max_card_est ......... ### fragment_id ............ 0 parent_frag ............ (none) @@ -643,6 +643,14 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON HIVE_MAX_STRING_LENGTH 20 SHOWCONTROL_SHOW_ALL ... OFF SCHEMA ................. HIVE.HIVE @@ -761,6 +769,14 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON HIVE_MAX_STRING_LENGTH 20 SHOWCONTROL_SHOW_ALL ... OFF SCHEMA ................. HIVE.HIVE @@ -916,7 +932,7 @@ DESCRIPTION >>invoke hive.hive.date_dim; -- Definition of hive table DATE_DIM --- Definition current Tue Sep 5 08:25:55 2017 +-- Definition current Tue Apr 10 12:52:29 2018 ( D_DATE_SK INT @@ -1060,7 +1076,7 @@ CREATE EXTERNAL TABLE DATE_DIM ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME ............ ### STATEMENT_NAME ........... S -PLAN_ID ................ 212356315833683208 +PLAN_ID ................ ### ROWS_OUT ............... ### EST_TOTAL_COST ......... ### STATEMENT ................ select * @@ -1075,7 +1091,7 @@ ROWS_OUT ............... ### EST_OPER_COST .......... ### EST_TOTAL_COST ......... ### DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB + est_memory_per_node ### max_card_est ......... ### fragment_id ............ 0 parent_frag ............ (none) @@ -1089,6 +1105,14 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON HIVE_MAX_STRING_LENGTH 20 SHOWCONTROL_SHOW_ALL ... OFF SCHEMA ................. TRAFODION.SCH @@ -1150,7 +1174,7 @@ DESCRIPTION >>invoke hive.hive.date_dim; -- Definition of hive table DATE_DIM --- Definition current Tue Sep 5 08:26:10 2017 +-- Definition current Tue Apr 10 12:52:50 2018 ( D_DATE_SK INT @@ -1307,6 +1331,14 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON HIVE_MAX_STRING_LENGTH 20 SHOWCONTROL_SHOW_ALL ... OFF SCHEMA ................. TRAFODION.SCH diff --git a/core/sql/regress/hive/EXPECTED030 b/core/sql/regress/hive/EXPECTED030 index b7a4148dff..ad53d988d9 100644 --- a/core/sql/regress/hive/EXPECTED030 +++ b/core/sql/regress/hive/EXPECTED030 @@ -50,7 +50,11 @@ >>invoke hive.hive.store_orc; -- Definition of hive table STORE_ORC +<<<<<<< HEAD -- Definition current Wed Nov 29 02:19:58 2017 +======= +-- Definition current Sun Jun 11 11:09:19 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( S_STORE_SK INT @@ -247,7 +251,11 @@ Suite 100 College MASTER Executor fragment ======================== +<<<<<<< HEAD Fragment ID: 0, Length: 58688 +======= +Fragment ID: 0, Length: 17144 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Contents of EX_ROOT [2]: ------------------------ @@ -271,7 +279,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3088 +======= +explain_plan_size = 3184 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -346,8 +358,13 @@ ColNumber: 26, ColName: S_ZIP ColNumber: 27, ColName: S_COUNTRY ColNumber: 28, ColName: S_GMT_OFFSET ColNumber: 29, ColName: S_TAX_PRECENTAGE +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc modTSforDir_ = 1511634123, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc +modTSforDir_ = 1496437232, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -409,7 +426,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3080 +======= +explain_plan_size = 3176 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -456,8 +477,13 @@ ESP 0 reads 2853 bytes ( 100 percent of avg) Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc modTSforDir_ = 1511634123, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc +modTSforDir_ = 1496437232, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -565,7 +591,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3176 +======= +explain_plan_size = 3272 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -612,8 +642,13 @@ ESP 0 reads 2853 bytes ( 100 percent of avg) Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc modTSforDir_ = 1511634123, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc +modTSforDir_ = 1496437232, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -677,7 +712,11 @@ S_STORE_SK MASTER Executor fragment ======================== +<<<<<<< HEAD Fragment ID: 0, Length: 19144 +======= +Fragment ID: 0, Length: 19984 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Contents of EX_ROOT [2]: ------------------------ @@ -701,7 +740,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3264 +======= +explain_plan_size = 3368 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -748,8 +791,13 @@ ESP 0 reads 2853 bytes ( 100 percent of avg) Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc modTSforDir_ = 1511634123, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc +modTSforDir_ = 1496437232, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Number of PPI entries: 3 PPI: #1 @@ -779,7 +827,11 @@ Expression: extOperExpr_ is not NULL ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... NOT NAMED +<<<<<<< HEAD PLAN_ID .................. 212378682018129421 +======= +PLAN_ID .................. 212363939384659273 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ......... 1,440,202 EST_TOTAL_COST ......... 191.55 STATEMENT ................ select * @@ -811,9 +863,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. SCHEMA ................. HIVE.HIVE HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON @@ -900,7 +963,11 @@ DESCRIPTION ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... NOT NAMED +<<<<<<< HEAD PLAN_ID .................. 212378682018429456 +======= +PLAN_ID .................. 212363939386438156 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ................. 1 EST_TOTAL_COST ........... 0 STATEMENT ................ select * from store_sales_orc where 1=2; @@ -929,9 +996,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. SCHEMA ................. HIVE.HIVE HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON @@ -1221,7 +1299,11 @@ NUM_HITS NUM_PARAMS >>invoke hive.hive.store_sales_orc; -- Definition of hive table STORE_SALES_ORC +<<<<<<< HEAD -- Definition current Wed Nov 29 02:20:31 2017 +======= +-- Definition current Sun Jun 11 11:10:08 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( SS_SOLD_DATE_SK INT @@ -1263,7 +1345,11 @@ NUM_HITS NUM_PARAMS ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378682032138196 +======= +PLAN_ID .................. 212363939411863426 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ............. 1,698 EST_TOTAL_COST ......... 191.52 STATEMENT ................ select * from store_sales_orc where ss_item_sk = 1; @@ -1293,9 +1379,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_ORC_FORMAT ON @@ -1405,7 +1502,11 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378682032886873 +======= +PLAN_ID .................. 212363939416859647 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ......... 2,880,404 EST_TOTAL_COST 23,225,420 STATEMENT ................ select * @@ -1438,9 +1539,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_ORC_FORMAT ON @@ -1660,7 +1772,11 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD >>invoke hive.hive.date_dim_orc; -- Definition of hive table DATE_DIM_ORC +<<<<<<< HEAD -- Definition current Wed Nov 29 02:20:37 2017 +======= +-- Definition current Sun Jun 11 11:10:29 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( D_DATE_SK INT @@ -1743,6 +1859,12 @@ CREATE TABLE DEFAULT.DATE_DIM_ORC stored as orc ; +<<<<<<< HEAD +======= +REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; +/* ObjectUID = 3064268403396931736 */ + +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. /* Trafodion DDL */ REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; @@ -1802,7 +1924,11 @@ CREATE EXTERNAL TABLE DATE_DIM_ORC ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378682040442348 +======= +PLAN_ID .................. 212363939435356144 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ............... 271 EST_TOTAL_COST ........... 0.66 STATEMENT ................ select * @@ -1833,9 +1959,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_ORC_FORMAT ON @@ -1913,7 +2050,11 @@ DESCRIPTION >>invoke hive.hive.date_dim_orc; -- Definition of hive table DATE_DIM_ORC +<<<<<<< HEAD -- Definition current Wed Nov 29 02:20:44 2017 +======= +-- Definition current Sun Jun 11 11:10:50 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( D_DATE_SK INT @@ -1996,6 +2137,12 @@ CREATE TABLE DEFAULT.DATE_DIM_ORC stored as orc ; +<<<<<<< HEAD +======= +REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; +/* ObjectUID = 3064268403396931736 */ + +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. /* Trafodion DDL */ REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; @@ -2055,7 +2202,11 @@ CREATE EXTERNAL TABLE DATE_DIM_ORC ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378682040442348 +======= +PLAN_ID .................. 212363939456056239 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ............... 271 EST_TOTAL_COST ........... 0.66 STATEMENT ................ select * @@ -2086,9 +2237,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_ORC_FORMAT ON @@ -2346,9 +2508,15 @@ and( and( and( and( and( not( HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK is null ) no ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... XX +<<<<<<< HEAD PLAN_ID .................. 212378682095271698 ROWS_OUT ................. 1 EST_TOTAL_COST .......... 17.46 +======= +PLAN_ID .................. 212363939559855112 +ROWS_OUT ................. 1 +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. STATEMENT ................ select count(*) from hive.hive.customer_orc, hive.hive.store_sales_sorted_orc @@ -2361,7 +2529,11 @@ ROOT ====================================== SEQ_NO 8 ONLY CHILD 7 REQUESTS_IN .............. 1 ROWS_OUT ................. 1 EST_OPER_COST ............ 0 +<<<<<<< HEAD EST_TOTAL_COST .......... 17.46 +======= +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. DESCRIPTION est_memory_per_node .... 10240.00(Limit), 0.13(BMOs), 0.00(nBMOs) MB max_card_est ........... 1 @@ -2381,9 +2553,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_ORC_FORMAT ON @@ -2408,7 +2591,11 @@ SORT_PARTIAL_AGGR_ROOT ==================== SEQ_NO 7 ONLY CHILD 6 REQUESTS_IN .............. 1 ROWS_OUT ................. 1 EST_OPER_COST ............ 0.01 +<<<<<<< HEAD EST_TOTAL_COST .......... 17.46 +======= +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. DESCRIPTION max_card_est ........... 1 fragment_id ............ 0 @@ -2422,7 +2609,11 @@ ESP_EXCHANGE ============================== SEQ_NO 6 ONLY CHILD 5 REQUESTS_IN .............. 1 ROWS_OUT ................. 1 EST_OPER_COST ............ 0.01 +<<<<<<< HEAD EST_TOTAL_COST .......... 17.46 +======= +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. DESCRIPTION max_card_est ........... 1 fragment_id ............ 2 @@ -2440,7 +2631,11 @@ SORT_PARTIAL_AGGR_LEAF ==================== SEQ_NO 5 ONLY CHILD 4 REQUESTS_IN .............. 1 ROWS_OUT ................. 1 EST_OPER_COST ............ 0.01 +<<<<<<< HEAD EST_TOTAL_COST .......... 17.46 +======= +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. DESCRIPTION max_card_est ........... 1 fragment_id ............ 2 @@ -2454,7 +2649,11 @@ HYBRID_HASH_JOIN ========================== SEQ_NO 4 CHILDREN 3, 2 REQUESTS_IN .............. 1 ROWS_OUT ......... 2,880,404 EST_OPER_COST ............ 0.02 +<<<<<<< HEAD EST_TOTAL_COST .......... 17.46 +======= +EST_TOTAL_COST .......... 18.16 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. DESCRIPTION memory_quota_per_instan 800 MB max_card_est ........... 2.8804e+06 @@ -2510,7 +2709,7 @@ ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 REQUESTS_IN .............. 1 ROWS_OUT ............... 317 EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ........... 0.7 +EST_TOTAL_COST ........... 1.4 DESCRIPTION max_card_est ...... 95,001 fragment_id ............ 3 @@ -2529,8 +2728,8 @@ ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN TABLE_NAME ............... HIVE.HIVE.CUSTOMER_ORC REQUESTS_IN .............. 1 ROWS_OUT ............... 317 -EST_OPER_COST ............ 0.7 -EST_TOTAL_COST ........... 0.7 +EST_OPER_COST ............ 1.4 +EST_TOTAL_COST ........... 1.4 DESCRIPTION max_card_est ...... 95,001 fragment_id ............ 3 diff --git a/core/sql/regress/hive/EXPECTED040 b/core/sql/regress/hive/EXPECTED040 index bb75200e29..39d0277a85 100644 --- a/core/sql/regress/hive/EXPECTED040 +++ b/core/sql/regress/hive/EXPECTED040 @@ -47,8 +47,69 @@ >>obey TEST040(tests); >>-------------------------------------------------------------------------- >>-- PARQUET file metadata info +<<<<<<< HEAD >>--invoke hive.hive.store_parquet; >>-- +======= +>>invoke hive.hive.store_parquet; + +-- Definition of hive table STORE_PARQUET +-- Definition current Sun Jun 11 11:28:44 2017 + + ( + S_STORE_SK INT + , S_STORE_ID VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_REC_START_DATE TIMESTAMP(6) + , S_REC_END_DATE TIMESTAMP(6) + , S_CLOSED_DATE_SK INT + , S_STORE_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_NUMBER_EMPLOYEES INT + , S_FLOOR_SPACE INT + , S_HOURS VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_MARKET_ID INT + , S_GEOGRAPHY_CLASS VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_MARKET_DESC VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_MARKET_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_DIVISION_ID INT + , S_DIVISION_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_COMPANY_ID INT + , S_COMPANY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_STREET_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_STREET_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_STREET_TYPE VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_SUITE_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_CITY VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_COUNTY VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_STATE VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_ZIP VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_COUNTRY VARCHAR(20 BYTES) CHARACTER SET UTF8 + COLLATE DEFAULT + , S_GMT_OFFSET REAL + , S_TAX_PRECENTAGE REAL + ) + /* stored as parquet */ + +--- SQL operation complete. +>> +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. >>-- select one row from PARQUET table >>select [first 1] * from hive.hive.store_parquet; @@ -197,7 +258,11 @@ Suite 100 College MASTER Executor fragment ======================== +<<<<<<< HEAD Fragment ID: 0, Length: 58712 +======= +Fragment ID: 0, Length: 17152 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Contents of EX_ROOT [2]: ------------------------ @@ -221,7 +286,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3112 +======= +explain_plan_size = 3208 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -296,8 +365,13 @@ ColNumber: 26, ColName: S_ZIP ColNumber: 27, ColName: S_COUNTRY ColNumber: 28, ColName: S_GMT_OFFSET ColNumber: 29, ColName: S_TAX_PRECENTAGE +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet modTSforDir_ = 1511634536, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet +modTSforDir_ = 1496437675, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -359,7 +433,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3104 +======= +explain_plan_size = 3208 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -406,8 +484,13 @@ ESP 0 reads 3681 bytes ( 100 percent of avg) Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet modTSforDir_ = 1511634536, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet +modTSforDir_ = 1496437675, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -515,7 +598,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3208 +======= +explain_plan_size = 3320 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -562,8 +649,13 @@ ESP 0 reads 3681 bytes ( 100 percent of avg) Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK +<<<<<<< HEAD hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet modTSforDir_ = 1511634536, numOfPartCols_ = 0 +======= +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet +modTSforDir_ = 1496437675, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 6 @@ -627,7 +719,11 @@ S_STORE_SK MASTER Executor fragment ======================== +<<<<<<< HEAD Fragment ID: 0, Length: 17104 +======= +Fragment ID: 0, Length: 15584 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Contents of EX_ROOT [2]: ------------------------ @@ -651,7 +747,11 @@ rtFlags5_ = 0 queryType_ = 2 inputVarsSize_ = 0 querySimilarityInfo()->siList()->numEntries() = 1 +<<<<<<< HEAD explain_plan_size = 3240 +======= +explain_plan_size = 3352 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. # of Expressions = 4 @@ -696,10 +796,16 @@ Summary of bytes read per ESP (3681 = 100 percent): ESP 0 reads 3681 bytes ( 100 percent of avg) +<<<<<<< HEAD Number of columns to retrieve: 1 ColNumber: 1, ColName: S_STORE_SK hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet modTSforDir_ = 1511634536, numOfPartCols_ = 0 +======= +Number of columns to retrieve: 0 +hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet +modTSforDir_ = 1496437675, numOfPartCols_ = 0 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. Number of PPI entries: 3 PPI: #1 @@ -729,9 +835,15 @@ Expression: extOperExpr_ is not NULL ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... NOT NAMED +<<<<<<< HEAD PLAN_ID .................. 212378573691398125 ROWS_OUT ......... 1,440,202 EST_TOTAL_COST ......... 234.75 +======= +PLAN_ID .................. 212363940580483637 +ROWS_OUT ........ 81,136,280 +EST_TOTAL_COST ...... 13,852.32 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. STATEMENT ................ select * from store_sales_parquet where ss_sold_date_sk = ss_item_sk; @@ -762,9 +874,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. SCHEMA ................. HIVE.HIVE HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON @@ -851,7 +974,11 @@ DESCRIPTION ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... NOT NAMED +<<<<<<< HEAD PLAN_ID .................. 212378573691753629 +======= +PLAN_ID .................. 212363940580884702 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ................. 1 EST_TOTAL_COST ........... 0 STATEMENT ................ select * from store_sales_parquet where 1=2; @@ -881,9 +1008,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. SCHEMA ................. HIVE.HIVE HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON @@ -1174,7 +1312,11 @@ NUM_HITS NUM_PARAMS >>invoke hive.hive.store_sales_parquet; -- Definition of hive table STORE_SALES_PARQUET +<<<<<<< HEAD -- Definition current Mon Nov 27 20:15:06 2017 +======= +-- Definition current Sun Jun 11 11:30:31 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( SS_SOLD_DATE_SK INT @@ -1216,9 +1358,15 @@ NUM_HITS NUM_PARAMS ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378573706907158 ROWS_OUT ............. 1,698 EST_TOTAL_COST ......... 234.71 +======= +PLAN_ID .................. 212363940631673658 +ROWS_OUT ............ 12,739 +EST_TOTAL_COST ...... 13,850.42 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. STATEMENT ................ select * from store_sales_parquet where ss_item_sk = 1; @@ -1249,9 +1397,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_PARQUET_FOR ON @@ -1432,7 +1591,11 @@ LC RC OP OPERATOR OPT DESCRIPTION CARD >>invoke hive.hive.date_dim_parquet; -- Definition of hive table DATE_DIM_PARQUET +<<<<<<< HEAD -- Definition current Mon Nov 27 20:15:11 2017 +======= +-- Definition current Sun Jun 11 11:30:49 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( D_DATE_SK INT @@ -1515,6 +1678,12 @@ CREATE TABLE DEFAULT.DATE_DIM_PARQUET stored as parquet ; +<<<<<<< HEAD +======= +REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; +/* ObjectUID = 563644711473585265 */ + +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. /* Trafodion DDL */ REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; @@ -1574,9 +1743,15 @@ CREATE EXTERNAL TABLE DATE_DIM_PARQUET ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378573717690486 ROWS_OUT ............... 271 EST_TOTAL_COST ......... 244.13 +======= +PLAN_ID .................. 212363940664424812 +ROWS_OUT ............. 1,978 +EST_TOTAL_COST ......... 477.06 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. STATEMENT ................ select * from hive.hive.date_dim_parquet where d_date = date '2016-01-27'; @@ -1606,9 +1781,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_PARQUET_FOR ON @@ -1687,7 +1873,11 @@ DESCRIPTION >>invoke hive.hive.date_dim_parquet; -- Definition of hive table DATE_DIM_PARQUET +<<<<<<< HEAD -- Definition current Mon Nov 27 20:15:28 2017 +======= +-- Definition current Sun Jun 11 11:31:24 2017 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ( D_DATE_SK INT @@ -1770,6 +1960,12 @@ CREATE TABLE DEFAULT.DATE_DIM_PARQUET stored as parquet ; +<<<<<<< HEAD +======= +REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; +/* ObjectUID = 563644711473585265 */ + +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. /* Trafodion DDL */ REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; @@ -1829,9 +2025,15 @@ CREATE EXTERNAL TABLE DATE_DIM_PARQUET ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... S +<<<<<<< HEAD PLAN_ID .................. 212378573717690486 ROWS_OUT ............... 271 EST_TOTAL_COST ......... 244.13 +======= +PLAN_ID .................. 212363940698868891 +ROWS_OUT ............. 1,978 +EST_TOTAL_COST ......... 477.06 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. STATEMENT ................ select * from hive.hive.date_dim_parquet where d_date = date '2016-01-27'; @@ -1861,9 +2063,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_PARQUET_FOR ON @@ -2122,7 +2335,11 @@ and( and( and( and( and( not( HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK is null ------------------------------------------------------------------ PLAN SUMMARY MODULE_NAME .............. DYNAMICALLY COMPILED STATEMENT_NAME ........... XX +<<<<<<< HEAD PLAN_ID .................. 212378573801436592 +======= +PLAN_ID .................. 212363940801861384 +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ROWS_OUT ................. 1 EST_TOTAL_COST .......... 23.94 STATEMENT ................ select count(*) @@ -2158,9 +2375,20 @@ DESCRIPTION embedded_arkcmp ........ used IS_SQLCI ............... ON LDAP_USERNAME .......... NOT AVAILABLE +<<<<<<< HEAD HBASE_FILTER_PREDS ..... 2 TRAF_INDEX_CREATE_OPT ON TRAF_USE_REGION_XN ..... ON +======= + MODE_SEABASE ........... ON + SEABASE_VOLATILE_TABLES ON + HBASE_ASYNC_DROP_TABLE OFF + HBASE_SERIALIZATION .... ON + HBASE_SMALL_SCANNER .... SYSTEM + HBASE_FILTER_PREDS ..... 2 + TRAF_ALIGNED_ROW_FORMAT ON + TRAF_INDEX_CREATE_OPT ON +>>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. HIVE_MAX_STRING_LENGTH 20 MODE_SEAHIVE ........... ON TRAF_ENABLE_PARQUET_FOR ON diff --git a/core/sql/regress/privs1/EXPECTED137 b/core/sql/regress/privs1/EXPECTED137 index c2f3d92941..aaf8eebeac 100755 --- a/core/sql/regress/privs1/EXPECTED137 +++ b/core/sql/regress/privs1/EXPECTED137 @@ -1,4 +1,8 @@ >>obey TEST137(set_up); +>>grant component privilege "SHOW" on sql_operations to "PUBLIC"; + +--- SQL operation complete. +>> >>prepare get_roles from +>select substring (auth_db_name,1,20) as role_name +>from "_MD_".AUTHS @@ -2000,6 +2004,10 @@ drop component privilege lib_view_benefactors cascade; *** ERROR[1008] Authorization identifier LIB_ROLE_TEST does not exist. --- SQL operation failed with errors. +>>revoke component privilege "SHOW" on sql_operations from "PUBLIC"; + +--- SQL operation complete. +>> >>revoke role lib_role_test from sql_user5; *** ERROR[1338] Role LIB_ROLE_TEST is not defined in the database. diff --git a/core/sql/regress/privs1/TEST137 b/core/sql/regress/privs1/TEST137 index 9654fe430a..c287d50530 100755 --- a/core/sql/regress/privs1/TEST137 +++ b/core/sql/regress/privs1/TEST137 @@ -108,6 +108,8 @@ revoke role library_ckout_clerks from sql_user3, sql_user4, sql_user5; drop role library_ckout_clerks; revoke component privilege lib_view_checkouts on library_books from lib_role_test; +revoke component privilege "SHOW" on sql_operations from "PUBLIC"; + revoke role lib_role_test from sql_user5; revoke role db__rootrole from sql_user5; drop role lib_role_test; @@ -120,6 +122,8 @@ revoke select on "_PRIVMGR_MD_".component_privileges from md_access; drop role md_access; ?section set_up +grant component privilege "SHOW" on sql_operations to "PUBLIC"; + prepare get_roles from select substring (auth_db_name,1,20) as role_name from "_MD_".AUTHS diff --git a/core/sql/regress/privs1/TEST141 b/core/sql/regress/privs1/TEST141 index 397c9d145d..8abf62712a 100755 --- a/core/sql/regress/privs1/TEST141 +++ b/core/sql/regress/privs1/TEST141 @@ -63,6 +63,9 @@ drop role t141_role1; drop role t141_role2; drop role t141_role3; +--revoke component privilege "CREATE" on sql_operations from user2_role; +--revoke component privilege "SHOW" on sql_operations from "PUBLIC"; + revoke select on "_PRIVMGR_MD_".object_Privileges from t141_role_md; revoke select on "_PRIVMGR_MD_".column_Privileges from t141_role_md; revoke select on "_MD_".objects from t141_role_md; diff --git a/core/sql/regress/tools/runallsb b/core/sql/regress/tools/runallsb index b5a8e2d588..646e04e40a 100755 --- a/core/sql/regress/tools/runallsb +++ b/core/sql/regress/tools/runallsb @@ -41,6 +41,7 @@ export SQLMX_REGRESS=1 export SEABASE_REGRESS=2 +#TEST_SUBDIRS="checktest1 checktest2 core compGeneral executor seabase hive fullstack2 charsets qat privs1 privs2 udr" TEST_SUBDIRS="core compGeneral executor seabase hive fullstack2 charsets qat privs1 privs2 udr" BASE_SUBDIRS="core compGeneral executor seabase hive" OTHER_SUBDIRS="fullstack2 charsets qat privs1 privs2 udr" @@ -145,10 +146,26 @@ if [[ $? -ne 0 ]]; then exit 1 fi +#checktests runs tests from multiple test subdirs. +#if runallsb is being used to run checktests along with other tests, +#then set a flag so those tests will be skipped by individual test suites. +export CHECK_TEST1=0 +export CHECK_TEST2=0 +for dir in $TEST_SUBDIRS; do + if [ "$dir" == "checktest1" ]; then + export CHECK_TEST1=1 + elif [ "$dir" == "checktest2" ]; then + export CHECK_TEST2=1 + fi +done + # # Make sure we have the rundir setup correctly for the tests and run 'em! # +if [ $diffOnly -eq 0 ]; then + $TRAF_HOME/sql/scripts/ilh_regrinit +fi for dir in $TEST_SUBDIRS; do if [[ ! -d $rundir/$dir ]]; then echo "Making $rundir/$dir - continuing with tests" @@ -162,6 +179,16 @@ for dir in $TEST_SUBDIRS; do fi case $dir in + checktest1) + pushd $rundir/checktest1 + $scriptsdir/tools/runregr -sb $diffStr + popd + ;; + checktest2) + pushd $rundir/checktest2 + $scriptsdir/tools/runregr -sb $diffStr + popd + ;; core) pushd core ./runregr -sb $diffStr diff --git a/core/sql/regress/tools/runregr b/core/sql/regress/tools/runregr index f6a47e8be0..820531b12d 100755 --- a/core/sql/regress/tools/runregr +++ b/core/sql/regress/tools/runregr @@ -153,6 +153,7 @@ export REGRBASDIR=$(basename $PWD) export REGRRUNDIR=$rundir/$REGRBASDIR export REGRTSTDIR=$scriptsdir/$REGRBASDIR export rgrlog=$REGRRUNDIR/runregr-${table_format}${log_qualifier}.log +export rgrStats=$REGRRUNDIR/runregr-${table_format}${log_qualifier}.stats export REGRTOOLSDIR=$scriptsdir/tools export REGRCONCURRENT="$concurrent_execution" @@ -168,6 +169,34 @@ export REGRTSTDIR_Q="'$REGRTSTDIR'" export REGRRUNDIR_QCleanup="'$REGRRUNDIR/cleanup'" export REGRRUNDIR_QAB="'$REGRRUNDIR/A/B'" +###################################################### +#these tests are run as part of checktests. Used by runregr_checktest*.ksh +#They are skipped by individual tests if 'runallsb' script is used to +#run multiple suites and checktests is part of it. +#That way we avoid running the same tests multiple times: once as part of +#checktests and then as part of individual test. + +# "orig" tests are the original tests that need to be run. +# non-orig are the ones currently being run due to existing issues with +# some tests in the orig list. +export coreCT="TEST000 TEST001 TEST002 TEST005 TEST032 TEST056 TEST116 TESTRTS" +export compGeneralCT="TEST006 TEST015 TEST042 TESTTOK" +export charsetsCT="TEST001 TEST311" +export executorCT="TEST013 TEST015 TEST022 TEST107" +export fullstack2CT="TEST062" + +export hiveCTorig="TEST001 TEST005 TEST009 TEST015 TEST021 TEST031 TEST041 TEST046 TEST055" +export hiveCT="TEST001 TEST005 TEST009 TEST015 TEST017 TEST021" + +export seabaseCT="TEST002 TEST003 TEST010 TEST011 TEST022 TEST025 TEST030 TEST031" +export udrCT="TEST002" +export privs1CT="TEST120 TEST137" + +export privs2CTorig="TEST145" +#export privs2CT="TEST145" +export privs2CT="" +#################################### + tmplocal=$TMP/`basename $0 .ksh`.tmp # Make sure sed commands in $FILTER work -- diff --git a/core/sql/regress/tools/runregr_charsets.ksh b/core/sql/regress/tools/runregr_charsets.ksh index fddd2aa1ee..d94be75f8d 100755 --- a/core/sql/regress/tools/runregr_charsets.ksh +++ b/core/sql/regress/tools/runregr_charsets.ksh @@ -271,6 +271,11 @@ if [ `uname` = "Windows_NT" ]; then skipTheseTests="$skipTheseTests TEST111 TEST132 TEST137 TEST500 TEST503" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST1" == "1" ]; then + skipTheseTests="$skipTheseTests $charsetsCT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do diff --git a/core/sql/regress/tools/runregr_checktest1.ksh b/core/sql/regress/tools/runregr_checktest1.ksh new file mode 100755 index 0000000000..59f11aa018 --- /dev/null +++ b/core/sql/regress/tools/runregr_checktest1.ksh @@ -0,0 +1,4 @@ +#checktest1 will run tests from core, compGeneral, charsets, executor +$scriptsdir/tools/runregr_checktests.ksh -ct1 $* + + diff --git a/core/sql/regress/tools/runregr_checktest2.ksh b/core/sql/regress/tools/runregr_checktest2.ksh new file mode 100755 index 0000000000..1aa1766242 --- /dev/null +++ b/core/sql/regress/tools/runregr_checktest2.ksh @@ -0,0 +1,4 @@ +#checktest2 will run tests from hive, seabase, privs1, udr +$scriptsdir/tools/runregr_checktests.ksh -ct2 $* + + diff --git a/core/sql/regress/tools/runregr_checktests.ksh b/core/sql/regress/tools/runregr_checktests.ksh new file mode 100755 index 0000000000..7df0333a09 --- /dev/null +++ b/core/sql/regress/tools/runregr_checktests.ksh @@ -0,0 +1,528 @@ +#! /bin/sh +####################################################################### +# @@@ START COPYRIGHT @@@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# @@@ END COPYRIGHT @@@ +####################################################################### + +testsToRun= +allTests= + +#allTests have space separated fully qualified tests with format: +# TEST001.core TEST023.executor +function setupAllTests() { + if [ "$2" == "" ]; then + return + fi + + for i in $2; do + tnum=`echo $i | cut -c 5-` + +# if [ "$1" == "1" ]; then +# allTests="$allTests TEST$tnum.$3"; +# continue +# fi + + testdir=$scriptsdir/$3 + testfile=$testdir/TEST$tnum + expectedfile=$testdir/EXPECTED$tnum + filterfile= + if [ -r "$expectedfile.SB" ]; then + expectedfile="$expectedfile.SB" + elif [ -r "$expectedfile.LINUX" ]; then + expectedfile="$expectedfile.LINUX" + fi + + if [ "$BUILD_FLAVOR" = "RELEASE" ]; then + if [ -r "$expectedfile.RELEASE" ]; then + expectedfile="$expectedfile.RELEASE" + fi + fi + + knowndiff= + if [ -r "$testdir/DIFF$tnum.KNOWN.LINUX" ]; then + knowndiff=$testdir/DIFF$tnum.KNOWN.LINUX + elif [ -r "$testdir/DIFF$tnum.KNOWN.SB" ]; then + knowndiff=$testdir/DIFF$tnum.KNOWN.SB + elif [ -r "$testdir/DIFF$tnum.KNOWN" ]; then + knowndiff=$testdir/DIFF$tnum.KNOWN + fi + + cp -f $testfile TEST$tnum.$3 + cp -f $expectedfile EXPECTED$tnum.$3 + if [ "$knowndiff" != "" ]; then + cp -f $knowndiff DIFF$tnum.KNOWN.$3 + fi + + filterfile= + if [ -r "$testdir/FILTER$tnum.SB" ]; then + filterfile="$testdir/FILTER$tnum.SB" + elif [ -r "$testdir/FILTER$tnum" ]; then + filterfile="$testdir/FILTER$tnum" + fi + if [ "$filterfile" != "" ]; then + cp -f $filterfile FILTER$tnum.$3 + chmod ugo+rwx FILTER$tnum.$3 + fi + +# if [ "$3" == "compGeneral" ]; then +# cp -f $testdir/hqc_*.* . +# cp -f $testdir/cache_*.* . +# fi + + if [ "$3" == "udr" ]; then + cp -f $testdir/Utils.java . + + export JDBC_T4_URL="jdbc:t4jdbc://localhost:23400/:" + if [ -r $TRAF_HOME/sql/scripts/sw_env.sh ]; then + # use a custom port for the JDBC Type 4 driver + . $TRAF_HOME/sql/scripts/sw_env.sh + export JDBC_T4_URL="jdbc:t4jdbc://localhost:${MY_DCS_MASTER_PORT}/:" + fi + fi + + allTests="$allTests TEST$tnum.$3"; + done +} + +# $1 contains space separated entries with format: +# core/TEST001 executor/test023 seabase/010 hive +function setupTestsToRun() { + if [ "$1" == "" ]; then + # run all tests + testsToRun="$allTests" + return + fi + + for i in $1; do + + iDir= + iTst= + if [ `echo $i | grep -c "/" ` -eq 0 ]; then + iDir=$i + else + iDir=$(echo $i | cut -d'/' -f 1) + iTst=$(echo $i | cut -d'/' -f 2) + iTst=`echo $iTst | tr a-z A-Z` + if [ `expr substr $iTst 1 4` != "TEST" ]; then + nlen=3 + test `expr match "$iTst" ".*[Uu]$"` -gt 0 && nlen=4 + if [ `expr length $iTst` -lt $nlen ]; then + iTst=0$iTst + if [ `expr length $iTst` -lt $nlen ]; then + iTst=0$iTst + fi + fi + iTst=TEST$iTst + fi + fi + + for j in $allTests; do + jTst=$(echo $j | cut -d'.' -f 1) + jDir=$(echo $j | cut -d'.' -f 2) + if [ "$iDir" == "$jDir" ]; then + if [ "$iTst" == "" ]; then + rTst=$jTst + rDir=$jDir + testsToRun="$testsToRun $rTst.$rDir" + elif [ "$iTst" == "$jTst" ]; then + rTst=$jTst + rDir=$jDir + testsToRun="$testsToRun $rTst.$rDir" + else + continue + fi + fi + done #for j + done #for i +} + +if [ "$1" = "-h" -o "$1" = "-help" -o "$1" = "-?" ]; then + cat << END_HELP_TEXT + + Usage: + $0 [-d] [-r] + [-diff] + [files...] + + -f or -failuresOnly + deletes empty (i.e. successful) DIFF files, leaving only failures. + + -diff + do diffs only, do not run tests + +-info + show which tests will be run + + If no files are specified, all test scripts matching the pattern TEST???* + are executed. If specified, the test files should all start with the + string "TEST" or be the three-digit test numbers. + +END_HELP_TEXT + exit 0 +fi + +failuresOnly=0 +diffOnly=0 +infoOnly=0 +ct1=0 +ct2=0 + +OK=-1 +while [ $OK -ne 0 ]; do # loop to allow options to appear in any order + + if [ $OK -gt 0 ]; then + shift $OK + fi + OK=0 + + if [ "$1" = "-f" -o "$1" = "-fail" -o "$1" = "-failuresOnly" ]; then + failuresOnly=1 + OK=1 + fi + + if [ "$1" = "-diff" ]; then + diffOnly=1 + OK=1 + fi + + if [ "$1" = "-info" ]; then + infoOnly=1 + OK=1 + fi + + if [ "$1" = "-ct1" ]; then + ct1=1 + OK=1 + fi + + if [ "$1" = "-ct2" ]; then + ct2=1 + OK=1 + fi + +done +# ---------------- end of parsing command line options ---------------- + +if [ $ct1 -eq 0 ] && [ $ct2 -eq 0 ]; then + ct1=1 + ct2=1 +fi + +if [ "$*" == "" ] && [ "$diffOnly" == "0" ]; then + #if not diff and all tests are to be run, then remove existing rgrStats + rm -f $rgrStats +fi + +export BUILD_FLAVOR=`echo $BUILD_FLAVOR | tr a-z A-Z` +bldFlvr=`grep "BuildFlavor:" $rgrStats` +if [ $diffOnly -eq 0 ]; then + if [ "$bldFlvr" == "" ]; then + echo "BuildFlavor:$BUILD_FLAVOR" >> $rgrStats + fi +elif [ "$bldFlvr" != "" ]; then + export BUILD_FLAVOR=`echo $bldFlvr | cut -d':' -f2` + echo $BUILD_FLAVOR +fi + +#CT test vars are defined and exported from 'tools/runregr' script. +#Any modification need to be done in 'runregr' +if [ $ct1 -eq 1 ]; then + setupAllTests "$diffOnly" "$coreCT" "core" + setupAllTests "$diffOnly" "$compGeneralCT" "compGeneral" + setupAllTests "$diffOnly" "$charsetsCT" "charsets" + setupAllTests "$diffOnly" "$executorCT" "executor" + setupAllTests "$diffOnly" "$fullstack2CT" "fullstack2" +fi + +if [ $ct2 -eq 1 ]; then + setupAllTests "$diffOnly" "$hiveCT" "hive" + setupAllTests "$diffOnly" "$seabaseCT" "seabase" + setupAllTests "$diffOnly" "$privs1CT" "privs1" + setupAllTests "$diffOnly" "$privs2CT" "privs2" + setupAllTests "$diffOnly" "$udrCT" "udr" +fi + +setupTestsToRun "$*" + +export TEST_CATALOG='TRAFODION' +export TEST_SCHEMA_NAME='SCH' +seabase="$SEABASE_REGRESS" + +sbdefsfile= +if [ -r $REGRTOOLSDIR/sbdefs ]; then + sbdefsfile="$REGRTOOLSDIR/sbdefs" +fi + +export TEST_SCHEMA="$TEST_CATALOG.$TEST_SCHEMA_NAME" + +if [ $diffOnly -eq 0 ]; then + echo "--" + echo "-- Executables:" + ls -l $sqlci $mxcmp # YES, do this in two steps,for + ls $sqlci $mxcmp >$NULL 2>&1 || exit 1 # those of us who have written + env | grep -i _DEBUG # our own ls.ksh command... +fi + +cd $REGRRUNDIR 2>$NULL + +wDir=../.. + +echo "--" +echo "-- Current work directory:" +pwd + +echo "------------------------------------------------------------" +echo + +cat $rgrlog >> $TMP/`basename $rgrlog` 2>$NULL # append elsewhere, for us who rm *.bak +mv -f $rgrlog $rgrlog.bak 2>$NULL +echo "`date +'%F %T'` ($BUILD_FLAVOR build)" > $rgrlog +echo " " >> $rgrlog +printf "%-21s%-21s%-10s%-20s\n" "dir/TEST" "StartTime" "Duration" "Status" >> $rgrlog +printf "%-21s%-21s%-10s%-20s\n" "=========" "==========" "=========" "=======" >> $rgrlog +echo " " >> $rgrlog + +loopStartTime="`date +'%D %T'`" + +testStartTime= +testEndTime= +testElapsedTime= +totalElapsedTime=0 +for i in $testsToRun; do + tnum=`expr substr $i 5 3` + dir=`echo $i | cut -c 9-` + test=TEST$tnum.$dir + exp=EXPECTED$tnum.$dir + diffknownfile=DIFF$tnum.KNOWN.$dir + log=LOG$tnum.$dir + diff=DIFF$tnum.$dir + logtxt= + filter=FILTER$tnum.$dir + + if [ $infoOnly -ne 0 ]; then + echo $dir/TEST$tnum + continue + fi + + mv -f $diff $diff.BAK 2>$NULL + if [ $diffOnly -eq 0 ]; then + rm -f $log.BAK $diff.BAK + mv -f $log $log.BAK 2>$NULL + + echo "------------------------------------------------------------" + echo "-- Starting test $dir/TEST$tnum: " + echo "------------------------------------------------------------" + else + echo "------------------------------------------------------------" + echo "-- Doing diff for test $dir/TEST$tnum: " + echo "------------------------------------------------------------" + fi + echo + + #run the test + defsfile= + if [ $diffOnly -eq 0 ]; then + if [ -r $scriptsdir/$dir/userdefs ]; then + defsfile="$scriptsdir/$dir/userdefs" + fi + + cat $sbdefsfile $defsfile $test > $test.tmp + + rm -f TEST$tnum 2>$NULL + cp $test TEST$tnum 2>$NULL + + export REGRTSTDIR=$scriptsdir/$dir + + testStartTime="`date +'%F %T'`" + testStartTimeInSecs="`date +'%s'`" + + $sqlci -i$test.tmp + + testEndTimeInSecs="`date +'%s'`" + testElapsedTime=$(($testEndTimeInSecs - $testStartTimeInSecs)) + #formattedET="`date -u -d @${testElapsedTime} +"%T"`" + exists=`grep "$dir/TEST$tnum" $rgrStats` + if [ "$exists" != "" ]; then + sed -i "s/$dir\/TEST$tnum.*/$dir\/TEST$tnum|$testStartTime|$testElapsedTime/" $rgrStats + else + echo "$dir/TEST$tnum|$testStartTime|$testElapsedTime" >> $rgrStats + fi + + rm -f $log 2>$NULL + cp LOG$tnum $log 2>$NULL + fi + rm -f $test.tmp 2>$NULL + rm -f TEST$tnum 2>$NULL + rm -f TEST$tnum.$dir 2>$NULL + + #--------------------------------------- + # Sort log and expected result file. -- + #--------------------------------------- + # sort log and expected file + if [ -x $LOGSORT ]; then + expd=$exp.srt + logd=$log.srt + rm -f $expd $logd + echo "SORTING EXPECTED AND LOG FILES" + echo "$LOGSORT $exp $expd" + echo "$LOGSORT $log $logd" + echo + $LOGSORT $exp $expd >> $NULL # not to $diff, because + $LOGSORT $log $logd >> $NULL # logsort writes too much junk + else + expd=$exp + logd=$log + echo "Could not find $LOGSORT, comparing unsorted files" + fi + + #------------------------------ + # filter result files -- + #------------------------------ + + if [ -r "$filter" ]; then + # Mask out test-specific patterns (like timestamps, + # generated identifiers, explain statistics) before doing the diff. + expr=$exp.tflt + logr=$log.tflt + + echo "RUNNING SPECIAL FILTER FOR TEST $test" + $REGRRUNDIR/$filter $expd > $expr 2>&1 + $REGRRUNDIR/$filter $logd > $logr 2>&1 + else + expr=$expd + logr=$logd + fi + + # Run general filter common for all tests + if [ "$FILTER" != "" ]; then + if [ -x "$FILTER" ]; then + # Mask out commonly changing patterns (like timestamps, + # generated identifiers, explain statistics) before doing the diff. + expt=$exp.flt + logt=$log.flt + echo "RUNNING STANDARD FILTER FOR TEST $test" + echo "$FILTER $logr > $logt" + echo "$FILTER $expr > $expt" + echo + $FILTER $expr > $expt 2>&1 + $FILTER $logr > $logt 2>&1 + echo "RUNNING DIFFs ON SORTED AND FILTERED LOGS/EXPECTED FILES" + echo "diff $expt $logt >> $diff" + diff $expt $logt >> $diff 2>&1 + else + echo "Could not find ${FILTER:-FILTER program}" + echo "diff $expr $logr >> $diff" + echo "RUNNING DIFFs ON SORTED AND FILTERED LOGS/EXPECTED FILES" + diff $expr $logr >> $diff 2>&1 + fi + else + echo "diff $expr $logr >> $diff" + diff $expr $logr >> $diff 2>&1 + fi + + # cleanup + chmod ug+rw $exp.*srt $exp.*flt $log* $diff* 2>$NULL + + # see if there were differences (not safe to use "$?" status, if the + # diff above pipes into some other command or some other cmd intervenes) + diffsize=`wc -l $diff`; diffsize=`echo $diffsize | cut -f1 -d' '` + + #diff the diff and the diff.KNOWN files. + diffsAreKnown=0 + knownsize= + nskKnown=0 + + if [ $diffsize -ne 0 ]; then + if [ -r "$diffknownfile" ]; then + echo + echo "COMPARING KNOWN DIFFS FILE TO CURRENT DIFFS" + echo "# ($diffsize lines different)" + # Filter known diff file to avoid schema differences + knownfiltered="$diffknownfile.flt" + dfilefiltered="$diff.flt" + echo "$FILTER $diffknownfile > $knownfiltered 2>&1" + $FILTER $diffknownfile > $knownfiltered 2>&1 + echo "$FILTER $diff > $dfilefiltered 2>&1" + $FILTER $diff > $dfilefiltered 2>&1 + echo "diff $dfilefiltered $knownfiltered" + diff $dfilefiltered $knownfiltered > $NULL + if [ $? -eq 0 ]; then + diffsAreKnown=1 + else + ktmp=`wc -l $knownfiltered`; ktmp=`echo $ktmp | cut -f1 -d' '` + test "$ktmp" != "" && + test $ktmp -ne 0 && knownsize=" (vs. $ktmp known)" + fi + fi + fi + + # print result + if [ $diffsize -eq 0 -o $diffsAreKnown -ne 0 ]; then + if [ $failuresOnly -ne 0 ]; then + rm -f $diff $diff.BAK $exp.srt $log.srt $log.BAK + fi + if [ $diffsize -eq 0 ]; then + logtxt="### PASS ###" + else +# logtxt="### PASS with known diffs ###$logtxt" + logtxt="### PASS (known diffs) ###" + fi + else + ls -l $diff* + logtxt="### FAIL ($diffsize lines$knownsize) ###" + fi + echo $logtxt + + exists=`grep "$dir/TEST$tnum" $rgrStats` + if [ "$exists" != "" ]; then + start=`grep "$dir/TEST$tnum" $rgrStats | cut -d'|' -f2` + elapsed=`grep "$dir/TEST$tnum" $rgrStats | cut -d'|' -f3` + formattedET="`date -u -d @${elapsed} +"%T"`" + printf "%-21s%-21s%-10s%s\n" "$dir/TEST$tnum" "$start" "$formattedET" "$logtxt" >> $rgrlog + echo + + totalElapsedTime=$(($totalElapsedTime + $elapsed)) + fi +done + +formattedTotalET="`date -u -d @${totalElapsedTime} +"%T"`" + +echo " " >> $rgrlog +echo "TotalDuration: " $formattedTotalET >> $rgrlog + +echo "-------------------------------------------------------------------------" +echo +loopEndTime="`date +'%F %T'`" + +if [ $infoOnly -ne 0 ]; then + echo +else + echo 'Regression Tests Summary' + echo '========================' + echo + ls -l $sqlci $mxcmp | sed 's/\.exe$//' # Summarize what we were testing with + echo " " | tee -a $rgrlog + echo +fi + +cat $rgrlog + +echo diff --git a/core/sql/regress/tools/runregr_compGeneral.ksh b/core/sql/regress/tools/runregr_compGeneral.ksh index 5199396560..a795cdce33 100755 --- a/core/sql/regress/tools/runregr_compGeneral.ksh +++ b/core/sql/regress/tools/runregr_compGeneral.ksh @@ -289,12 +289,12 @@ if [ "$BUILD_FLAVOR" = "RELEASE" ]; then fi if [ `uname` = "Linux" ]; then - # previously executed tests: - #skipTheseTests="$skipTheseTests" - # embedded tests - #skipTheseTests="$skipTheseTests" - # tests that hang - skipTheseTests="$skipTheseTests TEST070 TESTTOK TEST066" + skipTheseTests="$skipTheseTests TEST070 TEST066" +fi + +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST1" == "1" ]; then + skipTheseTests="$skipTheseTests $compGeneralCT" fi for i in $testfiles; do @@ -546,9 +546,9 @@ for i in $prettyfiles; do cp -f $REGRTSTDIR/$test $REGRRUNDIR/$test 2>$NULL fi - if [ $tnum = "TOK" ]; then - $wDir/toolbin/parserToks.ksh $wDir/parser/SqlParser.y NO-POPUP 2>&1 | tee LOGTOK; - elif [ $tnum = "TOK2" -a `uname` = "Windows_NT" ]; then +# if [ $tnum = "TOK" ]; then +# $wDir/toolbin/parserToks.ksh $wDir/parser/SqlParser.y NO-POPUP 2>&1 | tee LOGTOK; + if [ $tnum = "TOK2" -a `uname` = "Windows_NT" ]; then $REGRTSTDIR/tok2.ksh 2>&1 | tee LOGTOK2; else # special case for TEST002, where we want mxci to be licensed diff --git a/core/sql/regress/tools/runregr_core.ksh b/core/sql/regress/tools/runregr_core.ksh index 3996126b1c..1e9d0f55ff 100755 --- a/core/sql/regress/tools/runregr_core.ksh +++ b/core/sql/regress/tools/runregr_core.ksh @@ -331,6 +331,11 @@ if [ "$REGRCONCURRENT" -ne 0 ]; then skipTheseTests="$skipTheseTests $exclusiveTests" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST1" == "1" ]; then + skipTheseTests="$skipTheseTests $coreCT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do diff --git a/core/sql/regress/tools/runregr_executor.ksh b/core/sql/regress/tools/runregr_executor.ksh index 33a4948a8f..c32097a53b 100755 --- a/core/sql/regress/tools/runregr_executor.ksh +++ b/core/sql/regress/tools/runregr_executor.ksh @@ -356,6 +356,11 @@ if [ "$SQ_SEAMONSTER" != "1" ]; then skipTheseTests="$skipTheseTests TEST121 TEST123" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST1" == "1" ]; then + skipTheseTests="$skipTheseTests $executorCT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do diff --git a/core/sql/regress/tools/runregr_fullstack2.ksh b/core/sql/regress/tools/runregr_fullstack2.ksh index d96aee71a5..69b3fb0776 100755 --- a/core/sql/regress/tools/runregr_fullstack2.ksh +++ b/core/sql/regress/tools/runregr_fullstack2.ksh @@ -291,6 +291,11 @@ if [ "$seabase" -ne 0 ]; then skipTheseTexts= fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST1" == "1" ]; then + skipTheseTests="$skipTheseTests $fullstack2CT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do diff --git a/core/sql/regress/tools/runregr_hive.ksh b/core/sql/regress/tools/runregr_hive.ksh index 4757b81619..86b38c41be 100755 --- a/core/sql/regress/tools/runregr_hive.ksh +++ b/core/sql/regress/tools/runregr_hive.ksh @@ -233,6 +233,11 @@ if [ "$REGRCONCURRENT" -ne 0 ]; then skipTheseTests="$skipTheseTests $exclusiveTests" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST2" == "1" ]; then + skipTheseTests="$skipTheseTests $hiveCT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do @@ -434,9 +439,9 @@ for i in $prettyfiles; do if [ "$REGRCONCURRENT" -eq 1 ]; then echo "create schema ${TEST_SCHEMA}; set schema ${TEST_SCHEMA};" \ - | cat $defsfile - $testrun > $test.tmp + | cat $sbdefsfile $defsfile - $testrun > $test.tmp else - cat $defsfile $testrun > $test.tmp + cat $sbdefsfile $defsfile $testrun > $test.tmp fi diff --git a/core/sql/regress/tools/runregr_privs1.ksh b/core/sql/regress/tools/runregr_privs1.ksh index 5836754c87..c0ff546a07 100755 --- a/core/sql/regress/tools/runregr_privs1.ksh +++ b/core/sql/regress/tools/runregr_privs1.ksh @@ -187,6 +187,11 @@ fi #------------------------------------------------------- skipTheseTests="TEST133" +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST2" == "1" ]; then + skipTheseTests="$skipTheseTests $privs1CT" +fi + testfiles="$prettyfiles" prettyfiles= skippedfiles= diff --git a/core/sql/regress/tools/runregr_privs2.ksh b/core/sql/regress/tools/runregr_privs2.ksh index d13503a020..6695654d40 100755 --- a/core/sql/regress/tools/runregr_privs2.ksh +++ b/core/sql/regress/tools/runregr_privs2.ksh @@ -187,6 +187,11 @@ fi #------------------------------------------------------- skipTheseTests="" +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST2" == "1" ]; then + skipTheseTests="$skipTheseTests $privs2CT" +fi + testfiles="$prettyfiles" prettyfiles= skippedfiles= diff --git a/core/sql/regress/tools/runregr_seabase.ksh b/core/sql/regress/tools/runregr_seabase.ksh index 1147813a55..b275ac3912 100755 --- a/core/sql/regress/tools/runregr_seabase.ksh +++ b/core/sql/regress/tools/runregr_seabase.ksh @@ -241,6 +241,11 @@ if [ "$REGRCONCURRENT" -ne 0 ]; then skipTheseTests="$skipTheseTests $exclusiveTests" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST2" == "1" ]; then + skipTheseTests="$skipTheseTests $seabaseCT" +fi + for i in $testfiles; do skipthis=0 for j in $skipTheseTests; do diff --git a/core/sql/regress/tools/runregr_udr.ksh b/core/sql/regress/tools/runregr_udr.ksh index 26a5ed47d9..d9c050cff8 100755 --- a/core/sql/regress/tools/runregr_udr.ksh +++ b/core/sql/regress/tools/runregr_udr.ksh @@ -404,6 +404,11 @@ if [ $LINUX -eq 1 ]; then SKIPFILES="$SKIPFILES TEST150 TEST400 TEST402 TEST505 TEST508 TEST750 TEST982" fi +#skip checkTest tests if they have already been run +if [ "$CHECK_TEST2" == "1" ]; then + SKIPFILES="$SKIPFILES $udrCT" +fi + # 3/26/12 Security scrum: Skip all tests on NT for now. Changes for Secure JARs not compatible # with the existing testware on NT. if [ $LINUX -eq 0 ]; then From ff339843714290e4fa51c40bf07dba5c1c135dd8 Mon Sep 17 00:00:00 2001 From: Anoop Sharma Date: Wed, 18 Apr 2018 17:27:12 +0000 Subject: [PATCH 2/3] removed regress/hive/EXPECTED030,040 --- core/sql/regress/hive/EXPECTED030 | 3101 ----------------------------- core/sql/regress/hive/EXPECTED040 | 2755 ------------------------- 2 files changed, 5856 deletions(-) delete mode 100644 core/sql/regress/hive/EXPECTED030 delete mode 100644 core/sql/regress/hive/EXPECTED040 diff --git a/core/sql/regress/hive/EXPECTED030 b/core/sql/regress/hive/EXPECTED030 deleted file mode 100644 index ad53d988d9..0000000000 --- a/core/sql/regress/hive/EXPECTED030 +++ /dev/null @@ -1,3101 +0,0 @@ ->>obey TEST030(setup); ->>-------------------------------------------------------------------------- ->> ->>set schema hive.hive; - ---- SQL operation complete. ->>cqd HIVE_MAX_STRING_LENGTH_IN_BYTES '20' ; - ---- SQL operation complete. ->>cqd mode_seahive 'ON'; - ---- SQL operation complete. ->>cqd traf_enable_orc_format 'ON'; - ---- SQL operation complete. ->>cqd HIST_ROWCOUNT_REQUIRING_STATS '50000'; - ---- SQL operation complete. ->>cqd hive_use_ext_table_attrs 'ON'; - ---- SQL operation complete. ->>cqd hist_missing_stats_warning_level '0'; - ---- SQL operation complete. ->>cqd ORC_NJS_PROBES_THRESHOLD '1000000'; - ---- SQL operation complete. ->>cqd HIVE_MIN_NUM_ESPS_PER_DATANODE '0'; - ---- SQL operation complete. ->> ->>prepare explainIt from -+> select substring(cast(SEQ_NUM+100 as char(3)),2,2) s, -+> substring(operator,1,16) operator, -+> cast(LEFT_CHILD_SEQ_NUM as char(2)) lc, -+> cast(RIGHT_CHILD_SEQ_NUM as char(2)) rc, -+> substring -+> (substring(substring(tname from (1+locate('.',tname))),1,case locate(')',tname) when 0 then 0 else locate(')',substring(tname from (1+locate('.',tname))))-1 end), -+> (locate('.',substring(tname from (1+locate('.',tname)))))+1, -+> 10 -+> ) tab_name -+> from table (explain(NULL,'XX')) -+> order by 1 desc; - ---- SQL command prepared. ->> ->>obey TEST030(tests); ->>-------------------------------------------------------------------------- ->>-- ORC file metadata info ->>invoke hive.hive.store_orc; - --- Definition of hive table STORE_ORC -<<<<<<< HEAD --- Definition current Wed Nov 29 02:19:58 2017 -======= --- Definition current Sun Jun 11 11:09:19 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - S_STORE_SK INT - , S_STORE_ID VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_REC_START_DATE DATE - , S_REC_END_DATE DATE - , S_CLOSED_DATE_SK INT - , S_STORE_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_NUMBER_EMPLOYEES INT - , S_FLOOR_SPACE INT - , S_HOURS VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_ID INT - , S_GEOGRAPHY_CLASS VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_DESC VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_DIVISION_ID INT - , S_DIVISION_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COMPANY_ID INT - , S_COMPANY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_TYPE VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_SUITE_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_CITY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COUNTY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STATE VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_ZIP VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COUNTRY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_GMT_OFFSET REAL - , S_TAX_PRECENTAGE REAL - ) - /* stored as orc */ - ---- SQL operation complete. ->> ->>-- select one row from ORC table ->>select [first 1] * from hive.hive.store_orc; - -S_STORE_SK S_STORE_ID S_REC_START_DATE S_REC_END_DATE S_CLOSED_DATE_SK S_STORE_NAME S_NUMBER_EMPLOYEES S_FLOOR_SPACE S_HOURS S_MANAGER S_MARKET_ID S_GEOGRAPHY_CLASS S_MARKET_DESC S_MARKET_MANAGER S_DIVISION_ID S_DIVISION_NAME S_COMPANY_ID S_COMPANY_NAME S_STREET_NUMBER S_STREET_NAME S_STREET_TYPE S_SUITE_NUMBER S_CITY S_COUNTY S_STATE S_ZIP S_COUNTRY S_GMT_OFFSET S_TAX_PRECENTAGE ------------ -------------------- ---------------- -------------- ---------------- -------------------- ------------------ ------------- -------------------- -------------------- ----------- -------------------- -------------------- -------------------- ------------- -------------------- ------------ -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- --------------- ---------------- - - 1 AAAAAAAABAAAAAAA 1997-03-13 ? 2451189 ought 245 5250760 8AM-4PM William Ward 2 Unknown Enough high areas st Charles Bartley 1 Unknown 1 Unknown 767 Spring Wy Suite 250 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - ---- 1 row(s) selected. ->> ->>-- select all rows from ORC table ->>select * from hive.hive.store_orc order by s_store_sk; - -S_STORE_SK S_STORE_ID S_REC_START_DATE S_REC_END_DATE S_CLOSED_DATE_SK S_STORE_NAME S_NUMBER_EMPLOYEES S_FLOOR_SPACE S_HOURS S_MANAGER S_MARKET_ID S_GEOGRAPHY_CLASS S_MARKET_DESC S_MARKET_MANAGER S_DIVISION_ID S_DIVISION_NAME S_COMPANY_ID S_COMPANY_NAME S_STREET_NUMBER S_STREET_NAME S_STREET_TYPE S_SUITE_NUMBER S_CITY S_COUNTY S_STATE S_ZIP S_COUNTRY S_GMT_OFFSET S_TAX_PRECENTAGE ------------ -------------------- ---------------- -------------- ---------------- -------------------- ------------------ ------------- -------------------- -------------------- ----------- -------------------- -------------------- -------------------- ------------- -------------------- ------------ -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- --------------- ---------------- - - 1 AAAAAAAABAAAAAAA 1997-03-13 ? 2451189 ought 245 5250760 8AM-4PM William Ward 2 Unknown Enough high areas st Charles Bartley 1 Unknown 1 Unknown 767 Spring Wy Suite 250 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 2 AAAAAAAACAAAAAAA 1997-03-13 2000-03-12 ? able 236 5285950 8AM-4PM Scott Smith 8 Unknown Parliamentary candid David Lamontagne 1 Unknown 1 Unknown 255 Sycamore Dr. Suite 410 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 3 AAAAAAAACAAAAAAA 2000-03-13 ? ? able 236 7557959 8AM-4PM Scott Smith 7 Unknown Impossible, true arm David Lamontagne 1 Unknown 1 Unknown 877 Park Laurel Road Suite T Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 4 AAAAAAAAEAAAAAAA 1997-03-13 1999-03-13 2451044 ese 218 9341467 8AM-4PM Edwin Adams 4 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 27 Lake Ln Suite 260 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 5 AAAAAAAAEAAAAAAA 1999-03-14 2001-03-12 2450910 anti 288 9078805 8AM-4PM Edwin Adams 8 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 27 Lee 6th Court Suite 80 Fairview Williamson County TN 35709 United States -5.0000000E+000 2.9999999E-002 - 6 AAAAAAAAEAAAAAAA 2001-03-13 ? ? cally 229 9026222 8AM-4PM Edwin Adams 10 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 220 6th Lane Suite 140 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 7 AAAAAAAAHAAAAAAA 1997-03-13 ? ? ation 297 8954883 8AM-4PM David Thomas 9 Unknown Architects coul Thomas Benton 1 Unknown 1 Unknown 811 Lee Circle Suite T Midway Williamson County TN 31904 United States -5.0000000E+000 9.9999997E-003 - 8 AAAAAAAAIAAAAAAA 1997-03-13 2000-03-12 ? eing 278 6995995 8AM-4PM Brett Yates 2 Unknown Various bars make mo Dean Morrison 1 Unknown 1 Unknown 226 12th Lane Suite D Fairview Williamson County TN 35709 United States -5.0000000E+000 7.9999998E-002 - 9 AAAAAAAAIAAAAAAA 2000-03-13 ? ? eing 271 6995995 8AM-4PM Brett Yates 2 Unknown Formal, psychologica Dean Morrison 1 Unknown 1 Unknown 226 Hill Boulevard Suite 190 Midway Williamson County TN 31904 United States -5.0000000E+000 7.9999998E-002 - 10 AAAAAAAAKAAAAAAA 1997-03-13 1999-03-13 ? bar 294 9294113 8AM-4PM Raymond Jacobs 8 Unknown Little expectations Michael Wilson 1 Unknown 1 Unknown 175 4th Court Suite C Midway Williamson County TN 31904 United States -5.0000000E+000 5.9999998E-002 - 11 AAAAAAAAKAAAAAAA 1999-03-14 2001-03-12 ? ought 294 9294113 8AM-4PM Raymond Jacobs 6 Unknown Mysterious employe Michael Wilson 1 Unknown 1 Unknown 175 Park Green Court Suite 160 Midway Williamson County TN 31904 United States -5.0000000E+000 1.0999999E-001 - 12 AAAAAAAAKAAAAAAA 2001-03-13 ? ? ought 294 5219562 8AM-12AM Robert Thompson 6 Unknown Events develop i Dustin Kelly 1 Unknown 1 Unknown 337 College Boulevard Suite 100 Fairview Williamson County TN 31904 United States -5.0000000E+000 9.9999997E-003 - ---- 12 row(s) selected. ->> ->>-- select of few columns with WHERE predicate ->>select s_store_sk, left(s_store_id, 20) from hive.hive.store_orc where s_store_sk < 7; - -S_STORE_SK (EXPR) ------------ -------------------- - - 1 AAAAAAAABAAAAAAA - 2 AAAAAAAACAAAAAAA - 3 AAAAAAAACAAAAAAA - 4 AAAAAAAAEAAAAAAA - 5 AAAAAAAAEAAAAAAA - 6 AAAAAAAAEAAAAAAA - ---- 6 row(s) selected. ->> ->>-- select count of rows ->>select count(*) from hive.hive.store_orc; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>-- explain of join between 2 ORC tables ->>prepare XX from select x.s_suite_number, y.s_street_name -+> from hive.hive.store_orc x, hive.hive.store_orc y -+> where x.s_store_sk = y.s_store_sk; - ---- SQL command prepared. ->>execute explainIt; - -S OPERATOR LC RC TAB_NAME --- ---------------- -- -- ---------- - -04 ROOT 3 ? -03 HYBRID_HASH_JOIN 2 1 -02 ORC_SCAN ? ? STORE_ORC -01 ORC_SCAN ? ? STORE_ORC - ---- 4 row(s) selected. ->> ->>-- execute of join between 2 ORC tables ->>execute XX; - -S_SUITE_NUMBER S_STREET_NAME --------------------- -------------------- - -Suite 250 Spring -Suite 410 Sycamore -Suite T Park Laurel -Suite 260 Lake -Suite 80 Lee 6th -Suite 140 6th -Suite T Lee -Suite D 12th -Suite 190 Hill -Suite C 4th -Suite 160 Park Green -Suite 100 College - ---- 12 row(s) selected. ->> ->>-- explain of join between hive(hdfs) and ORC tables ->>prepare XX from select x.s_suite_number, y.s_street_name -+> from hive.hive.store x, hive.hive.store_orc y -+> where x.s_store_sk = y.s_store_sk; - ---- SQL command prepared. ->>execute explainIt; - -S OPERATOR LC RC TAB_NAME --- ---------------- -- -- ---------- - -04 ROOT 3 ? -03 HYBRID_HASH_JOIN 2 1 -02 HIVE_SCAN ? ? STORE -01 ORC_SCAN ? ? STORE_ORC - ---- 4 row(s) selected. ->> ->>-- execute of join between hive(hdfs) and ORC tables ->>execute XX; - -S_SUITE_NUMBER S_STREET_NAME --------------------- -------------------- - -Suite 250 Spring -Suite 410 Sycamore -Suite T Park Laurel -Suite 260 Lake -Suite 80 Lee 6th -Suite 140 6th -Suite T Lee -Suite D 12th -Suite 190 Hill -Suite C 4th -Suite 160 Park Green -Suite 100 College - ---- 12 row(s) selected. ->> ->>-- column list pushdown test ->>cqd orc_columns_pushdown 'OFF'; - ---- SQL operation complete. ->>showplan option 'tr' select s_store_sk from store_orc; -MASTER Executor fragment -======================== - -<<<<<<< HEAD -Fragment ID: 0, Length: 58688 -======= -Fragment ID: 0, Length: 17144 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3088 -======= -explain_plan_size = 3184 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 41032 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 2, criDescUp_->noTuples() = 3 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_ORC -type_ = ORC -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 1 -hdfsBufSize_ = 703, rangeTailIOSize_ = 20970, hdfsSqlMaxRecLen_ = 8 -tuppIndex_ = 2, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 516 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 560 -convertSkipListSize_ = 33, convertSkipList_ = 3 -outputRowLength_ = 16 -Flag = 0xc - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 3 2853 store_orc/000000_0 - -Summary of bytes read per ESP (2853 = 100 percent): - -ESP 0 reads 2853 bytes ( 100 percent of avg) - -Number of columns to retrieve: 29 -ColNumber: 1, ColName: S_STORE_SK -ColNumber: 2, ColName: S_STORE_ID -ColNumber: 3, ColName: S_REC_START_DATE -ColNumber: 4, ColName: S_REC_END_DATE -ColNumber: 5, ColName: S_CLOSED_DATE_SK -ColNumber: 6, ColName: S_STORE_NAME -ColNumber: 7, ColName: S_NUMBER_EMPLOYEES -ColNumber: 8, ColName: S_FLOOR_SPACE -ColNumber: 9, ColName: S_HOURS -ColNumber: 10, ColName: S_MANAGER -ColNumber: 11, ColName: S_MARKET_ID -ColNumber: 12, ColName: S_GEOGRAPHY_CLASS -ColNumber: 13, ColName: S_MARKET_DESC -ColNumber: 14, ColName: S_MARKET_MANAGER -ColNumber: 15, ColName: S_DIVISION_ID -ColNumber: 16, ColName: S_DIVISION_NAME -ColNumber: 17, ColName: S_COMPANY_ID -ColNumber: 18, ColName: S_COMPANY_NAME -ColNumber: 19, ColName: S_STREET_NUMBER -ColNumber: 20, ColName: S_STREET_NAME -ColNumber: 21, ColName: S_STREET_TYPE -ColNumber: 22, ColName: S_SUITE_NUMBER -ColNumber: 23, ColName: S_CITY -ColNumber: 24, ColName: S_COUNTY -ColNumber: 25, ColName: S_STATE -ColNumber: 26, ColName: S_ZIP -ColNumber: 27, ColName: S_COUNTRY -ColNumber: 28, ColName: S_GMT_OFFSET -ColNumber: 29, ColName: S_TAX_PRECENTAGE -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1511634123, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1496437232, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is NULL -Expression: moveExpr_ is not NULL -Expression: convertExpr_ is NULL -Expression: moveColsConvertExpr_ is not NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->>select s_store_sk from store_orc; - -S_STORE_SK ------------ - - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - ---- 12 row(s) selected. ->>cqd orc_columns_pushdown 'ON'; - ---- SQL operation complete. ->>showplan option 'tr' select s_store_sk from store_orc; -MASTER Executor fragment -======================== - -Fragment ID: 0, Length: 14664 - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3080 -======= -explain_plan_size = 3176 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 41032 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 2, criDescUp_->noTuples() = 3 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_ORC -type_ = ORC -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 1 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20970, hdfsSqlMaxRecLen_ = 8 -tuppIndex_ = 2, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 16 -convertSkipListSize_ = 33, convertSkipList_ = 3 -outputRowLength_ = 16 -Flag = 0xc - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 3 2853 store_orc/000000_0 - -Summary of bytes read per ESP (2853 = 100 percent): - -ESP 0 reads 2853 bytes ( 100 percent of avg) - -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1511634123, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1496437232, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is NULL -Expression: moveExpr_ is not NULL -Expression: convertExpr_ is NULL -Expression: moveColsConvertExpr_ is not NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->>select s_store_sk from store_orc; - -S_STORE_SK ------------ - - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - ---- 12 row(s) selected. ->> ->>-- predicate pushdown to ORC layer ->>cqd orc_pred_pushdown 'OFF'; - ---- SQL operation complete. ->>select s_store_sk from store_orc where s_store_sk = 3; - -S_STORE_SK ------------ - - 3 - ---- 1 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk < 2; - -S_STORE_SK ------------ - - 1 - ---- 1 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk >= 11; - -S_STORE_SK ------------ - - 11 - 12 - ---- 2 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk >= 4 and s_store_sk < 6; - -S_STORE_SK ------------ - - 4 - 5 - ---- 2 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk in (7,8,9); - -S_STORE_SK ------------ - - 7 - 8 - 9 - ---- 3 row(s) selected. ->>showplan option 'tr' select s_store_sk from store_orc where s_store_sk = 3; -MASTER Executor fragment -======================== - -Fragment ID: 0, Length: 14840 - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3176 -======= -explain_plan_size = 3272 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 32840 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 3, criDescUp_->noTuples() = 4 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_ORC -type_ = ORC -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 1 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20970, hdfsSqlMaxRecLen_ = 16 -tuppIndex_ = 3, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 8 -convertSkipListSize_ = 33, convertSkipList_ = 2 -outputRowLength_ = 8 -Flag = 0xc - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 3 2853 store_orc/000000_0 - -Summary of bytes read per ESP (2853 = 100 percent): - -ESP 0 reads 2853 bytes ( 100 percent of avg) - -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1511634123, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1496437232, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is not NULL -Expression: moveExpr_ is NULL -Expression: convertExpr_ is not NULL -Expression: moveColsConvertExpr_ is NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->> ->>cqd orc_pred_pushdown 'ON'; - ---- SQL operation complete. ->>select s_store_sk from store_orc where s_store_sk = 3; - -S_STORE_SK ------------ - - 3 - ---- 1 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk < 2; - -S_STORE_SK ------------ - - 1 - ---- 1 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk >= 11; - -S_STORE_SK ------------ - - 11 - 12 - ---- 2 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk >= 4 and s_store_sk < 6; - -S_STORE_SK ------------ - - 4 - 5 - ---- 2 row(s) selected. ->>select s_store_sk from store_orc where s_store_sk in (7,8,9); - -S_STORE_SK ------------ - - 7 - 8 - 9 - ---- 3 row(s) selected. ->>showplan option 'tr' select s_store_sk from store_orc where s_store_sk = 3; -MASTER Executor fragment -======================== - -<<<<<<< HEAD -Fragment ID: 0, Length: 19144 -======= -Fragment ID: 0, Length: 19984 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3264 -======= -explain_plan_size = 3368 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 32840 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 3, criDescUp_->noTuples() = 4 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_ORC -type_ = ORC -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 1 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20970, hdfsSqlMaxRecLen_ = 16 -tuppIndex_ = 3, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 8 -convertSkipListSize_ = 33, convertSkipList_ = 2 -outputRowLength_ = 8 -Flag = 0xc - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 3 2853 store_orc/000000_0 - -Summary of bytes read per ESP (2853 = 100 percent): - -ESP 0 reads 2853 bytes ( 100 percent of avg) - -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1511634123, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_orc -modTSforDir_ = 1496437232, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Number of PPI entries: 3 -PPI: #1 - type: STARTAND(1) -PPI: #2 - type: EQUALS(5) - operAttrIndex: 0 - colName_: s_store_sk -PPI: #3 - type: END(4) -Num Of extAllColInfoList entries: 33 - -# of Expressions = 6 - -Expression: selectExpr_ is not NULL -Expression: moveExpr_ is NULL -Expression: convertExpr_ is not NULL -Expression: moveColsConvertExpr_ is NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is not NULL - ---- SQL operation complete. ->> ->>-- local join predicate is not pushed down ->>explain select * from store_sales_orc where ss_sold_date_sk = ss_item_sk ; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... NOT NAMED -<<<<<<< HEAD -PLAN_ID .................. 212378682018129421 -======= -PLAN_ID .................. 212363939384659273 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ......... 1,440,202 -EST_TOTAL_COST ......... 191.55 -STATEMENT ................ select * - from store_sales_orc - where ss_sold_date_sk = ss_item_sk; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 3 ONLY CHILD 2 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 191.55 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 2.8804e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 132 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinal 1,440,202 - esp_2_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - SCHEMA ................. HIVE.HIVE - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - GENERATE_EXPLAIN ....... ON - select_list ............ HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_ORC.SS_SOLD_TIME_SK, - HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_ORC.SS_CUSTOMER_SK, - HIVE.STORE_SALES_ORC.SS_CDEMO_SK, - HIVE.STORE_SALES_ORC.SS_HDEMO_SK, - HIVE.STORE_SALES_ORC.SS_ADDR_SK, - HIVE.STORE_SALES_ORC.SS_STORE_SK, - HIVE.STORE_SALES_ORC.SS_PROMO_SK, - HIVE.STORE_SALES_ORC.SS_TICKET_NUMBER, - HIVE.STORE_SALES_ORC.SS_QUANTITY, - HIVE.STORE_SALES_ORC.SS_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_ORC.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_TAX, - HIVE.STORE_SALES_ORC.SS_COUPON_AMT, - HIVE.STORE_SALES_ORC.SS_NET_PAID, - HIVE.STORE_SALES_ORC.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_ORC.SS_NET_PROFIT - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ......... 191.55 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 153.262 KB - record_length ........ 132 - buffer_size ....... 30,388 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST .......... 191.55 -EST_TOTAL_COST ......... 191.55 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length ........ 132 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - executor_predicates .... (HIVE.STORE_SALES_ORC.SS_ITEM_SK = - HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK) - ---- SQL operation complete. ->> ->>-- Boolean constant predicate is not pushed down ->>explain select * from store_sales_orc where 1=2; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... NOT NAMED -<<<<<<< HEAD -PLAN_ID .................. 212378682018429456 -======= -PLAN_ID .................. 212363939386438156 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ................. 1 -EST_TOTAL_COST ........... 0 -STATEMENT ................ select * from store_sales_orc where 1=2; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 0 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality .... 1 - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - SCHEMA ................. HIVE.HIVE - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - GENERATE_EXPLAIN ....... ON - select_list ............ HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_ORC.SS_SOLD_TIME_SK, - HIVE.STORE_SALES_ORC.SS_ITEM_SK, - HIVE.STORE_SALES_ORC.SS_CUSTOMER_SK, - HIVE.STORE_SALES_ORC.SS_CDEMO_SK, - HIVE.STORE_SALES_ORC.SS_HDEMO_SK, - HIVE.STORE_SALES_ORC.SS_ADDR_SK, - HIVE.STORE_SALES_ORC.SS_STORE_SK, - HIVE.STORE_SALES_ORC.SS_PROMO_SK, - HIVE.STORE_SALES_ORC.SS_TICKET_NUMBER, - HIVE.STORE_SALES_ORC.SS_QUANTITY, - HIVE.STORE_SALES_ORC.SS_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_ORC.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_TAX, - HIVE.STORE_SALES_ORC.SS_COUPON_AMT, - HIVE.STORE_SALES_ORC.SS_NET_PAID, - HIVE.STORE_SALES_ORC.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_ORC.SS_NET_PROFIT - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0 -DESCRIPTION - max_card_est ........... 0 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - part_elim_compiled ..... 0. - ---- SQL operation complete. ->> ->>-- aggregate pushdown to ORC layer ->>cqd orc_aggr_pushdown 'ON'; - ---- SQL operation complete. ->>explain options 'f' select count(*) from store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(*) from store_orc; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->>select count(*), count(*) from store_orc; - -(EXPR) (EXPR) --------------------- -------------------- - - 12 12 - ---- 1 row(s) selected. ->>select min(s_store_sk) from store_orc; - -(EXPR) ------------ - - 1 - ---- 1 row(s) selected. ->>select max(s_store_sk) from store_orc; - -(EXPR) ------------ - - 12 - ---- 1 row(s) selected. ->>select sum(s_store_sk) from store_orc; - -(EXPR) --------------------- - - 78 - ---- 1 row(s) selected. ->>select count(*), min(s_store_sk), max(s_store_sk), sum(s_store_sk) -+> from store_orc; - -(EXPR) (EXPR) (EXPR) (EXPR) --------------------- ----------- ----------- -------------------- - - 12 1 12 78 - ---- 1 row(s) selected. ->>explain options 'f' select count(*), min(s_store_sk), max(s_store_sk), -+> sum(s_store_sk) from store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->> ->>explain options 'f' -+> select count(*) from hive.hive.store_orc union all -+> select count(*) from hive.hive.store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 2.00E+000 -1 2 3 merge_union 2.00E+000 -. . 2 orc_aggr 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(*) from hive.hive.store_orc union all -+> select count(*) from hive.hive.store_orc; - -(EXPR) --------------------- - - 12 - 12 - ---- 2 row(s) selected. ->> ->>explain options 'f' -+> select min(s_store_sk) from hive.hive.store_orc union all -+> select min(s_store_sk) from hive.hive.store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 2.00E+000 -1 2 3 merge_union 2.00E+000 -. . 2 orc_aggr 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->>select min(s_store_sk) from hive.hive.store_orc union all -+> select min(s_store_sk) from hive.hive.store_orc; - -(EXPR) ------------ - - 1 - 1 - ---- 2 row(s) selected. ->> ->>explain options 'f' select count(s_store_sk) from store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -2 . 3 root 1.00E+000 -1 . 2 sort_scalar_aggr 1.00E+000 -. . 1 orc_scan STORE_ORC 1.00E+002 - ---- SQL operation complete. ->>select count(s_store_sk) from store_orc; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>explain options 'f' select count(*) from store_orc -+> having sum(s_store_sk) = 78; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(*) from store_orc having sum(s_store_sk) = 78; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>select count(*) from store_orc having sum(s_store_sk) = 77; - ---- 0 row(s) selected. ->> ->>cqd orc_aggr_pushdown 'OFF'; - ---- SQL operation complete. ->>explain options 'f' select count(*), min(s_store_sk), max(s_store_sk), -+> sum(s_store_sk) from store_orc; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -2 . 3 root 1.00E+000 -1 . 2 sort_scalar_aggr 1.00E+000 -. . 1 orc_scan STORE_ORC 1.00E+002 - ---- SQL operation complete. ->>select count(*), min(s_store_sk), max(s_store_sk), sum(s_store_sk) -+> from store_orc; - -(EXPR) (EXPR) (EXPR) (EXPR) --------------------- ----------- ----------- -------------------- - - 12 1 12 78 - ---- 1 row(s) selected. ->> ->>-- test query cache ->>prepare xx from select cd_gender from customer_demographics_orc where cd_gender = 'male' ; - ---- SQL command prepared. ->>select num_hits, num_params from table(querycacheentries('user', 'local')) -+>where substring(text, 1, 16) = 'select cd_gender' order by 1,2; - -NUM_HITS NUM_PARAMS ----------- ---------- - - 0 1 - ---- 1 row(s) selected. ->> ->>prepare xx from select cd_gender from customer_demographics_orc where cd_gender = 'female' ; - ---- SQL command prepared. ->>select num_hits, num_params from table(querycacheentries('user', 'local')) -+>where substring(text, 1, 16) = 'select cd_gender' order by 1,2; - -NUM_HITS NUM_PARAMS ----------- ---------- - - 1 1 - ---- 1 row(s) selected. ->> ->> ->>-- test external table attributes ->>set schema trafodion.sch; - ---- SQL operation complete. ->>drop external table if exists store_sales_orc for hive.hive.store_sales_orc; - ---- SQL operation complete. ->>create external table store_sales_orc -+> for hive.hive.store_sales_orc; - ---- SQL operation complete. ->>invoke hive.hive.store_sales_orc; - --- Definition of hive table STORE_SALES_ORC -<<<<<<< HEAD --- Definition current Wed Nov 29 02:20:31 2017 -======= --- Definition current Sun Jun 11 11:10:08 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - SS_SOLD_DATE_SK INT - , SS_SOLD_TIME_SK INT - , SS_ITEM_SK INT - , SS_CUSTOMER_SK INT - , SS_CDEMO_SK INT - , SS_HDEMO_SK INT - , SS_ADDR_SK INT - , SS_STORE_SK INT - , SS_PROMO_SK INT - , SS_TICKET_NUMBER INT - , SS_QUANTITY INT - , SS_WHOLESALE_COST REAL - , SS_LIST_PRICE REAL - , SS_SALES_PRICE REAL - , SS_EXT_DISCOUNT_AMT REAL - , SS_EXT_SALES_PRICE REAL - , SS_EXT_WHOLESALE_COST REAL - , SS_EXT_LIST_PRICE REAL - , SS_EXT_TAX REAL - , SS_COUPON_AMT REAL - , SS_NET_PAID REAL - , SS_NET_PAID_INC_TAX REAL - , SS_NET_PROFIT REAL - ) - /* stored as orc */ - ---- SQL operation complete. ->> ->>set schema hive.hive; - ---- SQL operation complete. ->>prepare s from select * from store_sales_orc where ss_item_sk = 1; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378682032138196 -======= -PLAN_ID .................. 212363939411863426 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ............. 1,698 -EST_TOTAL_COST ......... 191.52 -STATEMENT ................ select * from store_sales_orc where ss_item_sk = 1; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 3 ONLY CHILD 2 -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 191.52 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 2.73638e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 1,698 - esp_2_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - ORC_AGGR_PUSHDOWN ...... OFF - SCHEMA ................. HIVE.HIVE - select_list ............ HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_ORC.SS_SOLD_TIME_SK, %(1), - HIVE.STORE_SALES_ORC.SS_CUSTOMER_SK, - HIVE.STORE_SALES_ORC.SS_CDEMO_SK, - HIVE.STORE_SALES_ORC.SS_HDEMO_SK, - HIVE.STORE_SALES_ORC.SS_ADDR_SK, - HIVE.STORE_SALES_ORC.SS_STORE_SK, - HIVE.STORE_SALES_ORC.SS_PROMO_SK, - HIVE.STORE_SALES_ORC.SS_TICKET_NUMBER, - HIVE.STORE_SALES_ORC.SS_QUANTITY, - HIVE.STORE_SALES_ORC.SS_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_ORC.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_TAX, - HIVE.STORE_SALES_ORC.SS_COUPON_AMT, - HIVE.STORE_SALES_ORC.SS_NET_PAID, - HIVE.STORE_SALES_ORC.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_ORC.SS_NET_PROFIT - input_variables ........ %(1) - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ......... 191.52 -DESCRIPTION - max_card_est ........... 2.73638e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 29.297 KB - record_length ........ 132 - buffer_size ........ 5,000 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST .......... 191.52 -EST_TOTAL_COST ......... 191.52 -DESCRIPTION - max_card_est ........... 2.73638e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length ........ 132 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( HIVE.STORE_SALES_ORC.SS_ITEM_SK = %(1) ) - executor_predicates .... (HIVE.STORE_SALES_ORC.SS_ITEM_SK = %(1)) - ---- SQL operation complete. ->> ->>-- join with nested join ->>cqd orc_njs 'on'; - ---- SQL operation complete. ->>control query shape nested_join(scan(path 'CUSTOMER_ORC'), -+> scan(path 'STORE_SALES_ORC')); - ---- SQL operation complete. ->>prepare s from select * from customer_orc, store_sales_orc -+> where store_sales_orc.ss_item_sk = customer_orc.c_customer_sk; - ---- SQL command prepared. ->>explain options 'f' s; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 2.88E+006 -1 2 3 nested_join 2.88E+006 -. . 2 orc_scan STORE_SALES_ORC 2.88E+001 -. . 1 orc_scan CUSTOMER_ORC 1.00E+005 - ---- SQL operation complete. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378682032886873 -======= -PLAN_ID .................. 212363939416859647 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ......... 2,880,404 -EST_TOTAL_COST 23,225,420 -STATEMENT ................ select * - from customer_orc, store_sales_orc - where store_sales_orc.ss_item_sk = - customer_orc.c_customer_sk; -MUST_MATCH ............... forced nested join(forced scan, forced scan) - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 4 ONLY CHILD 3 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ............ 0 -EST_TOTAL_COST 23,225,420 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 2.8804e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 402 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinal 2,880,404 - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - ORC_AGGR_PUSHDOWN ...... OFF - SCHEMA ................. HIVE.HIVE - ORC_NJS ................ ON - select_list ............ HIVE.CUSTOMER_ORC.C_CUSTOMER_SK, - HIVE.CUSTOMER_ORC.C_CUSTOMER_ID, - HIVE.CUSTOMER_ORC.C_CURRENT_CDEMO_SK, - HIVE.CUSTOMER_ORC.C_CURRENT_HDEMO_SK, - HIVE.CUSTOMER_ORC.C_CURRENT_ADDR_SK, - HIVE.CUSTOMER_ORC.C_FIRST_SHIPTO_DATE_SK, - HIVE.CUSTOMER_ORC.C_FIRST_SALES_DATE_SK, - HIVE.CUSTOMER_ORC.C_SALUTATION, - HIVE.CUSTOMER_ORC.C_FIRST_NAME, - HIVE.CUSTOMER_ORC.C_LAST_NAME, - HIVE.CUSTOMER_ORC.C_PREFERRED_CUST_FLAG, - HIVE.CUSTOMER_ORC.C_BIRTH_DAY, - HIVE.CUSTOMER_ORC.C_BIRTH_MONTH, - HIVE.CUSTOMER_ORC.C_BIRTH_YEAR, - HIVE.CUSTOMER_ORC.C_BIRTH_COUNTRY, - HIVE.CUSTOMER_ORC.C_LOGIN, - HIVE.CUSTOMER_ORC.C_EMAIL_ADDRESS, - HIVE.CUSTOMER_ORC.C_LAST_REVIEW_DATE, - HIVE.STORE_SALES_ORC.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_ORC.SS_SOLD_TIME_SK, - HIVE.CUSTOMER_ORC.C_CUSTOMER_SK, - HIVE.STORE_SALES_ORC.SS_CUSTOMER_SK, - HIVE.STORE_SALES_ORC.SS_CDEMO_SK, - HIVE.STORE_SALES_ORC.SS_HDEMO_SK, - HIVE.STORE_SALES_ORC.SS_ADDR_SK, - HIVE.STORE_SALES_ORC.SS_STORE_SK, - HIVE.STORE_SALES_ORC.SS_PROMO_SK, - HIVE.STORE_SALES_ORC.SS_TICKET_NUMBER, - HIVE.STORE_SALES_ORC.SS_QUANTITY, - HIVE.STORE_SALES_ORC.SS_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_ORC.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_ORC.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_ORC.SS_EXT_TAX, - HIVE.STORE_SALES_ORC.SS_COUPON_AMT, - HIVE.STORE_SALES_ORC.SS_NET_PAID, - HIVE.STORE_SALES_ORC.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_ORC.SS_NET_PROFIT - - -NESTED_JOIN =============================== SEQ_NO 3 CHILDREN 1, 2 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ............ 0.5 -EST_TOTAL_COST 23,225,420 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 402 - join_type .............. inner - join_method ............ nested - - -ORC_SCAN ================================== SEQ_NO 2 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_ORC -REQUESTS_IN ........ 100,000 -ROWS/REQUEST ............ 28.8 -EST_OPER_COST ... 23,225,410 -EST_TOTAL_COST 23,225,410 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 132 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ........... 100,000 - successful_probes 100,000 - unique_probes ...... 5,000 - duplicated_succ_pr 95,000 - rows_accessed .......... 2.8804e+06 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( HIVE.STORE_SALES_ORC.SS_ITEM_SK = - HIVE.CUSTOMER_ORC.C_CUSTOMER_SK ) - executor_predicates .... (HIVE.STORE_SALES_ORC.SS_ITEM_SK = - HIVE.CUSTOMER_ORC.C_CUSTOMER_SK) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.CUSTOMER_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ........... 100,000 -EST_OPER_COST ............ 7.71 -EST_TOTAL_COST ........... 7.71 -DESCRIPTION - max_card_est ..... 100,000 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 270 - scan_type .............. full scan of table HIVE.HIVE.CUSTOMER_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 18 - probes ................. 1 - rows_accessed .... 100,000 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( not( HIVE.CUSTOMER_ORC.C_CUSTOMER_SK is null ) - ) - executor_predicates .... HIVE.CUSTOMER_ORC.C_CUSTOMER_SK is not null - ---- SQL operation complete. ->>cqd orc_njs reset; - ---- SQL operation complete. ->> ->>-- join with parallel nested join ->> ->>control query shape off; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS 'off'; - ---- SQL operation complete. ->>cqd ncm_orc_costing 'on'; - ---- SQL operation complete. ->>cqd orc_njs 'on'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->> ->>prepare s from -+>select [last 0] ss_net_profit from -+>date_dim_orc dim, store_sales_sorted_orc ss -+>where -+>dim.d_date_sk = ss.ss_sold_date_sk -+>and d_year in (2001) and d_dom = 30 -- produce 12 rows -+>; - ---- SQL command prepared. ->> ->>explain options 'f' s; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -6 . 7 root 1.00E+000 -5 . 6 firstn 1.00E+000 -4 . 5 esp_exchange 1:4(hash2) 2.88E+006 -3 2 4 hybrid_hash_join 2.88E+006 -. . 3 orc_scan STORE_SALES_SORTED_O 2.88E+006 -1 . 2 esp_exchange 4(rep-b):4(hash2) 2.00E+000 -. . 1 orc_scan DATE_DIM_ORC 2.00E+000 - ---- SQL operation complete. ->> ->>cqd HIVE_USE_EXT_TABLE_ATTRS reset; - ---- SQL operation complete. ->>cqd ncm_orc_costing reset; - ---- SQL operation complete. ->>cqd orc_njs reset; - ---- SQL operation complete. ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->> ->> ->>-- more external table tests ->> ->>control query shape cut; - ---- SQL operation complete. ->>set schema trafodion.sch; - ---- SQL operation complete. ->>drop external table if exists date_dim_orc for hive.hive.date_dim_orc; - ---- SQL operation complete. ->>cqd volatile_table_find_suitable_key 'SYSTEM'; - ---- SQL operation complete. ->>create external table date_dim_orc -+> (d_date_sk int, d_date_id varchar(100 bytes) character set utf8, d_date date, -+> d_month_seq int, d_week_seq int, d_quarter_seq int, d_year int, d_dow int, -+> d_moy int, d_dom int, d_qoy int, d_fy_year int, d_fy_quarter_seq int, -+> d_fy_week_seq int, -+> d_day_name varchar(120 bytes) character set utf8, d_quarter_name varchar(200 bytes) character set utf8, d_holiday varchar(100 bytes) character set utf8, -+> d_weekend varchar(100 bytes) character set utf8, d_following_holiday varchar(100 bytes) character set utf8, -+> d_first_dom int, d_last_dom int, d_same_day_ly int, d_same_day_lq int, -+> d_current_day varchar(100 bytes) character set utf8, d_current_week varchar(111 bytes) character set utf8, -+> d_current_month varchar(200 bytes) character set utf8, d_current_quarter varchar(100 bytes) character set utf8, -+> d_current_year varchar(100 bytes) character set utf8) -+> for hive.hive.date_dim_orc; - ---- SQL operation complete. ->>invoke hive.hive.date_dim_orc; - --- Definition of hive table DATE_DIM_ORC -<<<<<<< HEAD --- Definition current Wed Nov 29 02:20:37 2017 -======= --- Definition current Sun Jun 11 11:10:29 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - D_DATE_SK INT - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_DATE DATE - , D_MONTH_SEQ INT - , D_WEEK_SEQ INT - , D_QUARTER_SEQ INT - , D_YEAR INT - , D_DOW INT - , D_MOY INT - , D_DOM INT - , D_QOY INT - , D_FY_YEAR INT - , D_FY_QUARTER_SEQ INT - , D_FY_WEEK_SEQ INT - , D_DAY_NAME VARCHAR(120 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_QUARTER_NAME VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_WEEKEND VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FOLLOWING_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FIRST_DOM INT - , D_LAST_DOM INT - , D_SAME_DAY_LY INT - , D_SAME_DAY_LQ INT - , D_CURRENT_DAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_WEEK VARCHAR(111 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_MONTH VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_QUARTER VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_YEAR VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - ) - /* stored as orc */ - ---- SQL operation complete. ->>showddl hive.hive.date_dim_orc; - -/* Hive DDL */ -CREATE TABLE DEFAULT.DATE_DIM_ORC - ( - D_DATE_SK int - , D_DATE_ID string - , D_DATE date - , D_MONTH_SEQ int - , D_WEEK_SEQ int - , D_QUARTER_SEQ int - , D_YEAR int - , D_DOW int - , D_MOY int - , D_DOM int - , D_QOY int - , D_FY_YEAR int - , D_FY_QUARTER_SEQ int - , D_FY_WEEK_SEQ int - , D_DAY_NAME string - , D_QUARTER_NAME string - , D_HOLIDAY string - , D_WEEKEND string - , D_FOLLOWING_HOLIDAY string - , D_FIRST_DOM int - , D_LAST_DOM int - , D_SAME_DAY_LY int - , D_SAME_DAY_LQ int - , D_CURRENT_DAY string - , D_CURRENT_WEEK string - , D_CURRENT_MONTH string - , D_CURRENT_QUARTER string - , D_CURRENT_YEAR string - ) - stored as orc -; - -<<<<<<< HEAD -======= -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; -/* ObjectUID = 3064268403396931736 */ - ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -/* Trafodion DDL */ - -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; -/* ObjectUID = 8954428504972159626 */ - -CREATE EXTERNAL TABLE DATE_DIM_ORC - ( - D_DATE_SK INT DEFAULT NULL - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_DATE DATE DEFAULT NULL - , D_MONTH_SEQ INT DEFAULT NULL - , D_WEEK_SEQ INT DEFAULT NULL - , D_QUARTER_SEQ INT DEFAULT NULL - , D_YEAR INT DEFAULT NULL - , D_DOW INT DEFAULT NULL - , D_MOY INT DEFAULT NULL - , D_DOM INT DEFAULT NULL - , D_QOY INT DEFAULT NULL - , D_FY_YEAR INT DEFAULT NULL - , D_FY_QUARTER_SEQ INT DEFAULT NULL - , D_FY_WEEK_SEQ INT DEFAULT NULL - , D_DAY_NAME VARCHAR(120 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_QUARTER_NAME VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_WEEKEND VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FOLLOWING_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FIRST_DOM INT DEFAULT NULL - , D_LAST_DOM INT DEFAULT NULL - , D_SAME_DAY_LY INT DEFAULT NULL - , D_SAME_DAY_LQ INT DEFAULT NULL - , D_CURRENT_DAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_WEEK VARCHAR(111 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_MONTH VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_QUARTER VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_YEAR VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - ) - FOR HIVE.HIVE.DATE_DIM_ORC -; - ---- SQL operation complete. ->>prepare s from select * from hive.hive.date_dim_orc where d_date = date '2016-01-27'; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378682040442348 -======= -PLAN_ID .................. 212363939435356144 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ............... 271 -EST_TOTAL_COST ........... 0.66 -STATEMENT ................ select * - from hive.hive.date_dim_orc - where d_date = date '2016-01-27'; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0.66 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,477 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 271 - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - ORC_AGGR_PUSHDOWN ...... OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - select_list ............ HIVE.DATE_DIM_ORC.D_DATE_SK, - HIVE.DATE_DIM_ORC.D_DATE_ID, %(2016-01-27), - HIVE.DATE_DIM_ORC.D_MONTH_SEQ, - HIVE.DATE_DIM_ORC.D_WEEK_SEQ, - HIVE.DATE_DIM_ORC.D_QUARTER_SEQ, - HIVE.DATE_DIM_ORC.D_YEAR, HIVE.DATE_DIM_ORC.D_DOW, - HIVE.DATE_DIM_ORC.D_MOY, HIVE.DATE_DIM_ORC.D_DOM, - HIVE.DATE_DIM_ORC.D_QOY, - HIVE.DATE_DIM_ORC.D_FY_YEAR, - HIVE.DATE_DIM_ORC.D_FY_QUARTER_SEQ, - HIVE.DATE_DIM_ORC.D_FY_WEEK_SEQ, - HIVE.DATE_DIM_ORC.D_DAY_NAME, - HIVE.DATE_DIM_ORC.D_QUARTER_NAME, - HIVE.DATE_DIM_ORC.D_HOLIDAY, - HIVE.DATE_DIM_ORC.D_WEEKEND, - HIVE.DATE_DIM_ORC.D_FOLLOWING_HOLIDAY, - HIVE.DATE_DIM_ORC.D_FIRST_DOM, - HIVE.DATE_DIM_ORC.D_LAST_DOM, - HIVE.DATE_DIM_ORC.D_SAME_DAY_LY, - HIVE.DATE_DIM_ORC.D_SAME_DAY_LQ, - HIVE.DATE_DIM_ORC.D_CURRENT_DAY, - HIVE.DATE_DIM_ORC.D_CURRENT_WEEK, - HIVE.DATE_DIM_ORC.D_CURRENT_MONTH, - HIVE.DATE_DIM_ORC.D_CURRENT_QUARTER, - HIVE.DATE_DIM_ORC.D_CURRENT_YEAR - input_variables ........ %(2016-01-27) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.DATE_DIM_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0.66 -EST_TOTAL_COST ........... 0.66 -DESCRIPTION - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,471 - scan_type .............. full scan of table HIVE.HIVE.DATE_DIM_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 28 - probes ................. 1 - rows_accessed ..... 73,049 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( HIVE.DATE_DIM_ORC.D_DATE = %(2016-01-27) ) - executor_predicates .... (HIVE.DATE_DIM_ORC.D_DATE = %(2016-01-27)) - ---- SQL operation complete. ->> ->>drop external table if exists date_dim_orc for hive.hive.date_dim_orc; - ---- SQL operation complete. ->>create external table date_dim_orc -+> (d_date_sk int, d_date_id varchar(100 bytes) character set utf8, d_date date) -+> for hive.hive.date_dim_orc; - ---- SQL operation complete. ->>invoke hive.hive.date_dim_orc; - --- Definition of hive table DATE_DIM_ORC -<<<<<<< HEAD --- Definition current Wed Nov 29 02:20:44 2017 -======= --- Definition current Sun Jun 11 11:10:50 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - D_DATE_SK INT - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_DATE DATE - , D_MONTH_SEQ INT - , D_WEEK_SEQ INT - , D_QUARTER_SEQ INT - , D_YEAR INT - , D_DOW INT - , D_MOY INT - , D_DOM INT - , D_QOY INT - , D_FY_YEAR INT - , D_FY_QUARTER_SEQ INT - , D_FY_WEEK_SEQ INT - , D_DAY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_QUARTER_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_WEEKEND VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FOLLOWING_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FIRST_DOM INT - , D_LAST_DOM INT - , D_SAME_DAY_LY INT - , D_SAME_DAY_LQ INT - , D_CURRENT_DAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_WEEK VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_MONTH VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_QUARTER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_YEAR VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - ) - /* stored as orc */ - ---- SQL operation complete. ->>showddl hive.hive.date_dim_orc; - -/* Hive DDL */ -CREATE TABLE DEFAULT.DATE_DIM_ORC - ( - D_DATE_SK int - , D_DATE_ID string - , D_DATE date - , D_MONTH_SEQ int - , D_WEEK_SEQ int - , D_QUARTER_SEQ int - , D_YEAR int - , D_DOW int - , D_MOY int - , D_DOM int - , D_QOY int - , D_FY_YEAR int - , D_FY_QUARTER_SEQ int - , D_FY_WEEK_SEQ int - , D_DAY_NAME string - , D_QUARTER_NAME string - , D_HOLIDAY string - , D_WEEKEND string - , D_FOLLOWING_HOLIDAY string - , D_FIRST_DOM int - , D_LAST_DOM int - , D_SAME_DAY_LY int - , D_SAME_DAY_LQ int - , D_CURRENT_DAY string - , D_CURRENT_WEEK string - , D_CURRENT_MONTH string - , D_CURRENT_QUARTER string - , D_CURRENT_YEAR string - ) - stored as orc -; - -<<<<<<< HEAD -======= -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; -/* ObjectUID = 3064268403396931736 */ - ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -/* Trafodion DDL */ - -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_ORC; -/* ObjectUID = 8954428504972159626 */ - -CREATE EXTERNAL TABLE DATE_DIM_ORC - ( - D_DATE_SK INT DEFAULT NULL - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_DATE DATE DEFAULT NULL - , D_MONTH_SEQ INT DEFAULT NULL - , D_WEEK_SEQ INT DEFAULT NULL - , D_QUARTER_SEQ INT DEFAULT NULL - , D_YEAR INT DEFAULT NULL - , D_DOW INT DEFAULT NULL - , D_MOY INT DEFAULT NULL - , D_DOM INT DEFAULT NULL - , D_QOY INT DEFAULT NULL - , D_FY_YEAR INT DEFAULT NULL - , D_FY_QUARTER_SEQ INT DEFAULT NULL - , D_FY_WEEK_SEQ INT DEFAULT NULL - , D_DAY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_QUARTER_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_WEEKEND VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FOLLOWING_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FIRST_DOM INT DEFAULT NULL - , D_LAST_DOM INT DEFAULT NULL - , D_SAME_DAY_LY INT DEFAULT NULL - , D_SAME_DAY_LQ INT DEFAULT NULL - , D_CURRENT_DAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_WEEK VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_MONTH VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_QUARTER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_YEAR VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - ) - FOR HIVE.HIVE.DATE_DIM_ORC -; - ---- SQL operation complete. ->>prepare s from select * from hive.hive.date_dim_orc where d_date = date '2016-01-27'; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378682040442348 -======= -PLAN_ID .................. 212363939456056239 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ............... 271 -EST_TOTAL_COST ........... 0.66 -STATEMENT ................ select * - from hive.hive.date_dim_orc - where d_date = date '2016-01-27'; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0.66 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,477 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 271 - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_PRED_PUSHDOWN ...... ON - ORC_AGGR_PUSHDOWN ...... OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - select_list ............ HIVE.DATE_DIM_ORC.D_DATE_SK, - HIVE.DATE_DIM_ORC.D_DATE_ID, %(2016-01-27), - HIVE.DATE_DIM_ORC.D_MONTH_SEQ, - HIVE.DATE_DIM_ORC.D_WEEK_SEQ, - HIVE.DATE_DIM_ORC.D_QUARTER_SEQ, - HIVE.DATE_DIM_ORC.D_YEAR, HIVE.DATE_DIM_ORC.D_DOW, - HIVE.DATE_DIM_ORC.D_MOY, HIVE.DATE_DIM_ORC.D_DOM, - HIVE.DATE_DIM_ORC.D_QOY, - HIVE.DATE_DIM_ORC.D_FY_YEAR, - HIVE.DATE_DIM_ORC.D_FY_QUARTER_SEQ, - HIVE.DATE_DIM_ORC.D_FY_WEEK_SEQ, - HIVE.DATE_DIM_ORC.D_DAY_NAME, - HIVE.DATE_DIM_ORC.D_QUARTER_NAME, - HIVE.DATE_DIM_ORC.D_HOLIDAY, - HIVE.DATE_DIM_ORC.D_WEEKEND, - HIVE.DATE_DIM_ORC.D_FOLLOWING_HOLIDAY, - HIVE.DATE_DIM_ORC.D_FIRST_DOM, - HIVE.DATE_DIM_ORC.D_LAST_DOM, - HIVE.DATE_DIM_ORC.D_SAME_DAY_LY, - HIVE.DATE_DIM_ORC.D_SAME_DAY_LQ, - HIVE.DATE_DIM_ORC.D_CURRENT_DAY, - HIVE.DATE_DIM_ORC.D_CURRENT_WEEK, - HIVE.DATE_DIM_ORC.D_CURRENT_MONTH, - HIVE.DATE_DIM_ORC.D_CURRENT_QUARTER, - HIVE.DATE_DIM_ORC.D_CURRENT_YEAR - input_variables ........ %(2016-01-27) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.DATE_DIM_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0.66 -EST_TOTAL_COST ........... 0.66 -DESCRIPTION - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,471 - scan_type .............. full scan of table HIVE.HIVE.DATE_DIM_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 28 - probes ................. 1 - rows_accessed ..... 73,049 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( HIVE.DATE_DIM_ORC.D_DATE = %(2016-01-27) ) - executor_predicates .... (HIVE.DATE_DIM_ORC.D_DATE = %(2016-01-27)) - ---- SQL operation complete. ->> ->> ->>-- error cases ->>drop external table if exists date_dim_orc for hive.hive.date_dim_orc; - ---- SQL operation complete. ->> ->>-- column d_date_skk doesn't exist in native hive table ->>create external table date_dim_orc -+> (d_date_skk int) -+> for hive.hive.date_dim_orc; - -*** ERROR[1009] Column D_DATE_SKK does not exist in the specified table. - ---- SQL operation failed with errors. ->> ->>-- del/update not supported on orc or hive ->>prepare s from delete from hive.hive.store2_sales_orc; - -*** ERROR[4223] Update/Delete on ORC table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store2_sales_orc set ss_ext_tax = 1; - -*** ERROR[4223] Update/Delete on ORC table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from delete from hive.hive.store_sales; - -*** ERROR[4223] Update/Delete on Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store_sales set ss_ext_tax = 1; - -*** ERROR[4223] Update/Delete on Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->> ->>-- upsert/insert cannot specify column list and must provide all column values. ->>prepare s from upsert into hive.hive.store2_sales_orc values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert using load into hive.hive.store2_sales_orc values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store2_sales_orc values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store2_sales_orc set ss_net_paid = 1; - -*** ERROR[4223] Update/Delete on ORC table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store2_sales_orc(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store2_sales_orc(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store_sales(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store_sales(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store_sales values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store_sales values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->> ->> ->>-- test min-max optimization ->> ->>update statistics for table hive.hive.date_dim on every column sample; - ---- SQL operation complete. ->>update statistics for table hive.hive.time_dim on every column sample; - ---- SQL operation complete. ->> ->>cqd ORC_PRED_PUSHDOWN 'ON'; - ---- SQL operation complete. ->>cqd GEN_HSHJ_MIN_MAX_OPT 'on'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->>cqd nested_joins 'off'; - ---- SQL operation complete. ->> ->>prepare xx from select count(*) from -+>hive.hive.store2_sales_orc, -- sorted on ss_sold_date_sk -+>hive.hive.date_dim, -+>hive.hive.time_dim -+> where ss_sold_date_sk = d_date_sk and -+> ss_sold_date_sk = t_time_sk and d_year = 2001 and t_hour = 10 ; - ---- SQL command prepared. ->> ->>-- display the push-down predicates, which should include the ->>-- min/max expressions from dimension table date_dim and time_dim. ->>select cast(tokenstr('orc_search_arguments', description, 'executor_predicates') -+> as char(400)) -+> from table (explain(NULL,'XX')) -+>where position('orc_search_arguments' in description) > 0 ; - -(EXPR) ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -and( and( and( and( and( not( HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK is null ) not( HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK <\:_sys_MinVal0 ) ) HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK <= \:_sys_MaxVal0 ) not( HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK <\:_sys_MinVal1 ) ) HIVE.STORE2_SALES_ORC.SS_SOLD_DATE_SK <= \:_sys_MaxVal1 ) ) - ---- 1 row(s) selected. ->> ->>execute xx; - -(EXPR) --------------------- - - 0 - ---- 1 row(s) selected. ->> ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->>cqd nested_joins reset; - ---- SQL operation complete. ->> ->>-- test NJ into a sorted ORC table ->> ->>cqd orc_pred_pushdown 'ON'; - ---- SQL operation complete. ->>cqd orc_njs 'ON'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS 'off'; - ---- SQL operation complete. ->> ->>prepare xx from select count(*) from hive.hive.customer_orc, hive.hive.store_sales_sorted_orc -+> where ss_sold_date_sk = c_customer_sk -+> and c_first_sales_date_sk = 4; - ---- SQL command prepared. ->> ->>explain xx; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... XX -<<<<<<< HEAD -PLAN_ID .................. 212378682095271698 -ROWS_OUT ................. 1 -EST_TOTAL_COST .......... 17.46 -======= -PLAN_ID .................. 212363939559855112 -ROWS_OUT ................. 1 -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -STATEMENT ................ select count(*) - from hive.hive.customer_orc, - hive.hive.store_sales_sorted_orc - where ss_sold_date_sk = c_customer_sk and - c_first_sales_date_sk = 4; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 8 ONLY CHILD 7 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -<<<<<<< HEAD -EST_TOTAL_COST .......... 17.46 -======= -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.13(BMOs), 0.00(nBMOs) MB - max_card_est ........... 1 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length .......... 8 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinal 2,880,404 - esp_2_node_map ......... (-1(4 times)) - esp_3_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_ORC_FORMAT ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - ORC_COLUMNS_PUSHDOWN ... ON - ORC_AGGR_PUSHDOWN ...... OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - GEN_HSHJ_MIN_MAX_OPT ... ON - ORC_PRED_PUSHDOWN ...... ON - ORC_NJS ................ ON - PARALLEL_NUM_ESPS ...... 4 - HIVE_USE_EXT_TABLE_ATTR OFF - select_list ............ cast(sum(count(1 ))) - input_variables ........ %(4), execution_count - - -SORT_PARTIAL_AGGR_ROOT ==================== SEQ_NO 7 ONLY CHILD 6 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -<<<<<<< HEAD -EST_TOTAL_COST .......... 17.46 -======= -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length .......... 8 - aggregates ............. sum(count(1 )) - - -ESP_EXCHANGE ============================== SEQ_NO 6 ONLY CHILD 5 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -<<<<<<< HEAD -EST_TOTAL_COST .......... 17.46 -======= -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 29.297 KB - record_length .......... 8 - buffer_size ........ 5,000 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -SORT_PARTIAL_AGGR_LEAF ==================== SEQ_NO 5 ONLY CHILD 4 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -<<<<<<< HEAD -EST_TOTAL_COST .......... 17.46 -======= -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length .......... 8 - aggregates ............. count(1 ) - - -HYBRID_HASH_JOIN ========================== SEQ_NO 4 CHILDREN 3, 2 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ............ 0.02 -<<<<<<< HEAD -EST_TOTAL_COST .......... 17.46 -======= -EST_TOTAL_COST .......... 18.16 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -DESCRIPTION - memory_quota_per_instan 800 MB - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_instance 66.631 KB - record_length .......... 0 - join_type .............. inner - join_method ............ hash - parallel_join_type ..... 2 - min_max_cols ........... HIVE.CUSTOMER_ORC.C_CUSTOMER_SK - min_max_expr ........... \:_sys_MinVal0, \:_sys_MaxVal0 - hash_join_predicates ... (HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE_SK = - HIVE.CUSTOMER_ORC.C_CUSTOMER_SK) - - -ORC_SCAN ================================== SEQ_NO 3 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_SORTED_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ........... 16.74 -EST_TOTAL_COST .......... 16.74 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length .......... 6 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_SORTED_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ...... 1 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - min_max_hashj_cols ..... HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE_SK - orc_pred_pushdown ...... yes - orc_search_arguments ... and( and( and( not( HIVE.STORE_SALES_SORTED_ORC.SS_S - OLD_DATE_SK is null ) not( - HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE_SK - <\:_sys_MinVal0 ) ) HIVE.STORE_SALES_SORTED_ORC.SS - _SOLD_DATE_SK <= \:_sys_MaxVal0 ) ) - executor_predicates .... HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE_SK is not - null and (HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE - _SK >= \:_sys_MinVal0) and - (HIVE.STORE_SALES_SORTED_ORC.SS_SOLD_DATE_SK <= - \:_sys_MaxVal0) - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 317 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ........... 1.4 -DESCRIPTION - max_card_est ...... 95,001 - fragment_id ............ 3 - parent_frag ............ 2 - fragment_type .......... esp - est_memory_per_node .... 32.227 KB - record_length .......... 6 - buffer_size ........ 5,000 - parent_processes ....... 4 - child_processes ........ 4 - parent_partitioning_fun broadcast 4 times - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -ORC_SCAN ================================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.CUSTOMER_ORC -REQUESTS_IN .............. 1 -ROWS_OUT ............... 317 -EST_OPER_COST ............ 1.4 -EST_TOTAL_COST ........... 1.4 -DESCRIPTION - max_card_est ...... 95,001 - fragment_id ............ 3 - parent_frag ............ 2 - fragment_type .......... esp - record_length .......... 6 - scan_type .............. full scan of table HIVE.HIVE.CUSTOMER_ORC - object_type ............ Hive_Orc - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ...... 2 - probes ................. 1 - rows_accessed .... 100,000 - orc_pred_pushdown ...... yes - orc_search_arguments ... and( and( HIVE.CUSTOMER_ORC.C_FIRST_SALES_DATE_SK = - %(4) not( HIVE.CUSTOMER_ORC.C_CUSTOMER_SK is null - ) ) ) - executor_predicates .... (HIVE.CUSTOMER_ORC.C_FIRST_SALES_DATE_SK = %(4)) and - HIVE.CUSTOMER_ORC.C_CUSTOMER_SK is not null - ---- SQL operation complete. ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -7 . 8 root 1.00E+000 -6 . 7 sort_partial_aggr_ro 1.00E+000 -5 . 6 esp_exchange 1:4(hash2) 1.00E+000 -4 . 5 sort_partial_aggr_le 1.00E+000 -3 2 4 hybrid_hash_join 2.88E+006 -. . 3 orc_scan STORE_SALES_SORTED_O 2.88E+006 -1 . 2 esp_exchange 4(rep-b):4(hash2) 3.17E+002 -. . 1 orc_scan CUSTOMER_ORC 3.17E+002 - ---- SQL operation complete. ->>execute xx; - -(EXPR) --------------------- - - 0 - ---- 1 row(s) selected. ->> ->>cqd orc_njs reset; - ---- SQL operation complete. ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS reset; - ---- SQL operation complete. ->> ->>-- test aggregates pushdown (orc) ->> ->>-- test aggregates pushdown ->>cqd orc_aggr_pushdown 'ON'; - ---- SQL operation complete. ->> ->>-- pushdown feasible ->>cqd attempt_esp_parallelism 'OFF'; - ---- SQL operation complete. ->>prepare xx from -+>select min(ss_sold_date_sk), max(ss_sold_date_sk) -+>from hive.hive.store2_sales_orc; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->> ->>-- pushdown feasible ->>prepare xx from -+>select ss_sold_date_sk from hive.hive.store2_sales_orc where -+>ss_sold_date_sk > -+>(select count(*) from hive.hive.store2_sales_orc); - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 9.60E+005 -2 1 3 hybrid_hash_join 9.60E+005 -. . 2 orc_scan STORE2_SALES_ORC 2.88E+006 -. . 1 orc_aggr 1.00E+000 - ---- SQL operation complete. ->> ->> ->>-- pushdown not feasible due to distinct ->>prepare xx from -+>select -+>count(distinct ss_sold_date_sk) -+>from hive.hive.store2_sales_orc; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 1.00E+000 -2 . 3 sort_scalar_aggr 1.00E+000 -1 . 2 hash_groupby 2.88E+006 -. . 1 orc_scan STORE2_SALES_ORC 2.88E+006 - ---- SQL operation complete. ->> ->>-- pushdown predicate expressions ->>set schema hive.hive; - ---- SQL operation complete. ->>process hive statement 'drop table t030hive'; - ---- SQL operation complete. ->>process hive statement 'create table t030hive (a int, b string) stored as orc tblproperties ("orc.stripe.size"="20000", "aaaaa"="bb", "orc.bloom.filter.columns"="a,b") '; - ---- SQL operation complete. ->>showddl hive.hive.t030hive; - -/* Hive DDL */ -CREATE TABLE DEFAULT.T030HIVE - ( - A int - , B string - ) - stored as orc - tblproperties - ( - "orc.bloom.filter.columns"="a,b" - ,"orc.stripe.size"="20000" - ) -; - -/* Trafodion DDL */ - ---- SQL operation complete. ->> ->>insert into hive.hive.t030hive values (1,'a'), (null, null), (5, ''), (10,'abc'); - ---- 4 row(s) inserted. ->>cqd hive_max_string_length_in_bytes '10'; - ---- SQL operation complete. ->>select * from t030hive; - -A B ------------ ---------- - - 1 a - ? ? - 5 - 10 abc - ---- 4 row(s) selected. ->>select * from t030hive where a is null; - -A B ------------ ---------- - - ? ? - ---- 1 row(s) selected. ->>select * from t030hive where b is null; - -A B ------------ ---------- - - ? ? - ---- 1 row(s) selected. ->> ->>prepare s from select * from hive.hive.t030hive where a = cast(? as int); - ---- SQL command prepared. ->>execute s using 1; - -A B ------------ ---------- - - 1 a - ---- 1 row(s) selected. ->>execute s using 2; - ---- 0 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->>prepare s from select * from hive.hive.t030hive where a < cast(? as int); - ---- SQL command prepared. ->>execute s using 1; - ---- 0 row(s) selected. ->>execute s using 2; - -A B ------------ ---------- - - 1 a - ---- 1 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->>prepare s from select * from hive.hive.t030hive where b = ?; - ---- SQL command prepared. ->>execute s using 'a'; - -A B ------------ ---------- - - 1 a - ---- 1 row(s) selected. ->>execute s using 'b'; - ---- 0 row(s) selected. ->>execute s using ''; - -A B ------------ ---------- - - 5 - ---- 1 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->>prepare s from select * from hive.hive.t030hive where b < ?; - ---- SQL command prepared. ->>execute s using 'a'; - -A B ------------ ---------- - - 5 - ---- 1 row(s) selected. ->>execute s using 'b'; - -A B ------------ ---------- - - 1 a - 5 - 10 abc - ---- 3 row(s) selected. ->>execute s using ''; - ---- 0 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->>cqd jdbc_process 'ON'; - ---- SQL operation complete. ->>prepare s from select * from t030hive where a = ?; - ---- SQL command prepared. ->>execute s using 0; - ---- 0 row(s) selected. ->>execute s using 5; - -A B ------------ ---------- - - 5 - ---- 1 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->>prepare s from select * from t030hive where a < ?; - ---- SQL command prepared. ->>execute s using 0; - ---- 0 row(s) selected. ->>execute s using 6; - -A B ------------ ---------- - - 1 a - 5 - ---- 2 row(s) selected. ->>execute s using NULL; - ---- 0 row(s) selected. ->> ->> ->>-- parquet table with tblproperties ->>set schema hive.hive; - ---- SQL operation complete. ->>process hive statement 'drop table t030parq'; - ---- SQL operation complete. ->>process hive statement 'create table t030parq (a int, b string) partitioned by (z int) stored as parquet tblproperties ("parquet.block.size"="5000000", "parquet.page.size"="10000", "parquet.compression"="SNAPPY", "parquet.enable.dictionary"="true", "parquet.dictionary.page.size"="5000") '; - ---- SQL operation complete. ->>showddl hive.hive.t030parq; - -/* Hive DDL */ -CREATE TABLE DEFAULT.T030PARQ - ( - A int - , B string - ) - PARTITIONED BY (Z int) - stored as parquet - tblproperties - ( - "parquet.compression"="SNAPPY" - ,"parquet.dictionary.page.size"="5000" - ,"parquet.block.size"="5000000" - ,"parquet.enable.dictionary"="true" - ,"parquet.page.size"="10000" - ) -; - -/* Trafodion DDL */ - ---- SQL operation complete. ->>insert into t030parq values (1,'a',1), (2,'b', 2); - ---- 2 row(s) inserted. ->>sleep 2; ->>select * from t030parq order by 1; - -A B Z ------------ ---------- ----------- - - 1 a 1 - 2 b 2 - ---- 2 row(s) selected. ->> ->> ->> ->>log; diff --git a/core/sql/regress/hive/EXPECTED040 b/core/sql/regress/hive/EXPECTED040 deleted file mode 100644 index 39d0277a85..0000000000 --- a/core/sql/regress/hive/EXPECTED040 +++ /dev/null @@ -1,2755 +0,0 @@ ->>obey TEST040(setup); ->>-------------------------------------------------------------------------- ->> ->>set schema hive.hive; - ---- SQL operation complete. ->>cqd HIVE_MAX_STRING_LENGTH_IN_BYTES '20' ; - ---- SQL operation complete. ->>cqd mode_seahive 'ON'; - ---- SQL operation complete. ->>cqd traf_enable_parquet_format 'ON'; - ---- SQL operation complete. ->>cqd HIST_ROWCOUNT_REQUIRING_STATS '50000'; - ---- SQL operation complete. ->>cqd hive_use_ext_table_attrs 'ON'; - ---- SQL operation complete. ->>cqd hist_missing_stats_warning_level '0'; - ---- SQL operation complete. ->>cqd ORC_NJS_PROBES_THRESHOLD '1000000'; - ---- SQL operation complete. ->>cqd HIVE_MIN_NUM_ESPS_PER_DATANODE '0'; - ---- SQL operation complete. ->> ->>prepare explainIt from -+> select substring(cast(SEQ_NUM+100 as char(3)),2,2) s, -+> substring(operator,1,16) operator, -+> cast(LEFT_CHILD_SEQ_NUM as char(2)) lc, -+> cast(RIGHT_CHILD_SEQ_NUM as char(2)) rc, -+> substring -+> (substring(substring(tname from (1+locate('.',tname))),1,case locate(')',tname) when 0 then 0 else locate(')',substring(tname from (1+locate('.',tname))))-1 end), -+> (locate('.',substring(tname from (1+locate('.',tname)))))+1, -+> 10 -+> ) tab_name -+> from table (explain(NULL,'XX')) -+> order by 1 desc; - ---- SQL command prepared. ->> ->>obey TEST040(tests); ->>-------------------------------------------------------------------------- ->>-- PARQUET file metadata info -<<<<<<< HEAD ->>--invoke hive.hive.store_parquet; ->>-- -======= ->>invoke hive.hive.store_parquet; - --- Definition of hive table STORE_PARQUET --- Definition current Sun Jun 11 11:28:44 2017 - - ( - S_STORE_SK INT - , S_STORE_ID VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_REC_START_DATE TIMESTAMP(6) - , S_REC_END_DATE TIMESTAMP(6) - , S_CLOSED_DATE_SK INT - , S_STORE_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_NUMBER_EMPLOYEES INT - , S_FLOOR_SPACE INT - , S_HOURS VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_ID INT - , S_GEOGRAPHY_CLASS VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_DESC VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_MARKET_MANAGER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_DIVISION_ID INT - , S_DIVISION_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COMPANY_ID INT - , S_COMPANY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STREET_TYPE VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_SUITE_NUMBER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_CITY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COUNTY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_STATE VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_ZIP VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_COUNTRY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , S_GMT_OFFSET REAL - , S_TAX_PRECENTAGE REAL - ) - /* stored as parquet */ - ---- SQL operation complete. ->> ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. ->>-- select one row from PARQUET table ->>select [first 1] * from hive.hive.store_parquet; - -S_STORE_SK S_STORE_ID S_REC_START_DATE S_REC_END_DATE S_CLOSED_DATE_SK S_STORE_NAME S_NUMBER_EMPLOYEES S_FLOOR_SPACE S_HOURS S_MANAGER S_MARKET_ID S_GEOGRAPHY_CLASS S_MARKET_DESC S_MARKET_MANAGER S_DIVISION_ID S_DIVISION_NAME S_COMPANY_ID S_COMPANY_NAME S_STREET_NUMBER S_STREET_NAME S_STREET_TYPE S_SUITE_NUMBER S_CITY S_COUNTY S_STATE S_ZIP S_COUNTRY S_GMT_OFFSET S_TAX_PRECENTAGE ------------ -------------------- -------------------------- -------------------------- ---------------- -------------------- ------------------ ------------- -------------------- -------------------- ----------- -------------------- -------------------- -------------------- ------------- -------------------- ------------ -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- --------------- ---------------- - - 1 AAAAAAAABAAAAAAA 1997-03-13 00:00:00.000000 ? 2451189 ought 245 5250760 8AM-4PM William Ward 2 Unknown Enough high areas st Charles Bartley 1 Unknown 1 Unknown 767 Spring Wy Suite 250 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - ---- 1 row(s) selected. ->> ->>-- select all rows from PARQUET table ->>select * from hive.hive.store_parquet order by s_store_sk; - -S_STORE_SK S_STORE_ID S_REC_START_DATE S_REC_END_DATE S_CLOSED_DATE_SK S_STORE_NAME S_NUMBER_EMPLOYEES S_FLOOR_SPACE S_HOURS S_MANAGER S_MARKET_ID S_GEOGRAPHY_CLASS S_MARKET_DESC S_MARKET_MANAGER S_DIVISION_ID S_DIVISION_NAME S_COMPANY_ID S_COMPANY_NAME S_STREET_NUMBER S_STREET_NAME S_STREET_TYPE S_SUITE_NUMBER S_CITY S_COUNTY S_STATE S_ZIP S_COUNTRY S_GMT_OFFSET S_TAX_PRECENTAGE ------------ -------------------- -------------------------- -------------------------- ---------------- -------------------- ------------------ ------------- -------------------- -------------------- ----------- -------------------- -------------------- -------------------- ------------- -------------------- ------------ -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- -------------------- --------------- ---------------- - - 1 AAAAAAAABAAAAAAA 1997-03-13 00:00:00.000000 ? 2451189 ought 245 5250760 8AM-4PM William Ward 2 Unknown Enough high areas st Charles Bartley 1 Unknown 1 Unknown 767 Spring Wy Suite 250 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 2 AAAAAAAACAAAAAAA 1997-03-13 00:00:00.000000 2000-03-12 00:00:00.000000 ? able 236 5285950 8AM-4PM Scott Smith 8 Unknown Parliamentary candid David Lamontagne 1 Unknown 1 Unknown 255 Sycamore Dr. Suite 410 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 3 AAAAAAAACAAAAAAA 2000-03-13 00:00:00.000000 ? ? able 236 7557959 8AM-4PM Scott Smith 7 Unknown Impossible, true arm David Lamontagne 1 Unknown 1 Unknown 877 Park Laurel Road Suite T Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 4 AAAAAAAAEAAAAAAA 1997-03-13 00:00:00.000000 1999-03-13 00:00:00.000000 2451044 ese 218 9341467 8AM-4PM Edwin Adams 4 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 27 Lake Ln Suite 260 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 5 AAAAAAAAEAAAAAAA 1999-03-14 00:00:00.000000 2001-03-12 00:00:00.000000 2450910 anti 288 9078805 8AM-4PM Edwin Adams 8 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 27 Lee 6th Court Suite 80 Fairview Williamson County TN 35709 United States -5.0000000E+000 2.9999999E-002 - 6 AAAAAAAAEAAAAAAA 2001-03-13 00:00:00.000000 ? ? cally 229 9026222 8AM-4PM Edwin Adams 10 Unknown Events would achieve Thomas Pollack 1 Unknown 1 Unknown 220 6th Lane Suite 140 Midway Williamson County TN 31904 United States -5.0000000E+000 2.9999999E-002 - 7 AAAAAAAAHAAAAAAA 1997-03-13 00:00:00.000000 ? ? ation 297 8954883 8AM-4PM David Thomas 9 Unknown Architects coul Thomas Benton 1 Unknown 1 Unknown 811 Lee Circle Suite T Midway Williamson County TN 31904 United States -5.0000000E+000 9.9999997E-003 - 8 AAAAAAAAIAAAAAAA 1997-03-13 00:00:00.000000 2000-03-12 00:00:00.000000 ? eing 278 6995995 8AM-4PM Brett Yates 2 Unknown Various bars make mo Dean Morrison 1 Unknown 1 Unknown 226 12th Lane Suite D Fairview Williamson County TN 35709 United States -5.0000000E+000 7.9999998E-002 - 9 AAAAAAAAIAAAAAAA 2000-03-13 00:00:00.000000 ? ? eing 271 6995995 8AM-4PM Brett Yates 2 Unknown Formal, psychologica Dean Morrison 1 Unknown 1 Unknown 226 Hill Boulevard Suite 190 Midway Williamson County TN 31904 United States -5.0000000E+000 7.9999998E-002 - 10 AAAAAAAAKAAAAAAA 1997-03-13 00:00:00.000000 1999-03-13 00:00:00.000000 ? bar 294 9294113 8AM-4PM Raymond Jacobs 8 Unknown Little expectations Michael Wilson 1 Unknown 1 Unknown 175 4th Court Suite C Midway Williamson County TN 31904 United States -5.0000000E+000 5.9999998E-002 - 11 AAAAAAAAKAAAAAAA 1999-03-14 00:00:00.000000 2001-03-12 00:00:00.000000 ? ought 294 9294113 8AM-4PM Raymond Jacobs 6 Unknown Mysterious employe Michael Wilson 1 Unknown 1 Unknown 175 Park Green Court Suite 160 Midway Williamson County TN 31904 United States -5.0000000E+000 1.0999999E-001 - 12 AAAAAAAAKAAAAAAA 2001-03-13 00:00:00.000000 ? ? ought 294 5219562 8AM-12AM Robert Thompson 6 Unknown Events develop i Dustin Kelly 1 Unknown 1 Unknown 337 College Boulevard Suite 100 Fairview Williamson County TN 31904 United States -5.0000000E+000 9.9999997E-003 - ---- 12 row(s) selected. ->> ->>-- select of few columns with WHERE predicate ->>select s_store_sk, left(s_store_id, 20) from hive.hive.store_parquet where s_store_sk < 7; - -S_STORE_SK (EXPR) ------------ -------------------- - - 1 AAAAAAAABAAAAAAA - 2 AAAAAAAACAAAAAAA - 3 AAAAAAAACAAAAAAA - 4 AAAAAAAAEAAAAAAA - 5 AAAAAAAAEAAAAAAA - 6 AAAAAAAAEAAAAAAA - ---- 6 row(s) selected. ->> ->>-- select count of rows ->>select count(*) from hive.hive.store_parquet; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>-- explain of join between 2 PARQUET tables ->>prepare XX from select x.s_suite_number, y.s_street_name -+> from hive.hive.store_parquet x, hive.hive.store_parquet y -+> where x.s_store_sk = y.s_store_sk; - ---- SQL command prepared. ->>execute explainIt; - -S OPERATOR LC RC TAB_NAME --- ---------------- -- -- ---------- - -04 ROOT 3 ? -03 HYBRID_HASH_JOIN 2 1 -02 PARQUET_SCAN ? ? STORE_PARQ -01 PARQUET_SCAN ? ? STORE_PARQ - ---- 4 row(s) selected. ->> ->>-- execute of join between 2 PARQUET tables ->>execute XX; - -S_SUITE_NUMBER S_STREET_NAME --------------------- -------------------- - -Suite 250 Spring -Suite 410 Sycamore -Suite T Park Laurel -Suite 260 Lake -Suite 80 Lee 6th -Suite 140 6th -Suite T Lee -Suite D 12th -Suite 190 Hill -Suite C 4th -Suite 160 Park Green -Suite 100 College - ---- 12 row(s) selected. ->> ->>-- explain of join between hive(hdfs) and PARQUET tables ->>control query shape join(scan(path 'STORE'), scan(path 'STORE_PARQUET')); - ---- SQL operation complete. ->>prepare XX from select x.s_suite_number, y.s_street_name -+> from hive.hive.store x, hive.hive.store_parquet y -+> where x.s_store_sk = y.s_store_sk; - ---- SQL command prepared. ->>control query shape cut; - ---- SQL operation complete. ->>execute explainIt; - -S OPERATOR LC RC TAB_NAME --- ---------------- -- -- ---------- - -04 ROOT 3 ? -03 HYBRID_HASH_JOIN 2 1 -02 HIVE_SCAN ? ? STORE -01 PARQUET_SCAN ? ? STORE_PARQ - ---- 4 row(s) selected. ->> ->>-- execute of join between hive(hdfs) and PARQUET tables ->>execute XX; - -S_SUITE_NUMBER S_STREET_NAME --------------------- -------------------- - -Suite 250 Spring -Suite 410 Sycamore -Suite T Park Laurel -Suite 260 Lake -Suite 80 Lee 6th -Suite 140 6th -Suite T Lee -Suite D 12th -Suite 190 Hill -Suite C 4th -Suite 160 Park Green -Suite 100 College - ---- 12 row(s) selected. ->> ->>-- column list pushdown test ->>cqd parquet_columns_pushdown 'OFF'; - ---- SQL operation complete. ->>showplan option 'tr' select s_store_sk from store_parquet; -MASTER Executor fragment -======================== - -<<<<<<< HEAD -Fragment ID: 0, Length: 58712 -======= -Fragment ID: 0, Length: 17152 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3112 -======= -explain_plan_size = 3208 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 41032 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 2, criDescUp_->noTuples() = 3 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_PARQUET -type_ = PARQUET -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 124 -hdfsBufSize_ = 735, rangeTailIOSize_ = 20984, hdfsSqlMaxRecLen_ = 8 -tuppIndex_ = 2, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 532 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 576 -convertSkipListSize_ = 33, convertSkipList_ = 3 -outputRowLength_ = 16 -Flag = 0x20c - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 4 3681 store_parquet/000000_0 - -Summary of bytes read per ESP (3681 = 100 percent): - -ESP 0 reads 3681 bytes ( 100 percent of avg) - -Number of columns to retrieve: 29 -ColNumber: 1, ColName: S_STORE_SK -ColNumber: 2, ColName: S_STORE_ID -ColNumber: 3, ColName: S_REC_START_DATE -ColNumber: 4, ColName: S_REC_END_DATE -ColNumber: 5, ColName: S_CLOSED_DATE_SK -ColNumber: 6, ColName: S_STORE_NAME -ColNumber: 7, ColName: S_NUMBER_EMPLOYEES -ColNumber: 8, ColName: S_FLOOR_SPACE -ColNumber: 9, ColName: S_HOURS -ColNumber: 10, ColName: S_MANAGER -ColNumber: 11, ColName: S_MARKET_ID -ColNumber: 12, ColName: S_GEOGRAPHY_CLASS -ColNumber: 13, ColName: S_MARKET_DESC -ColNumber: 14, ColName: S_MARKET_MANAGER -ColNumber: 15, ColName: S_DIVISION_ID -ColNumber: 16, ColName: S_DIVISION_NAME -ColNumber: 17, ColName: S_COMPANY_ID -ColNumber: 18, ColName: S_COMPANY_NAME -ColNumber: 19, ColName: S_STREET_NUMBER -ColNumber: 20, ColName: S_STREET_NAME -ColNumber: 21, ColName: S_STREET_TYPE -ColNumber: 22, ColName: S_SUITE_NUMBER -ColNumber: 23, ColName: S_CITY -ColNumber: 24, ColName: S_COUNTY -ColNumber: 25, ColName: S_STATE -ColNumber: 26, ColName: S_ZIP -ColNumber: 27, ColName: S_COUNTRY -ColNumber: 28, ColName: S_GMT_OFFSET -ColNumber: 29, ColName: S_TAX_PRECENTAGE -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1511634536, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1496437675, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is NULL -Expression: moveExpr_ is not NULL -Expression: convertExpr_ is NULL -Expression: moveColsConvertExpr_ is not NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->>select s_store_sk from store_parquet; - -S_STORE_SK ------------ - - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - ---- 12 row(s) selected. ->>cqd parquet_columns_pushdown 'ON'; - ---- SQL operation complete. ->>showplan option 'tr' select s_store_sk from store_parquet; -MASTER Executor fragment -======================== - -Fragment ID: 0, Length: 14672 - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3104 -======= -explain_plan_size = 3208 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 41032 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 2, criDescUp_->noTuples() = 3 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_PARQUET -type_ = PARQUET -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 124 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20984, hdfsSqlMaxRecLen_ = 8 -tuppIndex_ = 2, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 16 -convertSkipListSize_ = 33, convertSkipList_ = 3 -outputRowLength_ = 16 -Flag = 0x20c - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 4 3681 store_parquet/000000_0 - -Summary of bytes read per ESP (3681 = 100 percent): - -ESP 0 reads 3681 bytes ( 100 percent of avg) - -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1511634536, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1496437675, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is NULL -Expression: moveExpr_ is not NULL -Expression: convertExpr_ is NULL -Expression: moveColsConvertExpr_ is not NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->>select s_store_sk from store_parquet; - -S_STORE_SK ------------ - - 1 - 2 - 3 - 4 - 5 - 6 - 7 - 8 - 9 - 10 - 11 - 12 - ---- 12 row(s) selected. ->> ->>-- predicate pushdown to PARQUET layer ->>cqd parquet_pred_pushdown 'OFF'; - ---- SQL operation complete. ->>select s_store_sk from store_parquet where s_store_sk = 3; - -S_STORE_SK ------------ - - 3 - ---- 1 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk < 2; - -S_STORE_SK ------------ - - 1 - ---- 1 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk >= 11; - -S_STORE_SK ------------ - - 11 - 12 - ---- 2 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk >= 4 and s_store_sk < 6; - -S_STORE_SK ------------ - - 4 - 5 - ---- 2 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk in (7,8,9); - -S_STORE_SK ------------ - - 7 - 8 - 9 - ---- 3 row(s) selected. ->>showplan option 'tr' select s_store_sk from store_parquet where s_store_sk = 3; -MASTER Executor fragment -======================== - -Fragment ID: 0, Length: 14848 - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3208 -======= -explain_plan_size = 3320 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 32840 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 3, criDescUp_->noTuples() = 4 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_PARQUET -type_ = PARQUET -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 124 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20984, hdfsSqlMaxRecLen_ = 16 -tuppIndex_ = 3, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 8 -convertSkipListSize_ = 33, convertSkipList_ = 2 -outputRowLength_ = 8 -Flag = 0x20c - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 4 3681 store_parquet/000000_0 - -Summary of bytes read per ESP (3681 = 100 percent): - -ESP 0 reads 3681 bytes ( 100 percent of avg) - -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -<<<<<<< HEAD -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1511634536, numOfPartCols_ = 0 -======= -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1496437675, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 6 - -Expression: selectExpr_ is not NULL -Expression: moveExpr_ is NULL -Expression: convertExpr_ is not NULL -Expression: moveColsConvertExpr_ is NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is NULL - ---- SQL operation complete. ->> ->>cqd parquet_pred_pushdown 'ON'; - ---- SQL operation complete. ->>select s_store_sk from store_parquet where s_store_sk = 3; - -S_STORE_SK ------------ - - 3 - ---- 1 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk < 2; - -S_STORE_SK ------------ - - 1 - ---- 1 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk >= 11; - -S_STORE_SK ------------ - - 11 - 12 - ---- 2 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk >= 4 and s_store_sk < 6; - -S_STORE_SK ------------ - - 4 - 5 - ---- 2 row(s) selected. ->>select s_store_sk from store_parquet where s_store_sk in (7,8,9); - -S_STORE_SK ------------ - - 7 - 8 - 9 - ---- 3 row(s) selected. ->>showplan option 'tr' select s_store_sk from store_parquet where s_store_sk = 3; -MASTER Executor fragment -======================== - -<<<<<<< HEAD -Fragment ID: 0, Length: 17104 -======= -Fragment ID: 0, Length: 15584 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Contents of EX_ROOT [2]: ------------------------- - -For ComTdb : -Class Version = 1, Class Size = 576 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 0, queueSizeUp = 0, numBuffers = 0, bufferSize = 0 -estimatedRowUsed = 0.000000, estimatedRowsAccessed = 0.000000, expressionMode = 0 -Flag = 0x129 - -For ComTdbRoot : -FirstNRows = -1, baseTablenamePosition = -1 -queryType_ = 2, planVersion_ = 2600 -rtFlags1_ = 0x20300a00 -rtFlags2_ = 0 -rtFlags3_ = 0 -rtFlags4_ = 0x2000 -rtFlags5_ = 0 -queryType_ = 2 -inputVarsSize_ = 0 -querySimilarityInfo()->siList()->numEntries() = 1 -<<<<<<< HEAD -explain_plan_size = 3240 -======= -explain_plan_size = 3352 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -# of Expressions = 4 - -Expression: inputExpr_ is NULL -Expression: outputExpr_ is not NULL -Expression: pkeyExpr_ is NULL -Expression: predExpr_ is NULL -Contents of EX_EXT_STORAGE_SCAN [1]: ------------------------------------- - -For ComTdb : -Class Version = 1, Class Size = 464 -InitialQueueSizeDown = 4, InitialQueueSizeUp = 4 -queueResizeLimit = 9, queueResizeFactor = 4 -queueSizeDown = 2048, queueSizeUp = 2048, numBuffers = 4, bufferSize = 32840 -estimatedRowUsed = 100.000000, estimatedRowsAccessed = 100.000000, expressionMode = 0 -Flag = 0x9 -criDescDown_->noTuples() = 3, criDescUp_->noTuples() = 4 - -For ComTdbExtStorageScan : -tableName_ = HIVE.HIVE.STORE_PARQUET -type_ = PARQUET -hostName_ = localhost, port_ = 36000 -recordDelimiter_ = 10, columnDelimiter_ = 124 -hdfsBufSize_ = 115, rangeTailIOSize_ = 20984, hdfsSqlMaxRecLen_ = 16 -tuppIndex_ = 3, workAtpIndex_ = 3 -asciiTuppIndex_ = 4, asciiRowLen_ = 8 -moveExprColsTuppIndex_ = 2, moveExprColsRowLength_ = 8 -convertSkipListSize_ = 33, convertSkipList_ = 2 -outputRowLength_ = 8 -Flag = 0x20c - -Number of ranges to scan: 1 -Number of esps to scan: 1 - - Esp# Range# StripeOffset Length FileName -====== ====== ============ ============ ============================== - - 0 0 4 3681 store_parquet/000000_0 - -Summary of bytes read per ESP (3681 = 100 percent): - -ESP 0 reads 3681 bytes ( 100 percent of avg) - -<<<<<<< HEAD -Number of columns to retrieve: 1 -ColNumber: 1, ColName: S_STORE_SK -hdfsRootDir: hdfs://localhost:36000/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1511634536, numOfPartCols_ = 0 -======= -Number of columns to retrieve: 0 -hdfsRootDir: hdfs://localhost:24200/user/trafodion/hive/tpcds/store_parquet -modTSforDir_ = 1496437675, numOfPartCols_ = 0 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - -Number of PPI entries: 3 -PPI: #1 - type: STARTAND(1) -PPI: #2 - type: EQUALS(5) - operAttrIndex: 0 - colName_: s_store_sk -PPI: #3 - type: END(4) -Num Of extAllColInfoList entries: 33 - -# of Expressions = 6 - -Expression: selectExpr_ is NULL -Expression: moveExpr_ is NULL -Expression: convertExpr_ is not NULL -Expression: moveColsConvertExpr_ is NULL -Expression: partElimExpr_ is NULL -Expression: extOperExpr_ is not NULL - ---- SQL operation complete. ->> ->>-- local join predicate is not pushed down ->>explain select * from store_sales_parquet where ss_sold_date_sk = ss_item_sk ; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... NOT NAMED -<<<<<<< HEAD -PLAN_ID .................. 212378573691398125 -ROWS_OUT ......... 1,440,202 -EST_TOTAL_COST ......... 234.75 -======= -PLAN_ID .................. 212363940580483637 -ROWS_OUT ........ 81,136,280 -EST_TOTAL_COST ...... 13,852.32 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -STATEMENT ................ select * - from store_sales_parquet - where ss_sold_date_sk = ss_item_sk; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 3 ONLY CHILD 2 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 234.75 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 2.8804e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 132 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinal 1,440,202 - total_overflow_size .... 0.00 KB - esp_2_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - SCHEMA ................. HIVE.HIVE - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_PRED_PUSHDOWN ON - GENERATE_EXPLAIN ....... ON - select_list ............ HIVE.STORE_SALES_PARQUET.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_PARQUET.SS_SOLD_TIME_SK, - HIVE.STORE_SALES_PARQUET.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_PARQUET.SS_CUSTOMER_SK, - HIVE.STORE_SALES_PARQUET.SS_CDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_HDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_ADDR_SK, - HIVE.STORE_SALES_PARQUET.SS_STORE_SK, - HIVE.STORE_SALES_PARQUET.SS_PROMO_SK, - HIVE.STORE_SALES_PARQUET.SS_TICKET_NUMBER, - HIVE.STORE_SALES_PARQUET.SS_QUANTITY, - HIVE.STORE_SALES_PARQUET.SS_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_PARQUET.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_TAX, - HIVE.STORE_SALES_PARQUET.SS_COUPON_AMT, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_PARQUET.SS_NET_PROFIT - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ......... 234.75 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 153.262 KB - record_length ........ 132 - buffer_size ....... 30,388 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ......... 1,440,202 -EST_OPER_COST .......... 234.75 -EST_TOTAL_COST ......... 234.75 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length ........ 132 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - executor_predicates .... (HIVE.STORE_SALES_PARQUET.SS_ITEM_SK = - HIVE.STORE_SALES_PARQUET.SS_SOLD_DATE_SK) - ---- SQL operation complete. ->> ->>-- Boolean constant predicate is not pushed down ->>explain select * from store_sales_parquet where 1=2; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... NOT NAMED -<<<<<<< HEAD -PLAN_ID .................. 212378573691753629 -======= -PLAN_ID .................. 212363940580884702 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ................. 1 -EST_TOTAL_COST ........... 0 -STATEMENT ................ select * from store_sales_parquet where 1=2; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 0 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality .... 1 - total_overflow_size .... 0.00 KB - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - SCHEMA ................. HIVE.HIVE - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_PRED_PUSHDOWN ON - GENERATE_EXPLAIN ....... ON - select_list ............ HIVE.STORE_SALES_PARQUET.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_PARQUET.SS_SOLD_TIME_SK, - HIVE.STORE_SALES_PARQUET.SS_ITEM_SK, - HIVE.STORE_SALES_PARQUET.SS_CUSTOMER_SK, - HIVE.STORE_SALES_PARQUET.SS_CDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_HDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_ADDR_SK, - HIVE.STORE_SALES_PARQUET.SS_STORE_SK, - HIVE.STORE_SALES_PARQUET.SS_PROMO_SK, - HIVE.STORE_SALES_PARQUET.SS_TICKET_NUMBER, - HIVE.STORE_SALES_PARQUET.SS_QUANTITY, - HIVE.STORE_SALES_PARQUET.SS_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_PARQUET.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_TAX, - HIVE.STORE_SALES_PARQUET.SS_COUPON_AMT, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_PARQUET.SS_NET_PROFIT - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ........... 0 -DESCRIPTION - max_card_est ........... 0 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - part_elim_compiled ..... 0. - ---- SQL operation complete. ->> ->>-- aggregate pushdown to PARQUET layer ->>cqd parquet_aggr_pushdown 'ON'; - ---- SQL operation complete. ->>explain options 'f' select count(*) from store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(*) from store_parquet; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->>select count(*), count(*) from store_parquet; - -(EXPR) (EXPR) --------------------- -------------------- - - 12 12 - ---- 1 row(s) selected. ->>select min(s_store_sk) from store_parquet; - -(EXPR) ------------ - - 1 - ---- 1 row(s) selected. ->>select max(s_store_sk) from store_parquet; - -(EXPR) ------------ - - 12 - ---- 1 row(s) selected. ->>select sum(s_store_sk) from store_parquet; - -(EXPR) --------------------- - - 78 - ---- 1 row(s) selected. ->>select count(*), min(s_store_sk), max(s_store_sk), sum(s_store_sk) -+> from store_parquet; - -(EXPR) (EXPR) (EXPR) (EXPR) --------------------- ----------- ----------- -------------------- - - 12 1 12 78 - ---- 1 row(s) selected. ->>explain options 'f' select count(*), min(s_store_sk), max(s_store_sk), -+> sum(s_store_sk) from store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -2 . 3 root 1.00E+000 -1 . 2 sort_scalar_aggr 1.00E+000 -. . 1 parquet_scan STORE_PARQUET 1.00E+002 - ---- SQL operation complete. ->> ->>explain options 'f' -+> select count(*) from hive.hive.store_parquet union all -+> select count(*) from hive.hive.store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 2.00E+000 -1 2 3 merge_union 2.00E+000 -. . 2 parquet_aggr 1.00E+000 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(*) from hive.hive.store_parquet union all -+> select count(*) from hive.hive.store_parquet; - -(EXPR) --------------------- - - 12 - 12 - ---- 2 row(s) selected. ->> ->>explain options 'f' -+> select min(s_store_sk) from hive.hive.store_parquet union all -+> select min(s_store_sk) from hive.hive.store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 2.00E+000 -1 2 3 merge_union 2.00E+000 -. . 2 parquet_aggr 1.00E+000 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->>select min(s_store_sk) from hive.hive.store_parquet union all -+> select min(s_store_sk) from hive.hive.store_parquet; - -(EXPR) ------------ - - 1 - 1 - ---- 2 row(s) selected. ->> ->>explain options 'f' select count(s_store_sk) from store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->>select count(s_store_sk) from store_parquet; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>explain options 'f' select count(*) from store_parquet -+> having sum(s_store_sk) = 78; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -2 . 3 root 1.00E+000 -1 . 2 sort_scalar_aggr 1.00E+000 -. . 1 parquet_scan STORE_PARQUET 1.00E+002 - ---- SQL operation complete. ->>select count(*) from store_parquet having sum(s_store_sk) = 78; - -(EXPR) --------------------- - - 12 - ---- 1 row(s) selected. ->> ->>select count(*) from store_parquet having sum(s_store_sk) = 77; - ---- 0 row(s) selected. ->> ->>cqd parquet_aggr_pushdown 'OFF'; - ---- SQL operation complete. ->>explain options 'f' select count(*), min(s_store_sk), max(s_store_sk), -+> sum(s_store_sk) from store_parquet; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -2 . 3 root 1.00E+000 -1 . 2 sort_scalar_aggr 1.00E+000 -. . 1 parquet_scan STORE_PARQUET 1.00E+002 - ---- SQL operation complete. ->>select count(*), min(s_store_sk), max(s_store_sk), sum(s_store_sk) -+> from store_parquet; - -(EXPR) (EXPR) (EXPR) (EXPR) --------------------- ----------- ----------- -------------------- - - 12 1 12 78 - ---- 1 row(s) selected. ->> ->>-- test query cache ->>prepare xx from select cd_gender from customer_demographics_parquet where cd_gender = 'male' ; - ---- SQL command prepared. ->>select num_hits, num_params from table(querycacheentries('user', 'local')) -+>where substring(text, 1, 16) = 'select cd_gender' order by 1,2; - -NUM_HITS NUM_PARAMS ----------- ---------- - - 0 1 - ---- 1 row(s) selected. ->> ->>prepare xx from select cd_gender from customer_demographics_parquet where cd_gender = 'female' ; - ---- SQL command prepared. ->>select num_hits, num_params from table(querycacheentries('user', 'local')) -+>where substring(text, 1, 16) = 'select cd_gender' order by 1,2; - -NUM_HITS NUM_PARAMS ----------- ---------- - - 1 1 - ---- 1 row(s) selected. ->> ->> ->>-- test external table attributes ->>set schema trafodion.sch; - ---- SQL operation complete. ->>drop external table if exists store_sales_parquet for hive.hive.store_sales_parquet; - ---- SQL operation complete. ->>create external table store_sales_parquet -+> for hive.hive.store_sales_parquet; - ---- SQL operation complete. ->>invoke hive.hive.store_sales_parquet; - --- Definition of hive table STORE_SALES_PARQUET -<<<<<<< HEAD --- Definition current Mon Nov 27 20:15:06 2017 -======= --- Definition current Sun Jun 11 11:30:31 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - SS_SOLD_DATE_SK INT - , SS_SOLD_TIME_SK INT - , SS_ITEM_SK INT - , SS_CUSTOMER_SK INT - , SS_CDEMO_SK INT - , SS_HDEMO_SK INT - , SS_ADDR_SK INT - , SS_STORE_SK INT - , SS_PROMO_SK INT - , SS_TICKET_NUMBER INT - , SS_QUANTITY INT - , SS_WHOLESALE_COST REAL - , SS_LIST_PRICE REAL - , SS_SALES_PRICE REAL - , SS_EXT_DISCOUNT_AMT REAL - , SS_EXT_SALES_PRICE REAL - , SS_EXT_WHOLESALE_COST REAL - , SS_EXT_LIST_PRICE REAL - , SS_EXT_TAX REAL - , SS_COUPON_AMT REAL - , SS_NET_PAID REAL - , SS_NET_PAID_INC_TAX REAL - , SS_NET_PROFIT REAL - ) - /* stored as parquet */ - ---- SQL operation complete. ->> ->>set schema hive.hive; - ---- SQL operation complete. ->>prepare s from select * from store_sales_parquet where ss_item_sk = 1; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378573706907158 -ROWS_OUT ............. 1,698 -EST_TOTAL_COST ......... 234.71 -======= -PLAN_ID .................. 212363940631673658 -ROWS_OUT ............ 12,739 -EST_TOTAL_COST ...... 13,850.42 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -STATEMENT ................ select * - from store_sales_parquet - where ss_item_sk = 1; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 3 ONLY CHILD 2 -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 234.71 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ........... 2.73638e+06 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ........ 138 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 1,698 - total_overflow_size .... 0.00 KB - esp_2_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIVE_USE_EXT_TABLE_ATTR ON - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_PRED_PUSHDOWN ON - PARQUET_AGGR_PUSHDOWN OFF - SCHEMA ................. HIVE.HIVE - select_list ............ HIVE.STORE_SALES_PARQUET.SS_SOLD_DATE_SK, - HIVE.STORE_SALES_PARQUET.SS_SOLD_TIME_SK, %(1), - HIVE.STORE_SALES_PARQUET.SS_CUSTOMER_SK, - HIVE.STORE_SALES_PARQUET.SS_CDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_HDEMO_SK, - HIVE.STORE_SALES_PARQUET.SS_ADDR_SK, - HIVE.STORE_SALES_PARQUET.SS_STORE_SK, - HIVE.STORE_SALES_PARQUET.SS_PROMO_SK, - HIVE.STORE_SALES_PARQUET.SS_TICKET_NUMBER, - HIVE.STORE_SALES_PARQUET.SS_QUANTITY, - HIVE.STORE_SALES_PARQUET.SS_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_DISCOUNT_AMT, - HIVE.STORE_SALES_PARQUET.SS_EXT_SALES_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_WHOLESALE_COST, - HIVE.STORE_SALES_PARQUET.SS_EXT_LIST_PRICE, - HIVE.STORE_SALES_PARQUET.SS_EXT_TAX, - HIVE.STORE_SALES_PARQUET.SS_COUPON_AMT, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID, - HIVE.STORE_SALES_PARQUET.SS_NET_PAID_INC_TAX, - HIVE.STORE_SALES_PARQUET.SS_NET_PROFIT - input_variables ........ %(1) - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ......... 234.71 -DESCRIPTION - max_card_est ........... 2.73638e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 29.297 KB - record_length ........ 132 - buffer_size ........ 5,000 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ............. 1,698 -EST_OPER_COST .......... 234.71 -EST_TOTAL_COST ......... 234.71 -DESCRIPTION - max_card_est ........... 2.73638e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length ........ 132 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 23 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - parquet_pred_pushdown yes - parquet_search_argument and( HIVE.STORE_SALES_PARQUET.SS_ITEM_SK = %(1) ) - ---- SQL operation complete. ->> ->>-- join with nested join ->>?ignore ->>cqd orc_njs 'on'; ->>control query shape nested_join(scan(path 'CUSTOMER_PARQUET'), -+> scan(path 'STORE_SALES_PARQUET')); ->>prepare s from select * from customer_parquet, store_sales_parquet -+> where store_sales_parquet.ss_item_sk = customer_parquet.c_customer_sk; ->>explain options 'f' s; ->>explain s; ->>cqd orc_njs reset; ->>?ignore ->> ->>-- join with parallel nested join ->> ->>control query shape off; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS 'off'; - ---- SQL operation complete. ->>cqd ncm_orc_costing 'on'; - ---- SQL operation complete. ->>cqd orc_njs 'on'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->> ->>prepare s from -+>select [last 0] ss_net_profit from -+>date_dim_parquet dim, store_sales_sorted_parquet ss -+>where -+>dim.d_date_sk = ss.ss_sold_date_sk -+>and d_year in (2001) and d_dom = 30 -- produce 12 rows -+>; - ---- SQL command prepared. ->> ->>explain options 'f' s; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -6 . 7 root 1.00E+000 -5 . 6 firstn 1.00E+000 -4 . 5 esp_exchange 1:4(hash2) 2.88E+006 -3 2 4 hybrid_hash_join 2.88E+006 -. . 3 parquet_scan STORE_SALES_SORTED_P 2.88E+006 -1 . 2 esp_exchange 4(rep-b):4(hash2) 2.00E+000 -. . 1 parquet_scan DATE_DIM_PARQUET 2.00E+000 - ---- SQL operation complete. ->> ->>cqd HIVE_USE_EXT_TABLE_ATTRS reset; - ---- SQL operation complete. ->>cqd ncm_orc_costing reset; - ---- SQL operation complete. ->>cqd orc_njs reset; - ---- SQL operation complete. ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->> ->> ->>-- more external table tests ->> ->>control query shape cut; - ---- SQL operation complete. ->>set schema trafodion.sch; - ---- SQL operation complete. ->>drop external table if exists date_dim_parquet for hive.hive.date_dim_parquet; - ---- SQL operation complete. ->>cqd volatile_table_find_suitable_key 'SYSTEM'; - ---- SQL operation complete. ->>create external table date_dim_parquet -+> (d_date_sk int, d_date_id varchar(100 bytes) character set utf8, d_date date, -+> d_month_seq int, d_week_seq int, d_quarter_seq int, d_year int, d_dow int, -+> d_moy int, d_dom int, d_qoy int, d_fy_year int, d_fy_quarter_seq int, -+> d_fy_week_seq int, -+> d_day_name varchar(120 bytes) character set utf8, d_quarter_name varchar(200 bytes) character set utf8, d_holiday varchar(100 bytes) character set utf8, -+> d_weekend varchar(100 bytes) character set utf8, d_following_holiday varchar(100 bytes) character set utf8, -+> d_first_dom int, d_last_dom int, d_same_day_ly int, d_same_day_lq int, -+> d_current_day varchar(100 bytes) character set utf8, d_current_week varchar(111 bytes) character set utf8, -+> d_current_month varchar(200 bytes) character set utf8, d_current_quarter varchar(100 bytes) character set utf8, -+> d_current_year varchar(100 bytes) character set utf8) -+> for hive.hive.date_dim_parquet; - ---- SQL operation complete. ->>invoke hive.hive.date_dim_parquet; - --- Definition of hive table DATE_DIM_PARQUET -<<<<<<< HEAD --- Definition current Mon Nov 27 20:15:11 2017 -======= --- Definition current Sun Jun 11 11:30:49 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - D_DATE_SK INT - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_DATE DATE - , D_MONTH_SEQ INT - , D_WEEK_SEQ INT - , D_QUARTER_SEQ INT - , D_YEAR INT - , D_DOW INT - , D_MOY INT - , D_DOM INT - , D_QOY INT - , D_FY_YEAR INT - , D_FY_QUARTER_SEQ INT - , D_FY_WEEK_SEQ INT - , D_DAY_NAME VARCHAR(120 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_QUARTER_NAME VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_WEEKEND VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FOLLOWING_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FIRST_DOM INT - , D_LAST_DOM INT - , D_SAME_DAY_LY INT - , D_SAME_DAY_LQ INT - , D_CURRENT_DAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_WEEK VARCHAR(111 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_MONTH VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_QUARTER VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_YEAR VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - ) - /* stored as parquet */ - ---- SQL operation complete. ->>showddl hive.hive.date_dim_parquet; - -/* Hive DDL */ -CREATE TABLE DEFAULT.DATE_DIM_PARQUET - ( - D_DATE_SK int - , D_DATE_ID string - , D_DATE timestamp - , D_MONTH_SEQ int - , D_WEEK_SEQ int - , D_QUARTER_SEQ int - , D_YEAR int - , D_DOW int - , D_MOY int - , D_DOM int - , D_QOY int - , D_FY_YEAR int - , D_FY_QUARTER_SEQ int - , D_FY_WEEK_SEQ int - , D_DAY_NAME string - , D_QUARTER_NAME string - , D_HOLIDAY string - , D_WEEKEND string - , D_FOLLOWING_HOLIDAY string - , D_FIRST_DOM int - , D_LAST_DOM int - , D_SAME_DAY_LY int - , D_SAME_DAY_LQ int - , D_CURRENT_DAY string - , D_CURRENT_WEEK string - , D_CURRENT_MONTH string - , D_CURRENT_QUARTER string - , D_CURRENT_YEAR string - ) - stored as parquet -; - -<<<<<<< HEAD -======= -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; -/* ObjectUID = 563644711473585265 */ - ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -/* Trafodion DDL */ - -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; -/* ObjectUID = 1842681638780263725 */ - -CREATE EXTERNAL TABLE DATE_DIM_PARQUET - ( - D_DATE_SK INT DEFAULT NULL - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_DATE DATE DEFAULT NULL - , D_MONTH_SEQ INT DEFAULT NULL - , D_WEEK_SEQ INT DEFAULT NULL - , D_QUARTER_SEQ INT DEFAULT NULL - , D_YEAR INT DEFAULT NULL - , D_DOW INT DEFAULT NULL - , D_MOY INT DEFAULT NULL - , D_DOM INT DEFAULT NULL - , D_QOY INT DEFAULT NULL - , D_FY_YEAR INT DEFAULT NULL - , D_FY_QUARTER_SEQ INT DEFAULT NULL - , D_FY_WEEK_SEQ INT DEFAULT NULL - , D_DAY_NAME VARCHAR(120 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_QUARTER_NAME VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_WEEKEND VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FOLLOWING_HOLIDAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FIRST_DOM INT DEFAULT NULL - , D_LAST_DOM INT DEFAULT NULL - , D_SAME_DAY_LY INT DEFAULT NULL - , D_SAME_DAY_LQ INT DEFAULT NULL - , D_CURRENT_DAY VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_WEEK VARCHAR(111 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_MONTH VARCHAR(200 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_QUARTER VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_YEAR VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - ) - FOR HIVE.HIVE.DATE_DIM_PARQUET -; - ---- SQL operation complete. ->>prepare s from select * from hive.hive.date_dim_parquet where d_date = date '2016-01-27'; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378573717690486 -ROWS_OUT ............... 271 -EST_TOTAL_COST ......... 244.13 -======= -PLAN_ID .................. 212363940664424812 -ROWS_OUT ............. 1,978 -EST_TOTAL_COST ......... 477.06 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -STATEMENT ................ select * - from hive.hive.date_dim_parquet - where d_date = date '2016-01-27'; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 244.13 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,477 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 271 - total_overflow_size .... 0.00 KB - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_PRED_PUSHDOWN ON - PARQUET_AGGR_PUSHDOWN OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - select_list ............ HIVE.DATE_DIM_PARQUET.D_DATE_SK, - HIVE.DATE_DIM_PARQUET.D_DATE_ID, %(2016-01-27), - HIVE.DATE_DIM_PARQUET.D_MONTH_SEQ, - HIVE.DATE_DIM_PARQUET.D_WEEK_SEQ, - HIVE.DATE_DIM_PARQUET.D_QUARTER_SEQ, - HIVE.DATE_DIM_PARQUET.D_YEAR, - HIVE.DATE_DIM_PARQUET.D_DOW, - HIVE.DATE_DIM_PARQUET.D_MOY, - HIVE.DATE_DIM_PARQUET.D_DOM, - HIVE.DATE_DIM_PARQUET.D_QOY, - HIVE.DATE_DIM_PARQUET.D_FY_YEAR, - HIVE.DATE_DIM_PARQUET.D_FY_QUARTER_SEQ, - HIVE.DATE_DIM_PARQUET.D_FY_WEEK_SEQ, - HIVE.DATE_DIM_PARQUET.D_DAY_NAME, - HIVE.DATE_DIM_PARQUET.D_QUARTER_NAME, - HIVE.DATE_DIM_PARQUET.D_HOLIDAY, - HIVE.DATE_DIM_PARQUET.D_WEEKEND, - HIVE.DATE_DIM_PARQUET.D_FOLLOWING_HOLIDAY, - HIVE.DATE_DIM_PARQUET.D_FIRST_DOM, - HIVE.DATE_DIM_PARQUET.D_LAST_DOM, - HIVE.DATE_DIM_PARQUET.D_SAME_DAY_LY, - HIVE.DATE_DIM_PARQUET.D_SAME_DAY_LQ, - HIVE.DATE_DIM_PARQUET.D_CURRENT_DAY, - HIVE.DATE_DIM_PARQUET.D_CURRENT_WEEK, - HIVE.DATE_DIM_PARQUET.D_CURRENT_MONTH, - HIVE.DATE_DIM_PARQUET.D_CURRENT_QUARTER, - HIVE.DATE_DIM_PARQUET.D_CURRENT_YEAR - input_variables ........ %(2016-01-27) - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.DATE_DIM_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST .......... 244.13 -EST_TOTAL_COST ......... 244.13 -DESCRIPTION - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,471 - scan_type .............. full scan of table HIVE.HIVE.DATE_DIM_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 28 - probes ................. 1 - rows_accessed ..... 73,049 - parquet_pred_pushdown yes - parquet_search_argument and( HIVE.DATE_DIM_PARQUET.D_DATE = %(2016-01-27) ) - ---- SQL operation complete. ->> ->>drop external table if exists date_dim_parquet for hive.hive.date_dim_parquet; - ---- SQL operation complete. ->>create external table date_dim_parquet -+> (d_date_sk int, d_date_id varchar(100 bytes) character set utf8, d_date date) -+> for hive.hive.date_dim_parquet; - ---- SQL operation complete. ->>invoke hive.hive.date_dim_parquet; - --- Definition of hive table DATE_DIM_PARQUET -<<<<<<< HEAD --- Definition current Mon Nov 27 20:15:28 2017 -======= --- Definition current Sun Jun 11 11:31:24 2017 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - - ( - D_DATE_SK INT - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_DATE DATE - , D_MONTH_SEQ INT - , D_WEEK_SEQ INT - , D_QUARTER_SEQ INT - , D_YEAR INT - , D_DOW INT - , D_MOY INT - , D_DOM INT - , D_QOY INT - , D_FY_YEAR INT - , D_FY_QUARTER_SEQ INT - , D_FY_WEEK_SEQ INT - , D_DAY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_QUARTER_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_WEEKEND VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FOLLOWING_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_FIRST_DOM INT - , D_LAST_DOM INT - , D_SAME_DAY_LY INT - , D_SAME_DAY_LQ INT - , D_CURRENT_DAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_WEEK VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_MONTH VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_QUARTER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - , D_CURRENT_YEAR VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT - ) - /* stored as parquet */ - ---- SQL operation complete. ->>showddl hive.hive.date_dim_parquet; - -/* Hive DDL */ -CREATE TABLE DEFAULT.DATE_DIM_PARQUET - ( - D_DATE_SK int - , D_DATE_ID string - , D_DATE timestamp - , D_MONTH_SEQ int - , D_WEEK_SEQ int - , D_QUARTER_SEQ int - , D_YEAR int - , D_DOW int - , D_MOY int - , D_DOM int - , D_QOY int - , D_FY_YEAR int - , D_FY_QUARTER_SEQ int - , D_FY_WEEK_SEQ int - , D_DAY_NAME string - , D_QUARTER_NAME string - , D_HOLIDAY string - , D_WEEKEND string - , D_FOLLOWING_HOLIDAY string - , D_FIRST_DOM int - , D_LAST_DOM int - , D_SAME_DAY_LY int - , D_SAME_DAY_LQ int - , D_CURRENT_DAY string - , D_CURRENT_WEEK string - , D_CURRENT_MONTH string - , D_CURRENT_QUARTER string - , D_CURRENT_YEAR string - ) - stored as parquet -; - -<<<<<<< HEAD -======= -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; -/* ObjectUID = 563644711473585265 */ - ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -/* Trafodion DDL */ - -REGISTER /*INTERNAL*/ HIVE TABLE HIVE.HIVE.DATE_DIM_PARQUET; -/* ObjectUID = 1842681638780263725 */ - -CREATE EXTERNAL TABLE DATE_DIM_PARQUET - ( - D_DATE_SK INT DEFAULT NULL - , D_DATE_ID VARCHAR(100 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_DATE DATE DEFAULT NULL - , D_MONTH_SEQ INT DEFAULT NULL - , D_WEEK_SEQ INT DEFAULT NULL - , D_QUARTER_SEQ INT DEFAULT NULL - , D_YEAR INT DEFAULT NULL - , D_DOW INT DEFAULT NULL - , D_MOY INT DEFAULT NULL - , D_DOM INT DEFAULT NULL - , D_QOY INT DEFAULT NULL - , D_FY_YEAR INT DEFAULT NULL - , D_FY_QUARTER_SEQ INT DEFAULT NULL - , D_FY_WEEK_SEQ INT DEFAULT NULL - , D_DAY_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_QUARTER_NAME VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_WEEKEND VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FOLLOWING_HOLIDAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_FIRST_DOM INT DEFAULT NULL - , D_LAST_DOM INT DEFAULT NULL - , D_SAME_DAY_LY INT DEFAULT NULL - , D_SAME_DAY_LQ INT DEFAULT NULL - , D_CURRENT_DAY VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_WEEK VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_MONTH VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_QUARTER VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - , D_CURRENT_YEAR VARCHAR(20 BYTES) CHARACTER SET UTF8 - COLLATE DEFAULT DEFAULT NULL - ) - FOR HIVE.HIVE.DATE_DIM_PARQUET -; - ---- SQL operation complete. ->>prepare s from select * from hive.hive.date_dim_parquet where d_date = date '2016-01-27'; - ---- SQL command prepared. ->>explain s; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... S -<<<<<<< HEAD -PLAN_ID .................. 212378573717690486 -ROWS_OUT ............... 271 -EST_TOTAL_COST ......... 244.13 -======= -PLAN_ID .................. 212363940698868891 -ROWS_OUT ............. 1,978 -EST_TOTAL_COST ......... 477.06 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -STATEMENT ................ select * - from hive.hive.date_dim_parquet - where d_date = date '2016-01-27'; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST ............ 0 -EST_TOTAL_COST ......... 244.13 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.00(BMOs), 0.00(nBMOs) MB - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,477 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinality 271 - total_overflow_size .... 0.00 KB - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_PRED_PUSHDOWN ON - PARQUET_AGGR_PUSHDOWN OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - select_list ............ HIVE.DATE_DIM_PARQUET.D_DATE_SK, - HIVE.DATE_DIM_PARQUET.D_DATE_ID, %(2016-01-27), - HIVE.DATE_DIM_PARQUET.D_MONTH_SEQ, - HIVE.DATE_DIM_PARQUET.D_WEEK_SEQ, - HIVE.DATE_DIM_PARQUET.D_QUARTER_SEQ, - HIVE.DATE_DIM_PARQUET.D_YEAR, - HIVE.DATE_DIM_PARQUET.D_DOW, - HIVE.DATE_DIM_PARQUET.D_MOY, - HIVE.DATE_DIM_PARQUET.D_DOM, - HIVE.DATE_DIM_PARQUET.D_QOY, - HIVE.DATE_DIM_PARQUET.D_FY_YEAR, - HIVE.DATE_DIM_PARQUET.D_FY_QUARTER_SEQ, - HIVE.DATE_DIM_PARQUET.D_FY_WEEK_SEQ, - HIVE.DATE_DIM_PARQUET.D_DAY_NAME, - HIVE.DATE_DIM_PARQUET.D_QUARTER_NAME, - HIVE.DATE_DIM_PARQUET.D_HOLIDAY, - HIVE.DATE_DIM_PARQUET.D_WEEKEND, - HIVE.DATE_DIM_PARQUET.D_FOLLOWING_HOLIDAY, - HIVE.DATE_DIM_PARQUET.D_FIRST_DOM, - HIVE.DATE_DIM_PARQUET.D_LAST_DOM, - HIVE.DATE_DIM_PARQUET.D_SAME_DAY_LY, - HIVE.DATE_DIM_PARQUET.D_SAME_DAY_LQ, - HIVE.DATE_DIM_PARQUET.D_CURRENT_DAY, - HIVE.DATE_DIM_PARQUET.D_CURRENT_WEEK, - HIVE.DATE_DIM_PARQUET.D_CURRENT_MONTH, - HIVE.DATE_DIM_PARQUET.D_CURRENT_QUARTER, - HIVE.DATE_DIM_PARQUET.D_CURRENT_YEAR - input_variables ........ %(2016-01-27) - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.DATE_DIM_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ............... 271 -EST_OPER_COST .......... 244.13 -EST_TOTAL_COST ......... 244.13 -DESCRIPTION - max_card_est ...... 69,397.5 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length ...... 1,471 - scan_type .............. full scan of table HIVE.HIVE.DATE_DIM_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ..... 28 - probes ................. 1 - rows_accessed ..... 73,049 - parquet_pred_pushdown yes - parquet_search_argument and( HIVE.DATE_DIM_PARQUET.D_DATE = %(2016-01-27) ) - ---- SQL operation complete. ->> ->> ->>-- error cases ->>drop external table if exists date_dim_parquet for hive.hive.date_dim_parquet; - ---- SQL operation complete. ->> ->>-- column d_date_skk doesn't exist in native hive table ->>create external table date_dim_parquet -+> (d_date_skk int) -+> for hive.hive.date_dim_parquet; - -*** ERROR[1009] Column D_DATE_SKK does not exist in the specified table. - ---- SQL operation failed with errors. ->> ->>-- del/update not supported on parquet or hive ->>prepare s from delete from hive.hive.store2_sales_parquet; - -*** ERROR[4223] Update/Delete on PARQUET table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store2_sales_parquet set ss_ext_tax = 1; - -*** ERROR[4223] Update/Delete on PARQUET table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from delete from hive.hive.store_sales; - -*** ERROR[4223] Update/Delete on Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store_sales set ss_ext_tax = 1; - -*** ERROR[4223] Update/Delete on Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->> ->>-- upsert/insert cannot specify column list and must provide all column values. ->>prepare s from upsert into hive.hive.store2_sales_parquet values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert using load into hive.hive.store2_sales_parquet values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store2_sales_parquet values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from update hive.hive.store2_sales_parquet set ss_net_paid = 1; - -*** ERROR[4223] Update/Delete on PARQUET table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store2_sales_parquet(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store2_sales_parquet(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store_sales(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store_sales(ss_sold_date_sk) values (1); - -*** ERROR[4223] Target column list specification for insert/upsert into a Hive table is not supported in this software version or edition. - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from upsert into hive.hive.store_sales values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->>prepare s from insert into hive.hive.store_sales values (1); - -*** ERROR[4023] The degree of each row value constructor (1) must equal the degree of the target table column list (23). - -*** ERROR[8822] The statement was not prepared. - ->> ->> ->>-- test min-max optimization ->> ->>update statistics for table hive.hive.date_dim on every column sample; - ---- SQL operation complete. ->>update statistics for table hive.hive.time_dim on every column sample; - ---- SQL operation complete. ->> ->>cqd PARQUET_PRED_PUSHDOWN 'ON'; - ---- SQL operation complete. ->>cqd GEN_HSHJ_MIN_MAX_OPT 'on'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->>cqd nested_joins 'off'; - ---- SQL operation complete. ->> ->>prepare xx from select count(*) from -+>hive.hive.store2_sales_parquet, -- sorted on ss_sold_date_sk -+>hive.hive.date_dim, -+>hive.hive.time_dim -+> where ss_sold_date_sk = d_date_sk and -+> ss_sold_date_sk = t_time_sk and d_year = 2001 and t_hour = 10 ; - ---- SQL command prepared. ->> ->>-- display the push-down predicates, which should include the ->>-- min/max expressions from dimension table date_dim and time_dim. ->>select cast(tokenstr('parquet_search_arguments', description, 'executor_predicates') -+> as char(400)) -+> from table (explain(NULL,'XX')) -+>where position('parquet_search_arguments' in description) > 0 ; - -(EXPR) ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - -and( and( and( and( and( not( HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK is null ) not( HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK <\:_sys_MinVal0 ) ) HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK <= \:_sys_MaxVal0 ) not( HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK <\:_sys_MinVal1 ) ) HIVE.STORE2_SALES_PARQUET.SS_SOLD_DATE_SK <= \:_sys_MaxVal1 ) ) - ---- 1 row(s) selected. ->> ->>execute xx; - -(EXPR) --------------------- - - 0 - ---- 1 row(s) selected. ->> ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->>cqd nested_joins reset; - ---- SQL operation complete. ->> ->>-- test NJ into a sorted PARQUET table ->> ->>cqd parquet_pred_pushdown 'ON'; - ---- SQL operation complete. ->>cqd orc_njs 'ON'; - ---- SQL operation complete. ->>cqd parallel_num_esps '4'; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS 'off'; - ---- SQL operation complete. ->> ->>prepare xx from select count(*) from hive.hive.customer_parquet, hive.hive.store_sales_sorted_parquet -+> where ss_sold_date_sk = c_customer_sk -+> and c_first_sales_date_sk = 4; - ---- SQL command prepared. ->> ->>explain xx; - ------------------------------------------------------------------- PLAN SUMMARY -MODULE_NAME .............. DYNAMICALLY COMPILED -STATEMENT_NAME ........... XX -<<<<<<< HEAD -PLAN_ID .................. 212378573801436592 -======= -PLAN_ID .................. 212363940801861384 ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. -ROWS_OUT ................. 1 -EST_TOTAL_COST .......... 23.94 -STATEMENT ................ select count(*) - from hive.hive.customer_parquet, - hive.hive.store_sales_sorted_parquet - where ss_sold_date_sk = c_customer_sk and - c_first_sales_date_sk = 4; - - ------------------------------------------------------------------- NODE LISTING -ROOT ====================================== SEQ_NO 8 ONLY CHILD 7 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0 -EST_TOTAL_COST .......... 23.94 -DESCRIPTION - est_memory_per_node .... 10240.00(Limit), 0.13(BMOs), 0.00(nBMOs) MB - max_card_est ........... 1 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length .......... 8 - statement_index ........ 0 - affinity_value ......... 0 - max_max_cardinal 2,880,404 - total_overflow_size .... 0.00 KB - esp_2_node_map ......... (-1(4 times)) - esp_3_node_map ......... (-1(4 times)) - xn_access_mode ......... read_only - xn_autoabort_interval 0 - auto_query_retry ....... enabled - plan_version ....... 2,600 - embedded_arkcmp ........ used - IS_SQLCI ............... ON - LDAP_USERNAME .......... NOT AVAILABLE -<<<<<<< HEAD - HBASE_FILTER_PREDS ..... 2 - TRAF_INDEX_CREATE_OPT ON - TRAF_USE_REGION_XN ..... ON -======= - MODE_SEABASE ........... ON - SEABASE_VOLATILE_TABLES ON - HBASE_ASYNC_DROP_TABLE OFF - HBASE_SERIALIZATION .... ON - HBASE_SMALL_SCANNER .... SYSTEM - HBASE_FILTER_PREDS ..... 2 - TRAF_ALIGNED_ROW_FORMAT ON - TRAF_INDEX_CREATE_OPT ON ->>>>>>> 1830668... Enhanced checktests changes. Jenkins usage not yet enabled. - HIVE_MAX_STRING_LENGTH 20 - MODE_SEAHIVE ........... ON - TRAF_ENABLE_PARQUET_FOR ON - HIST_ROWCOUNT_REQU 50,000 - HIST_MISSING_STATS_WARN 0 - ORC_NJS_PROBES_T 1,000,000 - HIVE_MIN_NUM_ESPS_PER_D 0 - PARQUET_COLUMNS_PUSHDOW ON - PARQUET_AGGR_PUSHDOWN OFF - SCHEMA ................. TRAFODION.SCH - VOLATILE_TABLE_FIND_SUI SYSTEM - GEN_HSHJ_MIN_MAX_OPT ... ON - PARQUET_PRED_PUSHDOWN ON - ORC_NJS ................ ON - PARALLEL_NUM_ESPS ...... 4 - HIVE_USE_EXT_TABLE_ATTR OFF - select_list ............ cast(sum(count(1 ))) - input_variables ........ %(4), execution_count - - -SORT_PARTIAL_AGGR_ROOT ==================== SEQ_NO 7 ONLY CHILD 6 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST .......... 23.94 -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 0 - parent_frag ............ (none) - fragment_type .......... master - record_length .......... 8 - aggregates ............. sum(count(1 )) - - -ESP_EXCHANGE ============================== SEQ_NO 6 ONLY CHILD 5 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST .......... 23.94 -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_node .... 29.297 KB - record_length .......... 8 - buffer_size ........ 5,000 - parent_processes ....... 1 - child_processes ........ 4 - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -SORT_PARTIAL_AGGR_LEAF ==================== SEQ_NO 5 ONLY CHILD 4 -REQUESTS_IN .............. 1 -ROWS_OUT ................. 1 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST .......... 23.94 -DESCRIPTION - max_card_est ........... 1 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length .......... 8 - aggregates ............. count(1 ) - - -HYBRID_HASH_JOIN ========================== SEQ_NO 4 CHILDREN 3, 2 -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ............ 0.02 -EST_TOTAL_COST .......... 23.94 -DESCRIPTION - memory_quota_per_instan 800 MB - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - est_memory_per_instance 66.631 KB - record_length .......... 0 - join_type .............. inner - join_method ............ hash - parallel_join_type ..... 2 - min_max_cols ........... HIVE.CUSTOMER_PARQUET.C_CUSTOMER_SK - min_max_expr ........... \:_sys_MinVal0, \:_sys_MaxVal0 - hash_join_predicates ... (HIVE.STORE_SALES_SORTED_PARQUET.SS_SOLD_DATE_SK = - HIVE.CUSTOMER_PARQUET.C_CUSTOMER_SK) - - -PARQUET_SCAN ============================== SEQ_NO 3 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.STORE_SALES_SORTED_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ......... 2,880,404 -EST_OPER_COST ........... 19.89 -EST_TOTAL_COST .......... 19.89 -DESCRIPTION - max_card_est ........... 2.8804e+06 - fragment_id ............ 2 - parent_frag ............ 0 - fragment_type .......... esp - record_length .......... 6 - scan_type .............. full scan of table HIVE.HIVE.STORE_SALES_SORTED_PARQ - UET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ...... 1 - probes ................. 1 - rows_accessed .......... 2.8804e+06 - min_max_hashj_cols ..... HIVE.STORE_SALES_SORTED_PARQUET.SS_SOLD_DATE_SK - parquet_pred_pushdown yes - parquet_search_argument and( and( and( not( HIVE.STORE_SALES_SORTED_PARQUET. - SS_SOLD_DATE_SK is null ) not( - HIVE.STORE_SALES_SORTED_PARQUET.SS_SOLD_DATE_SK - <\:_sys_MinVal0 ) ) HIVE.STORE_SALES_SORTED_PARQUE - T.SS_SOLD_DATE_SK <= \:_sys_MaxVal0 ) ) - - -ESP_EXCHANGE ============================== SEQ_NO 2 ONLY CHILD 1 -REQUESTS_IN .............. 1 -ROWS_OUT ............... 317 -EST_OPER_COST ............ 0.01 -EST_TOTAL_COST ........... 4.02 -DESCRIPTION - max_card_est ...... 95,001 - fragment_id ............ 3 - parent_frag ............ 2 - fragment_type .......... esp - est_memory_per_node .... 32.227 KB - record_length .......... 6 - buffer_size ........ 5,000 - parent_processes ....... 4 - child_processes ........ 4 - parent_partitioning_fun broadcast 4 times - child_partitioning_func hash2 partitioned 4 ways on (randomNum) - - -PARQUET_SCAN ============================== SEQ_NO 1 NO CHILDREN -TABLE_NAME ............... HIVE.HIVE.CUSTOMER_PARQUET -REQUESTS_IN .............. 1 -ROWS_OUT ............... 317 -EST_OPER_COST ............ 4.02 -EST_TOTAL_COST ........... 4.02 -DESCRIPTION - max_card_est ...... 95,001 - fragment_id ............ 3 - parent_frag ............ 2 - fragment_type .......... esp - record_length .......... 6 - scan_type .............. full scan of table HIVE.HIVE.CUSTOMER_PARQUET - object_type ............ Hive_Parquet - scan_direction ......... forward - lock_mode .............. not specified, defaulted to lock cursor - access_mode ............ not specified, defaulted to read committed - columns_retrieved ...... 2 - probes ................. 1 - rows_accessed .... 100,000 - parquet_pred_pushdown yes - parquet_search_argument and( and( HIVE.CUSTOMER_PARQUET.C_FIRST_SALES_DATE_S - K = %(4) not( HIVE.CUSTOMER_PARQUET.C_CUSTOMER_SK - is null ) ) ) - ---- SQL operation complete. ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -7 . 8 root 1.00E+000 -6 . 7 sort_partial_aggr_ro 1.00E+000 -5 . 6 esp_exchange 1:4(hash2) 1.00E+000 -4 . 5 sort_partial_aggr_le 1.00E+000 -3 2 4 hybrid_hash_join 2.88E+006 -. . 3 parquet_scan STORE_SALES_SORTED_P 2.88E+006 -1 . 2 esp_exchange 4(rep-b):4(hash2) 3.17E+002 -. . 1 parquet_scan CUSTOMER_PARQUET 3.17E+002 - ---- SQL operation complete. ->>execute xx; - -(EXPR) --------------------- - - 0 - ---- 1 row(s) selected. ->> ->>cqd orc_njs reset; - ---- SQL operation complete. ->>cqd parallel_num_esps reset; - ---- SQL operation complete. ->>cqd HIVE_USE_EXT_TABLE_ATTRS reset; - ---- SQL operation complete. ->> ->>-- test aggregates pushdown (parquet) ->> ->>-- test aggregates pushdown ->>cqd parquet_aggr_pushdown 'ON'; - ---- SQL operation complete. ->> ->>-- pushdown feasible ->>cqd attempt_esp_parallelism 'OFF'; - ---- SQL operation complete. ->>prepare xx from -+>select min(ss_sold_date_sk), max(ss_sold_date_sk) -+>from hive.hive.store2_sales_parquet; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 1.00E+000 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->> ->>-- pushdown feasible ->>prepare xx from -+>select ss_sold_date_sk from hive.hive.store2_sales_parquet where -+>ss_sold_date_sk > -+>(select count(*) from hive.hive.store2_sales_parquet); - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 9.60E+005 -2 1 3 hybrid_hash_join 9.60E+005 -. . 2 parquet_scan STORE2_SALES_PARQUET 2.88E+006 -. . 1 parquet_aggr 1.00E+000 - ---- SQL operation complete. ->> ->> ->>-- pushdown not feasible due to distinct ->>prepare xx from -+>select -+>count(distinct ss_sold_date_sk) -+>from hive.hive.store2_sales_parquet; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 1.00E+000 -2 . 3 sort_scalar_aggr 1.00E+000 -1 . 2 hash_groupby 2.88E+006 -. . 1 parquet_scan STORE2_SALES_PARQUET 2.88E+006 - ---- SQL operation complete. ->> ->>--test cardinality ->> ->>set schema hive.hive; - ---- SQL operation complete. ->> ->>-- single scan ->>prepare xx from -+>select SS_ITEM_SK,SS_ADDR_SK from store_sales_parquet where -+>ss_sold_date_sk = 2 and -+>SS_ADDR_SK in (1,3,4,4); - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 7.19E+001 -. . 1 parquet_scan STORE_SALES_PARQUET 7.19E+001 - ---- SQL operation complete. ->> ->>-- join ->>prepare xx from -+>select a.SS_ITEM_SK, b.SS_ADDR_SK from -+>store_sales_parquet a ,store_sales_parquet b -+>where -+>a.ss_sold_date_sk = b.ss_sold_date_sk -+>; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 5.76E+007 -2 1 3 hybrid_hash_join 5.76E+007 -. . 2 parquet_scan STORE_SALES_PARQUET 2.88E+006 -. . 1 parquet_scan STORE_SALES_PARQUET 2.88E+006 - ---- SQL operation complete. ->> ->>update statistics for table store_sales_parquet_p on every column no sample; - ---- SQL operation complete. ->> ->>-- compile time elimination ->>prepare xx from -+>select SS_ITEM_SK,SS_ADDR_SK from store_sales_parquet_p where -+>ss_sold_date_sk = 2451241 -+>; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -1 . 2 root 9.23E+002 -. . 1 parquet_scan STORE_SALES_PARQUET_ 9.23E+002 - ---- SQL operation complete. ->> ->>-- NJ into a parquet table ->>cqd hash_joins 'off'; - ---- SQL operation complete. ->>cqd merge_joins 'off'; - ---- SQL operation complete. ->>cqd PARQUET_NJS 'on'; - ---- SQL operation complete. ->> ->>prepare xx from -+>select s.ss_sold_date_sk from customer_parquet c, store_sales_parquet_p s -+>where ss_sold_date_sk = c_customer_sk -+>and c_first_sales_date_sk = 4; - ---- SQL command prepared. ->> ->>explain options 'f' xx; - -LC RC OP OPERATOR OPT DESCRIPTION CARD ----- ---- ---- -------------------- -------- -------------------- --------- - -3 . 4 root 3.37E+005 -1 2 3 nested_join 3.37E+005 -. . 2 parquet_scan STORE_SALES_PARQUET_ 1.06E+003 -. . 1 parquet_scan CUSTOMER_PARQUET 3.17E+002 - ---- SQL operation complete. ->> ->>log; From a5530a7228ae08c599817ef65bfd76fe46e37fc6 Mon Sep 17 00:00:00 2001 From: Anoop Sharma Date: Wed, 18 Apr 2018 17:58:21 +0000 Subject: [PATCH 3/3] added apache license --- core/sqf/sql/scripts/ilh_regrinit | 22 ++++++++++++++++++++++ core/sqf/sql/scripts/regrinit.sql | 21 +++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/core/sqf/sql/scripts/ilh_regrinit b/core/sqf/sql/scripts/ilh_regrinit index 22ecdd3a1c..3d7bf4efae 100755 --- a/core/sqf/sql/scripts/ilh_regrinit +++ b/core/sqf/sql/scripts/ilh_regrinit @@ -1,3 +1,25 @@ +#!/bin/bash +# @@@ START COPYRIGHT @@@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# @@@ END COPYRIGHT @@@ + echo "executing: sqlci -> regrinit.sql" sqlci <