You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
create table test_part_table(
word string,
num bigint
)partitioned by(dt string)
STORED AS ORC;
2.CREATE EXTERNAL TABLE test (
word varchar(256) NULL COMMENT "",
num bigint(11) NULL COMMENT "",
dt varchar(256) NULL COMMENT ""
) ENGINE=HIVE
COMMENT "PARTITION BY (dt)"
PROPERTIES (
"database" = "lxy",
"table" = "test_part_table",
"resource" = "hive0",
"hive.metastore.uris" = "thrift://xxx"
);
insert into test_part_table partition(dt) select '1' as word, 2 as num, '20190808' as dt;
alter table test_part_table add if not exists partition(dt='20190807') location '20190807';
currently if you run
hadoop dfs -ls hdfs://172.26.92.154:10000/user/hive/warehouse/lxy.db/test_part_table
it will show only one directory although you already create 20190807 partition in hive meta
hdfs://xxx:10000/user/hive/warehouse/lxy.db/test_part_table/20190808
restart starrocks FE
then run
select * from test where dt='20190807';
Expected behavior (Required)
Empty set
Real behavior (Required)
in cli
ERROR 1064 (HY000): get table lxy.test_part_table partition meta info failed.
in fe log
2021-10-15 18:14:18,145 WARN (starrocks-mysql-nio-pool-0|83) [StmtExecutor.execute():454] execute Exception, sql select * from test where dt='20190807'
com.starrocks.sql.common.StarRocksPlannerException: get table lxy.test_part_table partition meta info failed.
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.getTableRowCount(StatisticsCalculator.java:444) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.computeHiveScanNode(StatisticsCalculator.java:220) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.visitLogicalHiveScan(StatisticsCalculator.java:208) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.visitLogicalHiveScan(StatisticsCalculator.java:111) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.operator.logical.LogicalHiveScanOperator.accept(LogicalHiveScanOperator.java:105) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.estimatorStats(StatisticsCalculator.java:130) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.DeriveStatsTask.execute(DeriveStatsTask.java:90) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:36) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:150) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:75) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:46) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:299) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:248) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:397) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:633) [starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:54) [starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener$$Lambda$51/1870280360.run(Unknown Source) [starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_41]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_41]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_41]
StarRocks version (Required)
You can get the StarRocks version by executing SQL select current_version()
1.19
The text was updated successfully, but these errors were encountered:
Steps to reproduce the behavior (Required)
word string,
num bigint
)partitioned by(dt string)
STORED AS ORC;
2.CREATE EXTERNAL TABLE test (
word varchar(256) NULL COMMENT "",
num bigint(11) NULL COMMENT "",
dt varchar(256) NULL COMMENT ""
) ENGINE=HIVE
COMMENT "PARTITION BY (dt)"
PROPERTIES (
"database" = "lxy",
"table" = "test_part_table",
"resource" = "hive0",
"hive.metastore.uris" = "thrift://xxx"
);
insert into test_part_table partition(dt) select '1' as word, 2 as num, '20190808' as dt;
alter table test_part_table add if not exists partition(dt='20190807') location '20190807';
currently if you run
hadoop dfs -ls hdfs://172.26.92.154:10000/user/hive/warehouse/lxy.db/test_part_table
it will show only one directory although you already create 20190807 partition in hive meta
hdfs://xxx:10000/user/hive/warehouse/lxy.db/test_part_table/20190808
restart starrocks FE
then run
select * from test where dt='20190807';
Expected behavior (Required)
Empty set
Real behavior (Required)
in cli
ERROR 1064 (HY000): get table lxy.test_part_table partition meta info failed.
in fe log
2021-10-15 18:14:18,145 WARN (starrocks-mysql-nio-pool-0|83) [StmtExecutor.execute():454] execute Exception, sql select * from test where dt='20190807'
com.starrocks.sql.common.StarRocksPlannerException: get table lxy.test_part_table partition meta info failed.
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.getTableRowCount(StatisticsCalculator.java:444) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.computeHiveScanNode(StatisticsCalculator.java:220) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.visitLogicalHiveScan(StatisticsCalculator.java:208) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.visitLogicalHiveScan(StatisticsCalculator.java:111) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.operator.logical.LogicalHiveScanOperator.accept(LogicalHiveScanOperator.java:105) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.statistics.StatisticsCalculator.estimatorStats(StatisticsCalculator.java:130) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.DeriveStatsTask.execute(DeriveStatsTask.java:90) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.task.SeriallyTaskScheduler.executeTasks(SeriallyTaskScheduler.java:36) ~[starrocks-fe.jar:?]
at com.starrocks.sql.optimizer.Optimizer.optimize(Optimizer.java:150) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.createQueryPlan(StatementPlanner.java:75) ~[starrocks-fe.jar:?]
at com.starrocks.sql.StatementPlanner.plan(StatementPlanner.java:46) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:299) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.handleQuery(ConnectProcessor.java:248) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.dispatch(ConnectProcessor.java:397) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.processOnce(ConnectProcessor.java:633) [starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener.lambda$handleEvent$0(ReadListener.java:54) [starrocks-fe.jar:?]
at com.starrocks.mysql.nio.ReadListener$$Lambda$51/1870280360.run(Unknown Source) [starrocks-fe.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [?:1.8.0_41]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [?:1.8.0_41]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_41]
StarRocks version (Required)
select current_version()
1.19
The text was updated successfully, but these errors were encountered: