Skip to content

Commit

Permalink
MDEV-8829: Assertion `0' failed in Explain_table_access::tag_to_json
Browse files Browse the repository at this point in the history
- Add EXPLAIN/ANALYZE FORMAT=JSON handling for a few special cases.
  • Loading branch information
spetrunia committed Sep 24, 2015
1 parent 428f03c commit 7016621
Show file tree
Hide file tree
Showing 5 changed files with 476 additions and 1 deletion.
349 changes: 349 additions & 0 deletions mysql-test/r/explain_json.result
Original file line number Diff line number Diff line change
Expand Up @@ -1194,3 +1194,352 @@ EXPLAIN
}
}
drop table t0, t1, t2;
#
# MDEV-8829: Assertion `0' failed in Explain_table_access::tag_to_json
#
# Check ET_CONST_ROW_NOT_FOUND
create table t1 (i int) engine=myisam;
explain
select * from t1;
id select_type table type possible_keys key key_len ref rows Extra
1 SIMPLE t1 system NULL NULL NULL NULL 0 const row not found
explain format=json
select * from t1;
EXPLAIN
{
"query_block": {
"select_id": 1,
"table": {
"table_name": "t1",
"access_type": "system",
"rows": 0,
"filtered": 0,
"const_row_not_found": true
}
}
}
analyze format=json
select * from t1;
ANALYZE
{
"query_block": {
"select_id": 1,
"table": {
"message": "no matching row in const table"
}
}
}
drop table t1;
# Check ET_IMPOSSIBLE_ON_CONDITION
create table t1 (a int);
create table t2 (pk int primary key);
insert into t1 values (1),(2);
insert into t2 values (1),(2);
explain
select * from t1 left join t2 on t2.pk > 10 and t2.pk < 0;
id select_type table type possible_keys key key_len ref rows Extra
1 SIMPLE t2 const PRIMARY NULL NULL NULL 1 Impossible ON condition
1 SIMPLE t1 ALL NULL NULL NULL NULL 2
explain format=json
select * from t1 left join t2 on t2.pk > 10 and t2.pk < 0;
EXPLAIN
{
"query_block": {
"select_id": 1,
"const_condition": "1",
"table": {
"table_name": "t2",
"access_type": "const",
"possible_keys": ["PRIMARY"],
"rows": 1,
"filtered": 100,
"impossible_on_condition": true
},
"table": {
"table_name": "t1",
"access_type": "ALL",
"rows": 2,
"filtered": 100
}
}
}
analyze format=json
select * from t1 left join t2 on t2.pk > 10 and t2.pk < 0;
ANALYZE
{
"query_block": {
"select_id": 1,
"r_loops": 1,
"r_total_time_ms": "REPLACED",
"const_condition": "1",
"table": {
"table_name": "t2",
"access_type": "const",
"possible_keys": ["PRIMARY"],
"r_loops": 0,
"rows": 1,
"r_rows": null,
"filtered": 100,
"r_filtered": null,
"impossible_on_condition": true
},
"table": {
"table_name": "t1",
"access_type": "ALL",
"r_loops": 1,
"rows": 2,
"r_rows": 2,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100
}
}
}
# Check ET_NOT_EXISTS:
explain
select * from t1 left join t2 on t2.pk=t1.a where t2.pk is null;
id select_type table type possible_keys key key_len ref rows Extra
1 SIMPLE t1 ALL NULL NULL NULL NULL 2
1 SIMPLE t2 eq_ref PRIMARY PRIMARY 4 test.t1.a 1 Using where; Using index; Not exists
explain format=json
select * from t1 left join t2 on t2.pk=t1.a where t2.pk is null;
EXPLAIN
{
"query_block": {
"select_id": 1,
"table": {
"table_name": "t1",
"access_type": "ALL",
"rows": 2,
"filtered": 100
},
"table": {
"table_name": "t2",
"access_type": "eq_ref",
"possible_keys": ["PRIMARY"],
"key": "PRIMARY",
"key_length": "4",
"used_key_parts": ["pk"],
"ref": ["test.t1.a"],
"rows": 1,
"filtered": 100,
"attached_condition": "(trigcond(isnull(t2.pk)) and trigcond(trigcond((t1.a is not null))))",
"using_index": true,
"not_exists": true
}
}
}
analyze format=json
select * from t1 left join t2 on t2.pk=t1.a where t2.pk is null;
ANALYZE
{
"query_block": {
"select_id": 1,
"r_loops": 1,
"r_total_time_ms": "REPLACED",
"table": {
"table_name": "t1",
"access_type": "ALL",
"r_loops": 1,
"rows": 2,
"r_rows": 2,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100
},
"table": {
"table_name": "t2",
"access_type": "eq_ref",
"possible_keys": ["PRIMARY"],
"key": "PRIMARY",
"key_length": "4",
"used_key_parts": ["pk"],
"ref": ["test.t1.a"],
"r_loops": 2,
"rows": 1,
"r_rows": 1,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100,
"attached_condition": "(trigcond(isnull(t2.pk)) and trigcond(trigcond((t1.a is not null))))",
"using_index": true,
"not_exists": true
}
}
}
# Check ET_DISTINCT
explain
select distinct t1.a from t1 join t2 on t2.pk=t1.a;
id select_type table type possible_keys key key_len ref rows Extra
1 SIMPLE t1 ALL NULL NULL NULL NULL 2 Using where; Using temporary
1 SIMPLE t2 eq_ref PRIMARY PRIMARY 4 test.t1.a 1 Using index; Distinct
explain format=json
select distinct t1.a from t1 join t2 on t2.pk=t1.a;
EXPLAIN
{
"query_block": {
"select_id": 1,
"temporary_table": {
"function": "buffer",
"table": {
"table_name": "t1",
"access_type": "ALL",
"rows": 2,
"filtered": 100,
"attached_condition": "(t1.a is not null)"
},
"table": {
"table_name": "t2",
"access_type": "eq_ref",
"possible_keys": ["PRIMARY"],
"key": "PRIMARY",
"key_length": "4",
"used_key_parts": ["pk"],
"ref": ["test.t1.a"],
"rows": 1,
"filtered": 100,
"using_index": true,
"distinct": true
}
}
}
}
analyze format=json
select distinct t1.a from t1 join t2 on t2.pk=t1.a;
ANALYZE
{
"query_block": {
"select_id": 1,
"r_loops": 1,
"r_total_time_ms": "REPLACED",
"temporary_table": {
"table": {
"table_name": "t1",
"access_type": "ALL",
"r_loops": 1,
"rows": 2,
"r_rows": 2,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100,
"attached_condition": "(t1.a is not null)"
},
"table": {
"table_name": "t2",
"access_type": "eq_ref",
"possible_keys": ["PRIMARY"],
"key": "PRIMARY",
"key_length": "4",
"used_key_parts": ["pk"],
"ref": ["test.t1.a"],
"r_loops": 2,
"rows": 1,
"r_rows": 1,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100,
"using_index": true,
"distinct": true
}
}
}
}
drop table t1,t2;
# Check ET_USING_INDEX_CONDITION_BKA
create table t1(a int);
insert into t1 values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9);
create table t2(a int);
insert into t2 select A.a + B.a* 10 + C.a * 100 from t1 A, t1 B, t1 C;
create table t3(a int, b int);
insert into t3 select a,a from t1;
create table t4(a int, b int, c int, filler char(100), key (a,b));
insert into t4 select a,a,a, 'filler-data' from t2;
set @tmp_optimizer_switch=@@optimizer_switch;
set @tmp_join_cache_level=@@join_cache_level;
set optimizer_switch='mrr=on';
set join_cache_level=6;
explain
select * from t3,t4 where t3.a=t4.a and (t4.b+1 <= t3.b+1);
id select_type table type possible_keys key key_len ref rows Extra
1 SIMPLE t3 ALL NULL NULL NULL NULL 10 Using where
1 SIMPLE t4 ref a a 5 test.t3.a 1 Using index condition(BKA); Using join buffer (flat, BKA join); Rowid-ordered scan
explain format=json
select * from t3,t4 where t3.a=t4.a and (t4.b+1 <= t3.b+1);
EXPLAIN
{
"query_block": {
"select_id": 1,
"table": {
"table_name": "t3",
"access_type": "ALL",
"rows": 10,
"filtered": 100,
"attached_condition": "(t3.a is not null)"
},
"block-nl-join": {
"table": {
"table_name": "t4",
"access_type": "ref",
"possible_keys": ["a"],
"key": "a",
"key_length": "5",
"used_key_parts": ["a"],
"ref": ["test.t3.a"],
"rows": 1,
"filtered": 100,
"index_condition_bka": "((t4.b + 1) <= (t3.b + 1))"
},
"buffer_type": "flat",
"buffer_size": "256Kb",
"join_type": "BKA",
"mrr_type": "; Rowid-ordered scan"
}
}
}
analyze format=json
select * from t3,t4 where t3.a=t4.a and (t4.b+1 <= t3.b+1);
ANALYZE
{
"query_block": {
"select_id": 1,
"r_loops": 1,
"r_total_time_ms": "REPLACED",
"table": {
"table_name": "t3",
"access_type": "ALL",
"r_loops": 1,
"rows": 10,
"r_rows": 10,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100,
"attached_condition": "(t3.a is not null)"
},
"block-nl-join": {
"table": {
"table_name": "t4",
"access_type": "ref",
"possible_keys": ["a"],
"key": "a",
"key_length": "5",
"used_key_parts": ["a"],
"ref": ["test.t3.a"],
"r_loops": 0,
"rows": 1,
"r_rows": null,
"r_total_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": null,
"index_condition_bka": "((t4.b + 1) <= (t3.b + 1))"
},
"buffer_type": "flat",
"buffer_size": "256Kb",
"join_type": "BKA",
"mrr_type": "; Rowid-ordered scan",
"r_filtered": 100
}
}
}
set optimizer_switch=@tmp_optimizer_switch;
set join_cache_level=@tmp_join_cache_level;
drop table t1,t2,t3,t4;
Loading

0 comments on commit 7016621

Please sign in to comment.