Commit 31536b24 authored by Sergei Petrunia's avatar Sergei Petrunia

MDEV-30972: ANALYZE FORMAT=JSON: some time is unaccounted-for in BNL-H join

After MDEV-30830 has added block-nl-join.r_unpack_time_ms, it became
apparent that there is some unaccounted-for time in BNL join operation,
namely the time that is spent after unpacking the join buffer record.

Fix this by adding a Gap_time_tracker to track the time that is spent
after unpacking the join buffer record and before any next time tracking.
The collected time is printed in block-nl-join.r_other_time_ms.

Reviewed by: Monty <monty@mariadb.org>
parent 0269d82d
......@@ -154,6 +154,7 @@ ANALYZE
"r_loops": 20,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 60
}
}
......@@ -198,6 +199,7 @@ ANALYZE
"r_loops": 20,
"r_filtered": 15.83333333,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 60
}
}
......@@ -661,6 +663,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": null,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 0
},
"subqueries": [
......@@ -754,6 +757,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": null,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 0
},
"subqueries": [
......@@ -789,6 +793,7 @@ ANALYZE
"r_loops": 0,
"r_filtered": null,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": null
}
}
......@@ -927,6 +932,7 @@ ANALYZE
"r_loops": 100,
"r_filtered": 0.424285714,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 700
}
}
......@@ -980,6 +986,7 @@ ANALYZE
"r_loops": 100,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 2.97
}
}
......
......@@ -77,6 +77,7 @@ X
"r_loops": 500,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 0.594
}
}
......@@ -87,3 +88,84 @@ cast(json_extract(@out,'$[0]') as DOUBLE) > 0
1
set join_cache_level=@tmp;
drop table t1, t2;
#
# MDEV-30972: ANALYZE FORMAT=JSON: some time is unaccounted-for in BNL-H join
#
create table t1 (
a int,
col1 varchar(100),
col2 varchar(100),
col3 varchar(100)
);
insert into t1 select
seq/100,
concat('col1-', seq),
concat('col1-', seq),
concat('col1-', seq)
from seq_1_to_1000;
create table t2 (
a int,
col1 varchar(100),
col2 varchar(100),
col3 varchar(100)
);
insert into t2 select
seq/100,
concat('col1-', seq),
concat('col1-', seq),
concat('col1-', seq)
from seq_1_to_2000;
set @tmp=@@join_cache_level, join_cache_level=6;
select '$out' as X;
X
{
"query_block": {
"select_id": 1,
"r_loops": 1,
"r_total_time_ms": "REPLACED",
"table": {
"table_name": "t1",
"access_type": "ALL",
"r_loops": 1,
"rows": 1000,
"r_rows": 1000,
"r_table_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100,
"attached_condition": "t1.a is not null"
},
"block-nl-join": {
"table": {
"table_name": "t2",
"access_type": "hash_ALL",
"key": "#hash#$hj",
"key_length": "5",
"used_key_parts": ["a"],
"ref": ["test.t1.a"],
"r_loops": 1,
"rows": 2000,
"r_rows": 2000,
"r_table_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"filtered": 100,
"r_filtered": 100
},
"buffer_type": "flat",
"buffer_size": "256Kb",
"join_type": "BNLH",
"attached_condition": "t2.a = t1.a and concat(t1.col1,t1.col2,t1.col3) = concat(t2.col1,t2.col2,t2.col3)",
"r_loops": 1000,
"r_filtered": 1.025630506,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 97.501
}
}
}
set @out=(select json_extract(@js,'$**.block-nl-join.r_other_time_ms'));
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
cast(json_extract(@out,'$[0]') as DOUBLE) > 0
1
set join_cache_level=@tmp;
drop table t1, t2;
......@@ -75,3 +75,53 @@ select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
set join_cache_level=@tmp;
drop table t1, t2;
--echo #
--echo # MDEV-30972: ANALYZE FORMAT=JSON: some time is unaccounted-for in BNL-H join
--echo #
create table t1 (
a int,
col1 varchar(100),
col2 varchar(100),
col3 varchar(100)
);
insert into t1 select
seq/100,
concat('col1-', seq),
concat('col1-', seq),
concat('col1-', seq)
from seq_1_to_1000;
create table t2 (
a int,
col1 varchar(100),
col2 varchar(100),
col3 varchar(100)
);
insert into t2 select
seq/100,
concat('col1-', seq),
concat('col1-', seq),
concat('col1-', seq)
from seq_1_to_2000;
set @tmp=@@join_cache_level, join_cache_level=6;
let $out=`
analyze format=json
select * from t1, t2
where
t1.a=t2.a
and concat(t1.col1, t1.col2, t1.col3)= concat(t2.col1, t2.col2, t2.col3)
`;
--source include/analyze-format.inc
evalp select '$out' as X;
set @out=(select json_extract(@js,'$**.block-nl-join.r_other_time_ms'));
select cast(json_extract(@out,'$[0]') as DOUBLE) > 0;
set join_cache_level=@tmp;
drop table t1, t2;
......@@ -444,6 +444,7 @@ ANALYZE
"r_loops": 10,
"r_filtered": 10,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 10
}
}
......@@ -522,6 +523,7 @@ ANALYZE
"r_loops": 4,
"r_filtered": 21.42857143,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 7
}
}
......
......@@ -337,6 +337,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 2
}
}
......@@ -376,6 +377,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 2
}
}
......@@ -444,6 +446,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 2
}
}
......@@ -483,6 +486,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 2
}
}
......
......@@ -456,6 +456,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......@@ -495,6 +496,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......@@ -562,6 +564,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......@@ -601,6 +604,7 @@ ANALYZE
"r_loops": 2,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......
......@@ -1547,6 +1547,7 @@ ANALYZE
"r_loops": 10,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 1
}
}
......
......@@ -398,6 +398,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......@@ -484,6 +485,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 3
}
}
......
......@@ -429,6 +429,7 @@ ANALYZE
"r_loops": 5,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 7
}
}
......@@ -515,6 +516,7 @@ ANALYZE
"r_loops": 5,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 7
}
}
......
......@@ -3688,6 +3688,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 100,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 1
},
"block-nl-join": {
......@@ -3715,6 +3716,7 @@ ANALYZE
"r_loops": 3,
"r_filtered": 40,
"r_unpack_time_ms": "REPLACED",
"r_other_time_ms": "REPLACED",
"r_effective_rows": 26.66666667
}
}
......
......@@ -63,14 +63,19 @@ class Exec_time_tracker
if (my_gap_tracker)
attach_gap_time_tracker(thd, my_gap_tracker, end);
}
public:
Exec_time_tracker() : count(0), cycles(0), my_gap_tracker(NULL) {}
/*
The time spent between stop_tracking() call on this object and any
other time measurement will be billed to this tracker.
The time spent after stop_tracking() call on this object and any
subsequent time tracking call will be billed to this tracker.
*/
Gap_time_tracker *my_gap_tracker;
public:
Exec_time_tracker() : count(0), cycles(0), my_gap_tracker(NULL) {}
void set_gap_tracker(Gap_time_tracker *gap_tracker)
{
my_gap_tracker= gap_tracker;
}
// interface for collecting time
void start_tracking(THD *thd)
......
......@@ -1901,6 +1901,9 @@ void Explain_table_access::print_explain_json(Explain_query *query,
writer->add_member("r_unpack_time_ms");
writer->add_double(jbuf_unpack_tracker.get_time_ms());
writer->add_member("r_other_time_ms").
add_double(jbuf_extra_time_tracker.get_time_ms());
/*
effective_rows is average number of matches we got for an incoming
row. The row is stored in the join buffer and then is read
......
......@@ -846,10 +846,15 @@ class Explain_table_access : public Sql_alloc
/* When using join buffer: Track the reads from join buffer */
Table_access_tracker jbuf_tracker;
/* When using join buffer: time spent unpacking rows from the join buffer */
Time_and_counter_tracker jbuf_unpack_tracker;
/*
Track the time to unpack rows from the join buffer.
When using join buffer: time spent after unpacking rows from the join
buffer. This will capture the time spent checking the Join Condition:
the condition that depends on this table and preceding tables.
*/
Time_and_counter_tracker jbuf_unpack_tracker;
Gap_time_tracker jbuf_extra_time_tracker;
/* When using join buffer: Track the number of incoming record combinations */
Counter_tracker jbuf_loops_tracker;
......
......@@ -27402,7 +27402,9 @@ bool JOIN_TAB::save_explain_data(Explain_table_access *eta,
if (thd->lex->analyze_stmt)
{
table->file->set_time_tracker(&eta->op_tracker);
eta->op_tracker.my_gap_tracker = &eta->extra_time_tracker;
eta->op_tracker.set_gap_tracker(&eta->extra_time_tracker);
eta->jbuf_unpack_tracker.set_gap_tracker(&eta->jbuf_extra_time_tracker);
}
/* No need to save id and select_type here, they are kept in Explain_select */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment