Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
M
MariaDB
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
nexedi
MariaDB
Commits
6d93260e
Commit
6d93260e
authored
Dec 03, 2020
by
Varun Gupta
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Refactoring
parent
b79dcec6
Changes
13
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
399 additions
and
271 deletions
+399
-271
mysql-test/main/unique_packed.result
mysql-test/main/unique_packed.result
+14
-0
mysql-test/main/unique_packed.test
mysql-test/main/unique_packed.test
+12
-0
sql/field.cc
sql/field.cc
+13
-1
sql/filesort.cc
sql/filesort.cc
+101
-38
sql/item_jsonfunc.cc
sql/item_jsonfunc.cc
+1
-1
sql/item_sum.cc
sql/item_sum.cc
+103
-84
sql/item_sum.h
sql/item_sum.h
+10
-6
sql/sql_class.h
sql/sql_class.h
+6
-6
sql/sql_sort.h
sql/sql_sort.h
+1
-22
sql/sql_statistics.cc
sql/sql_statistics.cc
+26
-10
sql/sql_statistics.h
sql/sql_statistics.h
+1
-1
sql/uniques.cc
sql/uniques.cc
+37
-52
sql/uniques.h
sql/uniques.h
+74
-50
No files found.
mysql-test/main/unique_packed.result
View file @
6d93260e
...
...
@@ -597,3 +597,17 @@ SELECT JSON_ARRAYAGG(DISTINCT a) FROM t1;
JSON_ARRAYAGG(DISTINCT a)
[null,"a","b","c"]
DROP TABLE t1;
#
# Multiple arguments to GROUP_CONCAT
#
CREATE TABLE t1 (a VARCHAR(10) NOT NULL, b INT);
INSERT INTO t1 VALUES ('a',259),('a',11);
INSERT INTO t1 VALUES ('a',259),('a',11);
INSERT INTO t1 VALUES ('b',259),('c',11);
SELECT GROUP_CONCAT(DISTINCT a, b) FROM t1;
GROUP_CONCAT(DISTINCT a, b)
a11,a259,b259,c11
SELECT GROUP_CONCAT(DISTINCT a, b+1) FROM t1;
GROUP_CONCAT(DISTINCT a, b+1)
a12,a260,b260,c12
DROP TABLE t1;
mysql-test/main/unique_packed.test
View file @
6d93260e
...
...
@@ -250,3 +250,15 @@ INSERT INTO t1 VALUES ('a',2),('b',2),('c',1),('a',3),('b',4),('c',4), (NULL, 5)
SELECT
GROUP_CONCAT
(
DISTINCT
a
)
FROM
t1
;
SELECT
JSON_ARRAYAGG
(
DISTINCT
a
)
FROM
t1
;
DROP
TABLE
t1
;
--
echo
#
--
echo
# Multiple arguments to GROUP_CONCAT
--
echo
#
CREATE
TABLE
t1
(
a
VARCHAR
(
10
)
NOT
NULL
,
b
INT
);
INSERT
INTO
t1
VALUES
(
'a'
,
259
),(
'a'
,
11
);
INSERT
INTO
t1
VALUES
(
'a'
,
259
),(
'a'
,
11
);
INSERT
INTO
t1
VALUES
(
'b'
,
259
),(
'c'
,
11
);
SELECT
GROUP_CONCAT
(
DISTINCT
a
,
b
)
FROM
t1
;
SELECT
GROUP_CONCAT
(
DISTINCT
a
,
b
+
1
)
FROM
t1
;
DROP
TABLE
t1
;
sql/field.cc
View file @
6d93260e
...
...
@@ -1065,8 +1065,20 @@ Field::make_packed_sort_key_part(uchar *buff,
/*
TODO varun: need to look what to do here
@brief
Create a packed sort key part
@param buff buffer where values are written
@param sort_field sort column structure
@details
This function stores the original values for the fixed size columns and
does not covert them to mem-comparable images.
@retval
length of the bytes written, does not include the NULL bytes
*/
uint
Field
::
make_packed_key_part
(
uchar
*
buff
,
const
SORT_FIELD_ATTR
*
sort_field
)
{
...
...
sql/filesort.cc
View file @
6d93260e
...
...
@@ -1872,8 +1872,8 @@ bool merge_buffers(Sort_param *param, IO_CACHE *from_file,
Store it also in 'to_file'.
*/
buffpek
=
(
Merge_chunk
*
)
queue_top
(
&
queue
);
rec_length
=
param
->
get_
record
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
rec_length
=
param
->
get_
key
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
DBUG_ASSERT
(
rec_length
<=
param
->
sort_length
);
...
...
@@ -1920,8 +1920,8 @@ bool merge_buffers(Sort_param *param, IO_CACHE *from_file,
memcpy
(
&
cnt
,
buffpek
->
current_key
()
+
dupl_count_ofs
,
sizeof
(
cnt
));
dupl_count
+=
cnt
;
}
rec_length
=
param
->
get_
record
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
rec_length
=
param
->
get_
key
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
goto
skip_duplicate
;
}
...
...
@@ -1931,8 +1931,8 @@ bool merge_buffers(Sort_param *param, IO_CACHE *from_file,
uint
dupl_count_ofs
=
rec_length
-
sizeof
(
element_count
);
memcpy
(
unique_buff
+
dupl_count_ofs
,
&
dupl_count
,
sizeof
(
dupl_count
));
}
rec_length
=
param
->
get_
record
_length_for_unique
(
unique_buff
,
size_of_dupl_count
);
rec_length
=
param
->
get_
key
_length_for_unique
(
unique_buff
,
size_of_dupl_count
);
res_length
=
rec_length
-
size_of_dupl_count
;
src
=
unique_buff
;
}
...
...
@@ -1961,8 +1961,8 @@ bool merge_buffers(Sort_param *param, IO_CACHE *from_file,
}
if
(
cmp
)
{
rec_length
=
param
->
get_
record
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
rec_length
=
param
->
get_
key
_length_for_unique
(
buffpek
->
current_key
(),
size_of_dupl_count
);
DBUG_ASSERT
(
rec_length
<=
param
->
sort_length
);
memcpy
(
unique_buff
,
buffpek
->
current_key
(),
rec_length
);
if
(
min_dupl_count
)
...
...
@@ -2057,8 +2057,7 @@ bool merge_buffers(Sort_param *param, IO_CACHE *from_file,
uchar
*
src
=
buffpek
->
current_key
();
if
(
cmp
)
{
rec_length
=
param
->
get_record_length_for_unique
(
src
,
size_of_dupl_count
);
rec_length
=
param
->
get_key_length_for_unique
(
src
,
size_of_dupl_count
);
res_length
=
rec_length
-
size_of_dupl_count
;
if
(
check_dupl_count
)
{
...
...
@@ -2575,19 +2574,20 @@ void Sort_param::try_to_pack_sortkeys()
/*
@brief
Return the length of the
recor
d in the Unique tree
Return the length of the
key inserte
d in the Unique tree
@param
to key value
size_of_dupl_count if min_dupl_count > 0, then the record length
needs size_of_dupl_count to store the counter
*/
uint32
Sort_param
::
get_record_length_for_unique
(
uchar
*
to
,
uint
size_of_dupl_count
)
uint32
Sort_param
::
get_key_length_for_unique
(
uchar
*
key
,
uint
size_of_dupl_count
)
{
if
(
!
using_packed_sortkeys
())
return
rec_length
;
return
Variable_size_keys_descriptor
::
read_packed_length
(
to
)
+
return
Variable_size_keys_descriptor
::
read_packed_length
(
key
)
+
size_of_dupl_count
;
}
...
...
@@ -2802,49 +2802,68 @@ bool SORT_FIELD_ATTR::check_if_packing_possible(THD *thd) const
/*
@brief
Setup the SORT_FIELD structure
Setup the SORT_FIELD structure
for a key part of a variable size key
@param
fld field structure
with_suffix TRUE if length bytes needed to store the length
for binary charset
@note
@note
s
Currently used only by Unique object
TODD varun: we can refactor the code for filesort to use this function.
*/
void
SORT_FIELD
::
setup
(
Field
*
fld
,
bool
with_suffix
)
void
SORT_FIELD
::
setup_key_part_for_variable_size_key
(
Field
*
fld
)
{
field
=
fld
;
item
=
NULL
;
reverse
=
false
;
SORT_FIELD_ATTR
::
setup
(
fld
,
with_suffix
);
SORT_FIELD_ATTR
::
setup
_key_part_for_variable_size_key
(
fld
);
}
void
SORT_FIELD
::
setup
(
Item
*
item_arg
,
bool
with_suffix
)
/*
@brief
Setup the SORT_FIELD structure for an key part of a variable size key
@param
fld Item structure
@notes
Currently used only by Unique object
*/
void
SORT_FIELD
::
setup_key_part_for_variable_size_key
(
Item
*
item_arg
)
{
Field
*
fld
=
item_arg
->
get_tmp_table_field
();
DBUG_ASSERT
(
fld
);
item
=
item_arg
;
field
=
NULL
;
reverse
=
false
;
SORT_FIELD_ATTR
::
setup
(
fld
,
with_suffix
);
SORT_FIELD_ATTR
::
setup
_key_part_for_variable_size_key
(
fld
);
}
/*
@brief
Setup the SORT_FIELD structure for a field of a fixed size key
void
SORT_FIELD
::
setup_for_fixed_size_keys
(
Field
*
fld
)
@param
fld field structure
@note
Currently used only by Unique object
*/
void
SORT_FIELD
::
setup_key_part_for_fixed_size_key
(
Field
*
fld
)
{
field
=
fld
;
item
=
NULL
;
reverse
=
false
;
SORT_FIELD_ATTR
::
setup_
for_fixed_size_keys
(
fld
);
SORT_FIELD_ATTR
::
setup_
key_part_for_fixed_size_key
(
fld
);
}
void
SORT_FIELD_ATTR
::
setup_
for_fixed_size_keys
(
Field
*
field
)
void
SORT_FIELD_ATTR
::
setup_
key_part_for_fixed_size_key
(
Field
*
field
)
{
original_length
=
length
=
field
->
pack_length
();
cs
=
field
->
charset
();
...
...
@@ -2854,20 +2873,17 @@ void SORT_FIELD_ATTR::setup_for_fixed_size_keys(Field *field)
length_bytes
=
0
;
}
void
SORT_FIELD_ATTR
::
setup
(
Field
*
fld
,
bool
with_suffix
)
{
original_length
=
length
=
(
with_suffix
?
fld
->
sort_length
()
:
fld
->
sort_length_without_suffix
());
void
SORT_FIELD_ATTR
::
setup_key_part_for_variable_size_key
(
Field
*
fld
)
{
original_length
=
length
=
fld
->
sort_length_without_suffix
();
cs
=
fld
->
sort_charset
();
suffix_length
=
with_suffix
?
fld
->
sort_suffix_length
()
:
0
;
suffix_length
=
0
;
type
=
fld
->
is_packable
()
?
SORT_FIELD_ATTR
::
VARIABLE_SIZE
:
SORT_FIELD_ATTR
::
FIXED_SIZE
;
maybe_null
=
fld
->
maybe_null
();
length_bytes
=
is_variable_sized
()
?
number_storage_requirement
(
length
)
:
0
;
length_bytes
=
is_variable_sized
()
?
number_storage_requirement
(
length
)
:
0
;
}
...
...
@@ -2883,7 +2899,7 @@ qsort2_cmp get_packed_keys_compare_ptr()
/*
@brief
Compare nullability of 2 keys
Compare null
-
ability of 2 keys
@param
a key to be compared
...
...
@@ -2965,8 +2981,31 @@ int SORT_FIELD_ATTR::compare_packed_varstrings(uchar *a, size_t *a_len,
}
/*
@brief
Compare two packed varstring
@param a key to be compared
@param b key to be compared
@details
This function compares packed values of two keys with a collation specific
comparison function.
@notes
This function basically does the same work as compare_packed_varstring
but the only difference is that this function is invoked when the key
has only one key part. This is currently used by Unique only as most
of the cases where Unique is used involves one key component.
@retval
>0 key a greater than b
=0 key a equal to b
<0 key a less than b
*/
int
SORT_FIELD_ATTR
::
compare_packed_varstrings
_for_single_arg
(
uchar
*
a
,
uchar
*
b
)
SORT_FIELD_ATTR
::
compare_packed_varstrings
(
uchar
*
a
,
uchar
*
b
)
{
size_t
a_length
,
b_length
;
if
(
maybe_null
)
...
...
@@ -3018,6 +3057,30 @@ int SORT_FIELD_ATTR::compare_packed_fixed_size_vals(uchar *a, size_t *a_len,
}
/*
@brief
Comparison function to compare fixed size key parts via Field::cmp
@param a key for comparison
@param b key for comparison
@param a_len [OUT] length of the value for the key part in key a
@param b_len [OUT] length of the value for the key part in key b
@details
A value comparison function that has a signature that's suitable for
comparing packed values, but actually compares fixed-size values with
Field::cmp.
@notes
This is used for ordering fixed-size columns when the keys are added
to the Unique tree
@retval
>0 key a greater than b
=0 key a equal to b
<0 key a less than b
*/
int
SORT_FIELD
::
compare_fixed_size_vals
(
uchar
*
a
,
size_t
*
a_len
,
uchar
*
b
,
size_t
*
b_len
)
{
...
...
@@ -3115,7 +3178,7 @@ int Sort_keys::compare_keys(uchar *a, uchar *b)
/*
@brief
Compare two packed sort keys with a single keypart
Compare two packed sort keys with a single key
part
@retval
>0 key a greater than b
...
...
@@ -3126,7 +3189,7 @@ int Sort_keys::compare_keys_for_single_arg(uchar *a, uchar *b)
{
SORT_FIELD
*
sort_field
=
begin
();
return
sort_field
->
compare_packed_varstrings
_for_single_arg
(
a
,
b
);
return
sort_field
->
compare_packed_varstrings
(
a
,
b
);
}
...
...
sql/item_jsonfunc.cc
View file @
6d93260e
...
...
@@ -1504,7 +1504,7 @@ static bool append_json_value_from_field(String *str,
@param
str buffer to write the value
item JSON_ARRAYAGG item
item
argument to
JSON_ARRAYAGG item
field field whose value needs to be appended
tmp_val temp buffer
...
...
sql/item_sum.cc
View file @
6d93260e
...
...
@@ -982,11 +982,12 @@ int Aggregator_distinct::insert_record_to_unique()
{
if
(
tree
->
is_variable_sized
())
{
uint
packed_length
;
if
((
packed_length
=
tree
->
get_descriptor
()
->
make_record
(
true
))
==
0
)
uchar
*
rec_ptr
;
Descriptor
*
descriptor
=
tree
->
get_descriptor
();
if
((
rec_ptr
=
descriptor
->
make_record
(
true
))
==
NULL
)
return
-
1
;
// NULL value
DBUG_ASSERT
(
packed_length
<=
tree
->
get_size
());
return
tree
->
unique_add
(
tree
->
get_descriptor
()
->
get_rec_ptr
()
);
DBUG_ASSERT
(
descriptor
->
get_length_of_key
(
rec_ptr
)
<=
tree
->
get_size
());
return
tree
->
unique_add
(
rec_ptr
);
}
copy_fields
(
tmp_table_param
);
...
...
@@ -3623,18 +3624,13 @@ int group_concat_key_cmp_with_distinct_with_nulls(void* arg,
/**
Compares the packed values for fields in expr list of GROUP_CONCAT.
@note
GROUP_CONCAT([DISTINCT] expr [,expr ...]
[ORDER BY {unsigned_integer | col_name | expr}
[ASC | DESC] [,col_name ...]]
[SEPARATOR str_val])
@return
@retval -1 : key1 < key2
@retval 0 : key1 = key2
@retval 1 : key1 > key2
*/
int
group_concat_packed_key_cmp_with_distinct
(
void
*
arg
,
const
void
*
a_ptr
,
const
void
*
b_ptr
)
...
...
@@ -4640,9 +4636,12 @@ String* Item_func_group_concat::val_str(String* str)
/*
@brief
Get the comparator function for DISTINT clause
@param packed TRUE if the record is stored in a packed format
*/
qsort_cmp2
Item_func_group_concat
::
get_comparator_function_for_distinct
(
bool
packed
)
qsort_cmp2
Item_func_group_concat
::
get_comparator_function_for_distinct
(
bool
packed
)
{
return
packed
?
group_concat_packed_key_cmp_with_distinct
:
...
...
@@ -4676,11 +4675,9 @@ qsort_cmp2 Item_func_group_concat::get_comparator_function_for_order_by()
uchar
*
Item_func_group_concat
::
get_record_pointer
()
{
return
is_distinct_packed
()
?
unique_filter
->
get_descriptor
()
->
get_rec_ptr
()
:
(
skip_nulls
()
?
return
skip_nulls
()
?
table
->
record
[
0
]
+
table
->
s
->
null_bytes
:
table
->
record
[
0
]
)
;
table
->
record
[
0
];
}
...
...
@@ -4731,9 +4728,8 @@ bool Item_func_group_concat::is_distinct_packed()
bool
Item_func_group_concat
::
is_packing_allowed
(
uint
*
total_length
)
{
/*
TODO varun:
Currently Unique is not packed if ORDER BY clause is used
This
needs to be implemented when MDEV-22089 is fixed
This
is a limitation as of now.
*/
if
(
!
distinct
||
arg_count_order
)
return
false
;
...
...
@@ -4749,8 +4745,11 @@ bool Item_func_group_concat::is_packing_allowed(uint* total_length)
@param table Table structure
@total_length [OUT] max length of the packed key(takes into account
the length bytes also)
*/
@retval
TRUE packing is allowed
FALSE otherwise
*/
bool
Item_sum
::
is_packing_allowed
(
TABLE
*
table
,
uint
*
total_length
)
{
...
...
@@ -4803,7 +4802,22 @@ bool Item_sum::is_packing_allowed(TABLE *table, uint* total_length)
/*
@brief
Get unique instance to filter out duplicate for AGG_FUNC(DISTINCT col....)
Get unique instance to filter out duplicates
@param comp_func compare function
@param comp_func_fixed_arg arg passed to the comparison function
@param size_arg max length of the key
@param max_in_memory_size_arg max memory available for Unique
@param min_dupl_count_arg > 0 , the count for each value needs
to be stored also
@param allow_packing TRUE: Variable size keys are allowed
@param number_of_args Number of args involved in DISTINCT
@retval
NOT NULL instance of Unique class returned
NULL ERROR
*/
Unique_impl
*
Item_sum
::
get_unique
(
qsort_cmp2
comp_func
,
void
*
comp_func_fixed_arg
,
...
...
@@ -4814,19 +4828,9 @@ Item_sum::get_unique(qsort_cmp2 comp_func, void *comp_func_fixed_arg,
Descriptor
*
desc
;
if
(
allow_packing
)
{
if
(
number_of_args
==
1
)
desc
=
new
Variable_size_keys_simple
(
size_arg
);
else
desc
=
new
Variable_size_composite_key_desc
(
size_arg
);
}
desc
=
get_descriptor_for_variable_size_keys
(
number_of_args
,
size_arg
);
else
{
if
(
number_of_args
==
1
)
desc
=
new
Fixed_size_keys_descriptor
(
size_arg
);
else
desc
=
new
Fixed_size_composite_keys_descriptor
(
size_arg
);
}
desc
=
get_descriptor_for_fixed_size_keys
(
number_of_args
,
size_arg
);
if
(
!
desc
)
return
NULL
;
...
...
@@ -4835,39 +4839,6 @@ Item_sum::get_unique(qsort_cmp2 comp_func, void *comp_func_fixed_arg,
}
Unique_impl
*
Item_func_group_concat
::
get_unique
(
qsort_cmp2
comp_func
,
void
*
comp_func_fixed_arg
,
uint
size_arg
,
size_t
max_in_memory_size_arg
,
uint
min_dupl_count_arg
,
bool
allow_packing
,
uint
number_of_args
)
{
Descriptor
*
desc
;
if
(
allow_packing
)
{
if
(
number_of_args
==
1
)
desc
=
new
Variable_size_keys_simple
(
size_arg
);
else
desc
=
new
Variable_size_composite_key_desc_for_gconcat
(
size_arg
);
}
else
{
if
(
number_of_args
==
1
&&
!
skip_nulls
())
desc
=
new
Fixed_size_keys_descriptor_with_nulls
(
size_arg
);
else
desc
=
new
Fixed_size_keys_for_group_concat
(
size_arg
);
}
if
(
!
desc
)
return
NULL
;
return
new
Unique_impl
(
comp_func
,
comp_func_fixed_arg
,
size_arg
,
max_in_memory_size_arg
,
min_dupl_count_arg
,
desc
);
}
void
Item_func_group_concat
::
print
(
String
*
str
,
enum_query_type
query_type
)
{
str
->
append
(
func_name
());
...
...
@@ -4928,21 +4899,14 @@ Item_func_group_concat::~Item_func_group_concat()
@retval
-1 NULL value, record rejected
0 record succesfully inserted into the tree
0 record succes
s
fully inserted into the tree
1 error
*/
int
Item_func_group_concat
::
insert_record_to_unique
()
{
if
(
unique_filter
->
is_variable_sized
()
&&
unique_filter
->
is_single_arg
())
{
uint
packed_length
;
if
((
packed_length
=
unique_filter
->
get_descriptor
()
->
make_record
(
skip_nulls
()))
==
0
)
return
-
1
;
// NULL value
DBUG_ASSERT
(
packed_length
<=
unique_filter
->
get_size
());
return
unique_filter
->
unique_add
(
unique_filter
->
get_descriptor
()
->
get_rec_ptr
());
}
return
insert_packed_record_to_unique
();
copy_fields
(
tmp_table_param
);
if
(
copy_funcs
(
tmp_table_param
->
items_to_copy
,
table
->
in_use
))
...
...
@@ -4960,16 +4924,71 @@ int Item_func_group_concat::insert_record_to_unique()
*/
if
(
unique_filter
->
is_variable_sized
())
{
DBUG_ASSERT
(
!
unique_filter
->
is_single_arg
());
uint
packed_length
;
if
((
packed_length
=
unique_filter
->
get_descriptor
()
->
make_record
(
skip_nulls
()))
==
0
)
return
-
1
;
// NULL value
DBUG_ASSERT
(
packed_length
<=
unique_filter
->
get_size
());
return
unique_filter
->
unique_add
(
unique_filter
->
get_descriptor
()
->
get_rec_ptr
());
}
return
insert_packed_record_to_unique
();
return
unique_filter
->
unique_add
(
get_record_pointer
());
}
/*
@brief
Insert a packed record inside the Unique tree
@retval
-1 NULL value, record rejected
0 record successfully inserted into the tree
1 error
*/
int
Item_func_group_concat
::
insert_packed_record_to_unique
()
{
Descriptor
*
descriptor
=
unique_filter
->
get_descriptor
();
uchar
*
rec_ptr
;
if
((
rec_ptr
=
descriptor
->
make_record
(
skip_nulls
()))
==
NULL
)
return
-
1
;
// NULL value
DBUG_ASSERT
(
descriptor
->
get_length_of_key
(
rec_ptr
)
<=
unique_filter
->
get_size
());
return
unique_filter
->
unique_add
(
rec_ptr
);
}
Descriptor
*
Item_sum
::
get_descriptor_for_fixed_size_keys
(
uint
args_count
,
uint
size_arg
)
{
if
(
args_count
==
1
)
return
new
Fixed_size_keys_descriptor
(
size_arg
);
else
return
new
Fixed_size_composite_keys_descriptor
(
size_arg
);
}
Descriptor
*
Item_sum
::
get_descriptor_for_variable_size_keys
(
uint
args_count
,
uint
size_arg
)
{
if
(
args_count
==
1
)
return
new
Variable_size_keys_simple
(
size_arg
);
else
return
new
Variable_size_composite_key_desc
(
size_arg
);
}
Descriptor
*
Item_func_group_concat
::
get_descriptor_for_fixed_size_keys
(
uint
args_count
,
uint
size_arg
)
{
if
(
args_count
==
1
&&
!
skip_nulls
())
return
new
Fixed_size_keys_descriptor_with_nulls
(
size_arg
);
else
return
new
Fixed_size_keys_for_group_concat
(
size_arg
);
}
Descriptor
*
Item_func_group_concat
::
get_descriptor_for_variable_size_keys
(
uint
args_count
,
uint
size_arg
)
{
if
(
args_count
==
1
)
return
new
Variable_size_keys_simple
(
size_arg
);
else
return
new
Variable_size_composite_key_desc_for_gconcat
(
size_arg
);
}
sql/item_sum.h
View file @
6d93260e
...
...
@@ -29,6 +29,7 @@
class
Item_sum
;
class
Aggregator_distinct
;
class
Aggregator_simple
;
class
Descriptor
;
/**
The abstract base class for the Aggregator_* classes.
...
...
@@ -597,6 +598,10 @@ class Item_sum :public Item_func_or_sum
uint
size_arg
,
size_t
max_in_memory_size_arg
,
uint
min_dupl_count_arg
,
bool
allow_packing
,
uint
number_of_args
);
virtual
Descriptor
*
get_descriptor_for_fixed_size_keys
(
uint
args_count
,
uint
size_arg
);
virtual
Descriptor
*
get_descriptor_for_variable_size_keys
(
uint
args_count
,
uint
size_arg
);
};
...
...
@@ -701,7 +706,6 @@ class Aggregator_distinct : public Aggregator
bool
unique_walk_function
(
void
*
element
);
bool
unique_walk_function_for_count
(
void
*
element
);
int
insert_record_to_unique
();
qsort_cmp2
get_compare_func_for_packed_keys
();
static
int
key_cmp
(
void
*
arg
,
uchar
*
key1
,
uchar
*
key2
);
};
...
...
@@ -2034,11 +2038,11 @@ class Item_func_group_concat : public Item_sum
element_count
__attribute__
((
unused
)),
void
*
item_arg
);
int
insert_record_to_unique
();
Unique_impl
*
get_unique
(
qsort_cmp2
comp_func
,
void
*
comp_func_fixed_arg
,
uint
size_arg
,
size_t
max_in_memory_size_arg
,
uint
min_dupl_count_arg
,
bool
allow_packing
,
uint
number_of_args
)
;
int
insert_packed_record_to_unique
();
Descriptor
*
get_descriptor_for_fixed_size_keys
(
uint
args_count
,
uint
size_arg
)
override
;
Descriptor
*
get_descriptor_for_variable_size_keys
(
uint
args_count
,
uint
size_arg
)
override
;
};
#endif
/* ITEM_SUM_INCLUDED */
sql/sql_class.h
View file @
6d93260e
...
...
@@ -6444,11 +6444,11 @@ struct SORT_FIELD_ATTR
uchar
*
b
,
size_t
*
b_len
);
int
compare_packed_varstrings
(
uchar
*
a
,
size_t
*
a_len
,
uchar
*
b
,
size_t
*
b_len
);
int
compare_packed_varstrings
_for_single_arg
(
uchar
*
a
,
uchar
*
b
);
int
compare_packed_varstrings
(
uchar
*
a
,
uchar
*
b
);
bool
check_if_packing_possible
(
THD
*
thd
)
const
;
bool
is_variable_sized
()
{
return
type
==
VARIABLE_SIZE
;
}
void
setup
(
Field
*
fld
,
bool
with_suffix
);
void
setup_
for_fixed_size_keys
(
Field
*
fld
);
void
setup
_key_part_for_variable_size_key
(
Field
*
fld
);
void
setup_
key_part_for_fixed_size_key
(
Field
*
fld
);
int
compare_nullability
(
uchar
*
a
,
uchar
*
b
);
};
...
...
@@ -6458,9 +6458,9 @@ struct SORT_FIELD: public SORT_FIELD_ATTR
Field
*
field
;
/* Field to sort */
Item
*
item
;
/* Item if not sorting fields */
bool
reverse
;
/* if descending sort */
void
setup
(
Field
*
fld
,
bool
with_suffix
);
void
setup
(
Item
*
item
,
bool
with_suffix
);
void
setup_
for_fixed_size_keys
(
Field
*
fld
);
void
setup
_key_part_for_variable_size_key
(
Field
*
fld
);
void
setup
_key_part_for_variable_size_key
(
Item
*
item
);
void
setup_
key_part_for_fixed_size_key
(
Field
*
fld
);
int
compare_fixed_size_vals
(
uchar
*
a
,
size_t
*
a_len
,
uchar
*
b
,
size_t
*
b_len
);
};
...
...
sql/sql_sort.h
View file @
6d93260e
...
...
@@ -672,7 +672,7 @@ class Sort_param {
m_packed_format
=
val
;
}
uint32
get_
record
_length_for_unique
(
uchar
*
to
,
uint
size_of_dupl_count
);
uint32
get_
key
_length_for_unique
(
uchar
*
to
,
uint
size_of_dupl_count
);
private:
uint
m_packable_length
;
...
...
@@ -697,25 +697,4 @@ int merge_index(Sort_param *param, Sort_buffer sort_buffer,
IO_CACHE
*
tempfile
,
IO_CACHE
*
outfile
);
void
reuse_freed_buff
(
QUEUE
*
queue
,
Merge_chunk
*
reuse
,
uint
key_length
);
/*
An interface to handle variable sized records.
The primary use of this class is to create record for a key
which has variable sized values for its keyparts.
The format used for the record is:
<total_key_length> <keypart1_null_byte> < keypart1_length> <keypart1_value> ......... <keypartN_value>
<total_key_length> : 4 bytes is used to store the length of the key.
<keypart1_null_byte> : uses 1 byte to store nullability for a kepart,
no byte is used if the keypart is defined as NOT NULLABLE
<keypart1_length> : length of the value of the keypart. This is optional and is only stored for keyparts
that can have variable sized values. For eg VARCHARS and CHARS will have this length
but integers being fixed size will not have these additional bytes for length.
<keypart1_value> : the value for the keypart.
*/
#endif
/* SQL_SORT_INCLUDED */
sql/sql_statistics.cc
View file @
6d93260e
...
...
@@ -1524,7 +1524,11 @@ class Stat_table_write_iter
@param
field Field structure
@retval
offset of the value from the start of the buffer
*/
uint
get_offset_to_value
(
Field
*
field
)
{
return
Variable_size_keys_descriptor
::
size_of_length_field
+
...
...
@@ -1535,10 +1539,16 @@ uint get_offset_to_value(Field *field)
/*
@brief
Get the end of the buffer storing the value for the field
@param
buffer buffer storing the value
@retval
return end of the buffer
*/
uchar
*
get_buffer_end
(
Field
*
field
,
uchar
*
to
)
uchar
*
get_buffer_end
(
uchar
*
buffer
)
{
return
to
+
Variable_size_keys_descriptor
::
read_packed_length
(
to
);
return
buffer
+
Variable_size_keys_descriptor
::
read_packed_length
(
buffer
);
}
...
...
@@ -1600,7 +1610,7 @@ class Histogram_builder
{
column
->
unpack
(
column
->
ptr
,
to
+
get_offset_to_value
(
column
),
get_buffer_end
(
column
,
to
),
0
);
get_buffer_end
(
to
),
0
);
}
else
column
->
store_field_value
(
to
,
col_length
);
...
...
@@ -1710,7 +1720,10 @@ class Count_distinct_field: public Sql_alloc
@param
field Field structure
@retval
Return the max length for a packable field
*/
uint
compute_packable_length
(
Field
*
field
)
{
return
table_field
->
max_packed_col_length
(
table_field
->
pack_length
())
+
...
...
@@ -1726,7 +1739,12 @@ class Count_distinct_field: public Sql_alloc
@param
thd Thread structure
max_heap_table_size max allowed size of the unique tree
@retval
TRUE ERROR
FALSE SUCCESS
*/
virtual
bool
setup
(
THD
*
thd
,
size_t
max_heap_table_size
)
{
Descriptor
*
desc
;
...
...
@@ -1759,14 +1777,12 @@ class Count_distinct_field: public Sql_alloc
{
table_field
->
mark_unused_memory_as_defined
();
DBUG_ASSERT
(
tree
);
uint
length
=
tree
->
get_size
();
if
(
tree
->
is_variable_sized
())
{
length
=
tree
->
get_descriptor
()
->
make_record
(
true
);
DBUG_ASSERT
(
length
!=
0
);
DBUG_ASSERT
(
length
<=
tree
->
get_size
());
return
tree
->
unique_add
(
tree
->
get_descriptor
()
->
get_rec_ptr
()
);
Descriptor
*
descriptor
=
tree
->
get_descriptor
(
);
uchar
*
rec_ptr
=
descriptor
->
make_record
(
true
);
DBUG_ASSERT
(
descriptor
->
get_length_of_key
(
rec_ptr
)
<=
tree
->
get_size
());
return
tree
->
unique_add
(
rec_ptr
);
}
return
tree
->
unique_add
(
table_field
->
ptr
);
}
...
...
@@ -1867,7 +1883,7 @@ class Count_distinct_field_bit: public Count_distinct_field
longlong
val
=
table_field
->
val_int
();
return
tree
->
unique_add
(
&
val
);
}
bool
setup
(
THD
*
thd
,
size_t
max_heap_table_size
)
bool
setup
(
THD
*
thd
,
size_t
max_heap_table_size
)
override
{
tree_key_length
=
sizeof
(
ulonglong
);
Descriptor
*
desc
=
new
Fixed_size_keys_mem_comparable
(
tree_key_length
);
...
...
sql/sql_statistics.h
View file @
6d93260e
...
...
@@ -140,7 +140,7 @@ double get_column_range_cardinality(Field *field,
bool
is_stat_table
(
const
LEX_CSTRING
*
db
,
LEX_CSTRING
*
table
);
bool
is_eits_usable
(
Field
*
field
);
uint
get_offset_to_value
(
Field
*
field
);
uchar
*
get_buffer_end
(
Field
*
field
,
uchar
*
to
);
uchar
*
get_buffer_end
(
uchar
*
to
);
class
Histogram
{
...
...
sql/uniques.cc
View file @
6d93260e
...
...
@@ -569,8 +569,8 @@ static bool merge_walk(uchar *merge_buffer, size_t merge_buffer_size,
read next key from the cache or from the file and push it to the
queue; this gives new top.
*/
key_length
=
sort_param
.
get_
record
_length_for_unique
((
uchar
*
)
old_key
,
size_of_dupl_count
);
key_length
=
sort_param
.
get_
key
_length_for_unique
((
uchar
*
)
old_key
,
size_of_dupl_count
);
cnt_ofs
=
key_length
-
(
with_counters
?
sizeof
(
element_count
)
:
0
);
top
->
advance_current_key
(
key_length
);
...
...
@@ -622,8 +622,8 @@ static bool merge_walk(uchar *merge_buffer, size_t merge_buffer_size,
{
do
{
key_length
=
sort_param
.
get_
record
_length_for_unique
(
top
->
current_key
(),
size_of_dupl_count
);
key_length
=
sort_param
.
get_
key
_length_for_unique
(
top
->
current_key
(),
size_of_dupl_count
);
cnt_ofs
=
key_length
-
(
with_counters
?
sizeof
(
element_count
)
:
0
);
cnt
=
with_counters
?
get_counter_from_merged_element
(
top
->
current_key
(),
cnt_ofs
)
:
1
;
...
...
@@ -882,27 +882,27 @@ int Unique_impl::write_record_to_file(uchar *key)
Variable_size_keys_descriptor
::
Variable_size_keys_descriptor
(
uint
length
)
{
max_length
=
length
;
flags
=
(
1
<<
VARIABLE_SIZED_KEYS
_WITH_ORIGINAL_VALUES
);
flags
=
(
1
<<
VARIABLE_SIZED_KEYS
);
sort_keys
=
NULL
;
sortorder
=
NULL
;
}
Variable_size_composite_key_desc
::
Variable_size_composite_key_desc
(
uint
length
)
:
Variable_size_keys_descriptor
(
length
),
Encode_
record
()
:
Variable_size_keys_descriptor
(
length
),
Encode_
key
()
{
}
Variable_size_composite_key_desc_for_gconcat
::
Variable_size_composite_key_desc_for_gconcat
(
uint
length
)
:
Variable_size_keys_descriptor
(
length
),
Encode_
record
_for_group_concat
()
:
Variable_size_keys_descriptor
(
length
),
Encode_
key
_for_group_concat
()
{
}
Variable_size_keys_simple
::
Variable_size_keys_simple
(
uint
length
)
:
Variable_size_keys_descriptor
(
length
),
Encode_
record
()
:
Variable_size_keys_descriptor
(
length
),
Encode_
key
()
{
}
...
...
@@ -951,10 +951,10 @@ Variable_size_keys_descriptor::setup_for_item(THD *thd, Item_sum *item,
if
(
arg
->
type
()
==
Item
::
FIELD_ITEM
)
{
Field
*
field
=
((
Item_field
*
)
arg
)
->
field
;
pos
->
setup
(
field
,
false
);
pos
->
setup
_key_part_for_variable_size_key
(
field
);
}
else
pos
->
setup
(
arg
,
false
);
pos
->
setup
_key_part_for_variable_size_key
(
arg
);
pos
++
;
}
return
false
;
...
...
@@ -988,7 +988,7 @@ bool Variable_size_keys_descriptor::setup_for_field(THD *thd, Field *field)
if
(
!
sort_keys
)
return
true
;
sort
=
pos
=
sortorder
;
pos
->
setup
(
field
,
false
);
// Nulls are always excluded
pos
->
setup
_key_part_for_variable_size_key
(
field
);
// Nulls are always excluded
return
false
;
}
...
...
@@ -1041,22 +1041,15 @@ int Variable_size_composite_key_desc_for_gconcat::compare_keys(uchar *a_ptr,
for
(
SORT_FIELD
*
sort_field
=
sort_keys
->
begin
();
sort_field
!=
sort_keys
->
end
();
sort_field
++
)
{
if
(
sort_field
->
is_variable_sized
())
{
retval
=
sort_field
->
compare_packed_varstrings
(
a
,
&
a_len
,
b
,
&
b_len
);
a
+=
a_len
;
b
+=
b_len
;
}
else
{
DBUG_ASSERT
(
sort_field
->
field
);
retval
=
sort_field
->
field
->
cmp
(
a
,
b
);
a
+=
sort_field
->
length
;
b
+=
sort_field
->
length
;
}
retval
=
sort_field
->
is_variable_sized
()
?
sort_field
->
compare_packed_varstrings
(
a
,
&
a_len
,
b
,
&
b_len
)
:
sort_field
->
compare_fixed_size_vals
(
a
,
&
a_len
,
b
,
&
b_len
);
if
(
retval
)
return
sort_field
->
reverse
?
-
retval
:
retval
;
a
+=
a_len
;
b
+=
b_len
;
}
return
retval
;
}
...
...
@@ -1069,19 +1062,18 @@ int Variable_size_keys_simple::compare_keys(uchar *a, uchar *b)
}
uint
Variable_size_composite_key_desc
::
make_record
(
bool
exclude_nulls
)
uchar
*
Variable_size_composite_key_desc
::
make_record
(
bool
exclude_nulls
)
{
return
make_encoded_record
(
sort_keys
,
exclude_nulls
);
}
u
int
Variable_size_composite_key_desc_for_gconcat
::
make_record
(
bool
exclude_nulls
)
u
char
*
Variable_size_composite_key_desc_for_gconcat
::
make_record
(
bool
exclude_nulls
)
{
return
make_encoded_record
(
sort_keys
,
exclude_nulls
);
}
u
int
Variable_size_keys_simple
::
make_record
(
bool
exclude_nulls
)
u
char
*
Variable_size_keys_simple
::
make_record
(
bool
exclude_nulls
)
{
return
make_encoded_record
(
sort_keys
,
exclude_nulls
);
}
...
...
@@ -1089,19 +1081,19 @@ uint Variable_size_keys_simple::make_record(bool exclude_nulls)
bool
Variable_size_composite_key_desc
::
init
()
{
return
Encode_
record
::
init
(
max_length
);
return
Encode_
key
::
init
(
max_length
);
}
bool
Variable_size_composite_key_desc_for_gconcat
::
init
()
{
return
Encode_
record
::
init
(
max_length
);
return
Encode_
key
::
init
(
max_length
);
}
bool
Variable_size_keys_simple
::
init
()
{
return
Encode_
record
::
init
(
max_length
);
return
Encode_
key
::
init
(
max_length
);
}
...
...
@@ -1130,7 +1122,7 @@ Variable_size_composite_key_desc_for_gconcat::setup_for_item(THD *thd,
continue
;
Field
*
field
=
arg
->
get_tmp_table_field
();
pos
->
setup
(
field
,
false
);
pos
->
setup
_key_part_for_variable_size_key
(
field
);
pos
++
;
}
return
false
;
...
...
@@ -1184,7 +1176,7 @@ Fixed_size_keys_descriptor::setup_for_item(THD *thd, Item_sum *item,
Field
*
field
=
arg
->
get_tmp_table_field
();
DBUG_ASSERT
(
field
);
pos
->
setup_
for_fixed_size_keys
(
field
);
pos
->
setup_
key_part_for_fixed_size_key
(
field
);
pos
++
;
}
return
false
;
...
...
@@ -1207,7 +1199,7 @@ Fixed_size_keys_descriptor::setup_for_field(THD *thd, Field *field)
if
(
!
sort_keys
)
return
true
;
sort
=
pos
=
sortorder
;
pos
->
setup_
for_fixed_size_keys
(
field
);
pos
->
setup_
key_part_for_fixed_size_key
(
field
);
return
false
;
}
...
...
@@ -1295,7 +1287,7 @@ int Fixed_size_keys_for_group_concat::compare_keys(uchar *key1, uchar *key2)
}
bool
Encode_
record
::
init
(
uint
length
)
bool
Encode_
key
::
init
(
uint
length
)
{
if
(
tmp_buffer
.
alloc
(
length
))
return
true
;
...
...
@@ -1306,7 +1298,7 @@ bool Encode_record::init(uint length)
}
Encode_
record
::~
Encode_record
()
Encode_
key
::~
Encode_key
()
{
my_free
(
rec_ptr
);
}
...
...
@@ -1320,8 +1312,8 @@ Encode_record::~Encode_record()
0 NULL value
>0 length of the packed record
*/
u
int
Encode_record
::
make_encoded_record
(
Sort_keys
*
sort_keys
,
bool
exclude_nulls
)
u
char
*
Encode_key
::
make_encoded_record
(
Sort_keys
*
sort_keys
,
bool
exclude_nulls
)
{
Field
*
field
;
SORT_FIELD
*
sort_field
;
...
...
@@ -1352,7 +1344,7 @@ uint Encode_record::make_encoded_record(Sort_keys *sort_keys,
if
((
maybe_null
=
sort_field
->
maybe_null
))
{
if
(
exclude_nulls
&&
length
==
0
)
// rejecting NULLS
return
0
;
return
NULL
;
to
++
;
}
to
+=
length
;
...
...
@@ -1360,13 +1352,13 @@ uint Encode_record::make_encoded_record(Sort_keys *sort_keys,
length
=
static_cast
<
uint
>
(
to
-
orig_to
);
Variable_size_keys_descriptor
::
store_packed_length
(
orig_to
,
length
);
return
length
;
return
rec_ptr
;
}
u
int
Encode_
record
_for_group_concat
::
make_encoded_record
(
Sort_keys
*
sort_keys
,
bool
exclude_nulls
)
u
char
*
Encode_
key
_for_group_concat
::
make_encoded_record
(
Sort_keys
*
sort_keys
,
bool
exclude_nulls
)
{
Field
*
field
;
SORT_FIELD
*
sort_field
;
...
...
@@ -1388,7 +1380,7 @@ Encode_record_for_group_concat::make_encoded_record(Sort_keys *sort_keys,
if
((
maybe_null
=
sort_field
->
maybe_null
))
{
if
(
exclude_nulls
&&
length
==
0
)
// rejecting NULLS
return
0
;
return
NULL
;
to
++
;
}
to
+=
length
;
...
...
@@ -1396,12 +1388,5 @@ Encode_record_for_group_concat::make_encoded_record(Sort_keys *sort_keys,
length
=
static_cast
<
uint
>
(
to
-
orig_to
);
Variable_size_keys_descriptor
::
store_packed_length
(
orig_to
,
length
);
return
length
;
}
bool
Descriptor
::
is_single_arg
()
{
DBUG_ASSERT
(
sort_keys
);
return
!
(
sort_keys
->
size
()
>
1
)
;
return
rec_ptr
;
}
\ No newline at end of file
sql/uniques.h
View file @
6d93260e
...
...
@@ -28,13 +28,21 @@
class
Descriptor
:
public
Sql_alloc
{
protected:
/* maximum possible size of any key, in bytes */
uint
max_length
;
enum
attributes
{
FIXED_SIZED_KEYS
=
0
,
VARIABLE_SIZED_KEYS
_WITH_ORIGINAL_VALUES
VARIABLE_SIZED_KEYS
};
/*
Storing information about the attributes for the keys
Each bit for the flag points to an attribute
Currently only 2 bits are used, so the remaining bit can be used
in the future if some extension is required for descriptors
*/
uint
flags
;
/*
Array of SORT_FIELD structure storing the information about the key parts
...
...
@@ -53,7 +61,7 @@ class Descriptor : public Sql_alloc
virtual
uint
get_length_of_key
(
uchar
*
ptr
)
=
0
;
bool
is_variable_sized
()
{
return
flags
&
(
1
<<
VARIABLE_SIZED_KEYS
_WITH_ORIGINAL_VALUES
);
return
flags
&
(
1
<<
VARIABLE_SIZED_KEYS
);
}
virtual
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
=
0
;
...
...
@@ -62,13 +70,47 @@ class Descriptor : public Sql_alloc
uint
non_const_args
,
uint
arg_count
)
{
return
false
;
}
virtual
bool
setup_for_field
(
THD
*
thd
,
Field
*
field
)
{
return
false
;
}
virtual
Sort_keys
*
get_keys
()
{
return
sort_keys
;
}
SORT_FIELD
*
get_sortorder
()
{
return
sortorder
;
}
/* need to be moved to a separate class */
virtual
uchar
*
get_rec_ptr
()
{
return
NULL
;
}
virtual
uint
make_record
(
bool
exclude_nulls
)
{
return
0
;
}
virtual
bool
is_single_arg
();
virtual
uchar
*
make_record
(
bool
exclude_nulls
)
{
return
NULL
;
}
virtual
bool
is_single_arg
()
=
0
;
};
/*
Class to encode a key into a particular format. The format depends whether
the key is of fixed size or variable size.
@note
Currently this encoding is only done for variable size keys
*/
class
Encode_key
{
protected:
/*
Packed record ptr for a record of the table, the packed value in this
record is added to the unique tree
*/
uchar
*
rec_ptr
;
String
tmp_buffer
;
public:
Encode_key
()
:
rec_ptr
(
NULL
)
{}
virtual
~
Encode_key
();
virtual
uchar
*
make_encoded_record
(
Sort_keys
*
keys
,
bool
exclude_nulls
);
bool
init
(
uint
length
);
uchar
*
get_rec_ptr
()
{
return
rec_ptr
;
}
};
class
Encode_key_for_group_concat
:
public
Encode_key
{
public:
Encode_key_for_group_concat
()
:
Encode_key
()
{}
~
Encode_key_for_group_concat
()
{}
uchar
*
make_encoded_record
(
Sort_keys
*
keys
,
bool
exclude_nulls
)
override
;
};
...
...
@@ -86,6 +128,7 @@ class Fixed_size_keys_descriptor : public Descriptor
bool
setup_for_item
(
THD
*
thd
,
Item_sum
*
item
,
uint
non_const_args
,
uint
arg_count
);
virtual
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
;
virtual
bool
is_single_arg
()
override
{
return
true
;
}
};
...
...
@@ -160,39 +203,12 @@ class Fixed_size_composite_keys_descriptor : public Fixed_size_keys_descriptor
:
Fixed_size_keys_descriptor
(
length
)
{}
~
Fixed_size_composite_keys_descriptor
()
{}
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
;
};
class
Encode_record
{
protected:
/*
Packed record ptr for a record of the table, the packed value in this
record is added to the unique tree
*/
uchar
*
rec_ptr
;
String
tmp_buffer
;
public:
Encode_record
()
:
rec_ptr
(
NULL
)
{}
virtual
~
Encode_record
();
virtual
uint
make_encoded_record
(
Sort_keys
*
keys
,
bool
exclude_nulls
);
bool
init
(
uint
length
);
};
class
Encode_record_for_group_concat
:
public
Encode_record
{
public:
Encode_record_for_group_concat
()
:
Encode_record
()
{}
~
Encode_record_for_group_concat
()
{}
uint
make_encoded_record
(
Sort_keys
*
keys
,
bool
exclude_nulls
)
override
;
bool
is_single_arg
()
override
{
return
false
;
}
};
/*
D
escriptor for variable size keys
Base class for the d
escriptor for variable size keys
*/
class
Variable_size_keys_descriptor
:
public
Descriptor
...
...
@@ -204,13 +220,14 @@ class Variable_size_keys_descriptor : public Descriptor
{
return
read_packed_length
(
ptr
);
}
Sort_keys
*
get_keys
()
{
return
sort_keys
;
}
SORT_FIELD
*
get_sortorder
()
{
return
sortorder
;
}
virtual
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
{
return
0
;
}
virtual
bool
init
()
{
return
false
;
}
bool
setup_for_item
(
THD
*
thd
,
Item_sum
*
item
,
uint
non_const_args
,
uint
arg_count
)
override
;
bool
setup_for_field
(
THD
*
thd
,
Field
*
field
)
override
;
virtual
bool
is_single_arg
()
override
{
return
false
;
}
virtual
bool
setup_for_item
(
THD
*
thd
,
Item_sum
*
item
,
uint
non_const_args
,
uint
arg_count
)
override
;
virtual
bool
setup_for_field
(
THD
*
thd
,
Field
*
field
)
override
;
// All need to be moved to some new class
// returns the length of the key along with the length bytes for the key
static
uint
read_packed_length
(
uchar
*
p
)
...
...
@@ -226,24 +243,23 @@ class Variable_size_keys_descriptor : public Descriptor
/*
Descriptor for variable size keys with only one component
Used by EITS, JSON_ARRAYAGG,
COUNT(DISTINCT col1) AND GROUP_CONCAT(DISTINCT col1) => only one item is allowed
*/
class
Variable_size_keys_simple
:
public
Variable_size_keys_descriptor
,
public
Encode_
record
public
Encode_
key
{
public:
Variable_size_keys_simple
(
uint
length
);
virtual
~
Variable_size_keys_simple
()
{}
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
;
u
int
make_record
(
bool
exclude_nulls
)
override
;
u
char
*
make_record
(
bool
exclude_nulls
)
override
;
uchar
*
get_rec_ptr
()
{
return
rec_ptr
;
}
bool
init
()
override
;
bool
is_single_arg
()
override
{
return
true
;
}
};
...
...
@@ -251,27 +267,30 @@ class Variable_size_keys_simple : public Variable_size_keys_descriptor,
Descriptor for variable sized keys with multiple key parts
*/
class
Variable_size_composite_key_desc
:
public
Variable_size_keys_descriptor
,
public
Encode_
record
public
Encode_
key
{
public:
Variable_size_composite_key_desc
(
uint
length
);
virtual
~
Variable_size_composite_key_desc
()
{}
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
;
uint
make_record
(
bool
exclude_nulls
)
override
;
uchar
*
get_rec_ptr
()
{
return
rec_ptr
;
}
uchar
*
make_record
(
bool
exclude_nulls
)
override
;
bool
init
()
override
;
};
/*
Descriptor for variable sized keys with multiple key parts for GROUP_CONCAT
*/
class
Variable_size_composite_key_desc_for_gconcat
:
public
Variable_size_keys_descriptor
,
public
Encode_
record
_for_group_concat
public
Variable_size_keys_descriptor
,
public
Encode_
key
_for_group_concat
{
public:
Variable_size_composite_key_desc_for_gconcat
(
uint
length
);
virtual
~
Variable_size_composite_key_desc_for_gconcat
()
{}
int
compare_keys
(
uchar
*
a
,
uchar
*
b
)
override
;
uint
make_record
(
bool
exclude_nulls
)
override
;
uchar
*
get_rec_ptr
()
{
return
rec_ptr
;
}
uchar
*
make_record
(
bool
exclude_nulls
)
override
;
bool
init
()
override
;
bool
setup_for_item
(
THD
*
thd
,
Item_sum
*
item
,
uint
non_const_args
,
uint
arg_count
)
override
;
...
...
@@ -285,6 +304,11 @@ class Variable_size_composite_key_desc_for_gconcat :
class
Unique
:
public
Sql_alloc
{
protected:
/*
Storing all relevant information of the expressions whose value are
being added to the Unique tree
*/
Descriptor
*
m_descriptor
;
public:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment