Commit 6c573a91 authored by Alexey Botchkov's avatar Alexey Botchkov

MDEV-22844 JSON_ARRAYAGG is limited by group_concat_max_len.

Warning message and function result fixed
parent 30d41c81
...@@ -1308,6 +1308,18 @@ SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1; ...@@ -1308,6 +1308,18 @@ SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
JSON_ARRAYAGG(a ORDER BY a ASC) JSON_ARRAYAGG(a ORDER BY a ASC)
[null,"blue","red"] [null,"blue","red"]
DROP TABLE t1; DROP TABLE t1;
set group_concat_max_len=64;
create table t1 (a varchar(254));
insert into t1 values (concat('x64-', repeat('a', 60)));
insert into t1 values (concat('x64-', repeat('b', 60)));
insert into t1 values (concat('x64-', repeat('c', 60)));
select json_arrayagg(a) from t1;
json_arrayagg(a)
["x64-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
Warnings:
Warning 1260 Row 1 was cut by JSON_ARRAYAGG()
drop table t1;
SET group_concat_max_len= default;
# #
# End of 10.5 tests # End of 10.5 tests
# #
...@@ -812,6 +812,14 @@ SELECT JSON_ARRAYAGG(a ORDER BY a DESC) FROM t1; ...@@ -812,6 +812,14 @@ SELECT JSON_ARRAYAGG(a ORDER BY a DESC) FROM t1;
SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1; SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
DROP TABLE t1; DROP TABLE t1;
set group_concat_max_len=64;
create table t1 (a varchar(254));
insert into t1 values (concat('x64-', repeat('a', 60)));
insert into t1 values (concat('x64-', repeat('b', 60))); insert into t1 values (concat('x64-', repeat('c', 60)));
select json_arrayagg(a) from t1;
drop table t1;
SET group_concat_max_len= default;
--echo # --echo #
--echo # End of 10.5 tests --echo # End of 10.5 tests
--echo # --echo #
......
...@@ -3689,6 +3689,24 @@ String *Item_func_json_arrayagg::get_str_from_field(Item *i,Field *f, ...@@ -3689,6 +3689,24 @@ String *Item_func_json_arrayagg::get_str_from_field(Item *i,Field *f,
} }
void Item_func_json_arrayagg::cut_max_length(String *result,
uint old_length, uint max_length) const
{
if (result->length() == 0)
return;
if (result->ptr()[result->length() - 1] != '"' ||
max_length == 0)
{
Item_func_group_concat::cut_max_length(result, old_length, max_length);
return;
}
Item_func_group_concat::cut_max_length(result, old_length, max_length-1);
result->append('"');
}
String* Item_func_json_arrayagg::val_str(String *str) String* Item_func_json_arrayagg::val_str(String *str)
{ {
if ((str= Item_func_group_concat::val_str(str))) if ((str= Item_func_group_concat::val_str(str)))
......
...@@ -546,7 +546,8 @@ class Item_func_json_arrayagg : public Item_func_group_concat ...@@ -546,7 +546,8 @@ class Item_func_json_arrayagg : public Item_func_group_concat
String *get_str_from_item(Item *i, String *tmp); String *get_str_from_item(Item *i, String *tmp);
String *get_str_from_field(Item *i, Field *f, String *tmp, String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset); const uchar *key, size_t offset);
void cut_max_length(String *result,
uint old_length, uint max_length) const;
public: public:
String m_tmp_json; /* Used in get_str_from_*.. */ String m_tmp_json; /* Used in get_str_from_*.. */
Item_func_json_arrayagg(THD *thd, Name_resolution_context *context_arg, Item_func_json_arrayagg(THD *thd, Name_resolution_context *context_arg,
......
...@@ -3733,6 +3733,37 @@ int group_concat_key_cmp_with_order_with_nulls(void *arg, const void *key1_arg, ...@@ -3733,6 +3733,37 @@ int group_concat_key_cmp_with_order_with_nulls(void *arg, const void *key1_arg,
} }
static void report_cut_value_error(THD *thd, uint row_count, const char *fname)
{
size_t fn_len= strlen(fname);
char *fname_upper= (char *) my_alloca(fn_len + 1);
fname_upper[fn_len]= 0;
for (; fn_len; fn_len--)
fname_upper[fn_len-1]= my_toupper(&my_charset_latin1, fname[fn_len-1]);
push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN,
ER_CUT_VALUE_GROUP_CONCAT,
ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
row_count, fname_upper);
}
void Item_func_group_concat::cut_max_length(String *result,
uint old_length, uint max_length) const
{
const char *ptr= result->ptr();
/*
It's ok to use item->result.length() as the fourth argument
as this is never used to limit the length of the data.
Cut is done with the third argument.
*/
size_t add_length= Well_formed_prefix(collation.collation,
ptr + old_length,
ptr + max_length,
result->length()).length();
result->length(old_length + add_length);
}
/** /**
Append data from current leaf to item->result. Append data from current leaf to item->result.
*/ */
...@@ -3812,24 +3843,10 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)), ...@@ -3812,24 +3843,10 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)),
/* stop if length of result more than max_length */ /* stop if length of result more than max_length */
if (result->length() > max_length) if (result->length() > max_length)
{ {
CHARSET_INFO *cs= item->collation.collation;
const char *ptr= result->ptr();
THD *thd= current_thd; THD *thd= current_thd;
/* item->cut_max_length(result, old_length, max_length);
It's ok to use item->result.length() as the fourth argument
as this is never used to limit the length of the data.
Cut is done with the third argument.
*/
size_t add_length= Well_formed_prefix(cs,
ptr + old_length,
ptr + max_length,
result->length()).length();
result->length(old_length + add_length);
item->warning_for_row= TRUE; item->warning_for_row= TRUE;
push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN, report_cut_value_error(thd, item->row_count, item->func_name());
ER_CUT_VALUE_GROUP_CONCAT,
ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
item->row_count);
/** /**
To avoid duplicated warnings in Item_func_group_concat::val_str() To avoid duplicated warnings in Item_func_group_concat::val_str()
...@@ -4427,9 +4444,7 @@ String* Item_func_group_concat::val_str(String* str) ...@@ -4427,9 +4444,7 @@ String* Item_func_group_concat::val_str(String* str)
table->blob_storage->is_truncated_value()) table->blob_storage->is_truncated_value())
{ {
warning_for_row= true; warning_for_row= true;
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN, report_cut_value_error(current_thd, row_count, func_name());
ER_CUT_VALUE_GROUP_CONCAT, ER(ER_CUT_VALUE_GROUP_CONCAT),
row_count);
} }
return &result; return &result;
......
...@@ -1941,6 +1941,8 @@ class Item_func_group_concat : public Item_sum ...@@ -1941,6 +1941,8 @@ class Item_func_group_concat : public Item_sum
virtual String *get_str_from_field(Item *i, Field *f, String *tmp, virtual String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset) const uchar *key, size_t offset)
{ return f->val_str(tmp, key + offset); } { return f->val_str(tmp, key + offset); }
virtual void cut_max_length(String *result,
uint old_length, uint max_length) const;
public: public:
// Methods used by ColumnStore // Methods used by ColumnStore
bool get_distinct() const { return distinct; } bool get_distinct() const { return distinct; }
......
...@@ -4940,7 +4940,7 @@ ER_ZLIB_Z_DATA_ERROR ...@@ -4940,7 +4940,7 @@ ER_ZLIB_Z_DATA_ERROR
por "ZLIB: Dados de entrada está corrupto" por "ZLIB: Dados de entrada está corrupto"
spa "ZLIB: Dato de entrada fué corrompido para zlib" spa "ZLIB: Dato de entrada fué corrompido para zlib"
ER_CUT_VALUE_GROUP_CONCAT ER_CUT_VALUE_GROUP_CONCAT
eng "Row %u was cut by GROUP_CONCAT()" eng "Row %u was cut by %s)"
ER_WARN_TOO_FEW_RECORDS 01000 ER_WARN_TOO_FEW_RECORDS 01000
eng "Row %lu doesn't contain data for all columns" eng "Row %lu doesn't contain data for all columns"
ger "Zeile %lu enthält nicht für alle Felder Daten" ger "Zeile %lu enthält nicht für alle Felder Daten"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment