Skip to content

Commit

Permalink
MDEV-22844 JSON_ARRAYAGG is limited by group_concat_max_len.
Browse files Browse the repository at this point in the history
Warning message and function result fixed
  • Loading branch information
Alexey Botchkov committed Jun 15, 2020
1 parent 30d41c8 commit 6c573a9
Show file tree
Hide file tree
Showing 7 changed files with 77 additions and 21 deletions.
12 changes: 12 additions & 0 deletions mysql-test/main/func_json.result
Original file line number Diff line number Diff line change
Expand Up @@ -1308,6 +1308,18 @@ SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
JSON_ARRAYAGG(a ORDER BY a ASC)
[null,"blue","red"]
DROP TABLE t1;
set group_concat_max_len=64;
create table t1 (a varchar(254));
insert into t1 values (concat('x64-', repeat('a', 60)));
insert into t1 values (concat('x64-', repeat('b', 60)));
insert into t1 values (concat('x64-', repeat('c', 60)));
select json_arrayagg(a) from t1;
json_arrayagg(a)
["x64-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"]
Warnings:
Warning 1260 Row 1 was cut by JSON_ARRAYAGG()
drop table t1;
SET group_concat_max_len= default;
#
# End of 10.5 tests
#
8 changes: 8 additions & 0 deletions mysql-test/main/func_json.test
Original file line number Diff line number Diff line change
Expand Up @@ -812,6 +812,14 @@ SELECT JSON_ARRAYAGG(a ORDER BY a DESC) FROM t1;
SELECT JSON_ARRAYAGG(a ORDER BY a ASC) FROM t1;
DROP TABLE t1;

set group_concat_max_len=64;
create table t1 (a varchar(254));
insert into t1 values (concat('x64-', repeat('a', 60)));
insert into t1 values (concat('x64-', repeat('b', 60))); insert into t1 values (concat('x64-', repeat('c', 60)));
select json_arrayagg(a) from t1;
drop table t1;
SET group_concat_max_len= default;

--echo #
--echo # End of 10.5 tests
--echo #
Expand Down
18 changes: 18 additions & 0 deletions sql/item_jsonfunc.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3689,6 +3689,24 @@ String *Item_func_json_arrayagg::get_str_from_field(Item *i,Field *f,
}


void Item_func_json_arrayagg::cut_max_length(String *result,
uint old_length, uint max_length) const
{
if (result->length() == 0)
return;

if (result->ptr()[result->length() - 1] != '"' ||
max_length == 0)
{
Item_func_group_concat::cut_max_length(result, old_length, max_length);
return;
}

Item_func_group_concat::cut_max_length(result, old_length, max_length-1);
result->append('"');
}


String* Item_func_json_arrayagg::val_str(String *str)
{
if ((str= Item_func_group_concat::val_str(str)))
Expand Down
3 changes: 2 additions & 1 deletion sql/item_jsonfunc.h
Original file line number Diff line number Diff line change
Expand Up @@ -546,7 +546,8 @@ class Item_func_json_arrayagg : public Item_func_group_concat
String *get_str_from_item(Item *i, String *tmp);
String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset);

void cut_max_length(String *result,
uint old_length, uint max_length) const;
public:
String m_tmp_json; /* Used in get_str_from_*.. */
Item_func_json_arrayagg(THD *thd, Name_resolution_context *context_arg,
Expand Down
53 changes: 34 additions & 19 deletions sql/item_sum.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3733,6 +3733,37 @@ int group_concat_key_cmp_with_order_with_nulls(void *arg, const void *key1_arg,
}


static void report_cut_value_error(THD *thd, uint row_count, const char *fname)
{
size_t fn_len= strlen(fname);
char *fname_upper= (char *) my_alloca(fn_len + 1);
fname_upper[fn_len]= 0;
for (; fn_len; fn_len--)
fname_upper[fn_len-1]= my_toupper(&my_charset_latin1, fname[fn_len-1]);
push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN,
ER_CUT_VALUE_GROUP_CONCAT,
ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
row_count, fname_upper);
}


void Item_func_group_concat::cut_max_length(String *result,
uint old_length, uint max_length) const
{
const char *ptr= result->ptr();
/*
It's ok to use item->result.length() as the fourth argument
as this is never used to limit the length of the data.
Cut is done with the third argument.
*/
size_t add_length= Well_formed_prefix(collation.collation,
ptr + old_length,
ptr + max_length,
result->length()).length();
result->length(old_length + add_length);
}


/**
Append data from current leaf to item->result.
*/
Expand Down Expand Up @@ -3812,24 +3843,10 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)),
/* stop if length of result more than max_length */
if (result->length() > max_length)
{
CHARSET_INFO *cs= item->collation.collation;
const char *ptr= result->ptr();
THD *thd= current_thd;
/*
It's ok to use item->result.length() as the fourth argument
as this is never used to limit the length of the data.
Cut is done with the third argument.
*/
size_t add_length= Well_formed_prefix(cs,
ptr + old_length,
ptr + max_length,
result->length()).length();
result->length(old_length + add_length);
item->cut_max_length(result, old_length, max_length);
item->warning_for_row= TRUE;
push_warning_printf(thd, Sql_condition::WARN_LEVEL_WARN,
ER_CUT_VALUE_GROUP_CONCAT,
ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
item->row_count);
report_cut_value_error(thd, item->row_count, item->func_name());

/**
To avoid duplicated warnings in Item_func_group_concat::val_str()
Expand Down Expand Up @@ -4427,9 +4444,7 @@ String* Item_func_group_concat::val_str(String* str)
table->blob_storage->is_truncated_value())
{
warning_for_row= true;
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
ER_CUT_VALUE_GROUP_CONCAT, ER(ER_CUT_VALUE_GROUP_CONCAT),
row_count);
report_cut_value_error(current_thd, row_count, func_name());
}

return &result;
Expand Down
2 changes: 2 additions & 0 deletions sql/item_sum.h
Original file line number Diff line number Diff line change
Expand Up @@ -1941,6 +1941,8 @@ class Item_func_group_concat : public Item_sum
virtual String *get_str_from_field(Item *i, Field *f, String *tmp,
const uchar *key, size_t offset)
{ return f->val_str(tmp, key + offset); }
virtual void cut_max_length(String *result,
uint old_length, uint max_length) const;
public:
// Methods used by ColumnStore
bool get_distinct() const { return distinct; }
Expand Down
2 changes: 1 addition & 1 deletion sql/share/errmsg-utf8.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4940,7 +4940,7 @@ ER_ZLIB_Z_DATA_ERROR
por "ZLIB: Dados de entrada está corrupto"
spa "ZLIB: Dato de entrada fué corrompido para zlib"
ER_CUT_VALUE_GROUP_CONCAT
eng "Row %u was cut by GROUP_CONCAT()"
eng "Row %u was cut by %s)"
ER_WARN_TOO_FEW_RECORDS 01000
eng "Row %lu doesn't contain data for all columns"
ger "Zeile %lu enthält nicht für alle Felder Daten"
Expand Down

0 comments on commit 6c573a9

Please sign in to comment.