mirror of
https://github.com/MariaDB/server.git
synced 2025-07-29 05:21:33 +03:00
MDEV-5542: GROUP_CONCAT truncate output to 65.536 chars when using DISTINCT or ORDER BY
port of mysql fix WL#6098
This commit is contained in:
@ -1119,3 +1119,84 @@ GROUP_CONCAT(t1a.a ORDER BY 1, t1a.a=0)
|
||||
1,1
|
||||
2,2
|
||||
DROP TABLE t1;
|
||||
#
|
||||
# WL#6098 Eliminate GROUP_CONCAT intermediate result limitation.
|
||||
# Bug#13387020 GROUP_CONCAT WITH ORDER BY RESULTS ARE TRUNCATED.
|
||||
#
|
||||
SET group_concat_max_len= 9999999;
|
||||
CREATE TABLE t1 (f1 LONGTEXT , f2 INTEGER);
|
||||
INSERT INTO t1 VALUES (REPEAT('a', 500000), 0), (REPEAT('b', 500000), 1), (REPEAT('c', 500000), 2);
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(f1 ORDER BY f2))
|
||||
1500002
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC))
|
||||
1500002
|
||||
SELECT SUBSTRING(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC), 1, 5) FROM t1;
|
||||
SUBSTRING(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC), 1, 5)
|
||||
ccccc
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT f1)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(DISTINCT f1))
|
||||
1500002
|
||||
SELECT LENGTH(GROUP_CONCAT(UPPER(f1) ORDER BY f2)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(UPPER(f1) ORDER BY f2))
|
||||
1500002
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1))
|
||||
1500002
|
||||
SELECT SUBSTRING(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1), 1, 5) FROM t1;
|
||||
SUBSTRING(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1), 1, 5)
|
||||
AAAAA
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1))) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1)))
|
||||
1500002
|
||||
CREATE TABLE t2 SELECT GROUP_CONCAT(f1 order by f2) FROM t1;
|
||||
SHOW CREATE TABLE t2;
|
||||
Table Create Table
|
||||
t2 CREATE TABLE `t2` (
|
||||
`GROUP_CONCAT(f1 order by f2)` mediumtext
|
||||
) ENGINE=MyISAM DEFAULT CHARSET=latin1
|
||||
DROP TABLE t2;
|
||||
CREATE TABLE t2 SELECT GROUP_CONCAT(UPPER(f1) ORDER BY f2) FROM t1;
|
||||
SHOW CREATE TABLE t2;
|
||||
Table Create Table
|
||||
t2 CREATE TABLE `t2` (
|
||||
`GROUP_CONCAT(UPPER(f1) ORDER BY f2)` mediumtext
|
||||
) ENGINE=MyISAM DEFAULT CHARSET=latin1
|
||||
DROP TABLE t2;
|
||||
SET group_concat_max_len= DEFAULT;
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1;
|
||||
LENGTH(GROUP_CONCAT(f1 ORDER BY f2))
|
||||
1024
|
||||
Warnings:
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
SET group_concat_max_len= 499999;
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 WHERE f2 = 0;
|
||||
LENGTH(GROUP_CONCAT(f1 ORDER BY f2))
|
||||
499999
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 GROUP BY f2;
|
||||
LENGTH(GROUP_CONCAT(f1 ORDER BY f2))
|
||||
499999
|
||||
499999
|
||||
499999
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
INSERT INTO t1 VALUES (REPEAT('a', 499999), 3), (REPEAT('b', 500000), 4);
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 GROUP BY f2;
|
||||
LENGTH(GROUP_CONCAT(f1 ORDER BY f2))
|
||||
499999
|
||||
499999
|
||||
499999
|
||||
499999
|
||||
499999
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 5 was cut by GROUP_CONCAT()
|
||||
DROP TABLE t1;
|
||||
SET group_concat_max_len= DEFAULT;
|
||||
|
@ -762,6 +762,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
where a1 in (select group_concat(b1) from t2_512 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
@ -775,6 +779,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
where a1 in (select group_concat(b1) from t2_512 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_512, t2_512, t3_512;
|
||||
set @blob_len = 1024;
|
||||
set @suffix_len = @blob_len - @prefix_len;
|
||||
@ -855,6 +863,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
where a1 in (select group_concat(b1) from t2_1024 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
@ -868,6 +880,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
where a1 in (select group_concat(b1) from t2_1024 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_1024, t2_1024, t3_1024;
|
||||
set @blob_len = 1025;
|
||||
set @suffix_len = @blob_len - @prefix_len;
|
||||
@ -948,6 +964,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
where a1 in (select group_concat(b1) from t2_1025 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
@ -961,6 +981,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
where a1 in (select group_concat(b1) from t2_1025 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_1025, t2_1025, t3_1025;
|
||||
create table t1bit (a1 bit(3), a2 bit(3));
|
||||
create table t2bit (b1 bit(3), b2 bit(3));
|
||||
|
@ -787,6 +787,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
where a1 in (select group_concat(b1) from t2_512 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
@ -801,6 +805,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_512
|
||||
where a1 in (select group_concat(b1) from t2_512 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_512, t2_512, t3_512;
|
||||
set @blob_len = 1024;
|
||||
set @suffix_len = @blob_len - @prefix_len;
|
||||
@ -882,6 +890,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
where a1 in (select group_concat(b1) from t2_1024 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
@ -896,6 +908,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1024
|
||||
where a1 in (select group_concat(b1) from t2_1024 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_1024, t2_1024, t3_1024;
|
||||
set @blob_len = 1025;
|
||||
set @suffix_len = @blob_len - @prefix_len;
|
||||
@ -977,6 +993,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
where a1 in (select group_concat(b1) from t2_1025 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
set @@group_concat_max_len = 256;
|
||||
explain extended select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
@ -991,6 +1011,10 @@ select left(a1,7), left(a2,7)
|
||||
from t1_1025
|
||||
where a1 in (select group_concat(b1) from t2_1025 group by b2);
|
||||
left(a1,7) left(a2,7)
|
||||
Warnings:
|
||||
Warning 1260 Row 1 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 2 was cut by GROUP_CONCAT()
|
||||
Warning 1260 Row 3 was cut by GROUP_CONCAT()
|
||||
drop table t1_1025, t2_1025, t3_1025;
|
||||
create table t1bit (a1 bit(3), a2 bit(3));
|
||||
create table t2bit (b1 bit(3), b2 bit(3));
|
||||
|
@ -832,3 +832,44 @@ PREPARE stmt FROM "SELECT GROUP_CONCAT(t1a.a ORDER BY 1, t1a.a=0) FROM t1 AS t1a
|
||||
EXECUTE stmt;
|
||||
EXECUTE stmt;
|
||||
DROP TABLE t1;
|
||||
|
||||
--echo #
|
||||
--echo # WL#6098 Eliminate GROUP_CONCAT intermediate result limitation.
|
||||
--echo # Bug#13387020 GROUP_CONCAT WITH ORDER BY RESULTS ARE TRUNCATED.
|
||||
--echo #
|
||||
|
||||
|
||||
SET group_concat_max_len= 9999999;
|
||||
CREATE TABLE t1 (f1 LONGTEXT , f2 INTEGER);
|
||||
INSERT INTO t1 VALUES (REPEAT('a', 500000), 0), (REPEAT('b', 500000), 1), (REPEAT('c', 500000), 2);
|
||||
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1;
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC)) FROM t1;
|
||||
SELECT SUBSTRING(GROUP_CONCAT(DISTINCT f1 ORDER BY f1 DESC), 1, 5) FROM t1;
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT f1)) FROM t1;
|
||||
|
||||
SELECT LENGTH(GROUP_CONCAT(UPPER(f1) ORDER BY f2)) FROM t1;
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1)) FROM t1;
|
||||
SELECT SUBSTRING(GROUP_CONCAT(DISTINCT UPPER(f1) ORDER BY f1), 1, 5) FROM t1;
|
||||
SELECT LENGTH(GROUP_CONCAT(DISTINCT UPPER(f1))) FROM t1;
|
||||
|
||||
CREATE TABLE t2 SELECT GROUP_CONCAT(f1 order by f2) FROM t1;
|
||||
SHOW CREATE TABLE t2;
|
||||
DROP TABLE t2;
|
||||
|
||||
CREATE TABLE t2 SELECT GROUP_CONCAT(UPPER(f1) ORDER BY f2) FROM t1;
|
||||
SHOW CREATE TABLE t2;
|
||||
DROP TABLE t2;
|
||||
|
||||
SET group_concat_max_len= DEFAULT;
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1;
|
||||
|
||||
SET group_concat_max_len= 499999;
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 WHERE f2 = 0;
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 GROUP BY f2;
|
||||
|
||||
INSERT INTO t1 VALUES (REPEAT('a', 499999), 3), (REPEAT('b', 500000), 4);
|
||||
SELECT LENGTH(GROUP_CONCAT(f1 ORDER BY f2)) FROM t1 GROUP BY f2;
|
||||
|
||||
DROP TABLE t1;
|
||||
SET group_concat_max_len= DEFAULT;
|
||||
|
31
sql/field.cc
31
sql/field.cc
@ -7845,7 +7845,7 @@ int Field_blob::store(const char *from,uint length,CHARSET_INFO *cs)
|
||||
ASSERT_COLUMN_MARKED_FOR_WRITE_OR_COMPUTED;
|
||||
uint copy_length, new_length;
|
||||
String_copier copier;
|
||||
const char *tmp;
|
||||
char *tmp;
|
||||
char buff[STRING_BUFFER_USUAL_SIZE];
|
||||
String tmpstr(buff,sizeof(buff), &my_charset_bin);
|
||||
|
||||
@ -7855,6 +7855,29 @@ int Field_blob::store(const char *from,uint length,CHARSET_INFO *cs)
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (table->blob_storage) // GROUP_CONCAT with ORDER BY | DISTINCT
|
||||
{
|
||||
DBUG_ASSERT(!f_is_hex_escape(flags));
|
||||
DBUG_ASSERT(field_charset == cs);
|
||||
DBUG_ASSERT(length <= max_data_length());
|
||||
|
||||
new_length= length;
|
||||
copy_length= table->in_use->variables.group_concat_max_len;
|
||||
if (new_length > copy_length)
|
||||
{
|
||||
int well_formed_error;
|
||||
new_length= cs->cset->well_formed_len(cs, from, from + copy_length,
|
||||
new_length, &well_formed_error);
|
||||
table->blob_storage->set_truncated_value(true);
|
||||
}
|
||||
if (!(tmp= table->blob_storage->store(from, new_length)))
|
||||
goto oom_error;
|
||||
|
||||
Field_blob::store_length(new_length);
|
||||
bmove(ptr + packlength, (uchar*) &tmp, sizeof(char*));
|
||||
return 0;
|
||||
}
|
||||
|
||||
/*
|
||||
If the 'from' address is in the range of the temporary 'value'-
|
||||
object we need to copy the content to a different location or it will be
|
||||
@ -7881,15 +7904,14 @@ int Field_blob::store(const char *from,uint length,CHARSET_INFO *cs)
|
||||
new_length= MY_MIN(max_data_length(), field_charset->mbmaxlen * length);
|
||||
if (value.alloc(new_length))
|
||||
goto oom_error;
|
||||
|
||||
tmp= const_cast<char*>(value.ptr());
|
||||
|
||||
if (f_is_hex_escape(flags))
|
||||
{
|
||||
copy_length= my_copy_with_hex_escaping(field_charset,
|
||||
(char*) value.ptr(), new_length,
|
||||
tmp, new_length,
|
||||
from, length);
|
||||
Field_blob::store_length(copy_length);
|
||||
tmp= value.ptr();
|
||||
bmove(ptr + packlength, (uchar*) &tmp, sizeof(char*));
|
||||
return 0;
|
||||
}
|
||||
@ -7897,7 +7919,6 @@ int Field_blob::store(const char *from,uint length,CHARSET_INFO *cs)
|
||||
(char*) value.ptr(), new_length,
|
||||
cs, from, length);
|
||||
Field_blob::store_length(copy_length);
|
||||
tmp= value.ptr();
|
||||
bmove(ptr+packlength,(uchar*) &tmp,sizeof(char*));
|
||||
|
||||
return check_conversion_status(&copier, from + length, cs, true);
|
||||
|
@ -629,9 +629,6 @@ void Copy_field::set(uchar *to,Field *from)
|
||||
Field_blob::store. Is this in order to trigger the call to
|
||||
well_formed_copy_nchars, by changing the pointer copy->tmp.ptr()?
|
||||
That call will take place anyway in all known cases.
|
||||
|
||||
- The above causes a truncation to MAX_FIELD_WIDTH. Is this the intended
|
||||
effect? Truncation is handled by well_formed_copy_nchars anyway.
|
||||
*/
|
||||
void Copy_field::set(Field *to,Field *from,bool save)
|
||||
{
|
||||
|
@ -496,8 +496,7 @@ Item *Item_sum::get_tmp_table_item(THD *thd)
|
||||
}
|
||||
|
||||
|
||||
Field *Item_sum::create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_length)
|
||||
Field *Item_sum::create_tmp_field(bool group, TABLE *table)
|
||||
{
|
||||
Field *UNINIT_VAR(field);
|
||||
MEM_ROOT *mem_root= table->in_use->mem_root;
|
||||
@ -512,13 +511,7 @@ Field *Item_sum::create_tmp_field(bool group, TABLE *table,
|
||||
Field_longlong(max_length, maybe_null, name, unsigned_flag);
|
||||
break;
|
||||
case STRING_RESULT:
|
||||
if (max_length/collation.collation->mbmaxlen <= 255 ||
|
||||
convert_blob_length > Field_varstring::MAX_SIZE ||
|
||||
!convert_blob_length)
|
||||
return make_string_field(table);
|
||||
field= new (mem_root) Field_varstring(convert_blob_length, maybe_null,
|
||||
name, table->s, collation.collation);
|
||||
break;
|
||||
case DECIMAL_RESULT:
|
||||
field= Field_new_decimal::create_from_item(mem_root, this);
|
||||
break;
|
||||
@ -1264,8 +1257,7 @@ void Item_sum_hybrid::setup_hybrid(THD *thd, Item *item, Item *value_arg)
|
||||
}
|
||||
|
||||
|
||||
Field *Item_sum_hybrid::create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_length)
|
||||
Field *Item_sum_hybrid::create_tmp_field(bool group, TABLE *table)
|
||||
{
|
||||
Field *field;
|
||||
MEM_ROOT *mem_root;
|
||||
@ -1275,7 +1267,7 @@ Field *Item_sum_hybrid::create_tmp_field(bool group, TABLE *table,
|
||||
field= ((Item_field*) args[0])->field;
|
||||
|
||||
if ((field= create_tmp_field_from_field(table->in_use, field, name, table,
|
||||
NULL, convert_blob_length)))
|
||||
NULL)))
|
||||
field->flags&= ~NOT_NULL_FLAG;
|
||||
return field;
|
||||
}
|
||||
@ -1301,7 +1293,7 @@ Field *Item_sum_hybrid::create_tmp_field(bool group, TABLE *table,
|
||||
Field::NONE, name, decimals);
|
||||
break;
|
||||
default:
|
||||
return Item_sum::create_tmp_field(group, table, convert_blob_length);
|
||||
return Item_sum::create_tmp_field(group, table);
|
||||
}
|
||||
if (field)
|
||||
field->init(table);
|
||||
@ -1658,8 +1650,7 @@ Item *Item_sum_avg::copy_or_same(THD* thd)
|
||||
}
|
||||
|
||||
|
||||
Field *Item_sum_avg::create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_len)
|
||||
Field *Item_sum_avg::create_tmp_field(bool group, TABLE *table)
|
||||
{
|
||||
Field *field;
|
||||
MEM_ROOT *mem_root= table->in_use->mem_root;
|
||||
@ -1885,8 +1876,7 @@ Item *Item_sum_variance::copy_or_same(THD* thd)
|
||||
If we're grouping, then we need some space to serialize variables into, to
|
||||
pass around.
|
||||
*/
|
||||
Field *Item_sum_variance::create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_len)
|
||||
Field *Item_sum_variance::create_tmp_field(bool group, TABLE *table)
|
||||
{
|
||||
Field *field;
|
||||
if (group)
|
||||
@ -3067,6 +3057,11 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)),
|
||||
ER_THD(thd, ER_CUT_VALUE_GROUP_CONCAT),
|
||||
item->row_count);
|
||||
|
||||
/**
|
||||
To avoid duplicated warnings in Item_func_group_concat::val_str()
|
||||
*/
|
||||
if (table && table->blob_storage)
|
||||
table->blob_storage->set_truncated_value(false);
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
@ -3204,6 +3199,8 @@ void Item_func_group_concat::cleanup()
|
||||
if (table)
|
||||
{
|
||||
THD *thd= table->in_use;
|
||||
if (table->blob_storage)
|
||||
delete table->blob_storage;
|
||||
free_tmp_table(thd, table);
|
||||
table= 0;
|
||||
if (tree)
|
||||
@ -3271,6 +3268,8 @@ void Item_func_group_concat::clear()
|
||||
reset_tree(tree);
|
||||
if (unique_filter)
|
||||
unique_filter->reset();
|
||||
if (table && table->blob_storage)
|
||||
table->blob_storage->reset();
|
||||
/* No need to reset the table as we never call write_row */
|
||||
}
|
||||
|
||||
@ -3397,6 +3396,7 @@ bool Item_func_group_concat::setup(THD *thd)
|
||||
{
|
||||
List<Item> list;
|
||||
SELECT_LEX *select_lex= thd->lex->current_select;
|
||||
const bool order_or_distinct= MY_TEST(arg_count_order > 0 || distinct);
|
||||
DBUG_ENTER("Item_func_group_concat::setup");
|
||||
|
||||
/*
|
||||
@ -3409,9 +3409,6 @@ bool Item_func_group_concat::setup(THD *thd)
|
||||
if (!(tmp_table_param= new TMP_TABLE_PARAM))
|
||||
DBUG_RETURN(TRUE);
|
||||
|
||||
/* We'll convert all blobs to varchar fields in the temporary table */
|
||||
tmp_table_param->convert_blob_length= max_length *
|
||||
collation.collation->mbmaxlen;
|
||||
/* Push all not constant fields to the list and create a temp table */
|
||||
always_null= 0;
|
||||
for (uint i= 0; i < arg_count_field; i++)
|
||||
@ -3451,17 +3448,8 @@ bool Item_func_group_concat::setup(THD *thd)
|
||||
count_field_types(select_lex, tmp_table_param, all_fields, 0);
|
||||
tmp_table_param->force_copy_fields= force_copy_fields;
|
||||
DBUG_ASSERT(table == 0);
|
||||
if (arg_count_order > 0 || distinct)
|
||||
if (order_or_distinct)
|
||||
{
|
||||
/*
|
||||
Currently we have to force conversion of BLOB values to VARCHAR's
|
||||
if we are to store them in TREE objects used for ORDER BY and
|
||||
DISTINCT. This leads to truncation if the BLOB's size exceeds
|
||||
Field_varstring::MAX_SIZE.
|
||||
*/
|
||||
set_if_smaller(tmp_table_param->convert_blob_length,
|
||||
Field_varstring::MAX_SIZE);
|
||||
|
||||
/*
|
||||
Force the create_tmp_table() to convert BIT columns to INT
|
||||
as we cannot compare two table records containg BIT fields
|
||||
@ -3495,6 +3483,13 @@ bool Item_func_group_concat::setup(THD *thd)
|
||||
table->file->extra(HA_EXTRA_NO_ROWS);
|
||||
table->no_rows= 1;
|
||||
|
||||
/**
|
||||
Initialize blob_storage if GROUP_CONCAT is used
|
||||
with ORDER BY | DISTINCT and BLOB field count > 0.
|
||||
*/
|
||||
if (order_or_distinct && table->s->blob_fields)
|
||||
table->blob_storage= new Blob_mem_storage();
|
||||
|
||||
/*
|
||||
Need sorting or uniqueness: init tree and choose a function to sort.
|
||||
Don't reserve space for NULLs: if any of gconcat arguments is NULL,
|
||||
@ -3547,6 +3542,16 @@ String* Item_func_group_concat::val_str(String* str)
|
||||
if (no_appended && tree)
|
||||
/* Tree is used for sorting as in ORDER BY */
|
||||
tree_walk(tree, &dump_leaf_key, this, left_root_right);
|
||||
|
||||
if (table && table->blob_storage &&
|
||||
table->blob_storage->is_truncated_value())
|
||||
{
|
||||
warning_for_row= true;
|
||||
push_warning_printf(current_thd, Sql_condition::WARN_LEVEL_WARN,
|
||||
ER_CUT_VALUE_GROUP_CONCAT, ER(ER_CUT_VALUE_GROUP_CONCAT),
|
||||
row_count);
|
||||
}
|
||||
|
||||
return &result;
|
||||
}
|
||||
|
||||
|
@ -481,8 +481,7 @@ public:
|
||||
}
|
||||
virtual void make_unique() { force_copy_fields= TRUE; }
|
||||
Item *get_tmp_table_item(THD *thd);
|
||||
virtual Field *create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_length);
|
||||
virtual Field *create_tmp_field(bool group, TABLE *table);
|
||||
virtual bool collect_outer_ref_processor(uchar *param);
|
||||
bool init_sum_func_check(THD *thd);
|
||||
bool check_sum_func(THD *thd, Item **ref);
|
||||
@ -845,7 +844,7 @@ public:
|
||||
return has_with_distinct() ? "avg(distinct " : "avg(";
|
||||
}
|
||||
Item *copy_or_same(THD* thd);
|
||||
Field *create_tmp_field(bool group, TABLE *table, uint convert_blob_length);
|
||||
Field *create_tmp_field(bool group, TABLE *table);
|
||||
void cleanup()
|
||||
{
|
||||
count= 0;
|
||||
@ -901,7 +900,7 @@ public:
|
||||
const char *func_name() const
|
||||
{ return sample ? "var_samp(" : "variance("; }
|
||||
Item *copy_or_same(THD* thd);
|
||||
Field *create_tmp_field(bool group, TABLE *table, uint convert_blob_length);
|
||||
Field *create_tmp_field(bool group, TABLE *table);
|
||||
enum Item_result result_type () const { return REAL_RESULT; }
|
||||
enum_field_types field_type() const { return MYSQL_TYPE_DOUBLE;}
|
||||
void cleanup()
|
||||
@ -979,8 +978,7 @@ protected:
|
||||
bool any_value() { return was_values; }
|
||||
void no_rows_in_result();
|
||||
void restore_to_before_no_rows_in_result();
|
||||
Field *create_tmp_field(bool group, TABLE *table,
|
||||
uint convert_blob_length);
|
||||
Field *create_tmp_field(bool group, TABLE *table);
|
||||
};
|
||||
|
||||
|
||||
|
@ -4529,8 +4529,6 @@ public:
|
||||
uint group_parts,group_length,group_null_parts;
|
||||
uint quick_group;
|
||||
bool using_indirect_summary_function;
|
||||
/* If >0 convert all blob fields to varchar(convert_blob_length) */
|
||||
uint convert_blob_length;
|
||||
CHARSET_INFO *table_charset;
|
||||
bool schema_table;
|
||||
/* TRUE if the temp table is created for subquery materialization. */
|
||||
@ -4559,7 +4557,7 @@ public:
|
||||
|
||||
TMP_TABLE_PARAM()
|
||||
:copy_field(0), group_parts(0),
|
||||
group_length(0), group_null_parts(0), convert_blob_length(0),
|
||||
group_length(0), group_null_parts(0),
|
||||
schema_table(0), materialized_subquery(0), force_not_null_cols(0),
|
||||
precomputed_group_by(0),
|
||||
force_copy_fields(0), bit_fields_as_long(0), skip_create_table(0)
|
||||
|
@ -3893,7 +3893,7 @@ Field *Item::create_field_for_create_select(THD *thd, TABLE *table)
|
||||
{
|
||||
Field *def_field, *tmp_field;
|
||||
return create_tmp_field(thd, table, this, type(),
|
||||
(Item ***) 0, &tmp_field, &def_field, 0, 0, 0, 0, 0);
|
||||
(Item ***) 0, &tmp_field, &def_field, 0, 0, 0, 0);
|
||||
}
|
||||
|
||||
|
||||
|
@ -15751,8 +15751,6 @@ const_expression_in_where(COND *cond, Item *comp_item, Field *comp_field,
|
||||
the record in the original table.
|
||||
If item == NULL then fill_record() will update
|
||||
the temporary table
|
||||
@param convert_blob_length If >0 create a varstring(convert_blob_length)
|
||||
field instead of blob.
|
||||
|
||||
@retval
|
||||
NULL on error
|
||||
@ -15762,21 +15760,10 @@ const_expression_in_where(COND *cond, Item *comp_item, Field *comp_field,
|
||||
|
||||
Field *create_tmp_field_from_field(THD *thd, Field *org_field,
|
||||
const char *name, TABLE *table,
|
||||
Item_field *item, uint convert_blob_length)
|
||||
Item_field *item)
|
||||
{
|
||||
Field *new_field;
|
||||
|
||||
/*
|
||||
Make sure that the blob fits into a Field_varstring which has
|
||||
2-byte lenght.
|
||||
*/
|
||||
if (convert_blob_length && convert_blob_length <= Field_varstring::MAX_SIZE &&
|
||||
(org_field->flags & BLOB_FLAG))
|
||||
new_field= new Field_varstring(convert_blob_length,
|
||||
org_field->maybe_null(),
|
||||
org_field->field_name, table->s,
|
||||
org_field->charset());
|
||||
else
|
||||
new_field= org_field->make_new_field(thd->mem_root, table,
|
||||
table == org_field->table);
|
||||
if (new_field)
|
||||
@ -15820,8 +15807,6 @@ Field *create_tmp_field_from_field(THD *thd, Field *org_field,
|
||||
update the record in the original table.
|
||||
If modify_item is 0 then fill_record() will
|
||||
update the temporary table
|
||||
@param convert_blob_length If >0 create a varstring(convert_blob_length)
|
||||
field instead of blob.
|
||||
|
||||
@retval
|
||||
0 on error
|
||||
@ -15830,8 +15815,7 @@ Field *create_tmp_field_from_field(THD *thd, Field *org_field,
|
||||
*/
|
||||
|
||||
static Field *create_tmp_field_from_item(THD *thd, Item *item, TABLE *table,
|
||||
Item ***copy_func, bool modify_item,
|
||||
uint convert_blob_length)
|
||||
Item ***copy_func, bool modify_item)
|
||||
{
|
||||
bool maybe_null= item->maybe_null;
|
||||
Field *UNINIT_VAR(new_field);
|
||||
@ -15869,17 +15853,6 @@ static Field *create_tmp_field_from_item(THD *thd, Item *item, TABLE *table,
|
||||
if (item->cmp_type() == TIME_RESULT ||
|
||||
item->field_type() == MYSQL_TYPE_GEOMETRY)
|
||||
new_field= item->tmp_table_field_from_field_type(table, true, false);
|
||||
/*
|
||||
Make sure that the blob fits into a Field_varstring which has
|
||||
2-byte lenght.
|
||||
*/
|
||||
else if (item->max_length/item->collation.collation->mbmaxlen > 255 &&
|
||||
convert_blob_length <= Field_varstring::MAX_SIZE &&
|
||||
convert_blob_length)
|
||||
new_field= new (mem_root)
|
||||
Field_varstring(convert_blob_length, maybe_null,
|
||||
item->name, table->s,
|
||||
item->collation.collation);
|
||||
else
|
||||
new_field= item->make_string_field(table);
|
||||
new_field->set_derivation(item->collation.derivation);
|
||||
@ -15958,8 +15931,6 @@ Field *Item::create_field_for_schema(THD *thd, TABLE *table)
|
||||
the record in the original table.
|
||||
If modify_item is 0 then fill_record() will update
|
||||
the temporary table
|
||||
@param convert_blob_length If >0 create a varstring(convert_blob_length)
|
||||
field instead of blob.
|
||||
|
||||
@retval
|
||||
0 on error
|
||||
@ -15972,8 +15943,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
Field **default_field,
|
||||
bool group, bool modify_item,
|
||||
bool table_cant_handle_bit_fields,
|
||||
bool make_copy_field,
|
||||
uint convert_blob_length)
|
||||
bool make_copy_field)
|
||||
{
|
||||
Field *result;
|
||||
Item::Type orig_type= type;
|
||||
@ -15991,7 +15961,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
case Item::SUM_FUNC_ITEM:
|
||||
{
|
||||
Item_sum *item_sum=(Item_sum*) item;
|
||||
result= item_sum->create_tmp_field(group, table, convert_blob_length);
|
||||
result= item_sum->create_tmp_field(group, table);
|
||||
if (!result)
|
||||
my_error(ER_OUT_OF_RESOURCES, MYF(ME_FATALERROR));
|
||||
return result;
|
||||
@ -16027,7 +15997,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
item->maybe_null= orig_item->maybe_null;
|
||||
}
|
||||
result= create_tmp_field_from_item(thd, item, table, NULL,
|
||||
modify_item, convert_blob_length);
|
||||
modify_item);
|
||||
*from_field= field->field;
|
||||
if (result && modify_item)
|
||||
field->result_field= result;
|
||||
@ -16039,7 +16009,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
{
|
||||
*from_field= field->field;
|
||||
result= create_tmp_field_from_item(thd, item, table, copy_func,
|
||||
modify_item, convert_blob_length);
|
||||
modify_item);
|
||||
if (result && modify_item)
|
||||
field->result_field= result;
|
||||
}
|
||||
@ -16049,8 +16019,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
item->name,
|
||||
table,
|
||||
modify_item ? field :
|
||||
NULL,
|
||||
convert_blob_length);
|
||||
NULL);
|
||||
if (orig_type == Item::REF_ITEM && orig_modify)
|
||||
((Item_ref*)orig_item)->set_result_field(result);
|
||||
/*
|
||||
@ -16084,8 +16053,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
sp_result_field,
|
||||
item_func_sp->name,
|
||||
table,
|
||||
NULL,
|
||||
convert_blob_length);
|
||||
NULL);
|
||||
|
||||
if (modify_item)
|
||||
item->set_result_field(result_field);
|
||||
@ -16117,7 +16085,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
}
|
||||
return create_tmp_field_from_item(thd, item, table,
|
||||
(make_copy_field ? 0 : copy_func),
|
||||
modify_item, convert_blob_length);
|
||||
modify_item);
|
||||
case Item::TYPE_HOLDER:
|
||||
result= ((Item_type_holder *)item)->make_field_by_type(table);
|
||||
result->set_derivation(item->collation.derivation);
|
||||
@ -16420,8 +16388,7 @@ create_tmp_table(THD *thd, TMP_TABLE_PARAM *param, List<Item> &fields,
|
||||
create_tmp_field(thd, table, arg, arg->type(), ©_func,
|
||||
tmp_from_field, &default_field[fieldnr],
|
||||
group != 0,not_all_columns,
|
||||
distinct, 0,
|
||||
param->convert_blob_length);
|
||||
distinct, false);
|
||||
if (!new_field)
|
||||
goto err; // Should be OOM
|
||||
tmp_from_field++;
|
||||
@ -16491,8 +16458,7 @@ create_tmp_table(THD *thd, TMP_TABLE_PARAM *param, List<Item> &fields,
|
||||
to be usable in this case too.
|
||||
*/
|
||||
item->marker == 4 || param->bit_fields_as_long,
|
||||
force_copy_fields,
|
||||
param->convert_blob_length);
|
||||
force_copy_fields);
|
||||
|
||||
if (!new_field)
|
||||
{
|
||||
|
@ -1590,7 +1590,7 @@ bool copy_funcs(Item **func_ptr, const THD *thd);
|
||||
uint find_shortest_key(TABLE *table, const key_map *usable_keys);
|
||||
Field* create_tmp_field_from_field(THD *thd, Field* org_field,
|
||||
const char *name, TABLE *table,
|
||||
Item_field *item, uint convert_blob_length);
|
||||
Item_field *item);
|
||||
|
||||
bool is_indexed_agg_distinct(JOIN *join, List<Item_field> *out_args);
|
||||
|
||||
@ -1849,8 +1849,7 @@ Field *create_tmp_field(THD *thd, TABLE *table,Item *item, Item::Type type,
|
||||
Field **def_field,
|
||||
bool group, bool modify_item,
|
||||
bool table_cant_handle_bit_fields,
|
||||
bool make_copy_field,
|
||||
uint convert_blob_length);
|
||||
bool make_copy_field);
|
||||
|
||||
/*
|
||||
General routine to change field->ptr of a NULL-terminated array of Field
|
||||
|
57
sql/table.h
57
sql/table.h
@ -977,6 +977,57 @@ struct TABLE_SHARE
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
Class is used as a BLOB field value storage for
|
||||
intermediate GROUP_CONCAT results. Used only for
|
||||
GROUP_CONCAT with DISTINCT or ORDER BY options.
|
||||
*/
|
||||
|
||||
class Blob_mem_storage: public Sql_alloc
|
||||
{
|
||||
private:
|
||||
MEM_ROOT storage;
|
||||
/**
|
||||
Sign that some values were cut
|
||||
during saving into the storage.
|
||||
*/
|
||||
bool truncated_value;
|
||||
public:
|
||||
Blob_mem_storage() :truncated_value(false)
|
||||
{
|
||||
init_alloc_root(&storage, MAX_FIELD_VARCHARLENGTH, 0, MYF(0));
|
||||
}
|
||||
~ Blob_mem_storage()
|
||||
{
|
||||
free_root(&storage, MYF(0));
|
||||
}
|
||||
void reset()
|
||||
{
|
||||
free_root(&storage, MYF(MY_MARK_BLOCKS_FREE));
|
||||
truncated_value= false;
|
||||
}
|
||||
/**
|
||||
Fuction creates duplicate of 'from'
|
||||
string in 'storage' MEM_ROOT.
|
||||
|
||||
@param from string to copy
|
||||
@param length string length
|
||||
|
||||
@retval Pointer to the copied string.
|
||||
@retval 0 if an error occured.
|
||||
*/
|
||||
char *store(const char *from, uint length)
|
||||
{
|
||||
return (char*) memdup_root(&storage, from, length);
|
||||
}
|
||||
void set_truncated_value(bool is_truncated_value)
|
||||
{
|
||||
truncated_value= is_truncated_value;
|
||||
}
|
||||
bool is_truncated_value() { return truncated_value; }
|
||||
};
|
||||
|
||||
|
||||
/* Information for one open table */
|
||||
enum index_hint_type
|
||||
{
|
||||
@ -1247,6 +1298,12 @@ public:
|
||||
|
||||
REGINFO reginfo; /* field connections */
|
||||
MEM_ROOT mem_root;
|
||||
/**
|
||||
Initialized in Item_func_group_concat::setup for appropriate
|
||||
temporary table if GROUP_CONCAT is used with ORDER BY | DISTINCT
|
||||
and BLOB field count > 0.
|
||||
*/
|
||||
Blob_mem_storage *blob_storage;
|
||||
GRANT_INFO grant;
|
||||
Filesort_info sort;
|
||||
/*
|
||||
|
Reference in New Issue
Block a user