Commit f458acc8 authored by Marko Mäkelä's avatar Marko Mäkelä

MDEV-27160 Out of memory in main.long_unique

A part of the test main.long_unique attempts to insert records
with two 60,000,001-byte columns. Let us move that test into
a separate file main.long_unique_big, declared as big test,
so that it can be skipped in environments with limited memory.
parent 658a1e1f
...@@ -3,7 +3,6 @@ ...@@ -3,7 +3,6 @@
#table containing single unique column #table containing single unique column
#table containing keys like unique(a,b,c,d) etc #table containing keys like unique(a,b,c,d) etc
#then table containing 2 blob unique etc #then table containing 2 blob unique etc
set @allowed_packet= @@max_allowed_packet;
#table with single long blob column; #table with single long blob column;
create table t1(a blob unique ); create table t1(a blob unique );
insert into t1 values(1),(2),(3),(56),('sachin'),('maria'),(123456789034567891),(null),(null),(123456789034567890); insert into t1 values(1),(2),(3),(56),('sachin'),('maria'),(123456789034567891),(null),(null),(123456789034567890);
...@@ -1225,44 +1224,6 @@ DB_ROW_HASH_1 ...@@ -1225,44 +1224,6 @@ DB_ROW_HASH_1
33 33
44 44
drop table t1,t2; drop table t1,t2;
#very long blob entry;
SET @@GLOBAL.max_allowed_packet=67108864;
connect 'newcon', localhost, root,,;
connection newcon;
show variables like 'max_allowed_packet';
Variable_name Value
max_allowed_packet 67108864
create table t1(a longblob unique, b longblob , c longblob , unique(b,c));
desc t1;
Field Type Null Key Default Extra
a longblob YES UNI NULL
b longblob YES MUL NULL
c longblob YES NULL
show create table t1;
Table Create Table
t1 CREATE TABLE `t1` (
`a` longblob DEFAULT NULL,
`b` longblob DEFAULT NULL,
`c` longblob DEFAULT NULL,
UNIQUE KEY `a` (`a`) USING HASH,
UNIQUE KEY `b` (`b`,`c`) USING HASH
) ENGINE=MyISAM DEFAULT CHARSET=latin1
show keys from t1;
Table Non_unique Key_name Seq_in_index Column_name Collation Cardinality Sub_part Packed Null Index_type Comment Index_comment
t1 0 a 1 a A NULL NULL NULL YES HASH
t1 0 b 1 b A NULL NULL NULL YES HASH
t1 0 b 2 c A NULL NULL NULL YES HASH
insert into t1 values(concat(repeat('sachin',10000000),'1'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'4'));
ERROR 23000: Duplicate entry 'sachinsachinsachinsachinsachinsachinsachinsachinsachinsachins...' for key 'a'
insert into t1 values(concat(repeat('sachin',10000000),'3'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
ERROR 23000: Duplicate entry 'sachinsachinsachinsachinsachinsachinsachinsachinsachinsachins...' for key 'b'
drop table t1;
#long key unique with different key length #long key unique with different key length
create table t1(a blob, unique(a(3000))); create table t1(a blob, unique(a(3000)));
desc t1; desc t1;
...@@ -1307,9 +1268,6 @@ t1 0 a 2 b A NULL NULL NULL YES HASH ...@@ -1307,9 +1268,6 @@ t1 0 a 2 b A NULL NULL NULL YES HASH
t1 0 c 1 c A NULL 4500 NULL YES HASH t1 0 c 1 c A NULL 4500 NULL YES HASH
t1 0 c 2 d A NULL NULL NULL YES HASH t1 0 c 2 d A NULL NULL NULL YES HASH
drop table t1; drop table t1;
disconnect newcon;
connection default;
SET @@GLOBAL.max_allowed_packet=4194304;
#ext bug #ext bug
create table t1(a int primary key, b blob unique, c int, d blob , index(c)); create table t1(a int primary key, b blob unique, c int, d blob , index(c));
show create table t1; show create table t1;
...@@ -1477,5 +1435,4 @@ id select_type table type possible_keys key key_len ref rows Extra ...@@ -1477,5 +1435,4 @@ id select_type table type possible_keys key key_len ref rows Extra
SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20; SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20;
b b
drop table t1,t2; drop table t1,t2;
set @@GLOBAL.max_allowed_packet= @allowed_packet;
# End of 10.4 tests # End of 10.4 tests
...@@ -10,7 +10,7 @@ let datadir=`select @@datadir`; ...@@ -10,7 +10,7 @@ let datadir=`select @@datadir`;
--echo #table containing single unique column --echo #table containing single unique column
--echo #table containing keys like unique(a,b,c,d) etc --echo #table containing keys like unique(a,b,c,d) etc
--echo #then table containing 2 blob unique etc --echo #then table containing 2 blob unique etc
set @allowed_packet= @@max_allowed_packet;
--echo #table with single long blob column; --echo #table with single long blob column;
create table t1(a blob unique ); create table t1(a blob unique );
insert into t1 values(1),(2),(3),(56),('sachin'),('maria'),(123456789034567891),(null),(null),(123456789034567890); insert into t1 values(1),(2),(3),(56),('sachin'),('maria'),(123456789034567891),(null),(null),(123456789034567890);
...@@ -396,28 +396,6 @@ select DB_ROW_HASH_1 from t1,t2 where t1.DB_ROW_HASH_1 = t2.DB_ROW_HASH_2; ...@@ -396,28 +396,6 @@ select DB_ROW_HASH_1 from t1,t2 where t1.DB_ROW_HASH_1 = t2.DB_ROW_HASH_2;
select DB_ROW_HASH_1 from t1 inner join t2 on t1.a = t2.DB_ROW_HASH_2; select DB_ROW_HASH_1 from t1 inner join t2 on t1.a = t2.DB_ROW_HASH_2;
drop table t1,t2; drop table t1,t2;
--echo #very long blob entry;
SET @@GLOBAL.max_allowed_packet=67108864;
connect ('newcon', localhost, root,,);
--connection newcon
show variables like 'max_allowed_packet';
create table t1(a longblob unique, b longblob , c longblob , unique(b,c));
desc t1;
show create table t1;
show keys from t1;
insert into t1 values(concat(repeat('sachin',10000000),'1'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'1'));
--error ER_DUP_ENTRY
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'4'));
--error ER_DUP_ENTRY
insert into t1 values(concat(repeat('sachin',10000000),'3'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
drop table t1;
--echo #long key unique with different key length --echo #long key unique with different key length
create table t1(a blob, unique(a(3000))); create table t1(a blob, unique(a(3000)));
desc t1; desc t1;
...@@ -435,9 +413,7 @@ desc t1; ...@@ -435,9 +413,7 @@ desc t1;
show create table t1; show create table t1;
show keys from t1; show keys from t1;
drop table t1; drop table t1;
disconnect newcon;
--connection default
SET @@GLOBAL.max_allowed_packet=4194304;
--echo #ext bug --echo #ext bug
create table t1(a int primary key, b blob unique, c int, d blob , index(c)); create table t1(a int primary key, b blob unique, c int, d blob , index(c));
show create table t1; show create table t1;
...@@ -556,5 +532,4 @@ SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20; ...@@ -556,5 +532,4 @@ SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20;
SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20; SELECT t2.b FROM t1 JOIN t2 ON t1.d = t2.f WHERE t2.pk >= 20;
drop table t1,t2; drop table t1,t2;
set @@GLOBAL.max_allowed_packet= @allowed_packet;
--echo # End of 10.4 tests --echo # End of 10.4 tests
set @allowed_packet= @@max_allowed_packet;
SET GLOBAL max_allowed_packet=67108864;
connect con1, localhost, root,,;
create table t1(a longblob unique, b longblob , c longblob , unique(b,c));
desc t1;
Field Type Null Key Default Extra
a longblob YES UNI NULL
b longblob YES MUL NULL
c longblob YES NULL
show create table t1;
Table Create Table
t1 CREATE TABLE `t1` (
`a` longblob DEFAULT NULL,
`b` longblob DEFAULT NULL,
`c` longblob DEFAULT NULL,
UNIQUE KEY `a` (`a`) USING HASH,
UNIQUE KEY `b` (`b`,`c`) USING HASH
) ENGINE=MyISAM DEFAULT CHARSET=latin1
show keys from t1;
Table Non_unique Key_name Seq_in_index Column_name Collation Cardinality Sub_part Packed Null Index_type Comment Index_comment
t1 0 a 1 a A NULL NULL NULL YES HASH
t1 0 b 1 b A NULL NULL NULL YES HASH
t1 0 b 2 c A NULL NULL NULL YES HASH
insert into t1 values(concat(repeat('sachin',10000000),'1'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'4'));
ERROR 23000: Duplicate entry 'sachinsachinsachinsachinsachinsachinsachinsachinsachinsachins...' for key 'a'
insert into t1 values(concat(repeat('sachin',10000000),'3'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
ERROR 23000: Duplicate entry 'sachinsachinsachinsachinsachinsachinsachinsachinsachinsachins...' for key 'b'
drop table t1;
disconnect con1;
connection default;
set @@GLOBAL.max_allowed_packet= @allowed_packet;
# End of 10.4 tests
# This test may run out of memory in some environments.
--source include/big_test.inc
set @allowed_packet= @@max_allowed_packet;
SET GLOBAL max_allowed_packet=67108864;
connect (con1, localhost, root,,);
create table t1(a longblob unique, b longblob , c longblob , unique(b,c));
desc t1;
show create table t1;
show keys from t1;
insert into t1 values(concat(repeat('sachin',10000000),'1'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'1'));
--error ER_DUP_ENTRY
insert into t1 values(concat(repeat('sachin',10000000),'2'),concat(repeat('sachin',10000000),'2'),
concat(repeat('sachin',10000000),'4'));
--error ER_DUP_ENTRY
insert into t1 values(concat(repeat('sachin',10000000),'3'),concat(repeat('sachin',10000000),'1'),
concat(repeat('sachin',10000000),'1'));
drop table t1;
disconnect con1;
connection default;
set @@GLOBAL.max_allowed_packet= @allowed_packet;
--echo # End of 10.4 tests
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment