Commit 39fb7b12 authored by unknown's avatar unknown

Merge work:/home/bk/mysql-4.0 into hundin.mysql.fi:/my/bk/mysql-4.0


Docs/manual.texi:
  Auto merged
parents 13ef5bab fd30e5e7
......@@ -7753,6 +7753,10 @@ version 4.0;
@itemize @bullet
@item
@code{INSERT INTO ... SELECT} had in 3.23 always @code{IGNORE} enabled.
In 4.0.1 MySQL will stop (and possible rollback) in case of an error if you
don't specify @code{IGNORE}.
@item
@file{safe_mysqld} is renamed to @file{mysqld_safe}.
@item
The old C API functions @code{mysql_drop_db}, @code{mysql_create_db} and
......@@ -46487,6 +46491,8 @@ Our TODO section contains what we plan to have in 4.0. @xref{TODO MySQL 4.0}.
@itemize @bullet
@item
Changed @code{INSERT INTO .. SELECT} to by default stop on errors.
@item
Ignore @code{DATA DIRECTORY} and @code{INDEX DIRECTORY} directives on windows.
@item
Added boolean fulltext search code. It should be considered early alpha.
......@@ -4,6 +4,8 @@ insert into t1 (bandID,payoutID) VALUES (1,6),(2,6),(3,4),(4,9),(5,10),(6,1),(7,
create table t2 (payoutID SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
insert into t2 (payoutID) SELECT DISTINCT payoutID FROM t1;
insert into t2 (payoutID) SELECT payoutID+10 FROM t1;
Duplicate entry '16' for key 1
insert ignore into t2 (payoutID) SELECT payoutID+10 FROM t1;
select * from t2;
payoutID
1
......
......@@ -7,7 +7,9 @@ create table t1 (bandID MEDIUMINT UNSIGNED NOT NULL PRIMARY KEY, payoutID SMALLI
insert into t1 (bandID,payoutID) VALUES (1,6),(2,6),(3,4),(4,9),(5,10),(6,1),(7,12),(8,12);
create table t2 (payoutID SMALLINT UNSIGNED NOT NULL PRIMARY KEY);
insert into t2 (payoutID) SELECT DISTINCT payoutID FROM t1;
--error 1062
insert into t2 (payoutID) SELECT payoutID+10 FROM t1;
insert ignore into t2 (payoutID) SELECT payoutID+10 FROM t1;
select * from t2;
drop table t1,t2;
#
......
......@@ -121,6 +121,7 @@ sub new
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'vacuum'} = 1; # When using with --fast
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'NEG'} = 1; # Supports -id
$limits{'alter_add_multi_col'}= 1; #Have ALTER TABLE t add a int,add b int;
......@@ -370,6 +371,7 @@ sub new
$self->{'limits'} = \%limits;
$self->{'double_quotes'} = 0;
$self->{'drop_attr'} = "";
$self->{'transactions'} = 0; # No transactions
$self->{'blob'} = "text(" . $limits{'max_text_size'} .")";
$self->{'text'} = "text(" . $limits{'max_text_size'} .")";
......@@ -565,6 +567,7 @@ sub new
$self->{'text'} = "text";
$self->{'double_quotes'} = 1;
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{"vacuum"} = 1;
$limits{'join_optimizer'} = 1; # Can optimize FROM tables
$limits{'load_data_infile'} = 0;
......@@ -854,6 +857,7 @@ sub new
$self->{'text'} = "long varchar";
$self->{'double_quotes'} = 1;
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 9999; # Probably big enough
$limits{'max_columns'} = 2000; # From crash-me
......@@ -1095,6 +1099,7 @@ sub new
$self->{'text'} = "text";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 1258;
$limits{'max_columns'} = 226; # server is disconnecting????
......@@ -1377,6 +1382,7 @@ sub new
$self->{'text'} = "long";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{"vacuum"} = 1;
$limits{'max_conditions'} = 9999; # (Actually not a limit)
......@@ -1633,6 +1639,7 @@ sub new
$self->{'text'} = "byte in table";
$self->{'double_quotes'} = 0; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{'host'} = $host;
$limits{'NEG'} = 1; # Supports -id
......@@ -1834,6 +1841,7 @@ sub new
$self->{'text'} = "blob"; # text ?
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 97; # We get 'Query is too complex'
$limits{'max_columns'} = 255; # Max number of columns in table
......@@ -2010,6 +2018,7 @@ sub new
$self->{'text'} = "text";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 1030; # We get 'Query is too complex'
$limits{'max_columns'} = 250; # Max number of columns in table
......@@ -2197,6 +2206,7 @@ sub new
$self->{'text'} = "text";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{"vacuum"} = 1;
$limits{'max_conditions'} = 1030; # We get 'Query is too complex'
......@@ -2389,6 +2399,7 @@ sub new
$self->{'text'} = "long";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 50; # (Actually not a limit)
$limits{'max_columns'} = 254; # Max number of columns in table
......@@ -2603,6 +2614,7 @@ sub new
$self->{'text'} = "varchar(255)";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$limits{'max_conditions'} = 418; # We get 'Query is too complex'
$limits{'max_columns'} = 500; # Max number of columns in table
......@@ -2772,6 +2784,7 @@ sub new
$self->{'text'} = "character varying(15000)";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{'char_null'} = "cast(NULL as char(1))";
$self->{'numeric_null'} = "cast(NULL as int)";
......@@ -2972,6 +2985,7 @@ sub new
$self->{'text'} = "";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = "";
$self->{'transactions'} = 1; # Transactions enabled
$self->{'char_null'} = "";
$self->{'numeric_null'} = "";
......@@ -3184,6 +3198,7 @@ sub new
$self->{'text'} = "varchar(8000000)";
$self->{'double_quotes'} = 1; # Can handle: 'Walker''s'
$self->{'drop_attr'} = ' restrict';
$self->{'transactions'} = 1; # Transactions enabled
$self->{'error_on_execute_means_zero_rows'}=1;
$limits{'max_conditions'} = 5427; # (Actually not a limit)
......
......@@ -106,6 +106,12 @@ if (!$opt_skip_create)
}
else
{
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
print "Transactions enabled\n" if ($opt_debug);
}
for ($ti = 0; $ti <= $#table_names; $ti++)
{
my $table_name = $table_names[$ti];
......@@ -128,8 +134,14 @@ if (!$opt_skip_create)
$row_count++;
}
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
close(DATA);
}
if ($opt_lock_tables)
{
$dbh->do("UNLOCK TABLES");
......
......@@ -74,11 +74,25 @@ do_many($dbh,$server->create("bench",\@fields,\@index));
print "Insert data into the table\n";
$loop_time=new Benchmark;
if ($opt_fast && defined($server->{transactions}))
{
$dbh->{AutoCommit} = 0;
print "Transactions enabled\n" if ($opt_debug);
}
for ($i=0 ; $i < $opt_row_count ; $i++)
{
$query="insert into bench values ( " . ("$i," x ($opt_start_field_count-1)) . "$i)";
$dbh->do($query) or die $DBI::errstr;
}
if ($opt_fast && defined($server->{transactions}))
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for insert ($opt_row_count)",
......
......@@ -127,14 +127,28 @@ end_benchmark($start_time);
sub test_query
{
my($test_text,$result_text,$query,$dbh,$count)=@_;
my($i,$loop_time,$end_time);
my($i,$loop_time,$end_time, $using_transactions);
print $test_text . "\n";
$loop_time=new Benchmark;
$using_transactions=0;
if ($opt_fast && server->{transactions} && $query=~ /^insert /i)
{
$using_transactions=1;
$dbh->{AutoCommit} = 0;
print "Transactions enabled\n" if ($opt_debug);
}
for ($i=0 ; $i < $count ; $i++)
{
defined(fetch_all_rows($dbh,$query)) or die $DBI::errstr;
}
if ($using_transactions)
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print $result_text . "($count): " .
timestr(timediff($end_time, $loop_time),"all") . "\n\n";
......
......@@ -29,6 +29,7 @@
use DBI;
use Benchmark;
use Data::Dumper;
$opt_loop_count=100000; # number of rows/3
$small_loop_count=10; # Loop for full table retrieval
......@@ -132,6 +133,12 @@ else
$query="insert into bench1 (id,id2,id3,dummy1) values ";
}
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
print "Transactions enabled\n" if ($opt_debug);
}
if (($opt_fast || $opt_fast_insert) && $server->{'limits'}->{'insert_multi_value'})
{
$query_size=$server->{'limits'}->{'query_size'};
......@@ -209,6 +216,12 @@ else
}
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for insert (" . ($total_rows) . "): " .
timestr(timediff($end_time, $loop_time),"all") . "\n\n";
......@@ -234,6 +247,12 @@ if ($limits->{'unique_index'})
{
print "Testing insert of duplicates\n";
$loop_time=new Benchmark;
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
for ($i=0 ; $i < $opt_loop_count ; $i++)
{
$tmpvar^= ((($tmpvar + 63) + $i)*3 % $opt_loop_count);
......@@ -244,6 +263,11 @@ if ($limits->{'unique_index'})
die "Didn't get an error when inserting duplicate record $tmp\n";
}
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for insert_duplicates (" . ($opt_loop_count) . "): " .
......@@ -577,7 +601,6 @@ if ($limits->{'group_functions'})
}
$sth->finish;
$count++;
$sth=$dbh->prepare($query="select count(*),sum(id+0.0),min(id),max(id),avg(id-0.0) from bench1") or die $DBI::errstr;
$sth->execute or die $DBI::errstr;
......@@ -1250,6 +1273,11 @@ if ($server->small_rollback_segment())
}
$loop_time=new Benchmark;
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
$fields=$#fields;
if (($opt_fast || $opt_fast_insert) && $server->{'limits'}->{'insert_multi_value'})
{
......@@ -1297,6 +1325,13 @@ else
$dbh->do($query) or die "Got error $DBI::errstr with query: $query\n";
}
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for insert_key ($many_keys_loop_count): " .
timestr(timediff($end_time, $loop_time),"all") . "\n\n";
......@@ -1325,11 +1360,24 @@ if ($opt_fast && defined($server->{vacuum}))
print "Testing update of keys\n";
$loop_time=new Benchmark;
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
for ($i=0 ; $i< 256; $i++)
{
$dbh->do("update bench1 set field5=1 where field_search=$i")
or die "Got error $DBI::errstr with query: update bench1 set field5=1 where field_search=$i\n";
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for update_of_primary_key_many_keys (256): " .
timestr(timediff($end_time, $loop_time),"all") . "\n\n";
......@@ -1420,12 +1468,18 @@ if ($limits->{'insert_multi_value'})
"dummy1 char(30)"],
["primary key (id,id2)",
"index index_id3 (id3)"]));
$loop_time=new Benchmark;
if ($opt_lock_tables)
{
$sth = $dbh->do("LOCK TABLES bench1 write") || die $DBI::errstr;
}
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
$loop_time=new Benchmark;
print "Inserting $opt_loop_count rows with multiple values\n";
$query="insert into bench1 values ";
$res=$query;
......@@ -1448,6 +1502,11 @@ if ($limits->{'insert_multi_value'})
{
$sth = $dbh->do("UNLOCK TABLES ") || die $DBI::errstr;
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time for multiple_value_insert (" . ($opt_loop_count) . "): " .
......
......@@ -92,6 +92,12 @@ if ($opt_fast && defined($server->{vacuum}))
print "Inserting $opt_loop_count rows\n";
$loop_time=new Benchmark;
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
$query="insert into bench1 values (";
$half_done=$opt_loop_count/2;
for ($id=0,$rev_id=$opt_loop_count-1 ; $id < $opt_loop_count ; $id++,$rev_id--)
......@@ -105,6 +111,12 @@ for ($id=0,$rev_id=$opt_loop_count-1 ; $id < $opt_loop_count ; $id++,$rev_id--)
}
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time to insert ($opt_loop_count): " .
timestr(timediff($end_time, $loop_time),"all") . "\n\n";
......
......@@ -109,6 +109,11 @@ if ($opt_fast && $server->{'limits'}->{'load_data_infile'})
}
else
{
if ($opt_fast && $server->{transactions})
{
$dbh->{AutoCommit} = 0;
}
for ($ti = 0; $ti <= $#table_names; $ti++)
{
my $table_name = $table_names[$ti];
......@@ -133,10 +138,17 @@ else
}
close(DATA);
}
if ($opt_lock_tables)
{
do_query($dbh,"UNLOCK TABLES");
}
if ($opt_fast && $server->{transactions})
{
$dbh->commit;
$dbh->{AutoCommit} = 1;
}
$end_time=new Benchmark;
print "Time to insert ($row_count): " .
timestr(timediff($end_time, $loop_time),"all") . "\n";
......
......@@ -1626,8 +1626,7 @@ mysql_execute_command(void)
if (!(res=open_and_lock_tables(thd, tables)))
{
if ((result=new select_insert(tables->table,&lex->field_list,
lex->sql_command == SQLCOM_REPLACE_SELECT ?
DUP_REPLACE : DUP_IGNORE)))
lex->duplicates)))
res=handle_select(thd,lex,result);
}
else
......
......@@ -2188,7 +2188,13 @@ insert:
INSERT { Lex->sql_command = SQLCOM_INSERT; } insert_lock_option opt_ignore insert2 insert_field_spec
replace:
REPLACE { Lex->sql_command = SQLCOM_REPLACE; } replace_lock_option insert2 insert_field_spec
REPLACE
{
LEX *lex=Lex;
lex->sql_command = SQLCOM_REPLACE;
lex->duplicates= DUP_REPLACE;
}
replace_lock_option insert2 insert_field_spec
insert_lock_option:
/* empty */ { Lex->lock_option= TL_WRITE_CONCURRENT_INSERT; }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment