Skip to content

Commit deb8e78

Browse files
MDEV-37207: dumping tables for multi delete query doesn't work always
It was observed that, when doing multiple DELETE on tables: -> if there was duplicate data to delete, then no ddls were dumped to the trace. -> However, when tested with no data being deleted from the tables, then ddls of the tables were getting dumped to the trace. The problem is that store_tables_context_in_trace() is not getting invoked, from mysql_execute_command() in all the situations. The reason is that multi-table DELETE returned error when it had deleted duplicate rows. Multi-table DELETE actually finds record combinations to delete, and when it has found let's say {t1.rowX, t2.rowY, t3.rowZ}, it will attempt to save t1.rowX in the temptable for t1, t2.rowY in the temptable for t2 and so forth. When saving the row to be deleted, in the temp table, it can encounter error 121 (HA_ERR_FOUND_DUPP_KEY), and it is propagated to the caller. As a fix, I have marked that there is no error when HA_ERR_FOUND_DUPP_KEY error_code is noticed in the multi_delete::send_data()
1 parent 71dba7d commit deb8e78

File tree

3 files changed

+66
-0
lines changed

3 files changed

+66
-0
lines changed

mysql-test/main/opt_trace_store_ddls.result

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -477,4 +477,40 @@ CREATE TABLE `t1` (
477477
) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci
478478
drop table t1;
479479
drop table t2;
480+
#
481+
# MDEV-37207: test multi delete of 2 tables
482+
# trace result should have the ddls for both the tables
483+
#
484+
create table t1(id1 int not null auto_increment primary key);
485+
create table t2(id2 int not null);
486+
insert into t1 values (1),(2);
487+
insert into t2 values (1),(1),(2),(2);
488+
delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2;
489+
set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace);
490+
select ddl
491+
from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt;
492+
ddl
493+
CREATE TABLE `t2` (
494+
`id2` int(11) NOT NULL
495+
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci
496+
CREATE TABLE `t1` (
497+
`id1` int(11) NOT NULL AUTO_INCREMENT,
498+
PRIMARY KEY (`id1`)
499+
) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci
500+
# rerun the same delete query
501+
# Now, trace result should have the ddls for all 2 tables,
502+
# even though no data is deleted
503+
delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2;
504+
set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace);
505+
select ddl
506+
from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt;
507+
ddl
508+
CREATE TABLE `t2` (
509+
`id2` int(11) NOT NULL
510+
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci
511+
CREATE TABLE `t1` (
512+
`id1` int(11) NOT NULL AUTO_INCREMENT,
513+
PRIMARY KEY (`id1`)
514+
) ENGINE=MyISAM AUTO_INCREMENT=3 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_uca1400_ai_ci
515+
drop table t1, t2;
480516
drop database db1;

mysql-test/main/opt_trace_store_ddls.test

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -361,4 +361,32 @@ select ddl from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt;
361361
drop table t1;
362362
drop table t2;
363363

364+
--echo #
365+
--echo # MDEV-37207: test multi delete of 2 tables
366+
--echo # trace result should have the ddls for both the tables
367+
--echo #
368+
369+
create table t1(id1 int not null auto_increment primary key);
370+
create table t2(id2 int not null);
371+
372+
insert into t1 values (1),(2);
373+
insert into t2 values (1),(1),(2),(2);
374+
375+
delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2;
376+
377+
set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace);
378+
select ddl
379+
from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt;
380+
381+
--echo # rerun the same delete query
382+
--echo # Now, trace result should have the ddls for all 2 tables,
383+
--echo # even though no data is deleted
384+
delete t1.*, t2.* from t1, t2 where t1.id1 = t2.id2;
385+
386+
set @ddls= (select json_detailed(json_extract(trace, '$**.ddl')) from information_schema.optimizer_trace);
387+
select ddl
388+
from json_table(@ddls, '$[*]' columns(ddl text path '$')) as jt;
389+
390+
drop table t1, t2;
391+
364392
drop database db1;

sql/sql_delete.cc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1419,6 +1419,8 @@ int multi_delete::send_data(List<Item> &values)
14191419
}
14201420
found++;
14211421
}
1422+
else
1423+
error= 0; /* Clear HA_ERR_FOUND_DUPP_{KEY,UNIQUE} error */
14221424
}
14231425
}
14241426
}

0 commit comments

Comments
 (0)