Skip to content

Commit

Permalink
MDEV-10508 Mariadb crash on out of disk space during dump import
Browse files Browse the repository at this point in the history
update info->write_end and info->write_pos together, with no
"return on error" in between, otherwise write_end might end up being
smaller than write_pos
  • Loading branch information
vuvova committed Sep 28, 2016
1 parent 794c826 commit 735a4a1
Show file tree
Hide file tree
Showing 3 changed files with 86 additions and 2 deletions.
60 changes: 60 additions & 0 deletions mysql-test/r/slowlog_enospace-10508.result
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
call mtr.add_suppression('Error writing file.*errno: 28 ');
create table t1 (a int, b int) engine=memory;
insert t1 select seq, seq+1 from seq_1_to_1000;
set global general_log=0;
set global log_queries_not_using_indexes=1;
set debug_dbug='+d,simulate_file_write_error';
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
select * from t1 where a>10;
set debug_dbug='';
set global general_log=1;
set global log_queries_not_using_indexes=default;
drop table t1;
24 changes: 24 additions & 0 deletions mysql-test/t/slowlog_enospace-10508.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#
# MDEV-10508 Mariadb crash on out of disk space during dump import
#
--source include/have_sequence.inc
--source include/have_debug.inc

call mtr.add_suppression('Error writing file.*errno: 28 ');
create table t1 (a int, b int) engine=memory;
insert t1 select seq, seq+1 from seq_1_to_1000;
set global general_log=0;
set global log_queries_not_using_indexes=1;
set debug_dbug='+d,simulate_file_write_error';
--disable_result_log
--let $run= 50
while ($run)
{
select * from t1 where a>10;
dec $run;
}
--enable_result_log
set debug_dbug='';
set global general_log=1;
set global log_queries_not_using_indexes=default;
drop table t1;
4 changes: 2 additions & 2 deletions mysys/mf_iocache.c
Original file line number Diff line number Diff line change
Expand Up @@ -1825,8 +1825,6 @@ int my_b_flush_io_cache(IO_CACHE *info, int need_append_buffer_lock)

if ((length=(size_t) (info->write_pos - info->write_buffer)))
{
info->write_end= (info->write_buffer + info->buffer_length -
((info->pos_in_file + length) & (IO_SIZE - 1)));
if (append_cache)
{

Expand All @@ -1848,6 +1846,8 @@ int my_b_flush_io_cache(IO_CACHE *info, int need_append_buffer_lock)

set_if_bigger(info->end_of_file, info->pos_in_file);
}
info->write_end= (info->write_buffer + info->buffer_length -
((info->pos_in_file + length) & (IO_SIZE - 1)));
info->write_pos= info->write_buffer;
++info->disk_writes;
UNLOCK_APPEND_BUFFER;
Expand Down

0 comments on commit 735a4a1

Please sign in to comment.