summaryrefslogtreecommitdiff
path: root/scripts/mysqlhotcopy.sh
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/mysqlhotcopy.sh')
-rw-r--r--scripts/mysqlhotcopy.sh70
1 files changed, 49 insertions, 21 deletions
diff --git a/scripts/mysqlhotcopy.sh b/scripts/mysqlhotcopy.sh
index 98a5c5b9f85..f23955da06a 100644
--- a/scripts/mysqlhotcopy.sh
+++ b/scripts/mysqlhotcopy.sh
@@ -569,22 +569,22 @@ sub copy_files {
print "Copying ".@$files." files...\n" unless $opt{quiet};
if ($method =~ /^s?cp\b/) { # cp or scp with optional flags
- my @cp = ($method);
+ my $cp = $method;
# add option to preserve mod time etc of copied files
# not critical, but nice to have
- push @cp, "-p" if $^O =~ m/^(solaris|linux|freebsd|darwin)$/;
+ $cp.= " -p" if $^O =~ m/^(solaris|linux|freebsd|darwin)$/;
# add recursive option for scp
- push @cp, "-r" if $^O =~ /m^(solaris|linux|freebsd|darwin)$/ && $method =~ /^scp\b/;
+ $cp.= " -r" if $^O =~ /m^(solaris|linux|freebsd|darwin)$/ && $method =~ /^scp\b/;
my @non_raid = map { "'$_'" } grep { ! m:/\d{2}/[^/]+$: } @$files;
# add files to copy and the destination directory
- safe_system( @cp, @non_raid, "'$target'" ) if (@non_raid);
+ safe_system( $cp, @non_raid, "'$target'" ) if (@non_raid);
foreach my $rd ( @$raid_dirs ) {
my @raid = map { "'$_'" } grep { m:$rd/: } @$files;
- safe_system( @cp, @raid, "'$target'/$rd" ) if ( @raid );
+ safe_system( $cp, @raid, "'$target'/$rd" ) if ( @raid );
}
}
else
@@ -646,26 +646,54 @@ sub copy_index
}
-sub safe_system
-{
- my @cmd= @_;
-
- if ( $opt{dryrun} )
- {
- print "@cmd\n";
- return;
+sub safe_system {
+ my @sources= @_;
+ my $method= shift @sources;
+ my $target= pop @sources;
+ ## @sources = list of source file names
+
+ ## We have to deal with very long command lines, otherwise they may generate
+ ## "Argument list too long".
+ ## With 10000 tables the command line can be around 1MB, much more than 128kB
+ ## which is the common limit on Linux (can be read from
+ ## /usr/src/linux/include/linux/binfmts.h
+ ## see http://www.linuxjournal.com/article.php?sid=6060).
+
+ my $chunk_limit= 100 * 1024; # 100 kB
+ my @chunk= ();
+ my $chunk_length= 0;
+ foreach (@sources) {
+ push @chunk, $_;
+ $chunk_length+= length($_);
+ if ($chunk_length > $chunk_limit) {
+ safe_simple_system($method, @chunk, $target);
+ @chunk=();
+ $chunk_length= 0;
+ }
}
-
- ## for some reason system fails but backticks works ok for scp...
- print "Executing '@cmd'\n" if $opt{debug};
- my $cp_status = system "@cmd > /dev/null";
- if ($cp_status != 0) {
- warn "Executing command failed ($cp_status). Trying backtick execution...\n";
- ## try something else
- `@cmd` || die "Error: @cmd failed ($?) while copying files.\n";
+ if ($chunk_length > 0) { # do not forget last small chunk
+ safe_simple_system($method, @chunk, $target);
}
}
+sub safe_simple_system {
+ my @cmd= @_;
+
+ if ( $opt{dryrun} ) {
+ print "@cmd\n";
+ }
+ else {
+ ## for some reason system fails but backticks works ok for scp...
+ print "Executing '@cmd'\n" if $opt{debug};
+ my $cp_status = system "@cmd > /dev/null";
+ if ($cp_status != 0) {
+ warn "Executing command failed ($cp_status). Trying backtick execution...\n";
+ ## try something else
+ `@cmd` || die "Error: @cmd failed ($?) while copying files.\n";
+ }
+ }
+}
+
sub retire_directory {
my ( @dir ) = @_;