Mysql Dump Splitter
Page content
After trying to import massive db dumps and it taking for ever i came up with a script to chop up a mysql dump at the table level. This allows me to import the dumps table by table, allowing for a multi threaded import rather than a single threaded import.
#!/usr/bin/php
<?php
$start=time();
echo "MySQL Dump Split to Tables \r\n";
set_time_limit(600);
$filetype=mime_content_type($argv[1]);
if(!isset($argv[1]))
{
echo "Please provide dump file as a argument \r\n";
echo "If the 2nd argument is gzip it will compress the sql dumps of the tables \r\n";
exit(1);
}
if(isset($argv[2]))
{
if($argv[2] == "gzip")
{
$gzipoutput=true;
}
}
if($filetype == "text/plain")
{
$handle = @fopen($argv[1], "r");
}
else if($filetype == "application/x-gzip")
{
$handle = @gzopen($argv[1], "r");
}
else
{
echo "Please provide a sql or gzip compressed sql file \r\n";
exit(1);
}
$header=true;
if ($handle)
{
while (!feof($handle))
{
$line = fgets($handle);
if($header == true && !strstr($line, 'Table structure for table'))
{
$headersstuff=$headersstuff.$line."\n";
}
if(strstr($line, 'Table structure for table'))
{
$header=false;
if(isset($out))
{
if($gzipoutput == true)
{
gzclose($out);
}
else
{
fclose($out);
}
unset($out);
}
preg_match('%Table structure for table `(.+?)`%', $line, $matches);
$table_name = $matches[1];
if($gzipoutput == true)
{
$out = gzopen($table_name . '.gz', 'w');
}
else
{
$out = fopen($table_name . '.sql', 'w');
}
echo "Create dump for $table_name \r\n";
fwrite($out,$headersstuff);
fwrite($out, $line . "\n");
}
else
{
if(isset($out))
{
fwrite($out, $line . "\n");
}
}
}
if($gzipoutput == true)
{
gzclose($out);
}
else
{
fclose($out);
}
unset($out);
if($filetype == "text/plain")
{
fclose($handle);
}
else
{
gzclose($handle);
}
}
else
{
echo "Unable to open file \r\n";
exit(1);
}
$end=time();
$total=$end-$start;
echo "\r\n Total time taken in seconds".$total."\r\n";
echo "Done \r\n";
?>