/184.168.182.1/wp-content/plugins/updraftplus/backup.php
PHP | 1119 lines | 875 code | 157 blank | 87 comment | 264 complexity | cca8e75544e52dca3e10c02014d9ea3a MD5 | raw file
- <?php
- if (!defined('UPDRAFTPLUS_DIR')) die('No direct access allowed');
- if (!class_exists('UpdraftPlus_PclZip')) require(UPDRAFTPLUS_DIR.'/class-zip.php');
- // This file contains functions that are only needed/loaded when a backup is running (reduces memory usage on other site pages)
- class UpdraftPlus_Backup {
- public $index = 0;
- private $zipfiles_added;
- private $zipfiles_added_thisrun = 0;
- private $zipfiles_dirbatched;
- private $zipfiles_batched;
- private $zip_split_every = 838860800; # 800Mb
- private $zip_last_ratio = 1;
- private $whichone;
- private $zip_basename = '';
- private $zipfiles_lastwritetime;
- // 0 = unknown; false = failed
- public $binzip = 0;
- private $dbhandle;
- private $dbhandle_isgz;
- private $use_zip_object = 'UpdraftPlus_ZipArchive';
- public $debug = false;
- private $updraft_dir;
- private $job_file_entities = array();
- public function __construct($backup_files) {
- global $updraftplus;
- # Decide which zip engine to begin with
- $this->debug = UpdraftPlus_Options::get_updraft_option('updraft_debug_mode');
- $this->updraft_dir = $updraftplus->backups_dir_location();
- if ('no' === $backup_files) {
- $this->use_zip_object = 'UpdraftPlus_PclZip';
- return;
- }
- // false means 'tried + failed'; whereas 0 means 'not yet tried'
- // Disallow binzip on OpenVZ when we're not sure there's plenty of memory
- if ($this->binzip === 0 && (!defined('UPDRAFTPLUS_PREFERPCLZIP') || UPDRAFTPLUS_PREFERPCLZIP != true) && (!defined('UPDRAFTPLUS_NO_BINZIP') || !UPDRAFTPLUS_NO_BINZIP) && $updraftplus->current_resumption <9) {
- if (@file_exists('/proc/user_beancounters') && @file_exists('/proc/meminfo') && @is_readable('/proc/meminfo')) {
- $meminfo = @file_get_contents('/proc/meminfo', false, null, -1, 200);
- if (is_string($meminfo) && preg_match('/MemTotal:\s+(\d+) kB/', $meminfo, $matches)) {
- $memory_mb = $matches[1]/1024;
- # If the report is of a large amount, then we're probably getting the total memory on the hypervisor (this has been observed), and don't really know the VPS's memory
- $vz_log = "OpenVZ; reported memory: ".round($memory_mb, 1)." Mb";
- if ($memory_mb < 1024 || $memory_mb > 8192) {
- $openvz_lowmem = true;
- $vz_log .= " (will not use BinZip)";
- }
- $updraftplus->log($vz_log);
- }
- }
- if (empty($openvz_lowmem)) {
- $updraftplus->log('Checking if we have a zip executable available');
- $binzip = $updraftplus->find_working_bin_zip();
- if (is_string($binzip)) {
- $updraftplus->log("Zip engine: found/will use a binary zip: $binzip");
- $this->binzip = $binzip;
- $this->use_zip_object = 'UpdraftPlus_BinZip';
- }
- }
- }
- # In tests, PclZip was found to be 25% slower than ZipArchive
- if ($this->use_zip_object != 'UpdraftPlus_PclZip' && empty($this->binzip) && ((defined('UPDRAFTPLUS_PREFERPCLZIP') && UPDRAFTPLUS_PREFERPCLZIP == true) || !class_exists('ZipArchive') || !class_exists('UpdraftPlus_ZipArchive') || (!extension_loaded('zip') && !method_exists('ZipArchive', 'AddFile')))) {
- global $updraftplus;
- $updraftplus->log("Zip engine: ZipArchive is not available or is disabled (will use PclZip if needed)");
- $this->use_zip_object = 'UpdraftPlus_PclZip';
- }
- }
- public function create_zip($create_from_dir, $whichone, $backup_file_basename, $index) {
- // Note: $create_from_dir can be an array or a string
- @set_time_limit(900);
- $original_index = $index;
- $this->index = $index;
- $this->whichone = $whichone;
- global $updraftplus;
- $this->zip_split_every = max((int)$updraftplus->jobdata_get('split_every'), UPDRAFTPLUS_SPLIT_MIN)*1048576;
- if ('others' != $whichone) $updraftplus->log("Beginning creation of dump of $whichone (split every: ".round($this->zip_split_every/1048576,1)." Mb)");
- if (is_string($create_from_dir) && !file_exists($create_from_dir)) {
- $flag_error = true;
- $updraftplus->log("Does not exist: $create_from_dir");
- if ('mu-plugins' == $whichone) {
- if (!function_exists('get_mu_plugins')) require_once(ABSPATH.'wp-admin/includes/plugin.php');
- $mu_plugins = get_mu_plugins();
- if (count($mu_plugins) == 0) {
- $updraftplus->log("There appear to be no mu-plugins to back up. Will not raise an error.");
- $flag_error = false;
- }
- }
- if ($flag_error) $updraftplus->log(sprintf(__("%s - could not back this entity up; the corresponding directory does not exist (%s)", 'updraftplus'), $whichone, $create_from_dir), 'error');
- return false;
- }
- $itext = (empty($index)) ? '' : ($index+1);
- $base_path = $backup_file_basename.'-'.$whichone.$itext.'.zip';
- $full_path = $this->updraft_dir.'/'.$base_path;
- $time_now = time();
- if (file_exists($full_path)) {
- # Gather any further files that may also exist
- $files_existing = array();
- while (file_exists($full_path)) {
- $files_existing[] = $base_path;
- $time_mod = (int)@filemtime($full_path);
- $updraftplus->log($base_path.": this file has already been created (age: ".round($time_now-$time_mod,1)." s)");
- if ($time_mod>100 && ($time_now-$time_mod)<30) {
- $updraftplus->terminate_due_to_activity($base_path, $time_now, $time_mod);
- }
- $index++;
- $base_path = $backup_file_basename.'-'.$whichone.$index.'.zip';
- $full_path = $this->updraft_dir.'/'.$base_path;
- }
- }
- // Temporary file, to be able to detect actual completion (upon which, it is renamed)
- // New (Jun-13) - be more aggressive in removing temporary files from earlier attempts - anything >=600 seconds old of this kind
- $updraftplus->clean_temporary_files('_'.$updraftplus->nonce."-$whichone", 600);
- // Firstly, make sure that the temporary file is not already being written to - which can happen if a resumption takes place whilst an old run is still active
- $zip_name = $full_path.'.tmp';
- $time_mod = (int)@filemtime($zip_name);
- if (file_exists($zip_name) && $time_mod>100 && ($time_now-$time_mod)<30) {
- $updraftplus->terminate_due_to_activity($zip_name, $time_now, $time_mod);
- } elseif (file_exists($zip_name)) {
- $updraftplus->log("File exists ($zip_name), but was apparently not modified within the last 30 seconds, so we assume that any previous run has now terminated (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod).")");
- }
- // Now, check for other forms of temporary file, which would indicate that some activity is going on (even if it hasn't made it into the main zip file yet)
- // Note: this doesn't catch PclZip temporary files
- $d = dir($this->updraft_dir);
- $match = '_'.$updraftplus->nonce."-".$whichone;
- while (false !== ($e = $d->read())) {
- if ('.' == $e || '..' == $e || !is_file($this->updraft_dir.'/'.$e)) continue;
- $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.([A-Za-z0-9]){6}?$/i", $e);
- $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $e);
- if ($time_now-filemtime($this->updraft_dir.'/'.$e) < 30 && ($ziparchive_match || (0 != $updraftplus->current_resumption && $binzip_match))) {
- $updraftplus->terminate_due_to_activity($this->updraft_dir.'/'.$e, $time_now, filemtime($this->updraft_dir.'/'.$e));
- }
- }
- @$d->close();
- clearstatcache();
- if (isset($files_existing)) {
- # Because of zip-splitting, the mere fact that files exist is not enough to indicate that the entity is finished. For that, we need to also see that no subsequent file has been started.
- # Q. What if the previous runner died in between zips, and it is our job to start the next one? A. The next temporary file is created before finishing the former zip, so we are safe (and we are also safe-guarded by the updated value of the index being stored in the database).
- return $files_existing;
- }
- $this->log_account_space();
- $this->zip_microtime_start = microtime(true);
- # The paths in the zip should then begin with '$whichone', having removed WP_CONTENT_DIR from the front
- $zipcode = $this->make_zipfile($create_from_dir, $backup_file_basename, $whichone);
- if ($zipcode !== true) {
- $updraftplus->log("ERROR: Zip failure: Could not create $whichone zip (".$this->index." / $index)");
- $updraftplus->log(sprintf(__("Could not create %s zip. Consult the log file for more information.",'updraftplus'),$whichone), 'error');
- # The caller is required to update $index from $this->index
- return false;
- } else {
- $itext = (empty($this->index)) ? '' : ($this->index+1);
- $full_path = $this->updraft_dir.'/'.$backup_file_basename.'-'.$whichone.$itext.'.zip';
- if (file_exists($full_path.'.tmp')) {
- if (@filesize($full_path.'.tmp') === 0) {
- $updraftplus->log("Did not create $whichone zip (".$this->index.") - not needed");
- @unlink($full_path.'.tmp');
- } else {
- $sha = sha1_file($full_path.'.tmp');
- $updraftplus->jobdata_set('sha1-'.$whichone.$this->index, $sha);
- @rename($full_path.'.tmp', $full_path);
- $timetaken = max(microtime(true)-$this->zip_microtime_start, 0.000001);
- $kbsize = filesize($full_path)/1024;
- $rate = round($kbsize/$timetaken, 1);
- $updraftplus->log("Created $whichone zip (".$this->index.") - ".round($kbsize,1)." Kb in ".round($timetaken,1)." s ($rate Kb/s) (SHA1 checksum: $sha)");
- // We can now remove any left-over temporary files from this job
- }
- } elseif ($this->index > $original_index) {
- $updraftplus->log("Did not create $whichone zip (".$this->index.") - not needed");
- # Added 12-Feb-2014 (to help multiple morefiles)
- $this->index--;
- } else {
- $updraftplus->log("Looked-for $whichone zip (".$this->index.") was not found (".basename($full_path).".tmp)", 'warning');
- }
- $updraftplus->clean_temporary_files('_'.$updraftplus->nonce."-$whichone", 0);
- }
- # Create the results array to send back (just the new ones, not any prior ones)
- $files_existing = array();
- $res_index = 0;
- for ($i = $original_index; $i<= $this->index; $i++) {
- $itext = (empty($i)) ? '' : ($i+1);
- $full_path = $this->updraft_dir.'/'.$backup_file_basename.'-'.$whichone.$itext.'.zip';
- if (file_exists($full_path)) {
- $files_existing[$res_index] = $backup_file_basename.'-'.$whichone.$itext.'.zip';
- }
- $res_index++;
- }
- return $files_existing;
- }
- // Dispatch to the relevant function
- public function cloud_backup($backup_array) {
- global $updraftplus;
- $services = $updraftplus->just_one($updraftplus->jobdata_get('service'));
- if (!is_array($services)) $services = array($services);
- $updraftplus->jobdata_set('jobstatus', 'clouduploading');
- add_action('http_api_curl', array($updraftplus, 'add_curl_capath'));
- $upload_status = $updraftplus->jobdata_get('uploading_substatus');
- if (!is_array($upload_status) || !isset($upload_status['t'])) {
- $upload_status = array('i' => 0, 'p' => 0, 't' => max(1, count($services))*count($backup_array));
- $updraftplus->jobdata_set('uploading_substatus', $upload_status);
- }
- $do_prune = array();
- # If there was no check-in last time, then attempt a different service first - in case a time-out on the attempted service leads to no activity and everything stopping
- if (count($services) >1 && !empty($updraftplus->no_checkin_last_time)) {
- $updraftplus->log('No check-in last time: will try a different remote service first');
- array_push($services, array_shift($services));
- if (1 == ($updraftplus->current_resumption % 2) && count($services)>2) array_push($services, array_shift($services));
- }
- $errors_before_uploads = $updraftplus->error_count();
- foreach ($services as $ind => $service) {
- # Used for logging by record_upload_chunk()
- $this->current_service = $service;
- # Used when deciding whether to delete the local file
- $this->last_service = ($ind+1 >= count($services) && $errors_before_uploads == $updraftplus->error_count()) ? true : false;
- $updraftplus->log("Cloud backup selection: ".$service);
- @set_time_limit(900);
- $method_include = UPDRAFTPLUS_DIR.'/methods/'.$service.'.php';
- if (file_exists($method_include)) require_once($method_include);
- if ($service == "none" || $service == "") {
- $updraftplus->log("No remote despatch: user chose no remote backup service");
- $this->prune_retained_backups(array("none" => array(null, null)));
- } else {
- $updraftplus->log("Beginning dispatch of backup to remote ($service)");
- $sarray = array();
- foreach ($backup_array as $bind => $file) {
- if ($updraftplus->is_uploaded($file, $service)) {
- $updraftplus->log("Already uploaded to $service: $file");
- } else {
- $sarray[$bind] = $file;
- }
- }
- if (count($sarray)>0) {
- $objname = "UpdraftPlus_BackupModule_${service}";
- if (class_exists($objname)) {
- $remote_obj = new $objname;
- $pass_to_prune = $remote_obj->backup($backup_array);
- $do_prune[$service] = array($remote_obj, $pass_to_prune);
- } else {
- $updraftplus->log("Unexpected error: no class '$objname' was found ($method_include)");
- $updraftplus->log(__("Unexpected error: no class '$objname' was found (your UpdraftPlus installation seems broken - try re-installing)",'updraftplus'), 'error');
- }
- }
- }
- }
- if (!empty($do_prune)) $this->prune_retained_backups($do_prune);
- remove_action('http_api_curl', array($updraftplus, 'add_curl_capath'));
- }
- // Carries out retain behaviour. Pass in a valid S3 or FTP object and path if relevant.
- // Services *must* be an array
- public function prune_retained_backups($services) {
- global $updraftplus;
- // If they turned off deletion on local backups, then there is nothing to do
- if (UpdraftPlus_Options::get_updraft_option('updraft_delete_local') == 0 && count($services) == 1 && in_array('none', $services)) {
- $updraftplus->log("Prune old backups from local store: nothing to do, since the user disabled local deletion and we are using local backups");
- return;
- }
- $updraftplus->jobdata_set('jobstatus', 'pruning');
- // Number of backups to retain - files
- $updraft_retain = UpdraftPlus_Options::get_updraft_option('updraft_retain', 2);
- $updraft_retain = (is_numeric($updraft_retain)) ? $updraft_retain : 1;
- // Number of backups to retain - db
- $updraft_retain_db = UpdraftPlus_Options::get_updraft_option('updraft_retain_db', $updraft_retain);
- $updraft_retain_db = (is_numeric($updraft_retain_db)) ? $updraft_retain_db : 1;
- $updraftplus->log("Retain: beginning examination of existing backup sets; user setting: retain_files=$updraft_retain, retain_db=$updraft_retain_db");
- // Returns an array, most recent first, of backup sets
- $backup_history = $updraftplus->get_backup_history();
- $db_backups_found = 0;
- $file_backups_found = 0;
- $updraftplus->log("Number of backup sets in history: ".count($backup_history));
- $backupable_entities = $updraftplus->get_backupable_file_entities(true);
- $database_backups_found = array();
- $file_entities_backups_found = array();
- foreach ($backupable_entities as $entity => $info) {
- $file_entities_backups_found[$entity] = 0;
- }
- foreach ($backup_history as $backup_datestamp => $backup_to_examine) {
- $files_to_prune = array();
- // $backup_to_examine is an array of file names, keyed on db/plugins/themes/uploads
- // The new backup_history array is saved afterwards, so remember to unset the ones that are to be deleted
- $updraftplus->log(sprintf("Examining backup set with datestamp: %s (%s)", $backup_datestamp, gmdate('M d Y H:i:s', $backup_datestamp)));
- # Databases
- foreach ($backup_to_examine as $key => $data) {
- if ('db' != strtolower(substr($key, 0, 2)) || '-size' == substr($key, -5, 5)) continue;
- $database_backups_found[$key] = (empty($database_backups_found[$key])) ? 1 : $database_backups_found[$key] + 1;
- $fname = (is_string($data)) ? $data : $data[0];
- $updraftplus->log("$backup_datestamp: $key: this set includes a database (".$fname."); db count is now ".$database_backups_found[$key]);
- if ($database_backups_found[$key] > $updraft_retain_db) {
- $updraftplus->log("$backup_datestamp: $key: over retain limit ($updraft_retain_db); will delete this database");
- if (!empty($data)) {
- foreach ($services as $service => $sd) $this->prune_file($service, $data, $sd[0], $sd[1]);
- }
- unset($backup_to_examine[$key]);
- $updraftplus->record_still_alive();
- }
- }
- foreach ($backupable_entities as $entity => $info) {
- if (!empty($backup_to_examine[$entity])) {
- $file_entities_backups_found[$entity]++;
- if ($file_entities_backups_found[$entity] > $updraft_retain) {
- $prune_this = $backup_to_examine[$entity];
- if (is_string($prune_this)) $prune_this = array($prune_this);
- foreach ($prune_this as $prune_file) {
- $updraftplus->log("$entity: $backup_datestamp: over retain limit ($updraft_retain); will delete this file ($prune_file)");
- $files_to_prune[] = $prune_file;
- }
- unset($backup_to_examine[$entity]);
- }
- }
- }
- # Actually delete the files
- foreach ($services as $service => $sd) {
- $this->prune_file($service, $files_to_prune, $sd[0], $sd[1]);
- $updraftplus->record_still_alive();
- }
- // Get new result, post-deletion; anything left in this set?
- $contains_files = 0;
- foreach ($backupable_entities as $entity => $info) {
- if (isset($backup_to_examine[$entity])) {
- $contains_files = 1;
- break;
- }
- }
- $contains_db = 0;
- foreach ($backup_to_examine as $key => $data) {
- if ('db' == strtolower(substr($key, 0, 2)) && '-size' != substr($key, -5, 5)) {
- $contains_db = 1;
- break;
- }
- }
- // Delete backup set completely if empty, o/w just remove DB
- // We search on the four keys which represent data, allowing other keys to be used to track other things
- if (!$contains_files && !$contains_db) {
- $updraftplus->log("$backup_datestamp: this backup set is now empty; will remove from history");
- unset($backup_history[$backup_datestamp]);
- if (isset($backup_to_examine['nonce'])) {
- $fullpath = $this->updraft_dir.'/log.'.$backup_to_examine['nonce'].'.txt';
- if (is_file($fullpath)) {
- $updraftplus->log("$backup_datestamp: deleting log file (log.".$backup_to_examine['nonce'].".txt)");
- @unlink($fullpath);
- } else {
- $updraftplus->log("$backup_datestamp: corresponding log file not found - must have already been deleted");
- }
- } else {
- $updraftplus->log("$backup_datestamp: no nonce record found in the backup set, so cannot delete any remaining log file");
- }
- } else {
- $updraftplus->log("$backup_datestamp: this backup set remains non-empty ($contains_files/$contains_db); will retain in history");
- $backup_history[$backup_datestamp] = $backup_to_examine;
- }
- # Loop over backup sets
- }
- $updraftplus->log("Retain: saving new backup history (sets now: ".count($backup_history).") and finishing retain operation");
- UpdraftPlus_Options::update_updraft_option('updraft_backup_history', $backup_history, false);
- }
- # $dofiles: An array of files (or a single string for one file)
- private function prune_file($service, $dofiles, $method_object = null, $object_passback = null) {
- global $updraftplus;
- if (!is_array($dofiles)) $dofiles=array($dofiles);
- foreach ($dofiles as $dofile) {
- if (empty($dofile)) continue;
- $updraftplus->log("Delete file: $dofile, service=$service");
- $fullpath = $this->updraft_dir.'/'.$dofile;
- // delete it if it's locally available
- if (file_exists($fullpath)) {
- $updraftplus->log("Deleting local copy ($dofile)");
- @unlink($fullpath);
- }
- }
- // Despatch to the particular method's deletion routine
- if (!is_null($method_object)) $method_object->delete($dofiles, $object_passback);
- }
- public function send_results_email($final_message) {
- global $updraftplus;
- $debug_mode = UpdraftPlus_Options::get_updraft_option('updraft_debug_mode');
- $sendmail_to = $updraftplus->just_one_email(UpdraftPlus_Options::get_updraft_option('updraft_email'));
- if (is_string($sendmail_to)) $sendmail_to = array($sendmail_to);
- $backup_files = $updraftplus->jobdata_get('backup_files');
- $backup_db = $updraftplus->jobdata_get('backup_database');
- if ('finished' == $backup_files && ('finished' == $backup_db || 'encrypted' == $backup_db)) {
- $backup_contains = __("Files and database", 'updraftplus');
- } elseif ('finished' == $backup_files) {
- $backup_contains = ($backup_db == "begun") ? __("Files (database backup has not completed)", 'updraftplus') : __("Files only (database was not part of this particular schedule)", 'updraftplus');
- } elseif ($backup_db == 'finished' || $backup_db == 'encrypted') {
- $backup_contains = ($backup_files == "begun") ? __("Database (files backup has not completed)", 'updraftplus') : __("Database only (files were not part of this particular schedule)", 'updraftplus');
- } else {
- $backup_contains = __("Unknown/unexpected error - please raise a support request", 'updraftplus');
- }
- $append_log = '';
- $attachments = array();
- $error_count = 0;
- if ($updraftplus->error_count() > 0) {
- $append_log .= __('Errors encountered:', 'updraftplus')."\r\n";
- $attachments[0] = $updraftplus->logfile_name;
- foreach ($updraftplus->errors as $err) {
- if (is_wp_error($err)) {
- foreach ($err->get_error_messages() as $msg) {
- $append_log .= "* ".rtrim($msg)."\r\n";
- }
- } elseif (is_array($err) && 'error' == $err['level']) {
- $append_log .= "* ".rtrim($err['message'])."\r\n";
- } elseif (is_string($err)) {
- $append_log .= "* ".rtrim($err)."\r\n";
- }
- $error_count++;
- }
- $append_log.="\r\n";
- }
- $warnings = $updraftplus->jobdata_get('warnings');
- if (is_array($warnings) && count($warnings) >0) {
- $append_log .= __('Warnings encountered:', 'updraftplus')."\r\n";
- $attachments[0] = $updraftplus->logfile_name;
- foreach ($warnings as $err) {
- $append_log .= "* ".rtrim($err)."\r\n";
- }
- $append_log.="\r\n";
- }
- if ($debug_mode && '' != $updraftplus->logfile_name && !in_array($updraftplus->logfile_name, $attachments)) {
- $append_log .= "\r\n".__('The log file has been attached to this email.', 'updraftplus');
- $attachments[0] = $updraftplus->logfile_name;
- }
- // We have to use the action in order to set the MIME type on the attachment - by default, WordPress just puts application/octet-stream
- $subject = apply_filters('updraft_report_subject', sprintf(__('Backed up: %s', 'updraftplus'), get_bloginfo('name')).' (UpdraftPlus '.$updraftplus->version.') '.get_date_from_gmt(gmdate('Y-m-d H:i:s', time()), 'Y-m-d H:i'), $error_count, count($warnings));
- $body = apply_filters('updraft_report_body', __('Backup of:').' '.site_url()."\r\nUpdraftPlus ".__('WordPress backup is complete','updraftplus').".\r\n".__('Backup contains:','updraftplus').' '.$backup_contains."\r\n".__('Latest status:', 'updraftplus').' '.$final_message."\r\n\r\n".$updraftplus->wordshell_random_advert(0)."\r\n".$append_log, $final_message, $backup_contains, $updraftplus->errors, $warnings);
- $this->attachments = apply_filters('updraft_report_attachments', $attachments);
- if (count($this->attachments)>0) add_action('phpmailer_init', array($this, 'phpmailer_init'));
- $attach_size = 0;
- $unlink_files = array();
- foreach ($this->attachments as $ind => $attach) {
- if ($attach == $updraftplus->logfile_name && filesize($attach) > 6*1048576) {
-
- $updraftplus->log("Log file is large (".round(filesize($attach)/1024, 1)." Kb): will compress before e-mailing");
- if (!$handle = fopen($attach, "r")) {
- $updraftplus->log("Error: Failed to open log file for reading: ".$attach);
- } else {
- if (!$whandle = gzopen($attach.'.gz', 'w')) {
- $updraftplus->log("Error: Failed to open log file for reading: ".$attach.".gz");
- } else {
- while (false !== ($line = @stream_get_line($handle, 131072, "\n"))) {
- @gzwrite($whandle, $line."\n");
- }
- fclose($handle);
- gzclose($whandle);
- $this->attachments[$ind] = $attach.'.gz';
- $unlink_files[] = $attach.'.gz';
- }
- }
- }
- $attach_size += filesize($this->attachments[$ind]);
- }
- foreach ($sendmail_to as $ind => $mailto) {
- if (false === apply_filters('updraft_report_sendto', true, $mailto, $error_count, count($warnings), $ind)) continue;
- foreach (explode(',', $mailto) as $sendmail_addr) {
- $updraftplus->log("Sending email ('$backup_contains') report (attachments: ".count($attachments).", size: ".round($attach_size/1024, 1)." Kb) to: ".substr($sendmail_addr, 0, 5)."...");
- wp_mail(trim($sendmail_addr), $subject, $body);
- }
- }
- foreach ($unlink_files as $file) @unlink($file);
- do_action('updraft_report_finished');
- if (count($this->attachments)>0) remove_action('phpmailer_init', array($this, 'phpmailer_init'));
- }
- // The purpose of this function is to make sure that the options table is put in the database first, then the users table, then the usermeta table; and after that the core WP tables - so that when restoring we restore the core tables first
- private function backup_db_sorttables($a, $b) {
- global $updraftplus, $wpdb;
- if ($a == $b) return 0;
- $our_table_prefix = $this->table_prefix;
- if ($a == $our_table_prefix.'options') return -1;
- if ($b == $our_table_prefix.'options') return 1;
- if ($a == $our_table_prefix.'users') return -1;
- if ($b == $our_table_prefix.'users') return 1;
- if ($a == $our_table_prefix.'usermeta') return -1;
- if ($b == $our_table_prefix.'usermeta') return 1;
- if (empty($our_table_prefix)) return strcmp($a, $b);
- try {
- $core_tables = array_merge($wpdb->tables, $wpdb->global_tables, $wpdb->ms_global_tables);
- } catch (Exception $e) {
- }
- if (empty($core_tables)) $core_tables = array('terms', 'term_taxonomy', 'term_relationships', 'commentmeta', 'comments', 'links', 'postmeta', 'posts', 'site', 'sitemeta', 'blogs', 'blogversions');
- global $updraftplus;
- $na = $updraftplus->str_replace_once($our_table_prefix, '', $a);
- $nb = $updraftplus->str_replace_once($our_table_prefix, '', $b);
- if (in_array($na, $core_tables) && !in_array($nb, $core_tables)) return -1;
- if (!in_array($na, $core_tables) && in_array($nb, $core_tables)) return 1;
- return strcmp($a, $b);
- }
- private function log_account_space() {
- # Don't waste time if space is huge
- if (!empty($this->account_space_oodles)) return;
- global $updraftplus;
- $hosting_bytes_free = $updraftplus->get_hosting_disk_quota_free();
- if (is_array($hosting_bytes_free)) {
- $perc = round(100*$hosting_bytes_free[1]/(max($hosting_bytes_free[2], 1)), 1);
- $updraftplus->log(sprintf('Free disk space in account: %s (%s used)', round($hosting_bytes_free[3]/1048576, 1)." Mb", "$perc %"));
- }
- }
- // This function is resumable
- public function backup_dirs($job_status) {
- global $updraftplus;
- if(!$updraftplus->backup_time) $updraftplus->backup_time_nonce();
- //get the blog name and rip out all non-alphanumeric chars other than _
- $blog_name = preg_replace('/[^A-Za-z0-9_]/','', str_replace(' ','_', substr(get_bloginfo(), 0, 32)));
- if (!$blog_name) $blog_name = 'non_alpha_name';
- $blog_name = apply_filters('updraftplus_blog_name', $blog_name);
- $backup_file_basename = 'backup_'.get_date_from_gmt(gmdate('Y-m-d H:i:s', $updraftplus->backup_time), 'Y-m-d-Hi').'_'.$blog_name.'_'.$updraftplus->nonce;
- $backup_array = array();
- $possible_backups = $updraftplus->get_backupable_file_entities(true);
- // Was there a check-in last time? If not, then reduce the amount of data attempted
- if ($job_status != 'finished' && $updraftplus->current_resumption >= 2 && $updraftplus->current_resumption<=10) {
- $maxzipbatch = $updraftplus->jobdata_get('maxzipbatch', 26214400);
- if ((int)$maxzipbatch < 1) $maxzipbatch = 26214400;
- # NOTYET: Possible amendment to original algorithm; not just no check-in, but if the check in was very early (can happen if we get a very early checkin for some trivial operation, then attempt something too big)
- if (!empty($updraftplus->no_checkin_last_time)) {
- if ($updraftplus->current_resumption - $updraftplus->last_successful_resumption > 2) {
- $this->try_split = true;
- } else {
- $new_maxzipbatch = max(floor($maxzipbatch * 0.75), 20971520);
- if ($new_maxzipbatch < $maxzipbatch) {
- $updraftplus->log("No check-in was detected on the previous run - as a result, we are reducing the batch amount (old=$maxzipbatch, new=$new_maxzipbatch)");
- $updraftplus->jobdata_set('maxzipbatch', $new_maxzipbatch);
- $updraftplus->jobdata_set('maxzipbatch_ceiling', $new_maxzipbatch);
- }
- }
- }
- }
- if($job_status != 'finished' && !$updraftplus->really_is_writable($this->updraft_dir)) {
- $updraftplus->log("Backup directory (".$this->updraft_dir.") is not writable, or does not exist");
- $updraftplus->log(sprintf(__("Backup directory (%s) is not writable, or does not exist.", 'updraftplus'), $this->updraft_dir), 'error');
- return array();
- }
- $this->job_file_entities = $updraftplus->jobdata_get('job_file_entities');
- # This is just used for the visual feedback (via the 'substatus' key)
- $which_entity = 0;
- # e.g. plugins, themes, uploads, others
- # $whichdir might be an array (if $youwhat is 'more')
- foreach ($possible_backups as $youwhat => $whichdir) {
- if (isset($this->job_file_entities[$youwhat])) {
- $index = (int)$this->job_file_entities[$youwhat]['index'];
- if (empty($index)) $index=0;
- $indextext = (0 == $index) ? '' : (1+$index);
- $zip_file = $this->updraft_dir.'/'.$backup_file_basename.'-'.$youwhat.$indextext.'.zip';
- # Split needed?
- $split_every=max((int)$updraftplus->jobdata_get('split_every'), 250);
- if (file_exists($zip_file) && filesize($zip_file) > $split_every*1048576) {
- $index++;
- $this->job_file_entities[$youwhat]['index'] = $index;
- $updraftplus->jobdata_set('job_file_entities', $this->job_file_entities);
- }
- // Populate prior parts of array, if we're on a subsequent zip file
- if ($index >0) {
- for ($i=0; $i<$index; $i++) {
- $itext = (0 == $i) ? '' : ($i+1);
- $backup_array[$youwhat][$i] = $backup_file_basename.'-'.$youwhat.$itext.'.zip';
- $z = $this->updraft_dir.'/'.$backup_file_basename.'-'.$youwhat.$itext.'.zip';
- $itext = (0 == $i) ? '' : $i;
- if (file_exists($z)) $backup_array[$youwhat.$itext.'-size'] = filesize($z);
- }
- }
- if ('finished' == $job_status) {
- // Add the final part of the array
- if ($index >0) {
- $fbase = $backup_file_basename.'-'.$youwhat.($index+1).'.zip';
- $z = $this->updraft_dir.'/'.$fbase;
- if (file_exists($z)) {
- $backup_array[$youwhat][$index] = $fbase;
- $backup_array[$youwhat.$index.'-size'] = filesize($z);
- }
- } else {
- $backup_array[$youwhat] = $backup_file_basename.'-'.$youwhat.'.zip';
- if (file_exists($zip_file)) $backup_array[$youwhat.'-size'] = filesize($zip_file);
- }
- } else {
- $which_entity++;
- $updraftplus->jobdata_set('filecreating_substatus', array('e' => $youwhat, 'i' => $which_entity, 't' => count($this->job_file_entities)));
- if ('others' == $youwhat) $updraftplus->log("Beginning backup of other directories found in the content directory (index: $index)");
- # Apply a filter to allow add-ons to provide their own method for creating a zip of the entity
- $created = apply_filters('updraftplus_backup_makezip_'.$youwhat, $whichdir, $backup_file_basename, $index);
- # If the filter did not lead to something being created, then use the default method
- if ($created === $whichdir) {
- // http://www.phpconcept.net/pclzip/user-guide/53
- /* First parameter to create is:
- An array of filenames or dirnames,
- or
- A string containing the filename or a dirname,
- or
- A string containing a list of filename or dirname separated by a comma.
- */
- if ('others' == $youwhat) {
- $dirlist = $updraftplus->backup_others_dirlist(true);
- } elseif ('uploads' == $youwhat) {
- $dirlist = $updraftplus->backup_uploads_dirlist(true);
- } else {
- $dirlist = $whichdir;
- if (is_array($dirlist)) $dirlist=array_shift($dirlist);
- }
- if (count($dirlist)>0) {
- $created = $this->create_zip($dirlist, $youwhat, $backup_file_basename, $index);
- # Now, store the results
- if (!is_string($created) && !is_array($created)) $updraftplus->log("$youwhat: create_zip returned an error");
- } else {
- $updraftplus->log("No backup of $youwhat: there was nothing found to back up");
- }
- }
- if ($created != $whichdir && (is_string($created) || is_array($created))) {
- if (is_string($created)) $created=array($created);
- foreach ($created as $findex => $fname) {
- $backup_array[$youwhat][$index] = $fname;
- $itext = ($index == 0) ? '' : $index;
- $index++;
- $backup_array[$youwhat.$itext.'-size'] = filesize($this->updraft_dir.'/'.$fname);
- }
- }
- $this->job_file_entities[$youwhat]['index'] = $this->index;
- $updraftplus->jobdata_set('job_file_entities', $this->job_file_entities);
- }
- } else {
- $updraftplus->log("No backup of $youwhat: excluded by user's options");
- }
- }
- return $backup_array;
- }
- // This uses a saved status indicator; its only purpose is to indicate *total* completion; there is no actual danger, just wasted time, in resuming when it was not needed. So the saved status indicator just helps save resources.
- public function resumable_backup_of_files($resumption_no) {
- global $updraftplus;
- //backup directories and return a numerically indexed array of file paths to the backup files
- $bfiles_status = $updraftplus->jobdata_get('backup_files');
- if ('finished' == $bfiles_status) {
- $updraftplus->log("Creation of backups of directories: already finished");
- $backup_array = $updraftplus->jobdata_get('backup_files_array');
- if (!is_array($backup_array)) $backup_array = array();
- # Check for recent activity
- foreach ($backup_array as $files) {
- if (!is_array($files)) $files=array($files);
- foreach ($files as $file) $updraftplus->check_recent_modification($this->updraft_dir.'/'.$file);
- }
- } elseif ('begun' == $bfiles_status) {
- if ($resumption_no>0) {
- $updraftplus->log("Creation of backups of directories: had begun; will resume");
- } else {
- $updraftplus->log("Creation of backups of directories: beginning");
- }
- $updraftplus->jobdata_set('jobstatus', 'filescreating');
- $backup_array = $this->backup_dirs($bfiles_status);
- $updraftplus->jobdata_set('backup_files_array', $backup_array);
- $updraftplus->jobdata_set('backup_files', 'finished');
- $updraftplus->jobdata_set('jobstatus', 'filescreated');
- } else {
- # This is not necessarily a backup run which is meant to contain files at all
- $updraftplus->log('This backup run is not intended for files - skipping');
- return array();
- }
- /*
- // DOES NOT WORK: there is no crash-safe way to do this here - have to be renamed at cloud-upload time instead
- $new_backup_array = array();
- foreach ($backup_array as $entity => $files) {
- if (!is_array($files)) $files=array($files);
- $outof = count($files);
- foreach ($files as $ind => $file) {
- $nval = $file;
- if (preg_match('/^(backup_[\-0-9]{15}_.*_[0-9a-f]{12}-[\-a-z]+)([0-9]+)?\.zip$/i', $file, $matches)) {
- $num = max((int)$matches[2],1);
- $new = $matches[1].$num.'of'.$outof.'.zip';
- if (file_exists($this->updraft_dir.'/'.$file)) {
- if (@rename($this->updraft_dir.'/'.$file, $this->updraft_dir.'/'.$new)) {
- $updraftplus->log(sprintf("Renaming: %s to %s", $file, $new));
- $nval = $new;
- }
- } elseif (file_exists($this->updraft_dir.'/'.$new)) {
- $nval = $new;
- }
- }
- $new_backup_array[$entity][$ind] = $nval;
- }
- }
- */
- return $backup_array;
- }
- /* This function is resumable, using the following method:
- - Each table is written out to ($final_filename).table.tmp
- - When the writing finishes, it is renamed to ($final_filename).table
- - When all tables are finished, they are concatenated into the final file
- */
- public function backup_db($already_done = 'begun') {
- global $updraftplus, $wpdb;
- $this->table_prefix = $updraftplus->get_table_prefix(true);
- $this->table_prefix_raw = $updraftplus->get_table_prefix(false);
- $errors = 0;
- if (!$updraftplus->backup_time) $updraftplus->backup_time_nonce();
- if (!$updraftplus->opened_log_time) $updraftplus->logfile_open($updraftplus->nonce);
- // Get the blog name and rip out all non-alphanumeric chars other than _
- $blog_name = preg_replace('/[^A-Za-z0-9_]/','', str_replace(' ','_', substr(get_bloginfo(), 0, 32)));
- if (!$blog_name) $blog_name = 'non_alpha_name';
- $blog_name = apply_filters('updraftplus_blog_name', $blog_name);
- $file_base = 'backup_'.get_date_from_gmt(gmdate('Y-m-d H:i:s', $updraftplus->backup_time), 'Y-m-d-Hi').'_'.$blog_name.'_'.$updraftplus->nonce;
- $backup_file_base = $this->updraft_dir.'/'.$file_base;
- if ('finished' == $already_done) return basename($backup_file_base.'-db.gz');
- if ('encrypted' == $already_done) return basename($backup_file_base.'-db.gz.crypt');
- $updraftplus->jobdata_set('jobstatus', 'dbcreating');
- $binsqldump = $updraftplus->find_working_sqldump();
- $total_tables = 0;
- $all_tables = $wpdb->get_results("SHOW TABLES", ARRAY_N);
- $all_tables = array_map(create_function('$a', 'return $a[0];'), $all_tables);
- if (0 == count($all_tables)) {
- $extra = ($updraftplus->newresumption_scheduled) ? ' - '.__('please wait for the rescheduled attempt', 'updraftplus') : '';
- $updraftplus->log("Error: No database tables found (SHOW TABLES returned nothing)".$extra);
- $updraftplus->log(__("No database tables found", 'updraftplus').$extra, 'error');
- die;
- }
- // Put the options table first
- usort($all_tables, array($this, 'backup_db_sorttables'));
- if (!$updraftplus->really_is_writable($this->updraft_dir)) {
- $updraftplus->log("The backup directory (".$this->updraft_dir.") is not writable.");
- $updraftplus->log($this->updraft_dir.": ".__('The backup directory is not writable - the database backup is expected to shortly fail.','updraftplus'), 'warning');
- # Why not just fail now? We saw a bizarre case when the results of really_is_writable() changed during the run.
- }
- $stitch_files = array();
- $how_many_tables = count($all_tables);
- $found_options_table = false;
- foreach ($all_tables as $table) {
- $manyrows_warning = false;
- $total_tables++;
- // Increase script execution time-limit to 15 min for every table.
- @set_time_limit(900);
- // The table file may already exist if we have produced it on a previous run
- $table_file_prefix = $file_base.'-db-table-'.$table.'.table';
- if ($this->table_prefix_raw.'options' == $table) $found_options_table = true;
- if (file_exists($this->updraft_dir.'/'.$table_file_prefix.'.gz')) {
- $updraftplus->log("Table $table: corresponding file already exists; moving on");
- $stitch_files[] = $table_file_prefix;
- } else {
- # === is needed, otherwise 'false' matches (i.e. prefix does not match)
- if (empty($this->table_prefix) || strpos($table, $this->table_prefix) === 0 ) {
- // Open file, store the handle
- $opened = $this->backup_db_open($this->updraft_dir.'/'.$table_file_prefix.'.tmp.gz', true);
- if (false === $opened) return false;
- // Create the SQL statements
- $this->stow("# " . sprintf('Table: %s' ,$updraftplus->backquote($table)) . "\n");
- $updraftplus->jobdata_set('dbcreating_substatus', array('t' => $table, 'i' => $total_tables, 'a' => $how_many_tables));
- $table_status = $wpdb->get_row("SHOW TABLE STATUS WHERE Name='$table'");
- if (isset($table_status->Rows)) {
- $rows = $table_status->Rows;
- $updraftplus->log("Table $table: Total expected rows (approximate): ".$rows);
- $this->stow("# Approximate rows expected in table: $rows\n");
- if ($rows > UPDRAFTPLUS_WARN_DB_ROWS) {
- $manyrows_warning = true;
- $updraftplus->log(sprintf(__("Table %s has very many rows (%s) - we hope your web hosting company gives you enough resources to dump out that table in the backup", 'updraftplus'), $table, $rows), 'warning', 'manyrows_'.$table);
- }
- }
- # Don't include the job data for any backups - so that when the database is restored, it doesn't continue an apparently incomplete backup
- if (!empty($this->table_prefix) && $this->table_prefix.'sitemeta' == $table) {
- $where = 'meta_key NOT LIKE "updraft_jobdata_%"';
- } elseif (!empty($this->table_prefix) && $this->table_prefix.'options' == $table) {
- $where = 'option_name NOT LIKE "updraft_jobdata_%"';
- } else {
- $where = '';
- }
- # TODO: If no check-in last time, then try the other method (but - any point in retrying slow method on large tables??)
- # TODO: Lower this from 10,000 if the feedback is good
- $bindump = (isset($rows) && $rows>10000 && is_string($binsqldump)) ? $this->backup_table_bindump($binsqldump, $table, $where) : false;
- if (true !== $bindump) $this->backup_table($table, $where);
- if (!empty($manyrows_warning)) $updraftplus->log_removewarning('manyrows_'.$table);
- // Close file
- $updraftplus->log("Table $table: finishing file (${table_file_prefix}.gz - ".round(filesize($this->updraft_dir.'/'.$table_file_prefix.'.tmp.gz')/1024,1)." Kb)");
- $this->close($this->dbhandle);
- rename($this->updraft_dir.'/'.$table_file_prefix.'.tmp.gz', $this->updraft_dir.'/'.$table_file_prefix.'.gz');
- $updraftplus->something_useful_happened();
- $stitch_files[] = $table_file_prefix;
- } else {
- $total_tables--;
- $updraftplus->log("Skipping table (lacks our prefix (".$this->table_prefix.")): $table");
- }
-
- }
- }
- if (!$found_options_table) {
- $updraftplus->log(__('The database backup appears to have failed - the options table was not found', 'updraftplus'), 'warning', 'optstablenotfound');
- $time_this_run = time()-$updraftplus->opened_log_time;
- if ($time_this_run > 2000) {
- # Have seen this happen; not sure how, but it was apparently deterministic; if the current process had been running for a long time, then apparently all database commands silently failed.
- # If we have been running that long, then the resumption may be far off; bring it closer
- $updraftplus->reschedule(60);
- $updraftplus->log("Have been running very long, and it seems the database went away; terminating");
- $updraftplus->record_still_alive();
- die;
- }
- } else {
- $updraftplus->log_removewarning('optstablenotfound');
- }
- // Race detection - with zip files now being resumable, these can more easily occur, with two running side-by-side
- $backup_final_file_name = $backup_file_base.'-db.gz';
- $time_now = time();
- $time_mod = (int)@filemtime($backup_final_file_name);
- if (file_exists($backup_final_file_name) && $time_mod>100 && ($time_now-$time_mod)<30) {
- $updraftplus->terminate_due_to_activity($backup_final_file_name, $time_now, $time_mod);
- } elseif (file_exists($backup_final_file_name)) {
- $updraftplus->log("The final database file ($backup_final_file_name) exists, but was apparently not modified within the last 30 seconds (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod)."). Thus we assume that another UpdraftPlus terminated; thus we will continue.");
- }
- // Finally, stitch the files together
- $opendb = $this->backup_db_open($backup_final_file_name, true);
- if (false === $opendb) return false;
- $this->backup_db_header();
- // We delay the unlinking because if two runs go concurrently and fail to detect each other (should not happen, but there's no harm in assuming the detection failed) then that leads to files missing from the db dump
- $unlink_files = array();
- $sind = 1;
- foreach ($stitch_files as $table_file) {
- $updraftplus->log("{$table_file}.gz ($sind/$how_many_tables): adding to final database dump");
- if (!$handle = gzopen($this->updraft_dir.'/'.$table_file.'.gz', "r")) {
- $updraftplus->log("Error: Failed to open database file for reading: ${table_file}.gz");
- $updraftplus->log(__("Failed to open database file for reading:", 'updraftplus').' '.$table_file.'.gz', 'error');
- $errors++;
- } else {
- while ($line = gzgets($handle, 2048)) { $this->stow($line); }
- gzclose($handle);
- $unlink_files[] = $this->updraft_dir.'/'.$table_file.'.gz';
- }
- $sind++;
- }
- if (defined("DB_CHARSET")) {
- $this->stow("/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;\n/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;\n/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;\n");
- }
- $updraftplus->log($file_base.'-db.gz: finished writing out complete database file ('.round(filesize($backup_final_file_name)/1024,1).' Kb)');
- if (!$this->close($this->dbhandle)) {
- $updraftplus->log('An error occurred whilst closing the final database file');
- $updraftplus->log(__('An error occurred whilst closing the final database file', 'updraftplus'), 'error');
- $errors++;
- }
- foreach ($unlink_files as $unlink_file) @unlink($unlink_file);
- if ($errors > 0) {
- return false;
- } else {
- # We no longer encrypt here - because the operation can take long, we made it resumable and moved it to the upload loop
- $updraftplus->jobdata_set('jobstatus', 'dbcreated');
- $sha = sha1_file($backup_final_file_name);
- $updraftplus->jobdata_set('sha1-db0', $sha);
- $updraftplus->log("Total database tables backed up: $total_tables (".basename($backup_final_file_name).": checksum (SHA1): $sha)");
- return basename($backup_file_base.'-db.gz');
- }
- } //wp_db_backup
- private function backup_table_bindump($potsql, $table_name, $where) {
- $microtime = microtime(true);
- global $updraftplus;
- $pfile = md5(time().rand()).'.tmp';
- file_put_contents($this->updraft_dir.'/'.$pfile, "[mysqldump]\npassword=".DB_PASSWORD."\n");
- if ($where) $where="--where='".escapeshellarg($where)."'";
- $exec = "cd ".escapeshellarg($this->updraft_dir)."; $potsql --defaults-file=$pfile $where --max_allowed_packet=1M --quote-names --add-drop-table --skip-comments --skip-set-charset --allow-keywords --dump-date --extended-insert --user=".escapeshellarg(DB_USER)." --host=".escapeshellarg(DB_HOST)." ".DB_NAME." ".escapeshellarg($table_name);
- $ret = false;
- $any_output = false;
- $writes = 0;
- $handle = popen($exec, "r");
- if ($handle) {
- while (!feof($handle)) {
- $w = fgets($handle);
- if ($w) {
- $this->stow($w);
- $writes++;
- $any_output = true;
- }
- }
- $ret = pclose($handle);
- if ($ret != 0) {
- $updraftplus->log("Binary mysqldump: error (code: $ret)");
- // Keep counter of failures? Change value of binsqldump?
- } else {
- if ($any_output) {
- $updraftplus->log("Table $table_name: binary mysqldump finished (writes: $writes) in ".sprintf("%.02f",max(microtime(true)-$microtime,0.00001))." seconds");
- $ret = true;
- }
- }
- } else {
- $updraftplus->log("Binary mysqldump error: bindump popen failed");
- }
- # Clean temporary files
- @unlink($this->updraft_dir.'/'.$pfile);
- return $ret;
- }
- /**
- * Taken partially from phpMyAdmin and partially from
- * Alain Wolf, Zurich - Switzerland
- * Website: http://restkultur.ch/personal/wolf/scripts/db_backup/
- * Modified by Scott Merrill (http://www.skippy.net/)
- * to use the WordPress $wpdb object
- * @param string $table
- * @param string $segment
- * @return void
- */
- private function backup_table($table, $where = '', $segment = 'none') {
- global $wpdb, $updraftplus;
- $microtime = microtime(true);
- $total_rows = 0;
- $table_structure = $wpdb->get_results("DESCRIBE $table");
- if (! $table_structure) {
- //$updraftplus->log(__('Error getting table details','wp-db-backup') . ": $table", 'error');
- return false;
- }
-
- if($segment == 'none' || $segment == 0) {
- // Add SQL statement to drop existing table
- $this->stow("\n# " . sprintf(__('Delete any existing table %s','wp-db-backup'),$updraftplus->backquote($table)) . "\n\n");
- $this->stow("DROP TABLE IF EXISTS " . $updraftplus->backquote($table) . ";\n");
-
- // Table structure
- // Comment in SQL-file
- $this->stow("\n# " . sprintf(__('Table structure of table %s','wp-db-backup'),$updraftplus->backquote($table)) . "\n\n");
-
- $create_table = $wpdb->get_results("SHOW CREATE TABLE `$table`", ARRAY_N);
- if (false === $create_table) {
- $err_msg = sprintf(__('Error with SHOW CREATE TABLE for %s.','wp-db-backup'), $table);
- //$updraftplus->log($err_msg, 'error');
- $this->stow("#\n# $err_msg\n#\n");
- }
- $create_line = $updraftplus->str_lreplace('TYPE=', 'ENGINE=', $create_table[0][1]);
- # Remove PAGE_CHECKSUM parameter from MyISAM - was internal, undocumented, later removed (so causes errors on import)
- if (preg_match('/ENGINE=([^\s;]+)/', $create_line, $eng_match)) {
- $engine = $eng_match[1];
- if ('myisam' == strtolower($engine)) {
- $create_line = preg_replace('/PAGE_CHECKSUM=\d\s?/', '', $create_line, 1);
- }
- }
- $this->stow($create_line.' ;');
-
- if (false === $table_structure) {
- $err_msg = sprintf('Error getting table structure of %s', $table);
- $this->stow("#\n# $err_msg\n#\n");
- }
-
- // Comment in SQL-file
- $this->stow("\n\n# " . sprintf('Data contents of table %s',$updraftplus->backquote($table)) . "\n\n");
- }
- # Some tables have optional data, and should be skipped if they do not work
- $table_sans_prefix = substr($table, strlen($this->table_prefix_raw));
- $data_optional_tables = apply_filters('updraftplus_data_optional_tables', explode(',', UPDRAFTPLUS_DATA_OPTIONAL_TABLES));
- if (in_array($table_sans_prefix, $data_optional_tables)) {
- if (!$updraftplus->something_useful_happened && !empty($updraftplus->current_resumption) && ($updraftplus->current_resumption - $updraftplus->last_successful_resumption > 2)) {
- $updraftplus->log("Table $table: Data skipped (previous attempts failed, and table is marked as n