/wp-content/plugins/updraftplus/updraftplus.php
PHP | 2610 lines | 1792 code | 376 blank | 442 comment | 666 complexity | e74b0c70da6434e710abff8c9bd3b97a MD5 | raw file
Possible License(s): GPL-2.0, GPL-3.0, Apache-2.0, LGPL-2.1
Large files files are truncated, but you can click here to view the full file
- <?php
- /*
- Plugin Name: UpdraftPlus - Backup/Restore
- Plugin URI: http://updraftplus.com
- Description: Backup and restore: take backups locally, or backup to Amazon S3, Dropbox, Google Drive, Rackspace, (S)FTP, WebDAV & email, on automatic schedules.
- Author: UpdraftPlus.Com, DavidAnderson
- Version: 1.9.13
- Donate link: http://david.dw-perspective.org.uk/donate
- License: GPLv3 or later
- Text Domain: updraftplus
- Domain Path: /languages
- Author URI: http://updraftplus.com
- */
- /*
- TODO - some of these are out of date/done, needs pruning
- // On free version, add note to restore page/to "delete-old-dirs" section
- // Make SFTP chunked (there is a new stream wrapper)
- // Store/show current Dropbox account
- // On plugins restore, don't let UD over-write itself - because this usually means a down-grade. Since upgrades are db-compatible, there's no reason to downgrade.
- // Renewal links should redirect to login and redirect to relevant page after
- // Alert user if they enter http(s):(etc) as their Dropbox path - seen one user do it
- // Schedule a task to report on failure
- // Copy.Com, Box
- // Switch 'Backup Now' to call the WP action via AJAX instead of via Cron - then test on hosts who deny all cron (e.g. Heart)
- // Get something to parse the 'Backups in progress' data, and if the 'next resumption' is far negative, and if also cron jobs appear to be not running, then call the action directly.
- // If ionice is available, then use it to limit I/O usage
- // Check the timestamps used in filenames - they should be UTC
- // Get user to confirm if they check both the search/replace and wp-config boxes
- // Tweak the display so that users seeing resumption messages don't think it's stuck
- // A search/replace console without needing to restore
- // On restore, check for some 'standard' PHP modules (prevents support requests related to them) -e.g. GD, Curl
- // Recognise known huge non-core tables on restore, and postpone them to the end (AJAX method?)
- // Add a cart notice if people have DBSF=quantity1
- // Pre-restore actually unpack the zips if they are not insanely big (to prevent the restore crashing at this stage if there's a problem)
- // Include in email report the list of "more" directories: http://updraftplus.com/forums/support-forum-group1/paid-support-forum-forum2/wordpress-multi-sites-thread121/
- // Integrate jstree for a nice files-chooser; use https://wordpress.org/plugins/dropbox-photo-sideloader/ to see how it's done
- // Verify that attempting to bring back a MS backup on a non-MS install warns the user
- // Pre-schedule resumptions that we know will be scheduled later
- // Change add-ons screen, to be less confusing for people who haven't yet updated but have connected
- // Change migrate window: 1) Retain link to article 2) Have selector to choose which backup set to migrate - or a fresh one 3) Have option for FTP/SFTP/SCP despatch 4) Have big "Go" button. Have some indication of what happens next. Test the login first. Have the remote site auto-scan its directory + pick up new sets. Have a way of querying the remote site for its UD-dir. Have a way of saving the settings as a 'profile'. Or just save the last set of settings (since mostly will be just one place to send to). Implement an HTTP/JSON method for sending files too.
- // Post restore, do an AJAX get for the site; if this results in a 500, then auto-turn-on WP_DEBUG
- // Place in maintenance mode during restore - ?
- // Test Azure: https://blogs.technet.com/b/blainbar/archive/2013/08/07/article-create-a-wordpress-site-using-windows-azure-read-on.aspx?Redirected=true
- // Seen during autobackup on 1.8.2: Warning: Invalid argument supplied for foreach() in /home/infinite/public_html/new/wp-content/plugins/updraftplus/updraftplus.php on line 1652
- // Add some kind of automated scan for post content (e.g. images) that has the same URL base, but is not part of WP. There's an example of such a site in tmp-rich.
- // Free/premium comparison page
- // Complete the tweak to bring the delete-old-dirs within a dialog (just needed to deal wtih case of needing credentials more elegantly).
- // Add note to support page requesting that non-English be translated
- // More locking: lock the resumptions too (will need to manage keys to make sure junk data is not left behind)
- // See: ftp-logins.log - would help if we retry FTP logins after 10 second delay (not on testing), to lessen chances of 'too many users - try again later' being terminal. Also, can we log the login error?
- // Deal with missing plugins/themes/uploads directory when installing
- // Bring down interval if we are already in upload time (since zip delays are no longer possible). See: options-general-11-23.txt
- // Add FAQ - can I get it to save automatically to my computer?
- // Pruner assumes storage is same as current - ?
- // Include blog feed in basic email report
- // Detect, and show prominent error in admin area, if the slug is not updraftplus/updraftplus.php (one Mac user in the wild managed to upload as updraftplus-2).
- // Pre-schedule future resumptions that we know will be scheduled; helps deal with WP's dodgy scheduler skipping some. (Then need to un-schedule if job finishes).
- // Dates in the progress box are apparently untranslated
- // Add-on descriptions are not internationalised
- // Nicer in-dashboard log: show log + option to download; also (if 'reporting' add-on available) show the HTML report from that
- // Take a look at logfile-to-examine.txt (stored), and the pattern of detection of zipfile contents
- // http://www.phpclasses.org/package/8269-PHP-Send-MySQL-database-backup-files-to-Ubuntu-One.html
- // Put the -old directories in updraft_dir instead of present location. Prevents file perms issues, and also will be automatically excluded from backups.
- // Test restores via cloud service for small $??? (Relevant: http://browshot.com/features) (per-day? per-install?)
- // Warn/prevent if trying to migrate between sub-domain/sub-folder based multisites
- // Don't perform pruning when doing auto-backup?
- // Post-migrate, notify the user if on Apache but without mod_rewrite (has been seen in the wild)
- // Pre-check the search/replace box if migration detected
- // Can some tables be omitted from the search/replace on a migrate? i.e. Special knowledge?
- // Put a 'what do I get if I upgrade?' link into the mix
- // Add to admin bar (and make it something that can be turned off)
- // If migrated database from somewhere else, then add note about revising UD settings
- // Strategy for what to do if the updraft_dir contains untracked backups. Automatically rescan?
- // MySQL manual: See Section 8.2.2.1, Speed of INSERT Statements.
- // Exempt UD itself from a plugins restore? (will options be out-of-sync? exempt options too?)
- // Post restore/migrate, check updraft_dir, and reset if non-existent
- // Auto-empty caches post-restore/post-migration (prevent support requests from people with state/wrong cacheing data)
- // Show 'Migrate' instead of 'Restore' on the button if relevant
- // Test with: http://wordpress.org/plugins/wp-db-driver/
- // Backup notes
- // Automatically re-count folder usage after doing a delete
- // Switch zip engines earlier if no progress - see log.cfd793337563_hostingfails.txt
- // The delete-em at the end needs to be made resumable. And to only run on last run-through (i.e. no errors, or no resumption)
- // Incremental - can leverage some of the multi-zip work???
- // Put in a help link to explain what WordPress core (including any additions to your WordPress root directory) does (was asked for support)
- // More databases
- // Multiple files in more-files
- // On multisite, the settings should be in the network panel. Connection settings need migrating into site options.
- // On restore, raise a warning for ginormous zips
- // Detect double-compressed files when they are uploaded (need a way to detect gz compression in general)
- // Log migrations/restores, and have an option for auto-emailing the log
- # Email backup method should be able to force split limit down to something manageable - or at least, should make the option display. (Put it in email class. Tweak the storage dropdown to not hide stuff also in expert class if expert is shown).
- // What happens if you restore with a database that then changes the setting for updraft_dir ? Should be safe, as the setting is cached during a run: double-check.
- // Multi-site manager at updraftplus.com
- // Import/slurp backups from other sites. See: http://www.skyverge.com/blog/extending-the-wordpress-xml-rpc-api/
- // More sophisticated options for retaining/deleting (e.g. 4/day for X days, then 7/week for Z weeks, then 1/month for Y months)
- // Unpack zips via AJAX? Do bit-by-bit to allow enormous opens a better chance? (have a huge one in Dropbox)
- // Put in a maintenance-mode detector
- // Add update warning if they've got an add-on but not connected account
- // Detect CloudFlare output in attempts to connect - detecting cloudflare.com should be sufficient
- // Bring multisite shop page up to date
- // Re-do pricing + support packages
- // More files: back up multiple directories, not just one
- // Give a help page to go with the message: A zip error occurred - check your log for more details (reduce support requests)
- // Exclude .git and .svn by default from wpcore
- // Add option to add, not just replace entities on restore/migrate
- // Add warning to backup run at beginning if -old dirs exist
- // Auto-alert if disk usage passes user-defined threshold / or an automatically computed one. Auto-alert if more backups are known than should be (usually a sign of incompleteness). Actually should just delete unknown backups over a certain age.
- // Generic S3 provider: add page to site. S3-compatible storage providers: http://www.dragondisk.com/s3-storage-providers.html
- // Importer - import backup sets from another WP site directly via HTTP
- // Option to create new user for self post-restore
- // Auto-disable certain cacheing/minifying plugins post-restore
- // Add note post-DB backup: you will need to log in using details from newly-imported DB
- // Make search+replace two-pass to deal with moving between exotic non-default moved-directory setups
- // Get link - http://www.rackspace.com/knowledge_center/article/how-to-use-updraftplus-to-back-up-cloud-sites-to-cloud-files
- // 'Delete from your webserver' should trigger a rescan if the backup was local-only
- // Option for additive restores - i.e. add content (themes, plugins,...) instead of replacing
- // Testing framework - automated testing of all file upload / download / deletion methods
- // Ginormous tables - need to make sure we "touch" the being-written-out-file (and double-check that we check for that) every 15 seconds - https://friendpaste.com/697eKEcWib01o6zT1foFIn
- // With ginormous tables, log how many times they've been attempted: after 3rd attempt, log a warning and move on. But first, batch ginormous tables (resumable)
- // Import single site into a multisite: http://codex.wordpress.org/Migrating_Multiple_Blogs_into_WordPress_3.0_Multisite, http://wordpress.org/support/topic/single-sites-to-multisite?replies=5, http://wpmu.org/import-export-wordpress-sites-multisite/
- // Selective restores - some resources
- // When you migrate/restore, if there is a .htaccess, warn/give option about it.
- // 'Show log' should be done in a nice pop-out, with a button to download the raw
- // delete_old_dirs() needs to use WP_Filesystem in a more user-friendly way when errors occur
- // Bulk download of entire set at once (not have to click 7 times).
- // Restoration should also clear all common cache locations (or just not back them up)
- // Deal with gigantic database tables - e.g. those over a million rows on cheap hosting.
- // When restoring core, need an option to retain database settings / exclude wp-config.php
- // If migrating, warn about consequences of over-writing wp-config.php
- // Produce a command-line version of the restorer (so that people with shell access are immune from server-enforced timeouts)
- // Restorations should be logged also
- // Migrator - list+download from remote, kick-off backup remotely
- // Search for other TODO-s in the code
- // Opt-in non-personal stats + link to aggregated results
- // Stand-alone installer - take a look at this: http://wordpress.org/extend/plugins/duplicator/screenshots/
- // More DB add-on (other non-WP tables; even other databases)
- // Unlimited customers should be auto-emailed each time they add a site (security)
- // Update all-features page at updraftplus.com (not updated after 1.5.5)
- // Save database encryption key inside backup history on per-db basis, so that if it changes we can still decrypt
- // AJAX-ify restoration
- // Warn Premium users before de-activating not to update whilst inactive
- // Ability to re-scan existing cloud storage
- // Dropbox uses one mcrypt function - port to phpseclib for more portability
- // Store meta-data on which version of UD the backup was made with (will help if we ever introduce quirks that need ironing)
- // Send the user an email upon their first backup with tips on what to do (e.g. support/improve) (include legacy check to not bug existing users)
- // Rackspace folders
- //Do an automated test periodically for the success of loop-back connections
- //When a manual backup is run, use a timer to update the 'Download backups and logs' section, just like 'Last finished backup run'. Beware of over-writing anything that's in there from a resumable downloader.
- //Change DB encryption to not require whole gzip in memory (twice) http://www.frostjedi.com/phpbb3/viewtopic.php?f=46&t=168508&p=391881&e=391881
- //Add YouSendIt/Hightail, Copy.Com, Box.Net, SugarSync, Me.Ga support??
- //Make it easier to find add-ons
- // On restore, move in data, not the whole directory (gives more flexibility on file permissions)
- // Move the inclusion, cloud and retention data into the backup job (i.e. don't read current config, make it an attribute of each job). In fact, everything should be. So audit all code for where get_option is called inside a backup run: it shouldn't happen.
- // Should we resume if the only errors were upon deletion (i.e. the backup itself was fine?) Presently we do, but it displays errors for the user to confuse them. Perhaps better to make pruning a separate scheuled task??
- // Create a "Want Support?" button/console, that leads them through what is needed, and performs some basic tests...
- // Add-on to check integrity of backups
- // Add-on to manage all your backups from a single dashboard
- // Provide backup/restoration for UpdraftPlus's settings, to allow 'bootstrap' on a fresh WP install - some kind of single-use code which a remote UpdraftPlus can use to authenticate
- // Multiple schedules
- // Allow connecting to remote storage, scanning + populating backup history from it
- // Multisite add-on should allow restoring of each blog individually
- // Remove the recurrence of admin notices when settings are saved due to _wp_referer
- // New sub-module to verify that the backups are there, independently of backup thread
- */
- /*
- Portions copyright 2011-14 David Anderson
- Portions copyright 2010 Paul Kehrer
- Other portions copyright as indicated authors in the relevant files
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 3 of the License, or
- (at your option) any later version.
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
- */
- define('UPDRAFTPLUS_DIR', dirname(__FILE__));
- define('UPDRAFTPLUS_URL', plugins_url('', __FILE__));
- define('UPDRAFT_DEFAULT_OTHERS_EXCLUDE','upgrade,cache,updraft,backup*,*backups');
- define('UPDRAFT_DEFAULT_UPLOADS_EXCLUDE','backup*,*backups,backwpup*,wp-clone');
- # The following can go in your wp-config.php
- # Tables whose data can be safed without significant loss, if (and only if) the attempt to back them up fails (e.g. bwps_log, from WordPress Better Security, is log data; but individual entries can be huge and cause out-of-memory fatal errors on low-resource environments). Comma-separate the table names (without the WordPress table prefix).
- if (!defined('UPDRAFTPLUS_DATA_OPTIONAL_TABLES')) define('UPDRAFTPLUS_DATA_OPTIONAL_TABLES', 'bwps_log,statpress,slim_stats,redirection_logs,Counterize,Counterize_Referers,Counterize_UserAgents');
- if (!defined('UPDRAFTPLUS_ZIP_EXECUTABLE')) define('UPDRAFTPLUS_ZIP_EXECUTABLE', "/usr/bin/zip,/bin/zip,/usr/local/bin/zip,/usr/sfw/bin/zip,/usr/xdg4/bin/zip,/opt/bin/zip");
- if (!defined('UPDRAFTPLUS_MYSQLDUMP_EXECUTABLE')) define('UPDRAFTPLUS_MYSQLDUMP_EXECUTABLE', "/usr/bin/mysqldump,/bin/mysqldump,/usr/local/bin/mysqldump,/usr/sfw/bin/mysqldump,/usr/xdg4/bin/mysqldump,/opt/bin/mysqldump");
- # If any individual file size is greater than this, then a warning is given
- if (!defined('UPDRAFTPLUS_WARN_FILE_SIZE')) define('UPDRAFTPLUS_WARN_FILE_SIZE', 1024*1024*250);
- # On a test on a Pentium laptop, 100,000 rows needed ~ 1 minute to write out - so 150,000 is around the CPanel default of 90 seconds execution time.
- if (!defined('UPDRAFTPLUS_WARN_DB_ROWS')) define('UPDRAFTPLUS_WARN_DB_ROWS', 150000);
- # The smallest value (in megabytes) that the "split zip files at" setting is allowed to be set to
- if (!defined('UPDRAFTPLUS_SPLIT_MIN')) define('UPDRAFTPLUS_SPLIT_MIN', 25);
- # The maximum number of files to batch at one time when writing to the backup archive. You'd only be likely to want to raise (not lower) this.
- if (!defined('UPDRAFTPLUS_MAXBATCHFILES')) define('UPDRAFTPLUS_MAXBATCHFILES', 500);
- // Load add-ons and various files that may or may not be present, depending on where the plugin was distributed
- if (is_file(UPDRAFTPLUS_DIR.'/premium.php')) require_once(UPDRAFTPLUS_DIR.'/premium.php');
- if (is_file(UPDRAFTPLUS_DIR.'/autoload.php')) require_once(UPDRAFTPLUS_DIR.'/autoload.php');
- if (is_file(UPDRAFTPLUS_DIR.'/udaddons/updraftplus-addons.php')) include_once(UPDRAFTPLUS_DIR.'/udaddons/updraftplus-addons.php');
- $updraftplus_have_addons = 0;
- if (is_dir(UPDRAFTPLUS_DIR.'/addons') && $dir_handle = opendir(UPDRAFTPLUS_DIR.'/addons')) {
- while (false !== ($e = readdir($dir_handle))) {
- if (is_file(UPDRAFTPLUS_DIR.'/addons/'.$e) && preg_match('/\.php$/', $e)) {
- $header = file_get_contents(UPDRAFTPLUS_DIR.'/addons/'.$e, false, null, -1, 1024);
- $phprequires = (preg_match("/RequiresPHP: (\d[\d\.]+)/", $header, $matches)) ? $matches[1] : false;
- $phpinclude = (preg_match("/IncludePHP: (\S+)/", $header, $matches)) ? $matches[1] : false;
- if (false === $phprequires || version_compare(PHP_VERSION, $phprequires, '>=')) {
- $updraftplus_have_addons++;
- if ($phpinclude) require_once(UPDRAFTPLUS_DIR.'/'.$phpinclude);
- include_once(UPDRAFTPLUS_DIR.'/addons/'.$e);
- }
- }
- }
- @closedir($dir_handle);
- }
- $updraftplus = new UpdraftPlus();
- $updraftplus->have_addons = $updraftplus_have_addons;
- if (!$updraftplus->memory_check(192)) {
- // Experience appears to show that the memory limit is only likely to be hit (unless it is very low) by single files that are larger than available memory (when compressed)
- # Add sanity checks - found someone who'd set WP_MAX_MEMORY_LIMIT to 256K !
- if (!$updraftplus->memory_check($updraftplus->memory_check_current(WP_MAX_MEMORY_LIMIT))) {
- $new = absint($updraftplus->memory_check_current(WP_MAX_MEMORY_LIMIT));
- if ($new>32 && $new<100000) {
- @ini_set('memory_limit', $new.'M'); //up the memory limit to the maximum WordPress is allowing for large backup files
- }
- }
- }
- if (!class_exists('UpdraftPlus_Options')) require_once(UPDRAFTPLUS_DIR.'/options.php');
- class UpdraftPlus {
- public $version;
- public $plugin_title = 'UpdraftPlus Backup/Restore';
- // Choices will be shown in the admin menu in the order used here
- public $backup_methods = array(
- 's3' => 'Amazon S3',
- 'dropbox' => 'Dropbox',
- 'cloudfiles' => 'Rackspace Cloud Files',
- 'googledrive' => 'Google Drive',
- 'ftp' => 'FTP',
- 'sftp' => 'SFTP / SCP',
- 'webdav' => 'WebDAV',
- 'bitcasa' => 'Bitcasa',
- 's3generic' => 'S3-Compatible (Generic)',
- 'openstack' => 'OpenStack (Swift)',
- 'dreamobjects' => 'DreamObjects',
- 'email' => 'Email'
- );
- public $errors = array();
- public $nonce;
- public $logfile_name = "";
- public $logfile_handle = false;
- public $backup_time;
- public $job_time_ms;
- public $opened_log_time;
- private $backup_dir;
- private $jobdata;
- public $something_useful_happened = false;
- public $have_addons = false;
- // Used to schedule resumption attempts beyond the tenth, if needed
- public $current_resumption;
- public $newresumption_scheduled = false;
- public function __construct() {
- // Initialisation actions - takes place on plugin load
- if ($fp = fopen(__FILE__, 'r')) {
- $file_data = fread( $fp, 1024 );
- if (preg_match("/Version: ([\d\.]+)(\r|\n)/", $file_data, $matches)) {
- $this->version = $matches[1];
- }
- fclose($fp);
- }
- # Create admin page
- add_action('init', array($this, 'handle_url_actions'));
- // Run earlier than default - hence earlier than other components
- // admin_menu runs earlier, and we need it because options.php wants to use $updraftplus_admin before admin_init happens
- add_action(apply_filters('updraft_admin_menu_hook', 'admin_menu'), array($this, 'admin_menu'), 9);
- # Not a mistake: admin-ajax.php calls only admin_init and not admin_menu
- add_action('admin_init', array($this, 'admin_menu'), 9);
- add_action('updraft_backup', array($this, 'backup_files'));
- add_action('updraft_backup_database', array($this, 'backup_database'));
- add_action('updraft_backupnow_backup', array($this, 'backupnow_files'));
- add_action('updraft_backupnow_backup_database', array($this, 'backupnow_database'));
- add_action('updraft_backupnow_backup_all', array($this, 'backup_all'));
- # backup_all as an action is legacy (Oct 2013) - there may be some people who wrote cron scripts to use it
- add_action('updraft_backup_all', array($this, 'backup_all'));
- # this is our runs-after-backup event, whose purpose is to see if it succeeded or failed, and resume/mom-up etc.
- add_action('updraft_backup_resume', array($this, 'backup_resume'), 10, 3);
- # http://codex.wordpress.org/Plugin_API/Filter_Reference/cron_schedules. Raised priority because some plugins wrongly over-write all prior schedule changes (including BackupBuddy!)
- add_filter('cron_schedules', array($this, 'modify_cron_schedules'), 30);
- add_action('plugins_loaded', array($this, 'load_translations'));
- # Prevent iThemes Security from telling people that they have no backups (and advertising them another product on that basis!)
- add_filter('itsec_has_external_backup', array($this, 'return_true'), 999);
- add_filter('itsec_external_backup_link', array($this, 'itsec_external_backup_link'), 999);
- add_filter('itsec_scheduled_external_backup', array($this, 'itsec_scheduled_external_backup'), 999);
- register_deactivation_hook(__FILE__, array($this, 'deactivation'));
- }
- public function itsec_scheduled_external_backup($x) { return (!wp_next_scheduled('updraft_backup')) ? false : true; }
- public function itsec_external_backup_link($x) { return UpdraftPlus_Options::admin_page_url().'?page=updraftplus'; }
- public function return_true($x) { return true; }
- public function ensure_phpseclib($class = false, $class_path = false) {
- if ($class && class_exists($class)) return;
- if (false === strpos(get_include_path(), UPDRAFTPLUS_DIR.'/includes/phpseclib')) set_include_path(get_include_path().PATH_SEPARATOR.UPDRAFTPLUS_DIR.'/includes/phpseclib');
- if ($class_path) require_once(UPDRAFTPLUS_DIR.'/includes/phpseclib/'.$class_path.'.php');
- }
- // Returns the number of bytes free, if it can be detected; otherwise, false
- // Presently, we only detect CPanel. If you know of others, then feel free to contribute!
- public function get_hosting_disk_quota_free() {
- if (!@is_dir('/usr/local/cpanel') || $this->detect_safe_mode() || !function_exists('popen') || (!@is_executable('/usr/local/bin/perl') && !@is_executable('/usr/local/cpanel/3rdparty/bin/perl'))) return false;
- $perl = (@is_executable('/usr/local/cpanel/3rdparty/bin/perl')) ? '/usr/local/cpanel/3rdparty/bin/perl' : '/usr/local/bin/perl';
- $exec = "UPDRAFTPLUSKEY=updraftplus $perl ".UPDRAFTPLUS_DIR."/includes/get-cpanel-quota-usage.pl";
- $handle = @popen($exec, 'r');
- if (!is_resource($handle)) return false;
- $found = false;
- $lines = 0;
- while (false === $found && !feof($handle) && $lines<100) {
- $lines++;
- $w = fgets($handle);
- # Used, limit, remain
- if (preg_match('/RESULT: (\d+) (\d+) (\d+) /', $w, $matches)) { $found = true; }
- }
- $ret = pclose($handle);
- if (false === $found ||$ret != 0) return false;
- if ((int)$matches[2]<100 || ($matches[1] + $matches[3] != $matches[2])) return false;
- return $matches;
- }
- // This function may get called multiple times, so write accordingly
- public function admin_menu() {
- // We are in the admin area: now load all that code
- global $updraftplus_admin;
- if (empty($updraftplus_admin)) require_once(UPDRAFTPLUS_DIR.'/admin.php');
- if (isset($_GET['wpnonce']) && isset($_GET['page']) && isset($_GET['action']) && $_GET['page'] == 'updraftplus' && $_GET['action'] == 'downloadlatestmodlog' && wp_verify_nonce($_GET['wpnonce'], 'updraftplus_download')) {
- $updraft_dir = $this->backups_dir_location();
- $log_file = '';
- $mod_time = 0;
- if ($handle = @opendir($updraft_dir)) {
- while (false !== ($entry = readdir($handle))) {
- // The latter match is for files created internally by zipArchive::addFile
- if (preg_match('/^log\.[a-z0-9]+\.txt$/i', $entry)) {
- $mtime = filemtime($updraft_dir.'/'.$entry);
- if ($mtime > $mod_time) {
- $mod_time = $mtime;
- $log_file = $updraft_dir.'/'.$entry;
- }
- }
- }
- @closedir($handle);
- }
- if ($mod_time >0) {
- if (is_readable($log_file)) {
- header('Content-type: text/plain');
- readfile($log_file);
- exit;
- } else {
- add_action('all_admin_notices', array($this,'show_admin_warning_unreadablelog') );
- }
- } else {
- add_action('all_admin_notices', array($this,'show_admin_warning_nolog') );
- }
- }
- }
- public function add_curl_capath($handle) {
- if (!UpdraftPlus_Options::get_updraft_option('updraft_ssl_useservercerts')) curl_setopt($handle, CURLOPT_CAINFO, UPDRAFTPLUS_DIR.'/includes/cacert.pem' );
- }
- // Handle actions passed on to method plugins; e.g. Google OAuth 2.0 - ?action=updraftmethod-googledrive-auth&page=updraftplus
- // Nov 2013: Google's new cloud console, for reasons as yet unknown, only allows you to enter a redirect_uri with a single URL parameter... thus, we put page second, and re-add it if necessary. Apr 2014: Bitcasa already do this, so perhaps it is part of the OAuth2 standard or best practice somewhere.
- // Also handle action=downloadlog
- public function handle_url_actions() {
- // First, basic security check: must be an admin page, with ability to manage options, with the right parameters
- // Also, only on GET because WordPress on the options page repeats parameters sometimes when POST-ing via the _wp_referer field
- if (isset($_SERVER['REQUEST_METHOD']) && 'GET' == $_SERVER['REQUEST_METHOD'] && isset($_GET['action'])) {
- if (preg_match("/^updraftmethod-([a-z]+)-([a-z]+)$/", $_GET['action'], $matches) && file_exists(UPDRAFTPLUS_DIR.'/methods/'.$matches[1].'.php') && UpdraftPlus_Options::user_can_manage()) {
- $_GET['page'] = 'updraftplus';
- $_REQUEST['page'] = 'updraftplus';
- $method = $matches[1];
- require_once(UPDRAFTPLUS_DIR.'/methods/'.$method.'.php');
- $call_class = "UpdraftPlus_BackupModule_".$method;
- $call_method = "action_".$matches[2];
- $backup_obj = new $call_class;
- add_action('http_api_curl', array($this, 'add_curl_capath'));
- try {
- if (method_exists($backup_obj, $call_method)) {
- call_user_func(array($backup_obj, $call_method));
- } elseif (method_exists($backup_obj, 'action_handler')) {
- call_user_func(array($backup_obj, 'action_handler'), $matches[2]);
- }
- } catch (Exception $e) {
- $this->log(sprintf(__("%s error: %s", 'updraftplus'), $method, $e->getMessage().' ('.$e->getCode().')', 'error'));
- }
- remove_action('http_api_curl', array($this, 'add_curl_capath'));
- } elseif (isset( $_GET['page'] ) && $_GET['page'] == 'updraftplus' && $_GET['action'] == 'downloadlog' && isset($_GET['updraftplus_backup_nonce']) && preg_match("/^[0-9a-f]{12}$/",$_GET['updraftplus_backup_nonce']) && UpdraftPlus_Options::user_can_manage()) {
- // No WordPress nonce is needed here or for the next, since the backup is already nonce-based
- $updraft_dir = $this->backups_dir_location();
- $log_file = $updraft_dir.'/log.'.$_GET['updraftplus_backup_nonce'].'.txt';
- if (is_readable($log_file)) {
- header('Content-type: text/plain');
- readfile($log_file);
- exit;
- } else {
- add_action('all_admin_notices', array($this,'show_admin_warning_unreadablelog') );
- }
- } elseif (isset( $_GET['page'] ) && $_GET['page'] == 'updraftplus' && $_GET['action'] == 'downloadfile' && isset($_GET['updraftplus_file']) && preg_match('/^backup_([\-0-9]{15})_.*_([0-9a-f]{12})-db([0-9]+)?+\.(gz\.crypt)$/i', $_GET['updraftplus_file']) && UpdraftPlus_Options::user_can_manage()) {
- $updraft_dir = $this->backups_dir_location();
- $spool_file = $updraft_dir.'/'.basename($_GET['updraftplus_file']);
- if (is_readable($spool_file)) {
- $dkey = (isset($_GET['decrypt_key'])) ? $_GET['decrypt_key'] : "";
- $this->spool_file('db', $spool_file, $dkey);
- exit;
- } else {
- add_action('all_admin_notices', array($this,'show_admin_warning_unreadablefile') );
- }
- }
- }
- }
- public function get_table_prefix($allow_override = false) {
- global $wpdb;
- if (is_multisite() && !defined('MULTISITE')) {
- # In this case (which should only be possible on installs upgraded from pre WP 3.0 WPMU), $wpdb->get_blog_prefix() cannot be made to return the right thing. $wpdb->base_prefix is not explicitly marked as public, so we prefer to use get_blog_prefix if we can, for future compatibility.
- $prefix = $wpdb->base_prefix;
- } else {
- $prefix = $wpdb->get_blog_prefix(0);
- }
- return ($allow_override) ? apply_filters('updraftplus_get_table_prefix', $prefix) : $prefix;
- }
- public function show_admin_warning_unreadablelog() {
- global $updraftplus_admin;
- $updraftplus_admin->show_admin_warning('<strong>'.__('UpdraftPlus notice:','updraftplus').'</strong> '.__('The log file could not be read.','updraftplus'));
- }
- public function show_admin_warning_nolog() {
- global $updraftplus_admin;
- $updraftplus_admin->show_admin_warning('<strong>'.__('UpdraftPlus notice:','updraftplus').'</strong> '.__('No log files were found.','updraftplus'));
- }
- public function show_admin_warning_unreadablefile() {
- global $updraftplus_admin;
- $updraftplus_admin->show_admin_warning('<strong>'.__('UpdraftPlus notice:','updraftplus').'</strong> '.__('The given file could not be read.','updraftplus'));
- }
- public function load_translations() {
- // Tell WordPress where to find the translations
- load_plugin_textdomain('updraftplus', false, basename(dirname(__FILE__)).'/languages/');
- # The Google Analyticator plugin does something horrible: loads an old version of the Google SDK on init, always - which breaks us
- if ((defined('DOING_CRON') && DOING_CRON) || (isset($_GET['page']) && $_GET['page'] == 'updraftplus')) {
- remove_action('init', 'ganalyticator_stats_init');
- # Appointments+ does the same; but providers a cleaner way to disable it
- define('APP_GCAL_DISABLE', true);
- }
- }
- // Cleans up temporary files found in the updraft directory (and some in the site root - pclzip)
- // Always cleans up temporary files over 12 hours old.
- // With parameters, also cleans up those.
- // Also cleans out old job data older than 12 hours old (immutable value)
- public function clean_temporary_files($match = '', $older_than = 43200) {
- # Clean out old job data
- if ($older_than >10000) {
- global $wpdb;
- $all_jobs = $wpdb->get_results("SELECT option_name, option_value FROM $wpdb->options WHERE option_name LIKE 'updraft_jobdata_%'", ARRAY_A);
- foreach ($all_jobs as $job) {
- $val = maybe_unserialize($job['option_value']);
- # TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014
- # TODO: This will need changing when incremental backups are introduced
- if (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) {
- delete_option($job['option_name']);
- } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) {
- delete_option($job['option_name']);
- } elseif (empty($val['backup_time_ms']) && empty($val['job_time_ms']) && !empty($val['job_type']) && $val['job_type'] != 'backup') {
- delete_option($job['option_name']);
- }
- }
-
- }
- $updraft_dir = $this->backups_dir_location();
- $now_time=time();
- if ($handle = opendir($updraft_dir)) {
- while (false !== ($entry = readdir($handle))) {
- // This match is for files created internally by zipArchive::addFile
- $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.([A-Za-z0-9]){6}?$/i", $entry);
- // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern.
- $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry);
- # Temporary files from the database dump process - not needed, as is caught by the catch-all
- # $table_match = preg_match("/${match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry);
- # The gz goes in with the txt, because we *don't* want to reap the raw .txt files
- if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $ziparchive_match || $binzip_match) && is_file($updraft_dir.'/'.$entry)) {
- // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old
- if (($match && ($ziparchive_match || $binzip_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) {
- $this->log("Deleting old temporary file: $entry");
- @unlink($updraft_dir.'/'.$entry);
- }
- }
- }
- @closedir($handle);
- }
- # Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both
- foreach (array(ABSPATH, ABSPATH.'wp-admin/') as $path) {
- if ($handle = opendir($path)) {
- while (false !== ($entry = readdir($handle))) {
- # With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes
- if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) {
- $this->log("Deleting old PclZip temporary file: $entry");
- @unlink($path.$entry);
- }
- }
- @closedir($handle);
- }
- }
- }
- public function backup_time_nonce($nonce = false) {
- $this->job_time_ms = microtime(true);
- $this->backup_time = time();
- if (false === $nonce) $nonce = substr(md5(time().rand()), 20);
- $this->nonce = $nonce;
- }
- public function logfile_open($nonce) {
- //set log file name and open log file
- $updraft_dir = $this->backups_dir_location();
- $this->logfile_name = $updraft_dir."/log.$nonce.txt";
- if (file_exists($this->logfile_name)) {
- $seek_to = max((filesize($this->logfile_name) - 340), 1);
- $handle = fopen($this->logfile_name, 'r');
- if (is_resource($handle)) {
- # Returns 0 on success
- if (0 === @fseek($handle, $seek_to)) {
- $bytes_back = filesize($this->logfile_name) - $seek_to;
- # Return to the end of the file
- $read_recent = fread($handle, $bytes_back);
- # Move to end of file - ought to be redundant
- if (false !== strpos($read_recent, 'The backup apparently succeeded') && false !== strpos($read_recent, 'and is now complete')) {
- $this->backup_is_already_complete = true;
- }
- }
- fclose($handle);
- }
- }
- $this->logfile_handle = fopen($this->logfile_name, 'a');
- $this->opened_log_time = microtime(true);
- $this->log('Opened log file at time: '.date('r').' on '.site_url());
- global $wp_version;
- @include(ABSPATH.'wp-includes/version.php');
- // Will need updating when WP stops being just plain MySQL
- $mysql_version = (function_exists('mysql_get_server_info')) ? @mysql_get_server_info() : '?';
- $safe_mode = $this->detect_safe_mode();
- $memory_limit = ini_get('memory_limit');
- $memory_usage = round(@memory_get_usage(false)/1048576, 1);
- $memory_usage2 = round(@memory_get_usage(true)/1048576, 1);
- # Attempt to raise limit to avoid false positives
- @set_time_limit(900);
- $max_execution_time = (int)@ini_get("max_execution_time");
- $logline = "UpdraftPlus WordPress backup plugin (http://updraftplus.com): ".$this->version." WP: ".$wp_version." PHP: ".phpversion()." (".@php_uname().") MySQL: $mysql_version Server: ".$_SERVER["SERVER_SOFTWARE"]." safe_mode: $safe_mode max_execution_time: $max_execution_time memory_limit: $memory_limit (used: ${memory_usage}M | ${memory_usage2}M) multisite: ".((is_multisite()) ? 'Y' : 'N')." mcrypt: ".((function_exists('mcrypt_encrypt')) ? 'Y' : 'N')." ZipArchive::addFile: ";
- // method_exists causes some faulty PHP installations to segfault, leading to support requests
- if (version_compare(phpversion(), '5.2.0', '>=') && extension_loaded('zip')) {
- $logline .= 'Y';
- } else {
- $logline .= (class_exists('ZipArchive') && method_exists('ZipArchive', 'addFile')) ? "Y" : "N";
- }
- $w3oc = 'N';
- if (0 === $this->current_resumption) {
- $memlim = $this->memory_check_current();
- if ($memlim<65) {
- $this->log(sprintf(__('The amount of memory (RAM) allowed for PHP is very low (%s Mb) - you should increase it to avoid failures due to insufficient memory (consult your web hosting company for more help)', 'updraftplus'), round($memlim, 1)), 'warning', 'lowram');
- }
- if ($max_execution_time>0 && $max_execution_time<20) {
- $this->log(sprintf(__('The amount of time allowed for WordPress plugins to run is very low (%s seconds) - you should increase it to avoid backup failures due to time-outs (consult your web hosting company for more help - it is the max_execution_time PHP setting; the recommended value is %s seconds or more)', 'updraftplus'), $max_execution_time, 90), 'warning', 'lowmaxexecutiontime');
- }
- if (defined('W3TC') && W3TC == true && function_exists('w3_instance')) {
- $modules = w3_instance('W3_ModuleStatus');
- if ($modules->is_enabled('objectcache')) {
- $w3oc = 'Y';
- }
- }
- $logline .= " W3TC/ObjectCache: $w3oc";
- }
- $this->log($logline);
- $hosting_bytes_free = $this->get_hosting_disk_quota_free();
- if (is_array($hosting_bytes_free)) {
- $perc = round(100*$hosting_bytes_free[1]/(max($hosting_bytes_free[2], 1)), 1);
- $quota_free = ' / '.sprintf('Free disk space in account: %s (%s used)', round($hosting_bytes_free[3]/1048576, 1)." Mb", "$perc %");
- if ($hosting_bytes_free[3] < 1048576*50) {
- $quota_free_mb = round($hosting_bytes_free[3]/1048576, 1);
- $this->log(sprintf(__('Your free space in your hosting account is very low - only %s Mb remain', 'updraftplus'), $quota_free_mb), 'warning', 'lowaccountspace'.$quota_free_mb);
- }
- } else {
- $quota_free = '';
- }
- $disk_free_space = @disk_free_space($updraft_dir);
- if ($disk_free_space === false) {
- $this->log("Free space on disk containing Updraft's temporary directory: Unknown".$quota_free);
- } else {
- $this->log("Free space on disk containing Updraft's temporary directory: ".round($disk_free_space/1048576,1)." Mb".$quota_free);
- $disk_free_mb = round($disk_free_space/1048576, 1);
- if ($disk_free_space < 50*1048576) $this->log(sprintf(__('Your free disk space is very low - only %s Mb remain', 'updraftplus'), round($disk_free_space/1048576, 1)), 'warning', 'lowdiskspace'.$disk_free_mb);
- }
- }
- /* Logs the given line, adding (relative) time stamp and newline
- Note these subtleties of log handling:
- - Messages at level 'error' are not logged to file - it is assumed that a separate call to log() at another level will take place. This is because at level 'error', messages are translated; whereas the log file is for developers who may not know the translated language. Messages at level 'error' are for the user.
- - Messages at level 'error' do not persist through the job (they are only saved with save_backup_history(), and never restored from there - so only the final save_backup_history() errors persist); we presume that either a) they will be cleared on the next attempt, or b) they will occur again on the final attempt (at which point they will go to the user). But...
- - ... messages at level 'warning' persist. These are conditions that are unlikely to be cleared, not-fatal, but the user should be informed about. The $uniq_id field (which should not be numeric) can then be used for warnings that should only be logged once
- $skip_dblog = true is suitable when there's a risk of excessive logging, and the information is not important for the user to see in the browser on the settings page
- */
- public function log($line, $level = 'notice', $uniq_id = false, $skip_dblog = false) {
- if ('error' == $level || 'warning' == $level) {
- if ('error' == $level && 0 == $this->error_count()) $this->log('An error condition has occurred for the first time during this job');
- if ($uniq_id) {
- $this->errors[$uniq_id] = array('level' => $level, 'message' => $line);
- } else {
- $this->errors[] = array('level' => $level, 'message' => $line);
- }
- # Errors are logged separately
- if ('error' == $level) return;
- # It's a warning
- $warnings = $this->jobdata_get('warnings');
- if (!is_array($warnings)) $warnings=array();
- if ($uniq_id) {
- $warnings[$uniq_id] = $line;
- } else {
- $warnings[] = $line;
- }
- $this->jobdata_set('warnings', $warnings);
- }
- do_action('updraftplus_logline', $line, $this->nonce, $level, $uniq_id);
- if ($this->logfile_handle) {
- # Record log file times relative to the backup start, if possible
- $rtime = (!empty($this->job_time_ms)) ? microtime(true)-$this->job_time_ms : microtime(true)-$this->opened_log_time;
- fwrite($this->logfile_handle, sprintf("%08.03f", round($rtime, 3))." (".$this->current_resumption.") ".(('notice' != $level) ? '['.ucfirst($level).'] ' : '').$line."\n");
- }
- switch ($this->jobdata_get('job_type')) {
- case 'download':
- // Download messages are keyed on the job (since they could be running several), and type
- // The values of the POST array were checked before
- $findex = (!empty($_POST['findex'])) ? $_POST['findex'] : 0;
- $this->jobdata_set('dlmessage_'.$_POST['timestamp'].'_'.$_POST['type'].'_'.$findex, $line);
- break;
- case 'restore':
- #if ('debug' != $level) echo $line."\n";
- break;
- default:
- if (!$skip_dblog && 'debug' != $level) UpdraftPlus_Options::update_updraft_option('updraft_lastmessage', $line." (".date_i18n('M d H:i:s').")", false);
- break;
- }
- if (defined('UPDRAFTPLUS_CONSOLELOG')) print $line."\n";
- if (defined('UPDRAFTPLUS_BROWSERLOG')) print htmlentities($line)."<br>\n";
- }
- public function log_removewarning($uniq_id) {
- $warnings = $this->jobdata_get('warnings');
- if (!is_array($warnings)) $warnings=array();
- unset($warnings[$uniq_id]);
- $this->jobdata_set('warnings', $warnings);
- unset($this->errors[$uniq_id]);
- }
- # For efficiency, you can also feed false or a string into this function
- public function log_wp_error($err, $echo = false, $logerror = false) {
- if (false === $err) return false;
- if (is_string($err)) {
- $this->log("Error message: $err");
- if ($echo) echo sprintf(__('Error: %s', 'updraftplus'), htmlspecialchars($err))."<br>";
- if ($logerror) $this->log($err, 'error');
- return false;
- }
- foreach ($err->get_error_messages() as $msg) {
- $this->log("Error message: $msg");
- if ($echo) echo sprintf(__('Error: %s', 'updraftplus'), htmlspecialchars($msg))."<br>";
- if ($logerror) $this->log($msg, 'error');
- }
- $codes = $err->get_error_codes();
- if (is_array($codes)) {
- foreach ($codes as $code) {
- $data = $err->get_error_data($code);
- if (!empty($data)) {
- $ll = (is_string($data)) ? $data : serialize($data);
- $this->log("Error data (".$code."): ".$ll);
- }
- }
- }
- # Returns false so that callers can return with false more efficiently if they wish
- return false;
- }
- public function get_max_packet_size() {
- global $wpdb, $updraftplus;
- $mp = (int)$wpdb->get_var("SELECT @@session.max_allowed_packet");
- # Default to 1Mb
- $mp = (is_numeric($mp) && $mp > 0) ? $mp : 1048576;
- # 32Mb
- if ($mp < 33554432) {
- $save = $wpdb->show_errors(false);
- $req = $wpdb->query("SET GLOBAL max_allowed_packet=33554432");
- $wpdb->show_errors($save);
- if (!$req) $updraftplus->log("Tried to raise max_allowed_packet from ".round($mp/1048576,1)." Mb to 32 Mb, but failed (".$wpdb->last_error.", ".serialize($req).")");
- $mp = (int)$wpdb->get_var("SELECT @@session.max_allowed_packet");
- # Default to 1Mb
- $mp = (is_numeric($mp) && $mp > 0) ? $mp : 1048576;
- }
- $updraftplus->log("Max packet size: ".round($mp/1048576, 1)." Mb");
- return $mp;
- }
- # Q. Why is this abstracted into a separate function? A. To allow poedit and other parsers to pick up the need to translate strings passed to it (and not pick up all of those passed to log()).
- # 1st argument = the line to be logged (obligatory)
- # Further arguments = parameters for sprintf()
- public function log_e() {
- $args = func_get_args();
- # Get first argument
- $pre_line = array_shift($args);
- # Log it whilst still in English
- if (is_wp_error($pre_line)) {
- $this->log_wp_error($pre_line);
- } else {
- # Now run (v)sprintf on it, using any remaining arguments. vsprintf = sprintf but takes an array instead of individual arguments
- $this->log(vsprintf($pre_line, $args));
- echo vsprintf(__($pre_line, 'updraftplus'), $args).'<br>';
- }
- }
- // This function is used by cloud methods to provide standardised logging, but more importantly to help us detect that meaningful activity took place during a resumption run, so that we can schedule further resumptions if it is worthwhile
- public function record_uploaded_chunk($percent, $extra = '', $file_path = false) {
- // Touch the original file, which helps prevent overlapping runs
- if ($file_path) touch($file_path);
- // What this means in effect is that at least one of the files touched during the run must reach this percentage (so lapping round from 100 is OK)
- if ($percent > 0.7 * ($this->current_resumption - max($this->jobdata_get('uploaded_lastreset'), 9))) $this->something_useful_happened();
- // Log it
- global $updraftplus_backup;
- $log = (!empty($updraftplus_backup->current_service)) ? ucfirst($updraftplus_backup->current_service)." chunked upload: $percent % uploaded" : '';
- if ($log) $this->log($log.(($extra) ? " ($extra)" : ''));
- // If we are on an 'overtime' resumption run, and we are still meaningfully uploading, then schedule a new resumption
- // Our definition of meaningful is that we must maintain an overall average of at least 0.7% per run, after allowing 9 runs for everything else to get going
- // i.e. Max 100/.7 + 9 = 150 runs = 760 minutes = 12 hrs 40, if spaced at 5 minute intervals. However, our algorithm now decreases the intervals if it can, so this should not really come into play
- // If they get 2 minutes on each run, and the file is 1Gb, then that equals 10.2Mb/120s = minimum 59Kb/s upload speed required
- $upload_status = $this->jobdata_get('uploading_substatus');
- if (is_array($upload_status)) {
- $upload_status['p'] = $percent/100;
- $this->jobdata_set('uploading_substatus', $upload_status);
- }
- }
- function chunked_upload($caller, $file, $cloudpath, $logname, $chunk_size, $uploaded_size) {
- $fullpath = $this->backups_dir_location().'/'.$file;
- $orig_file_size = filesize($fullpath);
- if ($uploaded_size >= $orig_file_size) return true;
- $fp = @fopen($fullpath, 'rb');
- if (!$fp) {
- $this->log("$logname: failed to open file: $fullpath");
- $this->log("$file: ".sprintf(__('%s Error: Failed to open local file','updraftplus'), $logname), 'error');
- return false;
- }
- $chunks = floor($orig_file_size / $chunk_size);
- // There will be a remnant unless the file size was exactly on a 5Mb boundary
- if ($orig_file_size % $chunk_size > 0 ) $chunks++;
- $this->log("$logname upload: $file (chunks: $chunks) -> $cloudpath ($uploaded_size)");
- if ($chunks < 2) {
- return 1;
- } else {
- $errors_so_far = 0;
- for ($i = 1 ; $i <= $chunks; $i++) {
- $upload_start = ($i-1)*$chunk_size;
- // The file size -1 equals the byte offset of the final byte
- $upload_end = min($i*$chunk_size-1, $orig_file_size-1);
- // Don't forget the +1; otherwise the last byte is omitted
- $upload_size = $upload_end - $upload_start + 1;
- fseek($fp, $upload_start);
- $uploaded = $caller->chunked_upload($file, $fp, $i, $upload_size, $upload_start, $upload_end);
- if ($uploaded) {
- $perc = round(100*((($i-1) * $chunk_size) + $upload_size)/max($orig_file_size, 1), 1);
- # $perc = round(100*$i/$chunks,1); # Takes no notice of last chunk likely being smaller
- $this->record_uploaded_chunk($perc, $i, $fullpath);
- } else {
- $errors_so_far++;
- if ($errors_so_far>=3) return false;
- }
- }
- if ($errors_so_far) return false;
- // All chunks are uploaded - now combine the chunks
- $ret = true;
- if (method_exists($caller, 'chunked_upload_finish')) {
- $ret = $caller->chunked_upload_finish($file);
- if (!$ret) {
- $this->log("$logname - failed to re-assemb…
Large files files are truncated, but you can click here to view the full file