#!/bin/bash # Script to backup Hall-D wiki # May 15, 2008 David Lawrence # April 18, 2014 - Updated David Lawrence # # This script can be used to make a backup of the Hall-D wiki. # It can be run in 2 modes. The first will simiply create a # full backup (as a tarball) in the current working directory. # If, however, the "-c" command-line switch is used, then the # tarball will be created in the BACKUPS directory on the halld # work disk and older backups removed such that only the 5 # most recent ones are kept. # export DATABASE_HOST=cnidb.jlab.org export DATABASE_NAME=halldwikidb export DATABASE_USER=halldwiki export DATABASE_PASS=wiki export BACKUPDIR=wiki_backup_`date +%y-%m-%d` export WIKI_ROOT=/group/halld/www/halldweb1/html/wiki export CRON_BACKUP_DIR=/work/halld/BACKUPS/wiki export CRON_BACKUP=0 # Check command line arguments while getopts ":c" Option do case $Option in c ) CRON_BACKUP=1;; * ) echo "Unimplemented option chosen.";; # DEFAULT esac done shift $(($OPTIND - 1)) if [ $CRON_BACKUP -eq "1" ] then mkdir -p $CRON_BACKUP_DIR cd $CRON_BACKUP_DIR fi echo "Working in directory:" pwd # Create directory to hold everything we want to tarball mkdir $BACKUPDIR echo "Dumping MySQL database from $DATABASE_HOST ..." mysqldump -h $DATABASE_HOST --database $DATABASE_NAME -u $DATABASE_USER -p$DATABASE_PASS --add-drop-table -B > $BACKUPDIR/halldwikidb.sql echo "Copying extensions ..." cp -rp --no-dereference $WIKI_ROOT/extensions $BACKUPDIR/extensions.JLab cp -p --no-dereference $WIKI_ROOT/README.JLab $BACKUPDIR echo "Copying images ..." cp -rp --no-dereference $WIKI_ROOT/images $BACKUPDIR echo "Copying LocalSettings.php ..." cp -p $WIKI_ROOT/LocalSettings.php $BACKUPDIR echo "Creating tarball ..." tar czf ${BACKUPDIR}.tgz $BACKUPDIR rm -rf $BACKUPDIR # If the "-c" option was specified, then remove backup files # until we have no more than 5. Note that this part is only # slightly modified from a script copied from: # http://www.mediawiki.org/wiki/User:Flominator/Backup_MW if [ $CRON_BACKUP -eq "1" ] then # Count files in directory file_count=`ls -1A wiki_backup*.tgz | wc -l` # Do until there are less than 6 files present while [ $file_count -ge 6 ] do # Delete alphabetically oldest file rm `ls -1Ar wiki_backup*.tgz | tail -1` # Count files again file_count=`ls -1A wiki_backup*.tgz | wc -l` done fi