wiki:S3QLBackup

Version 5 (modified by chris, 3 years ago) (diff)

--

Table of Contents

  1. s3ql_backup

S3QL Backup Script

Following is a modified version of the s3_backup.sh script which which was provided by the Debian package at /usr/share/doc/s3ql/examples/s3ql_backup.sh.

It expects a /etc/s3ql/SERVERNAME file listing directories to be backed up for each server and the SERVERNAME is expected to match the bucket name.

s3ql_backup

#!/bin/bash

# Abort entire script if any command fails
set -e

# This script assumes that bucketname and directory under 
# /media match and is also assumes that a file, with the 
# same name with a list of directories to backup exists 
# in /etc/s3ql
CONFIG_DIR="/etc/s3ql"
# S3 server URL
SERVER="s3c://s.qstack.advania.com:443"
# mount directory for s3ql file systems
MOUNT="/media/s3ql"
# optional base source directory for for files to 
# backup, this is used because all the servers are
# rsync command
RSYNC="rsync -aHAXx --delete-during --delete-excluded --partial -v"

# Check for bucket name / directory on standard input
if [[ $1 ]]; then
  BUCKET=$1
elif [[ ! $1 ]]; then
  echo "Type the bucketname and then [ENTER]:"
  read bucket
  BUCKET=$bucket
fi

# sshfs mounted at /media/server-name
SOURCE="/media/$BUCKET"

# Check that a list of diectories to backup exists at 
# $CONFIG_DIR/$BUCKET
if [[ ! -f "$CONFIG_DIR/$BUCKET" ]]; then
  echo "You need to create $CONFIG_DIR/$BUCKET with a list of directories to backup"
  exit
else
  BACKUP_LIST="$CONFIG_DIR/$BUCKET"
fi

# The following two commands are commented out as the mnt-s3ql script covers
# this and is run via cron before this script is run

# Recover cache if e.g. system was shut down while fs was mounted
#fsck.s3ql --backend-options="dumb-copy" --batch "$SERVER/$BUCKET"

# Mount file system
#mount.s3ql --backend-options="dumb-copy" "$SERVER/$BUCKET" "$MOUNT/$BUCKET"

# Figure out the most recent backup
cd "$MOUNT/$BUCKET"
LAST_BACKUP=`python <<EOF
import os
import re
backups=sorted(x for x in os.listdir('.') if re.match(r'^[\\d-]{10}_[\\d:]{8}$', x))
if backups:
    print backups[-1]
EOF`

# Duplicate the most recent backup unless this is the first backup
NEW_BACKUP=`date "+%Y-%m-%d_%H:%M:%S"`
if [ -n "$LAST_BACKUP" ]; then
    echo "Copying $LAST_BACKUP to $NEW_BACKUP..."
    s3qlcp "$LAST_BACKUP" "$NEW_BACKUP"

    # Make the last backup immutable
    # (in case the previous backup was interrupted prematurely)
    s3qllock "$LAST_BACKUP"
fi

# ..and update the copy
#rsync -aHAXx --delete-during --delete-excluded --partial -v \
#    --exclude /.cache/ \
#    --exclude /.s3ql/ \
#    --exclude /.thumbnails/ \
#    --exclude /tmp/ \
#    "/home/my_username/" "./$NEW_BACKUP/"

if [[ -d "$MOUNT/$BUCKET" ]]; then
  if [[ ! -d "$MOUNT/$BUCKET/$NEW_BACKUP" ]]; then
    echo "$MOUNT/$BUCKET/$NEW_BACKUP doesn't exist so creating it"
    mkdir -p "$MOUNT/$BUCKET/$NEW_BACKUP"
  fi
  for dir in $(<${BACKUP_LIST}); do
    mkdir -p "$MOUNT/$BUCKET/$NEW_BACKUP$dir/"
    $RSYNC "$SOURCE$dir/" "$MOUNT/$BUCKET/$NEW_BACKUP$dir/"
  done
else
  echo "$MOUNT/$BUCKET doesn't exist - something has gone horribly wrong"
  exit
fi

# Make the new backup immutable
s3qllock "$NEW_BACKUP"

# Expire old backups

# Note that expire_backups.py comes from contrib/ and is not installed
# by default when you install from the source tarball. If you have
# installed an S3QL package for your distribution, this script *may*
# be installed, and it *may* also not have the .py ending.
#expire_backups --use-s3qlrm 1 7 14 31 90 180 360