#!/bin/bash s3cmd=/home/iet/bin/s3cmd BACKUP_PATHS="" CLONE_PATH="" STORE_PATH="" REMOTE_S3_PATH="" BACKUP_NAME="some-account" STORE_DAILY_REMOTE=7 STORE_WEEKLY_REMOTE=5 STORE_DAILY_LOCAL=30 STORE_WEEKLY_LOCAL=30 cfg="$0.cfg" . $cfg check_config() { if [ "$BACKUP_PATHS" == "" ] || [ "$CLONE_PATH" == "" ] || [ "$STORE_PATH" == "" ] || [ "$REMOTE_S3_PATH" == "" ]; then echo "Invalid config in '$cfg'" exit 1 fi } log() { d=$(date +"%Y-%m-%d %H:%M:%S") echo "$d - $*" } getFilesToDelete() { keep=0 files=$1 keepCount=$2 for f in $files ; do ((keep++)) if (( $keep > $keepCount )); then echo $f fi done } check_config mkdir -p $CLONE_PATH mkdir -p $STORE_PATH for p in $BACKUP_PATHS; do echo "rsync item '$p'" rsync -av $p $CLONE_PATH done log Creating snapshot archive date=$(date +%Y-%m-%d) weekly_day=7 weekly="" if [ "$(date +%u)" == "$weekly_day" ]; then weekly=$(date +%U) weekly="-week-$weekly" fi day="$date$weekly" dstfile="$day-$BACKUP_NAME.tar.gz" dstfilepath=$STORE_PATH/$dstfile filesLocalDaily=$(ls -1 $STORE_PATH | grep -v "\-week\-" | sort -nr) filesLocalWeekly=$(ls -1 $STORE_PATH | grep "\-week\-" | sort -nr) filesDelLocal=$( getFilesToDelete "$filesLocalDaily" $STORE_DAILY_LOCAL getFilesToDelete "$filesLocalWeekly" $STORE_WEEKLY_LOCAL ) for f in $filesDelLocal; do fp=$STORE_PATH/$f echo "removing $fp" rm -f $fp done log Compressing files cd $CLONE_PATH #tar -czf $dstfilepath ./* tar -czf $dstfilepath . log Uploading to S3 filesRemoteDaily=$($s3cmd ls $REMOTE_S3_PATH | awk '{print $4}' | grep -v "\-week\-" | sort -nr) filesRemoteWeekly=$($s3cmd ls $REMOTE_S3_PATH | awk '{print $4}' | grep "\-week\-" | sort -nr) filesDelRemote=$( getFilesToDelete "$filesRemoteDaily" $STORE_DAILY_REMOTE getFilesToDelete "$filesRemoteWeekly" $STORE_WEEKLY_REMOTE ) for f in $filesDelRemote; do echo "removing $f" $s3cmd del $f done $s3cmd put $dstfilepath $REMOTE_S3_PATH/$dstfile log Backup finished