#!/bin/sh
# simple ganglia backup to s3
weekday=$(date '+%u')
ganglia_dir="/var/lib/ganglia"
backup_file="$ganglia_dir/ganglia_backup.tar.$weekday"
log_file="$ganglia_dir/ganglia_backup.log.$weekday"
lock_file="backup.lock.$weekday"

if [ ! -e ~/.s3cfg ]
then
  echo "bucket not configured..." > $log_file
  exit 1
fi

# in case the cronjob completed elsewhere already
if s3cmd ls s3://ganglia-backups | grep $(date '+%Y-%m-%d')
  then
  echo "backup already done..." > $log_file
  exit 1
fi

#check for a backup in progress
if s3cmd ls s3://ganglia-backups | grep $lock_file
  then
  s3cmd get s3://ganglia-backups/$lock_file $ganglia_dir/$lock_file
  echo "backup already in progress on $(cat $ganglia_dir/$lock_file) ... bailing" > $log_file
  rm $ganglia_dir/$lock_file
  exit 1
fi


#lock file so that no other ganglia boxes start backing up while this is going on
hostname  -f > $ganglia_dir/$lock_file
s3cmd put $ganglia_dir/$lock_file s3://ganglia-backups

tar -cf $backup_file $ganglia_dir/rrds 2>&1 > $log_file
s3cmd --no-check-md5 --multipart-chunk-size-mb=3000 put $backup_file s3://ganglia-backups 2>&1 >> $log_file
s3cmd rm s3://ganglia-backups/$lock_file
rm -f $ganglia_dir/$lock_file
rm -f $backup_file
exit 0
