I'm working on a script to backup S3 to GC everyday. I'm not sure if my script has any potential bug or error that might have destroyed anything from both sides? The data is quite sensitive and I don't want to mess it up. Anything is welcomed.
#!/bin/sh
if hash aws 2>/dev/null; then
echo 'awscli is installed.'
else
echo 'Please install awscli by running sudo pip install awscli'
exit
fi
if hash gsutil 2>/dev/null; then
echo 'gsutil is installed.'
else
echo 'Please install gsutil by running sudo pip install gsutil'
fi
if env | grep -q ^BACKUP_TO_EMAIL=
then
echo "After backup is done email will be send to $BACKUP_TO_EMAIL"
else
echo "BACKUP_TO_EMAIL is not set please set it"
exit
fi
if env | grep -q ^S3_BUCKET=
then
echo "Checking if $S3_BUCKET exists"
if aws s3 ls "s3://$S3_BUCKET" 2>&1 | grep -q 'AllAccessDisabled'
then
echo "bucket $S3_BUCKET doesn't exist please check again."
exit
fi
else
echo 'S3_BUCKET is not set please set it'
exit
fi
if env | grep -q ^GC_BUCKET=
then
echo "Checking if $GC_BUCKET exists"
if gsutil ls "gs://$GC_BUCKET" 2>&1 | grep -q 'AccessDeniedException'
then
echo "bucket $GC_BUCKET doesn't exist please check again."
exit
fi
else
echo 'GC_BUCKET is not set please set it'
exit
fi
echo "Backing up now..."
#`gsutil -m rsync -r s3://$S3_BUCKET gs://$GC_BUCKET`
echo "Creating new backup folder"
datestamp=$(date +%m%d%y)
mkdir $datestamp
echo "Downloading backup"
aws s3 sync "s3://$S3_BUCKET" $datestamp
echo "Compressing site backup"
tar -zcvf $datestamp.tar.gz $datestamp
file=$datestamp.tar.gz
rm -rf $datestamp
echo "Uploading to GC"
gsutil cp $file "gs://$GC_BUCKET"
echo "Deleting temporary files"
rm $file
echo "Sending email"
body="Backup is complete and the file is in gs://$GC_BUCKET/$file"
echo $body | mail $BACKUP_TO_EMAIL -s "S3 to GS backup"