forked from geerlingguy/my-backup-plan
-
Notifications
You must be signed in to change notification settings - Fork 0
/
backup.sh
46 lines (40 loc) · 1.2 KB
/
backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
#!/bin/bash
#
# Rclone script to archive Jeff Geerling's most important data to an Amazon S3
# Glacier Deep Archive-backed bucket.
#
# Basic usage:
# ./rclone.sh
#
# Script requires valid credentials - set up with `rclone config`.
RCLONE=/usr/local/bin/rclone
# Check if rclone is installed.
if ! [ -x "$(command -v $RCLONE)" ]; then
echo 'Error: rclone is not installed.' >&2
exit 1
fi
# Variables.
rclone_remote=personal
rclone_s3_bucket=jg-archive
show_progress=true
bandwidth_limit=23M
# Make sure bucket exists.
$RCLONE mkdir $rclone_remote:$rclone_s3_bucket
# List of directories to clone. MUST be absolute path, beginning with /.
declare -a dirs=(
"/Volumes/Brachiosaur/App Data"
"/Volumes/Brachiosaur/Presentation Recordings"
"/Volumes/Brachiosaur/Timelapses"
"/Volumes/Brachiosaur/YouTube Videos"
"/Volumes/Brachiosaur/Old School Files"
"/Volumes/Brachiosaur/Old Websites"
"/Volumes/Media/Movies"
"/Volumes/Media/TV Shows"
)
# Clone each directory. Add `--progress` for nicer (but more verbose) output.
for i in "${dirs[@]}"
do
echo "Syncing Directory: $i"
despaced="${i// /_}"
$RCLONE sync "$i" $rclone_remote:$rclone_s3_bucket"$despaced" --skip-links --bwlimit $bandwidth_limit
done