-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbackup_panels.sh
More file actions
121 lines (102 loc) · 4.56 KB
/
backup_panels.sh
File metadata and controls
121 lines (102 loc) · 4.56 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
#!/bin/bash
set -euo pipefail
# -------------------------------------------------------------------
# Panels Folder Backup Script
#
# Description:
# - Creates a compressed archive of the specified folder (default: ~/data)
# - Optionally excludes specific files/folders using a config file
# (backup_panels_excludes.conf, stored in the same directory you run the script from)
# The excludes file follows a simple format similar to .gitignore:
# * Lines starting with '#' are treated as comments
# * Blank lines are ignored
# * Wildcards (e.g. *.log) are supported
# * A trailing slash indicates a directory (e.g. cache/)
# * Directory patterns are excluded at any depth in the folder tree
# - Temporarily stores the archive as .tar, compresses to .tar.gz using pigz
# - Uploads the compressed file to Google Drive using rclone
# - Deletes the local archive after upload to save space
# - Removes remote backups older than 30 days to control storage use
#
# Usage:
# ./backup_panels.sh [source_directory]
# - Defaults to "$HOME/data" if no source_directory is provided
# - Logs are stored in ./backup_logs relative to where you run the script
# - The backup archive is staged in $HOME/panels_backup before upload
#
# Cron Example:
# # Run every Sunday at 02:00
# 0 2 * * 0 /bin/bash /home/youruser/github/ida-scripts/backup_panels.sh
#
# Requirements:
# - tar (to create the archive)
# - pv (to monitor progress during compression)
# - pigz (parallel gzip compression)
# - rclone (configured with the remote synology)
#
# Remote:
# By default, uploads to the rclone remote "synology:backups/panels_backup"
# To change, edit the RCLONE_REMOTE variable in the script
# -------------------------------------------------------------------
SOURCE_DIR="${1:-$HOME/data}"
TIMESTAMP=$(date +'%Y-%m-%d_%H-%M-%S')
# Get absolute path to the script's directory
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
# Config & logs relative to the script directory
CONFIG_FILE="$SCRIPT_DIR/backup_panels_excludes.conf"
LOG_DIR="$SCRIPT_DIR/backup_logs"
LOGFILE="$LOG_DIR/panels_backup.log"
# Staging, remote and naming
BACKUP_DIR="$HOME/panels_backup"
ARCHIVE_NAME="$(basename "$SOURCE_DIR")_${TIMESTAMP}.tar"
ARCHIVE_PATH="$BACKUP_DIR/$ARCHIVE_NAME"
GZ_PATH="${ARCHIVE_PATH}.gz"
RCLONE_REMOTE="synology:backups/panels_backup"
LOG_MAX_MB=10
mkdir -p "$BACKUP_DIR" "$LOG_DIR"
# Truncate the log file if it's larger than $LOG_MAX_MB MB
if [ -f "$LOGFILE" ] && [ "$(stat -c%s "$LOGFILE")" -gt $((LOG_MAX_MB * 1024 * 1024)) ]; then
tail -c "${LOG_MAX_MB}M" "$LOGFILE" > "${LOGFILE}.tmp" && mv "${LOGFILE}.tmp" "$LOGFILE"
fi
{
echo "[$(date)] Starting backup for folder: $SOURCE_DIR"
# Build tar command with robust excludes
TAR_CMD=(tar --wildcards --wildcards-match-slash -cf "$ARCHIVE_PATH" -C "$(dirname "$SOURCE_DIR")")
if [ -f "$CONFIG_FILE" ]; then
echo "[$(date)] Using exclusions from: $CONFIG_FILE"
while IFS= read -r pattern || [[ -n "$pattern" ]]; do
# strip CR (Windows line endings) and trim whitespace
pattern="${pattern%$'\r'}"
pattern="$(echo "$pattern" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
[[ -z "$pattern" || "$pattern" =~ ^# ]] && continue
if [[ "$pattern" == */ ]]; then
# Directory pattern: exclude it anywhere in the tree
name="${pattern%/}"
TAR_CMD+=( --exclude="*/$name" --exclude="*/$name/*" )
else
# File/filename glob pattern: pass through
TAR_CMD+=( --exclude="$pattern" )
fi
done < "$CONFIG_FILE"
fi
TAR_CMD+=( "$(basename "$SOURCE_DIR")" )
"${TAR_CMD[@]}"
echo "[$(date)] Archive created: $ARCHIVE_PATH"
# Compress with pigz (streamed via pv)
echo "[$(date)] Starting compression..."
ionice -c2 -n7 nice -n19 pv --force -i 60 "$ARCHIVE_PATH" | pigz -6 > "$GZ_PATH"
echo "[$(date)] Compression completed: $GZ_PATH"
# Remove original tar
rm "$ARCHIVE_PATH"
echo "[$(date)] Original TAR file deleted: $ARCHIVE_PATH"
# Upload to Google Drive
/usr/bin/rclone copy -v --progress "$GZ_PATH" "$RCLONE_REMOTE"
echo "[$(date)] Rclone upload completed: $RCLONE_REMOTE"
# Remove local .gz
rm "$GZ_PATH"
echo "[$(date)] Local .gz file deleted: $GZ_PATH"
# Remote retention
/usr/bin/rclone delete --min-age 30d "$RCLONE_REMOTE"
echo "[$(date)] Remote retention cleanup completed (older than 30 days)"
echo "[$(date)] Backup routine completed successfully"
} >> "$LOGFILE" 2>&1