Automated Backup Systems for Web Servers
Automated Backup Systems for Web Servers
Implement comprehensive automated backup solutions:
#!/bin/bash
# /usr/local/bin/webserver-backup.sh
# Configuration
BACKUP_ROOT="/backup"
REMOTE_BACKUP="backup-server:/remote-backup"
S3_BUCKET="s3://company-webserver-backups"
RETENTION_DAYS=30
ENCRYPTION_KEY="/etc/backup/.encryption-key"
LOG_FILE="/var/log/webserver-backup.log"
# Create backup directories
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_DIR="$BACKUP_ROOT/$TIMESTAMP"
mkdir -p "$BACKUP_DIR"/{configs,data,ssl,databases}
# Logging function
log() {
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" | tee -a "$LOG_FILE"
}
# Backup web server configurations
backup_configs() {
log "Starting configuration backup..."
# Apache configuration
if [ -d /etc/apache2 ]; then
tar -czf "$BACKUP_DIR/configs/apache2.tar.gz" \
--exclude='*.log' \
/etc/apache2 2>/dev/null
# Export enabled sites and modules
apache2ctl -S > "$BACKUP_DIR/configs/apache-sites.txt" 2>&1
apache2ctl -M > "$BACKUP_DIR/configs/apache-modules.txt" 2>&1
fi
# Nginx configuration
if [ -d /etc/nginx ]; then
tar -czf "$BACKUP_DIR/configs/nginx.tar.gz" \
--exclude='*.log' \
/etc/nginx 2>/dev/null
# Export configuration test
nginx -T > "$BACKUP_DIR/configs/nginx-config-dump.txt" 2>&1
fi
# System configurations
tar -czf "$BACKUP_DIR/configs/system.tar.gz" \
/etc/hosts \
/etc/hostname \
/etc/network \
/etc/resolv.conf \
/etc/fstab \
/etc/crontab \
/etc/cron.d \
/etc/security \
/etc/systemd/system/*.service \
2>/dev/null
# Firewall rules
if command -v ufw &> /dev/null; then
ufw status verbose > "$BACKUP_DIR/configs/ufw-rules.txt"
fi
if command -v iptables &> /dev/null; then
iptables-save > "$BACKUP_DIR/configs/iptables-rules.txt"
ip6tables-save > "$BACKUP_DIR/configs/ip6tables-rules.txt"
fi
log "Configuration backup completed"
}
# Backup SSL certificates and keys
backup_ssl() {
log "Starting SSL backup..."
# Create encrypted archive of SSL files
tar -czf - \
/etc/letsencrypt \
/etc/ssl/certs \
/etc/ssl/private \
2>/dev/null | \
openssl enc -aes-256-cbc -salt -pass file:"$ENCRYPTION_KEY" \
-out "$BACKUP_DIR/ssl/ssl-certificates-encrypted.tar.gz.enc"
# Backup certificate metadata
for cert in /etc/letsencrypt/live/*/cert.pem; do
if [ -f "$cert" ]; then
domain=$(basename $(dirname "$cert"))
openssl x509 -in "$cert" -noout -subject -enddate \
>> "$BACKUP_DIR/ssl/certificate-info.txt"
fi
done
log "SSL backup completed"
}
# Backup web data
backup_webdata() {
log "Starting web data backup..."
# Define directories to backup
WEB_DIRS=(
"/var/www"
"/usr/share/nginx/html"
"/opt/web-apps"
)
for dir in "${WEB_DIRS[@]}"; do
if [ -d "$dir" ]; then
dir_name=$(echo "$dir" | tr '/' '_')
tar -czf "$BACKUP_DIR/data/${dir_name}.tar.gz" \
--exclude='*.log' \
--exclude='*/cache/*' \
--exclude='*/temp/*' \
--exclude='*/tmp/*' \
"$dir" 2>/dev/null
fi
done
log "Web data backup completed"
}
# Backup databases
backup_databases() {
log "Starting database backup..."
# MySQL/MariaDB backup
if command -v mysqldump &> /dev/null; then
# Read credentials from secure file
if [ -f /root/.my.cnf ]; then
# Backup all databases
mysqldump --all-databases --single-transaction \
--routines --triggers --events \
| gzip > "$BACKUP_DIR/databases/mysql-all-databases.sql.gz"
# Individual database backups
mysql -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql|sys)" | \
while read db; do
mysqldump --single-transaction --routines --triggers --events \
"$db" | gzip > "$BACKUP_DIR/databases/mysql-${db}.sql.gz"
done
fi
fi
# PostgreSQL backup
if command -v pg_dump &> /dev/null; then
sudo -u postgres pg_dumpall | gzip > "$BACKUP_DIR/databases/postgresql-all.sql.gz"
fi
# MongoDB backup
if command -v mongodump &> /dev/null; then
mongodump --out "$BACKUP_DIR/databases/mongodb" --gzip
fi
log "Database backup completed"
}
# Create recovery script
create_recovery_script() {
cat > "$BACKUP_DIR/restore.sh" << 'EOF'
#!/bin/bash
# Automated recovery script
echo "Web Server Recovery Script"
echo "========================="
echo "Backup Date: $TIMESTAMP"
echo
# Function to restore configurations
restore_configs() {
echo "Restoring configurations..."
# Stop services
systemctl stop apache2 nginx
# Restore Apache
if [ -f configs/apache2.tar.gz ]; then
tar -xzf configs/apache2.tar.gz -C /
apache2ctl configtest && systemctl start apache2
fi
# Restore Nginx
if [ -f configs/nginx.tar.gz ]; then
tar -xzf configs/nginx.tar.gz -C /
nginx -t && systemctl start nginx
fi
# Restore firewall rules
if [ -f configs/iptables-rules.txt ]; then
iptables-restore < configs/iptables-rules.txt
fi
}
# Function to restore SSL certificates
restore_ssl() {
echo "Restoring SSL certificates..."
read -sp "Enter encryption password: " password
echo
openssl enc -d -aes-256-cbc -pass pass:"$password" \
-in ssl/ssl-certificates-encrypted.tar.gz.enc | \
tar -xzf - -C /
}
# Function to restore web data
restore_webdata() {
echo "Restoring web data..."
for archive in data/*.tar.gz; do
if [ -f "$archive" ]; then
echo "Extracting $archive..."
tar -xzf "$archive" -C /
fi
done
}
# Main menu
echo "Select recovery options:"
echo "1. Full recovery"
echo "2. Configurations only"
echo "3. SSL certificates only"
echo "4. Web data only"
echo "5. Exit"
read -p "Choice: " choice
case $choice in
1)
restore_configs
restore_ssl
restore_webdata
;;
2) restore_configs ;;
3) restore_ssl ;;
4) restore_webdata ;;
5) exit 0 ;;
esac
echo "Recovery completed!"
EOF
chmod +x "$BACKUP_DIR/restore.sh"
}
# Sync to remote locations
sync_backups() {
log "Syncing backups to remote locations..."
# Create compressed archive
ARCHIVE_NAME="webserver-backup-$TIMESTAMP.tar.gz"
tar -czf "$BACKUP_ROOT/$ARCHIVE_NAME" -C "$BACKUP_ROOT" "$TIMESTAMP"
# Sync to remote backup server
if [ -n "$REMOTE_BACKUP" ]; then
rsync -avz --progress "$BACKUP_ROOT/$ARCHIVE_NAME" "$REMOTE_BACKUP/"
fi
# Sync to S3
if command -v aws &> /dev/null && [ -n "$S3_BUCKET" ]; then
aws s3 cp "$BACKUP_ROOT/$ARCHIVE_NAME" "$S3_BUCKET/" \
--storage-class STANDARD_IA \
--server-side-encryption AES256
fi
log "Remote sync completed"
}
# Cleanup old backups
cleanup_old_backups() {
log "Cleaning up old backups..."
# Local cleanup
find "$BACKUP_ROOT" -name "webserver-backup-*.tar.gz" \
-mtime +$RETENTION_DAYS -delete
# S3 cleanup
if command -v aws &> /dev/null && [ -n "$S3_BUCKET" ]; then
aws s3 ls "$S3_BUCKET/" | \
while read -r line; do
createDate=$(echo $line | awk '{print $1" "$2}')
createDate=$(date -d "$createDate" +%s)
olderThan=$(date -d "$RETENTION_DAYS days ago" +%s)
if [[ $createDate -lt $olderThan ]]; then
fileName=$(echo $line | awk '{print $4}')
aws s3 rm "$S3_BUCKET/$fileName"
fi
done
fi
log "Cleanup completed"
}
# Main execution
main() {
log "Starting backup process..."
# Check prerequisites
if [ ! -f "$ENCRYPTION_KEY" ]; then
log "ERROR: Encryption key not found at $ENCRYPTION_KEY"
exit 1
fi
# Run backups
backup_configs
backup_ssl
backup_webdata
backup_databases
create_recovery_script
# Sync and cleanup
sync_backups
cleanup_old_backups
# Verify backup
BACKUP_SIZE=$(du -sh "$BACKUP_DIR" | cut -f1)
log "Backup completed successfully. Size: $BACKUP_SIZE"
# Send notification
echo "Backup completed on $(hostname) at $(date)" | \
mail -s "Backup Success: $(hostname)" [email protected]
}
# Run with error handling
{
main
} || {
log "ERROR: Backup failed!"
echo "Backup failed on $(hostname) at $(date)" | \
mail -s "BACKUP FAILURE: $(hostname)" [email protected]
exit 1
}