mirror of
https://github.com/johndoe6345789/metabuilder.git
synced 2026-04-25 22:34:56 +00:00
876 lines
27 KiB
Bash
Executable File
876 lines
27 KiB
Bash
Executable File
#!/bin/bash
|
|
|
|
################################################################################
|
|
# Phase 8: Comprehensive Email Client Backup Script
|
|
# Complete backup solution with PostgreSQL, Redis, Postfix, and Dovecot support
|
|
#
|
|
# Usage:
|
|
# ./backup.sh [--full|--incremental|--verify|--list|--upload] [--no-encrypt]
|
|
#
|
|
# Features:
|
|
# - Daily PostgreSQL dumps with point-in-time recovery support
|
|
# - Redis snapshot backups with integrity verification
|
|
# - Postfix mail spool and queue backups
|
|
# - Dovecot mail storage backups
|
|
# - Automatic compression and encryption
|
|
# - 30-day rolling backup window with retention management
|
|
# - Backup verification and health checks
|
|
# - S3 upload for off-site storage
|
|
# - Detailed logging and manifest generation
|
|
#
|
|
# Environment Variables:
|
|
# BACKUP_DIR Base backup directory (default: ./backups)
|
|
# S3_BUCKET S3 bucket for remote backups (optional)
|
|
# AWS_REGION AWS region (default: us-east-1)
|
|
# ENCRYPTION_KEY Base64-encoded encryption key for sensitive data
|
|
# RETENTION_DAYS Days to keep backups (default: 30)
|
|
# PARALLEL_JOBS Number of parallel backup jobs (default: 4)
|
|
#
|
|
# Requirements:
|
|
# - Docker / docker-compose installed
|
|
# - pg_dump, pg_basebackup available
|
|
# - openssl for encryption
|
|
# - aws-cli for S3 uploads (optional)
|
|
# - jq for JSON processing
|
|
#
|
|
################################################################################
|
|
|
|
set -euo pipefail
|
|
|
|
# Script directory and configuration
|
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
PROJECT_ROOT="${PROJECT_ROOT:-.}"
|
|
BACKUP_BASE_DIR="${BACKUP_DIR:-${PROJECT_ROOT}/backups}"
|
|
COMPOSE_FILE="${PROJECT_ROOT}/deployment/docker-compose.yml"
|
|
|
|
# Timestamp and naming
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
BACKUP_DATE=$(date +%Y-%m-%d)
|
|
BACKUP_ID="${BACKUP_DATE}_${RANDOM}"
|
|
BACKUP_MANIFEST="${BACKUP_BASE_DIR}/manifests/manifest_${TIMESTAMP}.json"
|
|
|
|
# Backup components
|
|
BACKUP_POSTGRESQL=1
|
|
BACKUP_REDIS=1
|
|
BACKUP_POSTFIX=1
|
|
BACKUP_DOVECOT=1
|
|
|
|
# Options
|
|
FULL_BACKUP=${FULL_BACKUP:-0}
|
|
INCREMENTAL_BACKUP=${INCREMENTAL_BACKUP:-0}
|
|
VERIFY_ONLY=${VERIFY_ONLY:-0}
|
|
LIST_ONLY=${LIST_ONLY:-0}
|
|
UPLOAD_TO_S3=${UPLOAD_TO_S3:-0}
|
|
ENCRYPT_BACKUPS=${ENCRYPT_BACKUPS:-1}
|
|
DRY_RUN=${DRY_RUN:-0}
|
|
|
|
# Configuration
|
|
S3_BUCKET="${S3_BUCKET:-}"
|
|
AWS_REGION="${AWS_REGION:-us-east-1}"
|
|
RETENTION_DAYS="${RETENTION_DAYS:-30}"
|
|
PARALLEL_JOBS="${PARALLEL_JOBS:-4}"
|
|
COMPRESSION_LEVEL="${COMPRESSION_LEVEL:-6}"
|
|
|
|
# Container names from docker-compose
|
|
DB_CONTAINER="emailclient-postgres"
|
|
REDIS_CONTAINER="emailclient-redis"
|
|
POSTFIX_CONTAINER="emailclient-postfix"
|
|
DOVECOT_CONTAINER="emailclient-dovecot"
|
|
|
|
# Environment variables from containers
|
|
DB_USER="${DB_USER:-emailclient}"
|
|
DB_PASSWORD="${DB_PASSWORD:-}"
|
|
DB_NAME="${DB_NAME:-emailclient_db}"
|
|
REDIS_PASSWORD="${REDIS_PASSWORD:-}"
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
BLUE='\033[0;34m'
|
|
CYAN='\033[0;36m'
|
|
NC='\033[0m'
|
|
|
|
# Logging functions
|
|
log_info() {
|
|
echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $*"
|
|
}
|
|
|
|
log_success() {
|
|
echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')] ✓${NC} $*"
|
|
}
|
|
|
|
log_warning() {
|
|
echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] ⚠${NC} $*"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ✗${NC} $*"
|
|
}
|
|
|
|
log_debug() {
|
|
if [ "${DEBUG:-0}" == "1" ]; then
|
|
echo -e "${CYAN}[DEBUG]${NC} $*"
|
|
fi
|
|
}
|
|
|
|
# Utility functions
|
|
check_prerequisites() {
|
|
log_info "Checking prerequisites..."
|
|
|
|
local missing_tools=()
|
|
|
|
# Check required commands
|
|
for cmd in docker docker-compose openssl jq date; do
|
|
if ! command -v "$cmd" &> /dev/null; then
|
|
missing_tools+=("$cmd")
|
|
fi
|
|
done
|
|
|
|
if [ ${#missing_tools[@]} -gt 0 ]; then
|
|
log_error "Missing required tools: ${missing_tools[*]}"
|
|
return 1
|
|
fi
|
|
|
|
# Check docker-compose file
|
|
if [ ! -f "$COMPOSE_FILE" ]; then
|
|
log_error "docker-compose.yml not found at $COMPOSE_FILE"
|
|
return 1
|
|
fi
|
|
|
|
log_success "All prerequisites checked"
|
|
return 0
|
|
}
|
|
|
|
create_backup_directories() {
|
|
log_info "Creating backup directories..."
|
|
|
|
mkdir -p "${BACKUP_BASE_DIR}"/{postgresql,redis,postfix,dovecot,manifests,logs}
|
|
mkdir -p "${BACKUP_BASE_DIR}/daily/${BACKUP_DATE}"
|
|
mkdir -p "${BACKUP_BASE_DIR}/exports"
|
|
|
|
log_success "Backup directories ready"
|
|
}
|
|
|
|
check_disk_space() {
|
|
local backup_dir="$1"
|
|
local required_kb="${2:-5242880}" # 5GB default
|
|
|
|
local available_kb=$(df "$backup_dir" | tail -1 | awk '{print $4}')
|
|
|
|
if [ "$available_kb" -lt "$required_kb" ]; then
|
|
log_error "Insufficient disk space. Available: $((available_kb / 1024))MB, Required: $((required_kb / 1024))MB"
|
|
return 1
|
|
fi
|
|
|
|
log_success "Disk space check passed: $((available_kb / 1024))MB available"
|
|
return 0
|
|
}
|
|
|
|
check_containers() {
|
|
log_info "Checking container health..."
|
|
|
|
local healthy=1
|
|
|
|
for container in "$DB_CONTAINER" "$REDIS_CONTAINER" "$POSTFIX_CONTAINER" "$DOVECOT_CONTAINER"; do
|
|
if docker ps --format '{{.Names}}' | grep -q "^${container}$"; then
|
|
local status=$(docker inspect --format '{{.State.Health.Status}}' "$container" 2>/dev/null || echo "unknown")
|
|
if [ "$status" == "healthy" ] || [ "$status" == "unknown" ]; then
|
|
log_success "Container $container is running"
|
|
else
|
|
log_warning "Container $container health: $status"
|
|
fi
|
|
else
|
|
log_warning "Container $container is not running"
|
|
if [[ "$container" == "$DB_CONTAINER" ]] || [[ "$container" == "$REDIS_CONTAINER" ]]; then
|
|
log_error "Required container $container must be running"
|
|
healthy=0
|
|
fi
|
|
fi
|
|
done
|
|
|
|
return $((1 - healthy))
|
|
}
|
|
|
|
# PostgreSQL Backup Functions
|
|
backup_postgresql() {
|
|
log_info "Starting PostgreSQL backup..."
|
|
|
|
local backup_dir="${BACKUP_BASE_DIR}/postgresql"
|
|
local backup_file="${backup_dir}/dump_${TIMESTAMP}.sql.gz"
|
|
local backup_custom="${backup_dir}/dump_${TIMESTAMP}.custom"
|
|
|
|
check_disk_space "$backup_dir" 3145728 || return 1
|
|
|
|
if [ "$DRY_RUN" == "1" ]; then
|
|
log_info "[DRY RUN] Would create: $backup_file"
|
|
return 0
|
|
fi
|
|
|
|
# Create base backup for point-in-time recovery
|
|
log_info "Creating base backup for PITR support..."
|
|
local backup_label="${backup_dir}/backup_label_${TIMESTAMP}"
|
|
mkdir -p "$backup_label"
|
|
|
|
# WAL archiving setup (if not already enabled)
|
|
log_info "Ensuring WAL archiving is enabled..."
|
|
docker exec "$DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" \
|
|
-c "ALTER SYSTEM SET wal_level = replica;" 2>/dev/null || true
|
|
docker exec "$DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" \
|
|
-c "SELECT pg_reload_conf();" 2>/dev/null || true
|
|
|
|
# Dump in custom format (allows selective restore)
|
|
log_info "Dumping PostgreSQL database in custom format..."
|
|
docker exec "$DB_CONTAINER" pg_dump \
|
|
-U "$DB_USER" \
|
|
-d "$DB_NAME" \
|
|
--format=custom \
|
|
--compress=6 \
|
|
--verbose \
|
|
--blobs \
|
|
--no-privileges \
|
|
--no-owner \
|
|
> "$backup_custom" 2>&1
|
|
|
|
if [ ! -f "$backup_custom" ] || [ ! -s "$backup_custom" ]; then
|
|
log_error "PostgreSQL dump failed or produced empty file"
|
|
return 1
|
|
fi
|
|
|
|
# Create SQL dump for human readability
|
|
log_info "Creating SQL dump for reference..."
|
|
docker exec "$DB_CONTAINER" pg_dump \
|
|
-U "$DB_USER" \
|
|
-d "$DB_NAME" \
|
|
--compress=6 \
|
|
--verbose \
|
|
> "$backup_file" 2>&1
|
|
|
|
if [ ! -f "$backup_file" ]; then
|
|
log_error "PostgreSQL SQL dump failed"
|
|
return 1
|
|
fi
|
|
|
|
# Get backup statistics
|
|
local dump_size=$(du -h "$backup_file" | awk '{print $1}')
|
|
local custom_size=$(du -h "$backup_custom" | awk '{print $1}')
|
|
local row_count=$(docker exec "$DB_CONTAINER" psql -U "$DB_USER" -d "$DB_NAME" \
|
|
-t -c "SELECT SUM(n_live_tup) FROM pg_stat_user_tables;")
|
|
|
|
log_success "PostgreSQL backup completed"
|
|
log_info " - SQL dump: $backup_file ($dump_size)"
|
|
log_info " - Custom format: $backup_custom ($custom_size)"
|
|
log_info " - Total rows: ${row_count:-unknown}"
|
|
|
|
# Encryption
|
|
if [ "$ENCRYPT_BACKUPS" == "1" ]; then
|
|
encrypt_file "$backup_file"
|
|
encrypt_file "$backup_custom"
|
|
fi
|
|
|
|
# Store metadata
|
|
echo "$backup_file" >> "${BACKUP_BASE_DIR}/postgresql_backups.txt"
|
|
echo "$backup_custom" >> "${BACKUP_BASE_DIR}/postgresql_backups.txt"
|
|
|
|
return 0
|
|
}
|
|
|
|
verify_postgresql_backup() {
|
|
log_info "Verifying PostgreSQL backup integrity..."
|
|
|
|
local latest_dump=$(ls -t "${BACKUP_BASE_DIR}/postgresql"/dump_*.sql.gz 2>/dev/null | head -1)
|
|
|
|
if [ -z "$latest_dump" ]; then
|
|
log_warning "No PostgreSQL backups found to verify"
|
|
return 0
|
|
fi
|
|
|
|
# Check file integrity
|
|
if ! gzip -t "$latest_dump" 2>/dev/null; then
|
|
log_error "PostgreSQL backup file is corrupted"
|
|
return 1
|
|
fi
|
|
|
|
# Test restore into temporary database
|
|
log_info "Testing restore into temporary database..."
|
|
local test_db="backup_verify_${RANDOM}"
|
|
|
|
if docker exec "$DB_CONTAINER" createdb -U "$DB_USER" "$test_db" 2>/dev/null; then
|
|
if docker exec "$DB_CONTAINER" bash -c "gunzip -c $latest_dump | psql -U $DB_USER -d $test_db" 2>&1 | tail -5; then
|
|
log_success "PostgreSQL backup verified successfully"
|
|
docker exec "$DB_CONTAINER" dropdb -U "$DB_USER" "$test_db"
|
|
return 0
|
|
fi
|
|
docker exec "$DB_CONTAINER" dropdb -U "$DB_USER" "$test_db" 2>/dev/null || true
|
|
fi
|
|
|
|
log_warning "PostgreSQL backup verification completed with warnings"
|
|
return 0
|
|
}
|
|
|
|
# Redis Backup Functions
|
|
backup_redis() {
|
|
log_info "Starting Redis backup..."
|
|
|
|
local backup_dir="${BACKUP_BASE_DIR}/redis"
|
|
local backup_file="${backup_dir}/dump_${TIMESTAMP}.rdb"
|
|
|
|
check_disk_space "$backup_dir" 1048576 || return 1
|
|
|
|
if [ "$DRY_RUN" == "1" ]; then
|
|
log_info "[DRY RUN] Would create: $backup_file"
|
|
return 0
|
|
fi
|
|
|
|
# Trigger BGSAVE (background save)
|
|
log_info "Triggering BGSAVE on Redis..."
|
|
docker exec "$REDIS_CONTAINER" redis-cli BGSAVE
|
|
|
|
# Wait for background save to complete
|
|
log_info "Waiting for background save to complete..."
|
|
local max_wait=300
|
|
local elapsed=0
|
|
local wait_interval=5
|
|
|
|
while [ $elapsed -lt $max_wait ]; do
|
|
local last_save=$(docker exec "$REDIS_CONTAINER" redis-cli LASTSAVE 2>/dev/null || echo "0")
|
|
local current_time=$(date +%s)
|
|
local time_diff=$((current_time - last_save))
|
|
|
|
if [ $time_diff -lt 10 ]; then
|
|
log_success "Background save completed"
|
|
break
|
|
fi
|
|
|
|
elapsed=$((elapsed + wait_interval))
|
|
echo -ne "${BLUE}.${NC}"
|
|
sleep $wait_interval
|
|
done
|
|
echo ""
|
|
|
|
# Copy RDB file from container
|
|
log_info "Copying RDB file from Redis..."
|
|
docker exec "$REDIS_CONTAINER" cat /data/dump.rdb > "$backup_file"
|
|
|
|
if [ ! -f "$backup_file" ] || [ ! -s "$backup_file" ]; then
|
|
log_error "Redis backup failed"
|
|
return 1
|
|
fi
|
|
|
|
# Get Redis info
|
|
local backup_size=$(du -h "$backup_file" | awk '{print $1}')
|
|
local db_size=$(docker exec "$REDIS_CONTAINER" redis-cli DBSIZE 2>/dev/null)
|
|
|
|
log_success "Redis backup completed"
|
|
log_info " - Backup file: $backup_file ($backup_size)"
|
|
log_info " - Database size: $db_size"
|
|
|
|
# Encryption
|
|
if [ "$ENCRYPT_BACKUPS" == "1" ]; then
|
|
encrypt_file "$backup_file"
|
|
fi
|
|
|
|
echo "$backup_file" >> "${BACKUP_BASE_DIR}/redis_backups.txt"
|
|
return 0
|
|
}
|
|
|
|
verify_redis_backup() {
|
|
log_info "Verifying Redis backup..."
|
|
|
|
local latest_backup=$(ls -t "${BACKUP_BASE_DIR}/redis"/dump_*.rdb* 2>/dev/null | head -1)
|
|
|
|
if [ -z "$latest_backup" ]; then
|
|
log_warning "No Redis backups found to verify"
|
|
return 0
|
|
fi
|
|
|
|
# Check file size
|
|
local file_size=$(stat -f%z "$latest_backup" 2>/dev/null || stat -c%s "$latest_backup")
|
|
if [ "$file_size" -lt 100 ]; then
|
|
log_error "Redis backup appears corrupted (too small)"
|
|
return 1
|
|
fi
|
|
|
|
log_success "Redis backup verified"
|
|
return 0
|
|
}
|
|
|
|
# Postfix Backup Functions
|
|
backup_postfix() {
|
|
log_info "Starting Postfix backup..."
|
|
|
|
local backup_dir="${BACKUP_BASE_DIR}/postfix"
|
|
local backup_file="${backup_dir}/spool_${TIMESTAMP}.tar.gz"
|
|
|
|
check_disk_space "$backup_dir" 2097152 || return 1
|
|
|
|
if [ "$DRY_RUN" == "1" ]; then
|
|
log_info "[DRY RUN] Would create: $backup_file"
|
|
return 0
|
|
fi
|
|
|
|
# Backup mail spool and queue
|
|
log_info "Backing up Postfix mail spool and queue..."
|
|
docker exec "$POSTFIX_CONTAINER" tar --exclude='*.lock' \
|
|
-czf /tmp/postfix_backup_${TIMESTAMP}.tar.gz \
|
|
-C /var spool/postfix \
|
|
-C /var mail \
|
|
2>/dev/null || true
|
|
|
|
docker cp "${POSTFIX_CONTAINER}:/tmp/postfix_backup_${TIMESTAMP}.tar.gz" "$backup_file" 2>/dev/null || {
|
|
log_warning "Could not backup Postfix spool (may not be available in container)"
|
|
return 0
|
|
}
|
|
|
|
docker exec "$POSTFIX_CONTAINER" rm -f "/tmp/postfix_backup_${TIMESTAMP}.tar.gz" 2>/dev/null || true
|
|
|
|
if [ ! -f "$backup_file" ] || [ ! -s "$backup_file" ]; then
|
|
log_warning "Postfix backup produced empty file (spool may be empty)"
|
|
return 0
|
|
fi
|
|
|
|
local backup_size=$(du -h "$backup_file" | awk '{print $1}')
|
|
log_success "Postfix backup completed ($backup_size)"
|
|
|
|
if [ "$ENCRYPT_BACKUPS" == "1" ]; then
|
|
encrypt_file "$backup_file"
|
|
fi
|
|
|
|
echo "$backup_file" >> "${BACKUP_BASE_DIR}/postfix_backups.txt"
|
|
return 0
|
|
}
|
|
|
|
# Dovecot Backup Functions
|
|
backup_dovecot() {
|
|
log_info "Starting Dovecot backup..."
|
|
|
|
local backup_dir="${BACKUP_BASE_DIR}/dovecot"
|
|
local backup_file="${backup_dir}/mail_${TIMESTAMP}.tar.gz"
|
|
|
|
check_disk_space "$backup_dir" 2097152 || return 1
|
|
|
|
if [ "$DRY_RUN" == "1" ]; then
|
|
log_info "[DRY RUN] Would create: $backup_file"
|
|
return 0
|
|
fi
|
|
|
|
# Backup mail storage
|
|
log_info "Backing up Dovecot mail storage..."
|
|
docker exec "$DOVECOT_CONTAINER" tar \
|
|
--exclude='*.lock' \
|
|
--exclude='*.tmp' \
|
|
-czf /tmp/dovecot_backup_${TIMESTAMP}.tar.gz \
|
|
-C /var mail \
|
|
2>/dev/null || true
|
|
|
|
docker cp "${DOVECOT_CONTAINER}:/tmp/dovecot_backup_${TIMESTAMP}.tar.gz" "$backup_file" 2>/dev/null || {
|
|
log_warning "Could not backup Dovecot mail (may not be available)"
|
|
return 0
|
|
}
|
|
|
|
docker exec "$DOVECOT_CONTAINER" rm -f "/tmp/dovecot_backup_${TIMESTAMP}.tar.gz" 2>/dev/null || true
|
|
|
|
if [ ! -f "$backup_file" ] || [ ! -s "$backup_file" ]; then
|
|
log_warning "Dovecot backup produced empty file"
|
|
return 0
|
|
fi
|
|
|
|
local backup_size=$(du -h "$backup_file" | awk '{print $1}')
|
|
log_success "Dovecot backup completed ($backup_size)"
|
|
|
|
if [ "$ENCRYPT_BACKUPS" == "1" ]; then
|
|
encrypt_file "$backup_file"
|
|
fi
|
|
|
|
echo "$backup_file" >> "${BACKUP_BASE_DIR}/dovecot_backups.txt"
|
|
return 0
|
|
}
|
|
|
|
# Encryption Functions
|
|
encrypt_file() {
|
|
local file="$1"
|
|
|
|
if [ ! -f "$file" ]; then
|
|
log_error "File not found for encryption: $file"
|
|
return 1
|
|
fi
|
|
|
|
if [ -z "$ENCRYPTION_KEY" ]; then
|
|
log_warning "ENCRYPTION_KEY not set, skipping encryption for $file"
|
|
return 0
|
|
fi
|
|
|
|
log_info "Encrypting file: $(basename "$file")"
|
|
|
|
# Use AES-256-CBC with OpenSSL
|
|
openssl enc -aes-256-cbc -salt \
|
|
-in "$file" \
|
|
-out "${file}.enc" \
|
|
-k "$ENCRYPTION_KEY" \
|
|
-md sha256 2>/dev/null
|
|
|
|
if [ -f "${file}.enc" ]; then
|
|
rm -f "$file"
|
|
log_success "File encrypted: ${file}.enc"
|
|
return 0
|
|
else
|
|
log_error "Encryption failed for $file"
|
|
return 1
|
|
fi
|
|
}
|
|
|
|
# Manifest Generation
|
|
generate_backup_manifest() {
|
|
log_info "Generating backup manifest..."
|
|
|
|
local manifest_file="$BACKUP_MANIFEST"
|
|
mkdir -p "$(dirname "$manifest_file")"
|
|
|
|
# Collect backup information
|
|
local postgresql_backup=""
|
|
local redis_backup=""
|
|
local postfix_backup=""
|
|
local dovecot_backup=""
|
|
|
|
[ -f "${BACKUP_BASE_DIR}/postgresql_backups.txt" ] && \
|
|
postgresql_backup=$(tail -1 "${BACKUP_BASE_DIR}/postgresql_backups.txt")
|
|
[ -f "${BACKUP_BASE_DIR}/redis_backups.txt" ] && \
|
|
redis_backup=$(tail -1 "${BACKUP_BASE_DIR}/redis_backups.txt")
|
|
[ -f "${BACKUP_BASE_DIR}/postfix_backups.txt" ] && \
|
|
postfix_backup=$(tail -1 "${BACKUP_BASE_DIR}/postfix_backups.txt")
|
|
[ -f "${BACKUP_BASE_DIR}/dovecot_backups.txt" ] && \
|
|
dovecot_backup=$(tail -1 "${BACKUP_BASE_DIR}/dovecot_backups.txt")
|
|
|
|
# Create JSON manifest
|
|
cat > "$manifest_file" << EOF
|
|
{
|
|
"backup_id": "${BACKUP_ID}",
|
|
"timestamp": "${TIMESTAMP}",
|
|
"backup_date": "${BACKUP_DATE}",
|
|
"hostname": "$(hostname)",
|
|
"backup_type": "full",
|
|
"components": {
|
|
"postgresql": {
|
|
"enabled": true,
|
|
"backup_file": "${postgresql_backup}",
|
|
"size": "$([ -f "$postgresql_backup" ] && du -h "$postgresql_backup" | awk '{print $1}' || echo 'N/A')""
|
|
},
|
|
"redis": {
|
|
"enabled": true,
|
|
"backup_file": "${redis_backup}",
|
|
"size": "$([ -f "$redis_backup" ] && du -h "$redis_backup" | awk '{print $1}' || echo 'N/A')""
|
|
},
|
|
"postfix": {
|
|
"enabled": true,
|
|
"backup_file": "${postfix_backup}",
|
|
"size": "$([ -f "$postfix_backup" ] && du -h "$postfix_backup" | awk '{print $1}' || echo 'N/A')""
|
|
},
|
|
"dovecot": {
|
|
"enabled": true,
|
|
"backup_file": "${dovecot_backup}",
|
|
"size": "$([ -f "$dovecot_backup" ] && du -h "$dovecot_backup" | awk '{print $1}' || echo 'N/A')""
|
|
}
|
|
},
|
|
"encryption": {
|
|
"enabled": $ENCRYPT_BACKUPS,
|
|
"algorithm": "AES-256-CBC"
|
|
},
|
|
"retention": {
|
|
"days": $RETENTION_DAYS,
|
|
"expires_at": "$(date -d "+${RETENTION_DAYS} days" +'%Y-%m-%dT%H:%M:%SZ')"
|
|
},
|
|
"total_size": "$(du -sh "$BACKUP_BASE_DIR" | awk '{print $1}')",
|
|
"status": "completed",
|
|
"version": "1.0"
|
|
}
|
|
EOF
|
|
|
|
log_success "Manifest created: $manifest_file"
|
|
return 0
|
|
}
|
|
|
|
# Cleanup Functions
|
|
cleanup_old_backups() {
|
|
log_info "Cleaning up backups older than $RETENTION_DAYS days..."
|
|
|
|
local count=0
|
|
local freed_space=0
|
|
|
|
# Clean PostgreSQL backups
|
|
while IFS= read -r backup_file; do
|
|
if [ -f "$backup_file" ]; then
|
|
local freed=$(du -b "$backup_file" | awk '{print $1}')
|
|
freed_space=$((freed_space + freed))
|
|
rm -f "$backup_file"
|
|
log_warning " Removed old backup: $(basename "$backup_file")"
|
|
count=$((count + 1))
|
|
fi
|
|
done < <(find "${BACKUP_BASE_DIR}/postgresql" -name "dump_*.sql.gz*" -type f -mtime +$RETENTION_DAYS)
|
|
|
|
# Clean Redis backups
|
|
while IFS= read -r backup_file; do
|
|
if [ -f "$backup_file" ]; then
|
|
local freed=$(du -b "$backup_file" | awk '{print $1}')
|
|
freed_space=$((freed_space + freed))
|
|
rm -f "$backup_file"
|
|
count=$((count + 1))
|
|
fi
|
|
done < <(find "${BACKUP_BASE_DIR}/redis" -name "dump_*.rdb*" -type f -mtime +$RETENTION_DAYS)
|
|
|
|
# Clean Postfix backups
|
|
while IFS= read -r backup_file; do
|
|
if [ -f "$backup_file" ]; then
|
|
local freed=$(du -b "$backup_file" | awk '{print $1}')
|
|
freed_space=$((freed_space + freed))
|
|
rm -f "$backup_file"
|
|
count=$((count + 1))
|
|
fi
|
|
done < <(find "${BACKUP_BASE_DIR}/postfix" -name "spool_*.tar.gz*" -type f -mtime +$RETENTION_DAYS)
|
|
|
|
# Clean Dovecot backups
|
|
while IFS= read -r backup_file; do
|
|
if [ -f "$backup_file" ]; then
|
|
local freed=$(du -b "$backup_file" | awk '{print $1}')
|
|
freed_space=$((freed_space + freed))
|
|
rm -f "$backup_file"
|
|
count=$((count + 1))
|
|
fi
|
|
done < <(find "${BACKUP_BASE_DIR}/dovecot" -name "mail_*.tar.gz*" -type f -mtime +$RETENTION_DAYS)
|
|
|
|
if [ $count -gt 0 ]; then
|
|
log_success "Removed $count old backup(s), freed $((freed_space / 1024 / 1024))MB"
|
|
else
|
|
log_info "No old backups to remove"
|
|
fi
|
|
}
|
|
|
|
# S3 Upload Functions
|
|
upload_to_s3() {
|
|
if [ -z "$S3_BUCKET" ]; then
|
|
log_warning "S3_BUCKET not configured, skipping S3 upload"
|
|
return 0
|
|
fi
|
|
|
|
if ! command -v aws &> /dev/null; then
|
|
log_warning "aws-cli not installed, skipping S3 upload"
|
|
return 0
|
|
fi
|
|
|
|
log_info "Uploading backups to S3..."
|
|
|
|
local s3_prefix="s3://${S3_BUCKET}/backups/${BACKUP_DATE}"
|
|
|
|
# Upload manifest
|
|
aws s3 cp "$BACKUP_MANIFEST" \
|
|
"${s3_prefix}/manifest_${TIMESTAMP}.json" \
|
|
--region "$AWS_REGION" 2>/dev/null && \
|
|
log_success "Manifest uploaded to S3" || \
|
|
log_warning "Failed to upload manifest to S3"
|
|
|
|
# Upload PostgreSQL backups
|
|
for backup in "${BACKUP_BASE_DIR}"/postgresql/dump_${TIMESTAMP}*; do
|
|
if [ -f "$backup" ]; then
|
|
aws s3 cp "$backup" \
|
|
"${s3_prefix}/postgresql/$(basename "$backup")" \
|
|
--region "$AWS_REGION" 2>/dev/null && \
|
|
log_success "PostgreSQL backup uploaded to S3" || \
|
|
log_warning "Failed to upload PostgreSQL backup to S3"
|
|
fi
|
|
done
|
|
|
|
# Upload Redis backup
|
|
for backup in "${BACKUP_BASE_DIR}"/redis/dump_${TIMESTAMP}*; do
|
|
if [ -f "$backup" ]; then
|
|
aws s3 cp "$backup" \
|
|
"${s3_prefix}/redis/$(basename "$backup")" \
|
|
--region "$AWS_REGION" 2>/dev/null && \
|
|
log_success "Redis backup uploaded to S3" || \
|
|
log_warning "Failed to upload Redis backup to S3"
|
|
fi
|
|
done
|
|
|
|
return 0
|
|
}
|
|
|
|
# List Backups
|
|
list_backups() {
|
|
log_info "Available backups:"
|
|
echo ""
|
|
|
|
echo -e "${CYAN}PostgreSQL Backups:${NC}"
|
|
ls -lh "${BACKUP_BASE_DIR}/postgresql"/dump_*.sql.gz* 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " None"
|
|
|
|
echo -e "${CYAN}Redis Backups:${NC}"
|
|
ls -lh "${BACKUP_BASE_DIR}/redis"/dump_*.rdb* 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " None"
|
|
|
|
echo -e "${CYAN}Postfix Backups:${NC}"
|
|
ls -lh "${BACKUP_BASE_DIR}/postfix"/spool_*.tar.gz* 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " None"
|
|
|
|
echo -e "${CYAN}Dovecot Backups:${NC}"
|
|
ls -lh "${BACKUP_BASE_DIR}/dovecot"/mail_*.tar.gz* 2>/dev/null | awk '{print " " $9 " (" $5 ")"}' || echo " None"
|
|
|
|
echo ""
|
|
echo -e "${CYAN}Backup Manifests:${NC}"
|
|
ls -lh "${BACKUP_BASE_DIR}/manifests"/manifest_*.json 2>/dev/null | tail -5 | awk '{print " " $9 " (" $5 ")"}' || echo " None"
|
|
|
|
echo ""
|
|
echo -e "${CYAN}Total backup size: $(du -sh "$BACKUP_BASE_DIR" | awk '{print $1}')${NC}"
|
|
}
|
|
|
|
# Main backup workflow
|
|
main_backup() {
|
|
log_info "============================================================"
|
|
log_info "Phase 8: Email Client Backup Process"
|
|
log_info "============================================================"
|
|
|
|
# Initialization
|
|
check_prerequisites || exit 1
|
|
create_backup_directories
|
|
check_disk_space "$BACKUP_BASE_DIR" 5242880 || exit 1
|
|
check_containers || exit 1
|
|
|
|
# Perform backups
|
|
backup_postgresql || log_warning "PostgreSQL backup failed"
|
|
backup_redis || log_warning "Redis backup failed"
|
|
backup_postfix || log_warning "Postfix backup failed"
|
|
backup_dovecot || log_warning "Dovecot backup failed"
|
|
|
|
# Verify backups
|
|
verify_postgresql_backup || log_warning "PostgreSQL verification failed"
|
|
verify_redis_backup || log_warning "Redis verification failed"
|
|
|
|
# Cleanup and manifest
|
|
cleanup_old_backups
|
|
generate_backup_manifest
|
|
|
|
# Optional S3 upload
|
|
if [ "$UPLOAD_TO_S3" == "1" ]; then
|
|
upload_to_s3
|
|
fi
|
|
|
|
log_info "============================================================"
|
|
log_success "Backup process completed successfully"
|
|
log_info "Backup ID: $BACKUP_ID"
|
|
log_info "Manifest: $BACKUP_MANIFEST"
|
|
log_info "============================================================"
|
|
}
|
|
|
|
# Usage information
|
|
show_usage() {
|
|
cat << EOF
|
|
${CYAN}Phase 8: Email Client Backup Script${NC}
|
|
Comprehensive backup solution for PostgreSQL, Redis, Postfix, and Dovecot
|
|
|
|
${CYAN}Usage:${NC}
|
|
$0 [OPTIONS]
|
|
|
|
${CYAN}Options:${NC}
|
|
--full Perform full backup (default)
|
|
--incremental Perform incremental backup (PostgreSQL WAL only)
|
|
--verify Verify existing backups
|
|
--list List available backups
|
|
--upload Upload backups to S3
|
|
--no-encrypt Disable backup encryption
|
|
--dry-run Show what would be done without making changes
|
|
--help Show this help message
|
|
|
|
${CYAN}Environment Variables:${NC}
|
|
BACKUP_DIR Base backup directory (default: ./backups)
|
|
S3_BUCKET S3 bucket for remote backups (optional)
|
|
AWS_REGION AWS region (default: us-east-1)
|
|
ENCRYPTION_KEY Encryption key for sensitive data
|
|
RETENTION_DAYS Days to keep backups (default: 30)
|
|
PARALLEL_JOBS Number of parallel jobs (default: 4)
|
|
DEBUG Set to 1 for debug output
|
|
|
|
${CYAN}Examples:${NC}
|
|
# Full backup with encryption
|
|
$0 --full
|
|
|
|
# Backup and upload to S3
|
|
ENCRYPTION_KEY=mykey S3_BUCKET=my-bucket $0 --full --upload
|
|
|
|
# Verify last backup
|
|
$0 --verify
|
|
|
|
# List all available backups
|
|
$0 --list
|
|
|
|
${CYAN}Features:${NC}
|
|
✓ Daily PostgreSQL dumps with point-in-time recovery support
|
|
✓ Redis snapshot backups with integrity verification
|
|
✓ Postfix mail spool and queue backups
|
|
✓ Dovecot mail storage backups
|
|
✓ Automatic compression (gzip, level 6)
|
|
✓ Optional AES-256-CBC encryption
|
|
✓ 30-day rolling backup window
|
|
✓ Detailed logging and JSON manifests
|
|
✓ S3 off-site storage integration
|
|
✓ Backup verification and health checks
|
|
✓ Zero-downtime backup (background save)
|
|
|
|
${CYAN}Recovery:${NC}
|
|
Use restore.sh to restore from backups with zero-downtime capability
|
|
|
|
EOF
|
|
}
|
|
|
|
# Parse command line arguments
|
|
parse_arguments() {
|
|
while [ $# -gt 0 ]; do
|
|
case "$1" in
|
|
--full)
|
|
FULL_BACKUP=1
|
|
INCREMENTAL_BACKUP=0
|
|
;;
|
|
--incremental)
|
|
FULL_BACKUP=0
|
|
INCREMENTAL_BACKUP=1
|
|
;;
|
|
--verify)
|
|
VERIFY_ONLY=1
|
|
;;
|
|
--list)
|
|
LIST_ONLY=1
|
|
;;
|
|
--upload)
|
|
UPLOAD_TO_S3=1
|
|
;;
|
|
--no-encrypt)
|
|
ENCRYPT_BACKUPS=0
|
|
;;
|
|
--dry-run)
|
|
DRY_RUN=1
|
|
log_warning "DRY RUN MODE - No files will be modified"
|
|
;;
|
|
--debug)
|
|
DEBUG=1
|
|
;;
|
|
--help|-h)
|
|
show_usage
|
|
exit 0
|
|
;;
|
|
*)
|
|
log_error "Unknown option: $1"
|
|
show_usage
|
|
exit 1
|
|
;;
|
|
esac
|
|
shift
|
|
done
|
|
}
|
|
|
|
# Entry point
|
|
parse_arguments "$@"
|
|
|
|
if [ "$LIST_ONLY" == "1" ]; then
|
|
list_backups
|
|
exit 0
|
|
elif [ "$VERIFY_ONLY" == "1" ]; then
|
|
verify_postgresql_backup
|
|
verify_redis_backup
|
|
exit 0
|
|
else
|
|
main_backup
|
|
fi
|