aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLibravatarLibravatar Biswa Kalyan Bhuyan <[email protected]> 2025-06-06 15:05:28 +0530
committerLibravatarLibravatar Biswa Kalyan Bhuyan <[email protected]> 2025-06-06 15:05:28 +0530
commit6e8fc92c6f81fc3b80f0aee18459feaf858b4949 (patch)
tree8e67e4caa920be7ba0e0327ddc0d0065f601e532
downloadbackupbot-6e8fc92c6f81fc3b80f0aee18459feaf858b4949.tar.gz
backupbot-6e8fc92c6f81fc3b80f0aee18459feaf858b4949.tar.bz2
backupbot-6e8fc92c6f81fc3b80f0aee18459feaf858b4949.zip
feat: created new script for backing up server using rclone
-rw-r--r--README.md273
-rwxr-xr-xbackup.sh701
2 files changed, 974 insertions, 0 deletions
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..4fa6160
--- /dev/null
+++ b/README.md
@@ -0,0 +1,273 @@
+# OpenBSD Server Backup Script
+
+A robust, production-ready backup solution for OpenBSD servers that compresses directories and uploads them to cloud storage with comprehensive logging and email notifications.
+
+## Author
+
+**Biswa Kalyan Bhuyan**
+
+## Features
+
+- ✅ **Individual tar archives** for each directory
+- ✅ **Maximum gzip compression** (level 9)
+- ✅ **Cloud upload** via rclone
+- ✅ **Automatic cleanup** after successful upload
+- ✅ **Email notifications** with detailed logs
+- ✅ **Directory validation** before backup
+- ✅ **Disk space checking** (1GB minimum requirement)
+- ✅ **Log rotation** (keeps last 10 logs)
+- ✅ **Permission validation** with root privilege warnings
+- ✅ **Dry-run mode** for testing
+- ✅ **Server-optimized** with production-ready configurations
+
+## Requirements
+
+### System Requirements
+- **OpenBSD** (tested and optimized for OpenBSD)
+- **Root access** (recommended for complete system backups)
+- **Minimum 1GB free space** in `/tmp`
+
+### Dependencies
+```bash
+# Install required packages
+pkg_add rclone mailx
+
+# Configure rclone (setup 'cf' remote)
+rclone config
+```
+
+## Installation
+
+### 1. Clone Repository
+```bash
+git clone <repository-url>
+cd openbsd-backup-script
+```
+
+### 2. Deploy to Server
+```bash
+# Copy script to system location
+sudo cp backup.sh /usr/local/bin/
+sudo chmod +x /usr/local/bin/backup.sh
+sudo chown root:wheel /usr/local/bin/backup.sh
+```
+
+### 3. Configure
+- Setup rclone remote named 'cf' pointing to your cloud storage
+- Verify email functionality: `echo "Test" | mail -s "Test" [email protected]`
+- Edit script to change `EMAIL_RECIPIENT` if needed
+
+## Usage
+
+### Basic Commands
+
+```bash
+# Test mode (no actual upload)
+sudo backup.sh --dir /etc --dry-run
+
+# Backup single directory
+sudo backup.sh --dir /etc
+
+# Backup multiple directories
+sudo backup.sh --dir /etc,/var/www,/var/log
+
+# Process existing tarball
+sudo backup.sh /path/to/existing.tar
+
+# Show help
+backup.sh --help
+```
+
+### Server-Specific Examples
+
+#### Web Server Backup
+```bash
+sudo backup.sh --dir /etc,/var/www,/var/log
+```
+
+#### Mail Server Backup
+```bash
+sudo backup.sh --dir /etc,/var/mail,/var/spool/mail,/usr/local/etc
+```
+
+#### Database Server Backup
+```bash
+sudo backup.sh --dir /etc,/var/lib/mysql,/var/postgresql,/usr/local/etc
+```
+
+#### Complete System Backup
+```bash
+sudo backup.sh --dir /etc,/var/www,/var/log,/home,/usr/local,/var/mail
+```
+
+## Automation with Cron
+
+### Daily Backup (2:00 AM)
+```bash
+# Edit root's crontab
+sudo crontab -e
+
+# Add daily backup
+0 2 * * * /usr/local/bin/backup.sh --dir /etc,/var/www,/var/log >/dev/null 2>&1
+```
+
+### Weekly Full Backup (Sunday 3:00 AM)
+```bash
+# Weekly comprehensive backup
+0 3 * * 0 /usr/local/bin/backup.sh --dir /etc,/var/www,/var/log,/home,/usr/local/etc >/dev/null 2>&1
+```
+
+### Multiple Schedules
+```bash
+# Daily configs (2 AM)
+0 2 * * * /usr/local/bin/backup.sh --dir /etc,/usr/local/etc >/dev/null 2>&1
+
+# Daily web content (3 AM)
+0 3 * * * /usr/local/bin/backup.sh --dir /var/www >/dev/null 2>&1
+
+# Weekly logs (Sunday 4 AM)
+0 4 * * 0 /usr/local/bin/backup.sh --dir /var/log >/dev/null 2>&1
+```
+
+## Configuration
+
+### Script Configuration
+Edit these variables in `backup.sh`:
+```bash
+RCLONE_REMOTE="cf:backups/" # Cloud storage remote
+EMAIL_RECIPIENT="[email protected]" # Email for notifications
+COMPRESSION_LEVEL=9 # Gzip compression level
+MIN_FREE_SPACE_MB=1024 # Minimum required space (MB)
+MAX_LOG_FILES=10 # Log retention count
+```
+
+### File Naming Convention
+- **Archive files**: `etc-20250606.tar.gz`, `var-www-20250606.tar.gz`
+- **Log files**: `/var/log/backup-log-20250606.log`
+- **Backup staging**: `/tmp/backups-20250606/`
+
+## Monitoring & Logs
+
+### Log Locations
+- **Primary**: `/var/log/backup-log-YYYYMMDD.log`
+- **Fallback**: `/tmp/backup-log-YYYYMMDD.log` (if `/var/log/` not writable)
+
+### Email Reports
+- **Success**: `[SUCCESS] Backup Script Log - hostname - date`
+- **Failure**: `[FAILED] Backup Script Log - hostname - date`
+- **Content**: Complete execution log with statistics
+
+### Log Rotation
+- Automatically keeps last 10 log files
+- Older logs are automatically removed
+
+## Output Examples
+
+### Archive Creation
+```
+[INFO] Creating tar archive for: /var/www
+[INFO] Directory size: 486M
+[INFO] Available space in backup location: 5229 MB
+[INFO] Creating archive: /tmp/backups-20250606/var-www-20250606.tar
+[SUCCESS] Tar archive created in 3 seconds
+[INFO] Archive size: 484 MB
+```
+
+### Compression & Upload
+```
+[INFO] Compressing 'var-www-20250606.tar' with maximum compression (level 9)...
+[SUCCESS] Compression completed in 45 seconds
+[INFO] Original size: 484 MB
+[INFO] Compressed size: 156 MB
+[INFO] Compression ratio: 67%
+[INFO] Space saved: 328 MB
+[INFO] Uploading 'var-www-20250606.tar.gz' to cf:backups/...
+[SUCCESS] Upload completed successfully in 23 seconds
+```
+
+## Error Handling
+
+### Common Issues & Solutions
+
+#### Permission Denied
+```bash
+# Run as root for system directories
+sudo backup.sh --dir /etc
+```
+
+#### Insufficient Disk Space
+```bash
+# Check available space
+df -h /tmp
+# Clean up old files or increase disk space
+```
+
+#### Directory Not Found
+```bash
+# Script validates directories before starting
+[ERROR] Directory does not exist: /var/www
+```
+
+#### rclone Not Configured
+```bash
+# Configure rclone remote
+sudo rclone config
+# Setup remote named 'cf'
+```
+
+## Security Considerations
+
+- **Run as root** for complete system access
+- **Sensitive files** (like `/etc/shadow`) may show permission warnings (normal behavior)
+- **Cloud credentials** stored in rclone config
+- **Log files** contain backup statistics but not file contents
+- **Email logs** sent to configured recipient
+
+## Performance
+
+### Typical Performance
+- **Compression**: ~10-15MB/s (depends on data type and CPU)
+- **Upload**: Depends on network bandwidth and cloud provider
+- **Disk Usage**: Temporary files in `/tmp/` (auto-cleaned)
+
+### Optimization Tips
+- **Schedule during off-peak hours** (2-4 AM)
+- **Separate large directories** into different backup jobs
+- **Monitor disk space** in `/tmp/`
+- **Use SSD storage** for better compression performance
+
+## Contributing
+
+1. Fork the repository
+2. Create a feature branch (`git checkout -b feature/amazing-feature`)
+3. Commit your changes (`git commit -m 'Add amazing feature'`)
+4. Push to the branch (`git push origin feature/amazing-feature`)
+5. Open a Pull Request
+
+## License
+
+This project is open source and available under the [MIT License](LICENSE).
+
+## Support
+
+For issues, questions, or contributions:
+- **Email**: [[email protected]](mailto:[email protected])
+- **Issues**: Use GitHub Issues for bug reports and feature requests
+
+## Changelog
+
+### v1.0.0
+- Initial release
+- OpenBSD optimized backup script
+- Cloud upload via rclone
+- Email notifications
+- Comprehensive logging
+- Directory validation
+- Dry-run mode
+- Automatic cleanup
+- Log rotation
+
+---
+
+**Made with ❤️ for OpenBSD servers** \ No newline at end of file
diff --git a/backup.sh b/backup.sh
new file mode 100755
index 0000000..754cb2c
--- /dev/null
+++ b/backup.sh
@@ -0,0 +1,701 @@
+#!/bin/sh
+
+# backup.sh - OpenBSD Server Backup Script
+# Author: Biswa Kalyan Bhuyan ([email protected])
+#
+# DEPLOYMENT INSTRUCTIONS:
+# 1. Copy this script to your OpenBSD server: /usr/local/bin/backup.sh
+# 2. Make it executable: chmod +x /usr/local/bin/backup.sh
+# 3. Install dependencies: pkg_add rclone mailx
+# 4. Configure rclone: rclone config (setup 'cf' remote)
+# 5. Test: backup.sh --dir /etc --dry-run
+# 6. Run: backup.sh --dir /etc,/var/www,/var/log
+# 7. Add to crontab for automation
+#
+# Usage: ./backup.sh <tarball_file>
+# ./backup.sh --dir /etc,/var/www,/home
+# ./backup.sh --dir /etc,/var/log --dry-run
+
+set -e
+
+# Color codes for terminal output
+RED='\033[0;31m'
+GREEN='\033[0;32m'
+YELLOW='\033[1;33m'
+BLUE='\033[0;34m'
+NC='\033[0m'
+
+# Configuration variables
+RCLONE_REMOTE="cf:backups/"
+COMPRESSION_LEVEL=9
+EMAIL_RECIPIENT="[email protected]"
+LOG_FILE="/var/log/backup-log-$(date +%Y%m%d).log"
+HOSTNAME=$(hostname -s)
+BACKUP_BASE_DIR="/tmp/backups-$(date +%Y%m%d)"
+DRY_RUN=false
+
+# Server settings
+BACKUP_USER="root"
+MIN_FREE_SPACE_MB=1024
+MAX_LOG_FILES=10
+
+# Write log message with timestamp
+log_with_timestamp() {
+ local message="$1"
+ local timestamp=$(date '+%Y-%m-%d %H:%M:%S')
+ echo "[$timestamp] $message" >> "$LOG_FILE"
+}
+
+# Print status message with blue color and log it
+print_status() {
+ local message="${BLUE}[INFO]${NC} $1"
+ log_with_timestamp "[INFO] $1"
+ printf "%s\n" "$message" >&2
+}
+
+# Print success message with green color and log it
+print_success() {
+ local message="${GREEN}[SUCCESS]${NC} $1"
+ log_with_timestamp "[SUCCESS] $1"
+ printf "%s\n" "$message" >&2
+}
+
+# Print warning message with yellow color and log it
+print_warning() {
+ local message="${YELLOW}[WARNING]${NC} $1"
+ log_with_timestamp "[WARNING] $1"
+ printf "%s\n" "$message" >&2
+}
+
+# Print error message with red color and log it
+print_error() {
+ local message="${RED}[ERROR]${NC} $1"
+ log_with_timestamp "[ERROR] $1"
+ printf "%s\n" "$message" >&2
+}
+
+# Send email with backup log details
+send_log_email() {
+ local exit_status="$1"
+ local subject_prefix="SUCCESS"
+
+ if [ "$exit_status" -ne 0 ]; then
+ subject_prefix="FAILED"
+ fi
+
+ local subject="[$subject_prefix] Backup Script Log - $HOSTNAME - $(date '+%Y-%m-%d %H:%M:%S')"
+
+ if command -v mail >/dev/null 2>&1; then
+ {
+ echo "Backup script execution log from $HOSTNAME"
+ echo "=========================================="
+ echo "Execution time: $(date)"
+ echo "Exit status: $exit_status"
+ echo "Script arguments: $ORIGINAL_ARGS"
+ echo ""
+ echo "Full log output:"
+ echo "----------------"
+ cat "$LOG_FILE" 2>/dev/null || echo "Log file not found"
+ } | mail -s "$subject" "$EMAIL_RECIPIENT"
+
+ echo "Log email sent to $EMAIL_RECIPIENT" >&2
+ else
+ echo "Warning: mail command not available. Log saved to: $LOG_FILE" >&2
+ fi
+}
+
+# Clean up temporary files and send email on script exit
+cleanup_and_email() {
+ local exit_code=$?
+ send_log_email "$exit_code"
+ if [ -d "$BACKUP_BASE_DIR" ]; then
+ rm -rf "$BACKUP_BASE_DIR"
+ print_status "Cleaned up temporary directory: $BACKUP_BASE_DIR"
+ fi
+ if [ "$exit_code" -eq 0 ] && [ -f "$LOG_FILE" ]; then
+ rm -f "$LOG_FILE"
+ fi
+ exit $exit_code
+}
+
+trap cleanup_and_email EXIT
+
+# Check if script is running with appropriate permissions
+check_user_permissions() {
+ if [ "$(id -u)" -ne 0 ] && [ "$DRY_RUN" = "false" ]; then
+ print_warning "Not running as root. Some system directories may not be accessible."
+ print_warning "For complete server backups, consider running as root: sudo $0 $ORIGINAL_ARGS"
+ fi
+
+ local log_dir=$(dirname "$LOG_FILE")
+ if [ ! -w "$log_dir" ]; then
+ print_warning "Cannot write to log directory: $log_dir"
+ LOG_FILE="/tmp/backup-log-$(date +%Y%m%d).log"
+ print_status "Using alternative log location: $LOG_FILE"
+ fi
+}
+
+# Remove old log files keeping only the latest MAX_LOG_FILES
+cleanup_old_logs() {
+ local log_dir=$(dirname "$LOG_FILE")
+ local log_pattern="backup-log-*.log"
+
+ if [ -d "$log_dir" ] && [ -w "$log_dir" ]; then
+ find "$log_dir" -name "$log_pattern" -type f 2>/dev/null | \
+ sort -r | \
+ tail -n +$((MAX_LOG_FILES + 1)) | \
+ while read -r old_log; do
+ rm -f "$old_log" 2>/dev/null
+ print_status "Cleaned up old log file: $(basename "$old_log")"
+ done
+ fi
+}
+
+# Initialize log file with header information
+initialize_log() {
+ local log_dir=$(dirname "$LOG_FILE")
+ if [ ! -d "$log_dir" ]; then
+ mkdir -p "$log_dir" 2>/dev/null || {
+ LOG_FILE="/tmp/backup-log-$(date +%Y%m%d).log"
+ print_warning "Created log in /tmp instead: $LOG_FILE"
+ }
+ fi
+
+ echo "==================================================" > "$LOG_FILE"
+ echo "Server Backup Script Execution Log" >> "$LOG_FILE"
+ echo "==================================================" >> "$LOG_FILE"
+ echo "Start time: $(date)" >> "$LOG_FILE"
+ echo "Hostname: $HOSTNAME" >> "$LOG_FILE"
+ echo "Script: $0" >> "$LOG_FILE"
+ echo "Arguments: $ORIGINAL_ARGS" >> "$LOG_FILE"
+ echo "User: $(whoami) (UID: $(id -u))" >> "$LOG_FILE"
+ echo "Log file: $LOG_FILE" >> "$LOG_FILE"
+ echo "Server info: $(uname -a)" >> "$LOG_FILE"
+ echo "==================================================" >> "$LOG_FILE"
+ echo "" >> "$LOG_FILE"
+
+ cleanup_old_logs
+}
+
+# Verify required tools are installed and available
+check_dependencies() {
+ print_status "Checking dependencies..."
+
+ if ! command -v tar >/dev/null 2>&1; then
+ print_error "tar is not installed or not in PATH"
+ exit 1
+ fi
+
+ if ! command -v gzip >/dev/null 2>&1; then
+ print_error "gzip is not installed or not in PATH"
+ exit 1
+ fi
+
+ if [ "$DRY_RUN" = "false" ]; then
+ if ! command -v rclone >/dev/null 2>&1; then
+ print_error "rclone is not installed or not in PATH"
+ print_error "Please install rclone: pkg_add rclone"
+ print_error "Or use --dry-run flag to test without uploading"
+ exit 1
+ fi
+ else
+ print_warning "DRY RUN mode enabled - skipping rclone check"
+ fi
+
+ if ! command -v mail >/dev/null 2>&1; then
+ print_warning "mail command not available - logs will be saved locally only"
+ log_with_timestamp "[WARNING] Consider installing mailx: pkg_add mailx"
+ fi
+
+ print_success "All essential dependencies found"
+}
+
+# Create tar archive from directory with error handling
+create_tar_archive() {
+ local source_dir="$1"
+ local archive_name="$2"
+ local tar_file="$BACKUP_BASE_DIR/$archive_name.tar"
+
+ print_status "Creating tar archive for: $source_dir"
+
+ if [ ! -d "$source_dir" ]; then
+ print_error "Directory does not exist: $source_dir"
+ return 1
+ fi
+
+ if [ ! -r "$source_dir" ]; then
+ print_error "Directory is not readable: $source_dir"
+ print_error "Try running as root or check permissions"
+ return 1
+ fi
+
+ local dir_size=$(du -sh "$source_dir" 2>/dev/null | cut -f1)
+ print_status "Directory size: $dir_size"
+
+ local backup_dir_parent=$(dirname "$BACKUP_BASE_DIR")
+ local available_space=$(df "$backup_dir_parent" | awk 'NR==2 {print $4}')
+ local available_space_mb=$(( available_space / 1024 ))
+ print_status "Available space in backup location: ${available_space_mb} MB"
+
+ if [ "$available_space_mb" -lt "$MIN_FREE_SPACE_MB" ]; then
+ print_error "Insufficient disk space. Available: ${available_space_mb}MB, Required: ${MIN_FREE_SPACE_MB}MB"
+ return 1
+ fi
+
+ local start_time=$(date +%s)
+
+ local parent_dir=$(dirname "$source_dir")
+ local dir_name=$(basename "$source_dir")
+
+ print_status "Creating archive: $tar_file"
+
+ if (cd "$parent_dir" && tar -cf "$tar_file" "$dir_name") 2>&1 | tee -a "$LOG_FILE" >&2; then
+ local end_time=$(date +%s)
+ local duration=$(( end_time - start_time ))
+
+ if [ ! -f "$tar_file" ]; then
+ print_error "Tar file was not created: $tar_file"
+ return 1
+ fi
+
+ local tar_size=$(stat -f%z "$tar_file" 2>/dev/null || stat -c%s "$tar_file" 2>/dev/null)
+
+ if [ "$tar_size" -eq 0 ]; then
+ print_warning "Tar file is empty, this might indicate an issue"
+ fi
+
+ print_success "Tar archive created in ${duration} seconds"
+ print_status "Archive size: $(( tar_size / 1024 / 1024 )) MB"
+
+ echo "$tar_file"
+ return 0
+ else
+ local tar_exit_code=$?
+ print_error "Failed to create tar archive for: $source_dir (exit code: $tar_exit_code)"
+ print_error "Check the log file for detailed error messages: $LOG_FILE"
+
+ if [ ! -w "$backup_dir_parent" ]; then
+ print_error "Backup directory is not writable: $backup_dir_parent"
+ fi
+
+ return 1
+ fi
+}
+
+# Remove leading slash and replace other slashes with hyphens for filenames
+sanitize_filename() {
+ local path="$1"
+ echo "$path" | sed 's/^\/*//' | sed 's/\//-/g'
+}
+
+# Compress tar file using gzip with maximum compression
+compress_tarball() {
+ local input_file="$1"
+ local output_file="${input_file}.gz"
+
+ print_status "Compressing '$(basename "$input_file")' with maximum compression (level $COMPRESSION_LEVEL)..."
+
+ local original_size=$(stat -f%z "$input_file" 2>/dev/null || stat -c%s "$input_file" 2>/dev/null)
+ log_with_timestamp "Starting compression - Original size: $original_size bytes"
+
+ local start_time=$(date +%s)
+
+ if gzip -${COMPRESSION_LEVEL} -c "$input_file" > "$output_file"; then
+ local end_time=$(date +%s)
+ local duration=$(( end_time - start_time ))
+ local compressed_size=$(stat -f%z "$output_file" 2>/dev/null || stat -c%s "$output_file" 2>/dev/null)
+ local compression_ratio=$(( (original_size - compressed_size) * 100 / original_size ))
+
+ print_success "Compression completed in ${duration} seconds"
+ print_status "Original size: $(( original_size / 1024 / 1024 )) MB"
+ print_status "Compressed size: $(( compressed_size / 1024 / 1024 )) MB"
+ print_status "Compression ratio: ${compression_ratio}%"
+ print_status "Space saved: $(( (original_size - compressed_size) / 1024 / 1024 )) MB"
+
+ echo "$output_file"
+ return 0
+ else
+ print_error "Failed to compress '$(basename "$input_file")'"
+ return 1
+ fi
+}
+
+# Upload file to cloud storage using rclone
+upload_to_cloud() {
+ local file_to_upload="$1"
+
+ print_status "Uploading '$(basename "$file_to_upload")' to $RCLONE_REMOTE..."
+
+ if [ "$DRY_RUN" = "true" ]; then
+ print_warning "DRY RUN: Skipping actual upload"
+ local file_size=$(stat -f%z "$file_to_upload" 2>/dev/null || stat -c%s "$file_to_upload" 2>/dev/null)
+ print_status "Would upload file size: $(( file_size / 1024 / 1024 )) MB"
+ sleep 1
+ print_success "DRY RUN: Upload simulation completed"
+ return 0
+ fi
+
+ if ! command -v rclone >/dev/null 2>&1; then
+ print_error "rclone is not installed or not in PATH"
+ print_error "Please install rclone: pkg_add rclone"
+ return 1
+ fi
+
+ if ! rclone listremotes | grep -q "cf:"; then
+ print_error "rclone remote 'cf' is not configured"
+ print_error "Please configure rclone first: rclone config"
+ return 1
+ fi
+
+ local file_size=$(stat -f%z "$file_to_upload" 2>/dev/null || stat -c%s "$file_to_upload" 2>/dev/null)
+ print_status "Upload file size: $(( file_size / 1024 / 1024 )) MB"
+
+ local start_time=$(date +%s)
+
+ print_status "Starting upload with progress monitoring..."
+ if rclone copy "$file_to_upload" "$RCLONE_REMOTE" --progress --stats=1s >> "$LOG_FILE" 2>&1; then
+ local end_time=$(date +%s)
+ local duration=$(( end_time - start_time ))
+ print_success "Upload completed successfully in ${duration} seconds"
+ return 0
+ else
+ print_error "Upload failed"
+ return 1
+ fi
+}
+
+# Remove local tar and compressed files after successful upload
+cleanup_files() {
+ local original_tar="$1"
+ local compressed_file="$2"
+
+ print_status "Cleaning up local files..."
+
+ if [ -f "$original_tar" ]; then
+ local tar_size=$(stat -f%z "$original_tar" 2>/dev/null || stat -c%s "$original_tar" 2>/dev/null)
+ if rm "$original_tar"; then
+ print_success "Removed original tar file: $(basename "$original_tar") ($(( tar_size / 1024 / 1024 )) MB freed)"
+ else
+ print_warning "Failed to remove original tar file: $(basename "$original_tar")"
+ fi
+ fi
+
+ if [ -f "$compressed_file" ]; then
+ local gz_size=$(stat -f%z "$compressed_file" 2>/dev/null || stat -c%s "$compressed_file" 2>/dev/null)
+ if rm "$compressed_file"; then
+ print_success "Removed compressed file: $(basename "$compressed_file") ($(( gz_size / 1024 / 1024 )) MB freed)"
+ else
+ print_warning "Failed to remove compressed file: $(basename "$compressed_file")"
+ fi
+ fi
+}
+
+# Process single tar file: compress, upload, and optionally cleanup
+process_tar_file() {
+ local tarball="$1"
+ local auto_cleanup="${2:-false}"
+
+ print_status "Processing tar file: $(basename "$tarball")"
+
+ local compressed_file
+ if compressed_file=$(compress_tarball "$tarball"); then
+ if upload_to_cloud "$compressed_file"; then
+ print_success "Successfully processed: $(basename "$tarball")"
+
+ if [ "$auto_cleanup" = "true" ]; then
+ cleanup_files "$tarball" "$compressed_file"
+ fi
+ return 0
+ else
+ print_error "Failed to upload: $(basename "$tarball")"
+ return 1
+ fi
+ else
+ print_error "Failed to compress: $(basename "$tarball")"
+ return 1
+ fi
+}
+
+# Validate all directories exist and are readable before starting backup
+validate_backup_directories() {
+ local dir_list="$1"
+ local temp_validation_file="/tmp/dir-validation-$$"
+ local valid_count=0
+ local invalid_count=0
+
+ print_status "Validating directories before backup..."
+
+ echo "$dir_list" | tr ',' '\n' > "$temp_validation_file"
+
+ while IFS= read -r dir; do
+ dir=$(echo "$dir" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
+
+ if [ -z "$dir" ]; then
+ continue
+ fi
+
+ if [ ! -d "$dir" ]; then
+ print_error "Directory does not exist: $dir"
+ invalid_count=$((invalid_count + 1))
+ elif [ ! -r "$dir" ]; then
+ print_error "Directory is not readable: $dir"
+ invalid_count=$((invalid_count + 1))
+ else
+ print_status "✓ Valid directory: $dir"
+ valid_count=$((valid_count + 1))
+ fi
+ done < "$temp_validation_file"
+
+ rm -f "$temp_validation_file"
+
+ if [ $invalid_count -gt 0 ]; then
+ print_error "Found $invalid_count invalid directories. Aborting backup."
+ return 1
+ fi
+
+ print_success "All $valid_count directories validated successfully"
+ return 0
+}
+
+# Create tar archives for multiple directories and upload them
+backup_directories() {
+ local dir_list="$1"
+ local failed_count=0
+ local success_count=0
+ local temp_list_file="/tmp/dir-list-$$"
+
+ print_status "Starting directory backup mode"
+ print_status "Target directories: $dir_list"
+
+ if ! validate_backup_directories "$dir_list"; then
+ exit 1
+ fi
+
+ if ! mkdir -p "$BACKUP_BASE_DIR"; then
+ print_error "Failed to create backup directory: $BACKUP_BASE_DIR"
+ exit 1
+ fi
+ print_status "Created temporary backup directory: $BACKUP_BASE_DIR"
+
+ echo "$dir_list" | tr ',' '\n' > "$temp_list_file"
+
+ while IFS= read -r dir; do
+ dir=$(echo "$dir" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
+
+ if [ -z "$dir" ]; then
+ continue
+ fi
+
+ echo >&2
+ print_status "=== Processing directory: $dir ==="
+
+ local sanitized_name=$(sanitize_filename "$dir")
+ local timestamp=$(date +%Y%m%d)
+ local archive_name="${sanitized_name}-${timestamp}"
+
+ if tar_file=$(create_tar_archive "$dir" "$archive_name"); then
+ if process_tar_file "$tar_file" "true"; then
+ success_count=$((success_count + 1))
+ print_success "Completed backup of: $dir"
+ else
+ failed_count=$((failed_count + 1))
+ print_error "Failed backup of: $dir"
+ fi
+ else
+ failed_count=$((failed_count + 1))
+ print_error "Failed to create archive for: $dir"
+ fi
+ done < "$temp_list_file"
+
+ rm -f "$temp_list_file"
+
+ echo >&2
+ print_status "=== Backup Summary ==="
+ print_status "Successful backups: $success_count"
+ if [ "$failed_count" -gt 0 ]; then
+ print_error "Failed backups: $failed_count"
+ return 1
+ else
+ print_success "All directory backups completed successfully!"
+ return 0
+ fi
+}
+
+# Validate input tar file exists and is readable
+validate_input() {
+ local tarball="$1"
+
+ if [ -z "$tarball" ]; then
+ print_error "No tarball file specified"
+ exit 1
+ fi
+
+ if [ ! -f "$tarball" ]; then
+ print_error "File '$tarball' does not exist"
+ exit 1
+ fi
+
+ local file_size=$(stat -f%z "$tarball" 2>/dev/null || stat -c%s "$tarball" 2>/dev/null)
+ print_status "Input file: $tarball"
+ print_status "File size: $(( file_size / 1024 / 1024 )) MB"
+
+ if ! file "$tarball" | grep -q "tar archive\|POSIX tar archive"; then
+ print_warning "File '$tarball' might not be a valid tar archive"
+ echo -n "Continue anyway? (y/N): " >&2
+ read -r response
+ case "$response" in
+ [yY]|[yY][eE][sS])
+ log_with_timestamp "User chose to continue with non-tar file"
+ ;;
+ *)
+ print_error "Operation cancelled by user"
+ exit 1
+ ;;
+ esac
+ else
+ print_success "File validated as tar archive"
+ fi
+}
+
+# Parse command line arguments and set global variables
+parse_arguments() {
+ while [ $# -gt 0 ]; do
+ case "$1" in
+ --dir)
+ if [ -z "$2" ]; then
+ print_error "--dir flag requires directory list"
+ exit 1
+ fi
+ DIRECTORY_MODE=true
+ DIRECTORY_LIST="$2"
+ shift 2
+ ;;
+ --dry-run)
+ DRY_RUN=true
+ shift
+ ;;
+ --help|-h)
+ show_usage
+ exit 0
+ ;;
+ -*)
+ print_error "Unknown option: $1"
+ show_usage
+ exit 1
+ ;;
+ *)
+ if [ -z "$TARBALL_FILE" ]; then
+ TARBALL_FILE="$1"
+ else
+ print_error "Multiple file arguments not supported"
+ exit 1
+ fi
+ shift
+ ;;
+ esac
+ done
+}
+
+# Display usage information and examples
+show_usage() {
+ cat >&2 << EOF
+OpenBSD Server Backup Script
+============================
+
+Usage:
+ $0 <tarball_file> # Process existing tarball
+ $0 --dir /etc,/var/www,/home # Backup server directories
+ $0 --dir /etc,/var/log,/usr/local # System directories backup
+
+Options:
+ --dir DIRS Comma-separated list of directories to backup
+ --dry-run Test mode - skip actual upload to cloud storage
+ --help, -h Show this help message
+
+Common Server Backup Examples:
+ sudo $0 --dir /etc,/var/www,/var/log # Web server
+ sudo $0 --dir /etc,/var/mail,/home # Mail server
+ sudo $0 --dir /etc,/var/lib/mysql,/var/www # Database + Web
+ sudo $0 --dir /etc,/usr/local/etc,/var/log # System config
+
+This script will:
+ 1. Validate directories and permissions
+ 2. Create individual tar archives for each directory
+ 3. Compress with maximum gzip compression (level 9)
+ 4. Upload to cloud storage using rclone
+ 5. Clean up local files after successful upload
+ 6. Send detailed logs via email to $EMAIL_RECIPIENT
+ 7. Maintain log rotation (keeps last $MAX_LOG_FILES logs)
+
+Server Requirements:
+ - Run as root for complete system backups
+ - tar (pre-installed on OpenBSD)
+ - gzip (pre-installed on OpenBSD)
+ - rclone (install with: pkg_add rclone)
+ - mailx (install with: pkg_add mailx) for email notifications
+ - Configured rclone remote named 'cf'
+ - Minimum $MIN_FREE_SPACE_MB MB free space in /tmp
+
+Log Location: $LOG_FILE
+EOF
+}
+
+# Main script execution function
+main() {
+ print_status "Starting server backup process on $HOSTNAME..."
+ if [ "$DRY_RUN" = "true" ]; then
+ print_warning "DRY RUN MODE ENABLED - No files will be uploaded"
+ fi
+ print_status "Email notifications will be sent to: $EMAIL_RECIPIENT"
+ print_status "Logs will be saved to: $LOG_FILE"
+ echo >&2
+
+ check_user_permissions
+ echo >&2
+
+ check_dependencies
+ echo >&2
+
+ if [ "$DIRECTORY_MODE" = "true" ]; then
+ if backup_directories "$DIRECTORY_LIST"; then
+ print_success "All directory backups completed successfully!"
+ else
+ print_error "Some directory backups failed"
+ exit 1
+ fi
+ else
+ validate_input "$TARBALL_FILE"
+ echo >&2
+
+ if process_tar_file "$TARBALL_FILE"; then
+ print_success "Tarball backup completed successfully!"
+ else
+ print_error "Tarball backup failed"
+ exit 1
+ fi
+ fi
+
+ echo >&2
+ print_success "All operations completed!"
+}
+
+# Global variables for argument parsing
+ORIGINAL_ARGS="$*"
+DIRECTORY_MODE=false
+DIRECTORY_LIST=""
+TARBALL_FILE=""
+
+# Parse command line arguments
+parse_arguments "$@"
+
+# Initialize logging
+initialize_log
+
+# Validate arguments
+if [ "$DIRECTORY_MODE" = "false" ] && [ -z "$TARBALL_FILE" ]; then
+ show_usage
+ exit 1
+fi
+
+# Run main function
+main