Python Snippets

Automatic Database Backup with Rotation and Compression

import sqlite3
import shutil
import gzip
import os
from datetime import datetime, timedelta
from pathlib import Path

def backup_database_with_rotation(db_path, backup_dir, max_backups=7, compress=True):
    """
    Create a timestamped backup of a SQLite database with automatic rotation and optional compression.
    
    Args:
        db_path (str): Path to the source database file
        backup_dir (str): Directory where backups will be stored
        max_backups (int): Maximum number of backups to keep (default: 7)
        compress (bool): Whether to compress backups with gzip (default: True)
    
    Returns:
        str: Path to the created backup file
    """
    # Ensure backup directory exists
    backup_path = Path(backup_dir)
    backup_path.mkdir(parents=True, exist_ok=True)
    
    # Create timestamped backup filename
    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    db_name = Path(db_path).stem
    backup_filename = f"{db_name}_backup_{timestamp}.{('db.gz' if compress else 'db')}"
    backup_file_path = backup_path / backup_filename
    
    try:
        if compress:
            # Create compressed backup
            with sqlite3.connect(db_path) as src_conn:
                with gzip.open(backup_file_path, 'wb') as backup_file:
                    for line in src_conn.iterdump():
                        backup_file.write(line.encode('utf-8') + b'\n')
        else:
            # Create regular backup
            shutil.copy2(db_path, backup_file_path)
        
        print(f"Backup created: {backup_file_path}")
        
        # Rotate old backups
        rotate_backups(backup_path, db_name, max_backups)
        
        return str(backup_file_path)
        
    except Exception as e:
        print(f"Backup failed: {e}")
        raise

def rotate_backups(backup_path, db_name, max_backups):
    """
    Remove old backups keeping only the specified maximum number.
    
    Args:
        backup_path (Path): Path to backup directory
        db_name (str): Database name prefix for filtering backups
        max_backups (int): Maximum number of backups to keep
    """
    # Find all backups for this database
    backups = list(backup_path.glob(f"{db_name}_backup_*"))
    
    # Sort by modification time (newest first)
    backups.sort(key=lambda x: x.stat().st_mtime, reverse=True)
    
    # Remove excess backups
    for old_backup in backups[max_backups:]:
        old_backup.unlink()
        print(f"Removed old backup: {old_backup}")

# Example usage
if __name__ == "__main__":
    # Backup a SQLite database
    database_file = "example.db"
    backup_directory = "./backups"
    
    # Create example database if it doesn't exist
    if not os.path.exists(database_file):
        conn = sqlite3.connect(database_file)
        conn.execute("CREATE TABLE IF NOT EXISTS users (id INTEGER PRIMARY KEY, name TEXT)")
        conn.execute("INSERT INTO users (name) VALUES ('Alice'), ('Bob')")
        conn.commit()
        conn.close()
    
    # Perform backup
    backup_file = backup_database_with_rotation(
        db_path=database_file,
        backup_dir=backup_directory,
        max_backups=5,
        compress=True
    )
    
    print(f"Database backed up to: {backup_file}")

This code snippet provides a robust solution for automatically backing up SQLite databases with several important features:

What it does:

  1. Creates timestamped backups of SQLite databases with unique filenames
  2. Supports compression using gzip to save disk space
  3. Implements backup rotation to automatically delete old backups and maintain only a specified number
  4. Handles errors gracefully with proper exception handling
  5. Works with any SQLite database and creates organized backup directories

Why it’s useful:

How to run it:

  1. Save the code to a Python file (e.g., database_backup.py)
  2. Modify the database_file and backup_directory variables to match your setup
  3. Run the script: python database_backup.py

For production use:

The code is production-ready and handles edge cases like missing directories, database locks, and file permissions.