🔄 Add Comprehensive Backup Management System

 New Features:
- Complete backup lifecycle management (create, list, download, delete, cleanup)
- Web-based backup interface with real-time status updates
- Individual backup deletion and bulk cleanup for old backups
- Docker-aware backup operations with volume persistence
- Automated backup scheduling and retention policies

📁 Added Files:
- backup.py - Core backup script for creating timestamped archives
- docker_backup.sh - Docker-compatible backup wrapper script
- app/templates/backup.html - Web interface for backup management
- BACKUP_SYSTEM.md - Comprehensive backup system documentation
- BACKUP_GUIDE.md - Quick reference guide for backup operations

🔧 Enhanced Files:
- Dockerfile - Added backup.py copy for container availability
- docker-compose.yml - Added backup volume mount for persistence
- app/routes/api.py - Added backup API endpoints (create, list, delete, cleanup)
- app/routes/main.py - Added backup management route
- app/templates/index.html - Added backup management navigation
- README.md - Updated with backup system overview and quick start

🎯 Key Improvements:
- Fixed backup creation errors in Docker environment
- Added Docker-aware path detection for container operations
- Implemented proper error handling and user confirmation dialogs
- Added real-time backup status updates via JavaScript
- Enhanced data persistence with volume mounting

💡 Use Cases:
- Data protection and disaster recovery
- Environment migration and cloning
- Development data management
- Automated maintenance workflows
This commit is contained in:
2025-08-01 13:01:15 -04:00
parent faaddba185
commit 9e4c21996b
12 changed files with 2515 additions and 1 deletions

View File

@@ -476,3 +476,310 @@ def generate_shortened_qr():
except Exception as e:
return jsonify({'error': str(e)}), 500
# Backup Management API Endpoints
@bp.route('/backup/create', methods=['POST'])
@login_required
def create_backup():
"""Create backup via API"""
try:
import subprocess
import os
from pathlib import Path
data = request.json or {}
backup_type = data.get('type', 'data') # data, config, full
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
app_root = '/app'
backup_script = '/app/backup.py'
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_script = 'backup.py'
if backup_type == 'data':
# Create data backup using Python script
result = subprocess.run(
['python3', backup_script, '--data-only'],
cwd=app_root,
capture_output=True,
text=True
)
elif backup_type == 'config':
# Create config backup
result = subprocess.run(
['python3', backup_script, '--config'],
cwd=app_root,
capture_output=True,
text=True
)
elif backup_type == 'full':
# Create full backup
result = subprocess.run(
['python3', backup_script, '--full'],
cwd=app_root,
capture_output=True,
text=True
)
else:
return jsonify({'error': 'Invalid backup type'}), 400
if result.returncode == 0:
return jsonify({
'success': True,
'message': 'Backup created successfully',
'output': result.stdout
})
else:
return jsonify({
'error': 'Backup creation failed',
'output': result.stderr
}), 500
except Exception as e:
return jsonify({'error': str(e)}), 500
@bp.route('/backup/list', methods=['GET'])
@login_required
def list_backups():
"""List available backups"""
try:
import subprocess
import os
from pathlib import Path
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
app_root = '/app'
backup_script = '/app/backup.py'
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_script = 'backup.py'
result = subprocess.run(
['python3', backup_script, '--list'],
cwd=app_root,
capture_output=True,
text=True
)
if result.returncode == 0:
# Parse backup list from output
lines = result.stdout.strip().split('\n')
backups = []
for line in lines:
if '|' in line and not line.startswith('') and 'Available backups' not in line:
parts = [part.strip() for part in line.split('|')]
if len(parts) >= 4:
backups.append({
'type': parts[0],
'filename': parts[1],
'size': parts[2],
'date': parts[3]
})
return jsonify({
'success': True,
'backups': backups
})
else:
return jsonify({
'error': 'Failed to list backups',
'output': result.stderr
}), 500
except Exception as e:
return jsonify({'error': str(e)}), 500
@bp.route('/backup/download/<filename>', methods=['GET'])
@login_required
def download_backup(filename):
"""Download backup file"""
try:
import os
from pathlib import Path
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
backup_dir = Path('/app/backups')
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_dir = app_root / 'backups'
backup_file = backup_dir / filename
if not backup_file.exists():
return jsonify({'error': 'Backup file not found'}), 404
return send_file(
str(backup_file),
as_attachment=True,
download_name=filename,
mimetype='application/gzip'
)
except Exception as e:
return jsonify({'error': str(e)}), 500
@bp.route('/backup/status', methods=['GET'])
@login_required
def backup_status():
"""Get backup system status"""
try:
import os
from pathlib import Path
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
backup_dir = Path('/app/backups')
data_dir = Path('/app/data')
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_dir = app_root / 'backups'
data_dir = app_root / 'data'
# Count backups
backup_files = list(backup_dir.glob('*.tar.gz')) if backup_dir.exists() else []
# Get data directory size
data_size = 0
if data_dir.exists():
for file_path in data_dir.rglob('*'):
if file_path.is_file():
data_size += file_path.stat().st_size
# Get backup directory size
backup_size = 0
for backup_file in backup_files:
backup_size += backup_file.stat().st_size
# Check last backup time
last_backup = None
if backup_files:
latest_backup = max(backup_files, key=lambda x: x.stat().st_mtime)
last_backup = datetime.fromtimestamp(latest_backup.stat().st_mtime).isoformat()
return jsonify({
'success': True,
'status': {
'backup_count': len(backup_files),
'data_size': data_size,
'backup_size': backup_size,
'last_backup': last_backup,
'backup_directory': str(backup_dir),
'data_directory': str(data_dir)
}
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bp.route('/backup/delete/<filename>', methods=['DELETE'])
@login_required
def delete_backup(filename):
"""Delete a backup file"""
try:
import os
from pathlib import Path
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
backup_dir = Path('/app/backups')
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_dir = app_root / 'backups'
backup_file = backup_dir / filename
# Security check: ensure filename is just a filename, not a path
if '/' in filename or '\\' in filename or '..' in filename:
return jsonify({'error': 'Invalid filename'}), 400
# Check if file exists
if not backup_file.exists():
return jsonify({'error': 'Backup file not found'}), 404
# Delete the file
backup_file.unlink()
# Also delete associated metadata file if it exists
metadata_file = backup_dir / f"{filename.replace('.tar.gz', '.json')}"
if metadata_file.exists():
metadata_file.unlink()
return jsonify({
'success': True,
'message': f'Backup {filename} deleted successfully'
})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bp.route('/backup/cleanup', methods=['POST'])
@login_required
def cleanup_old_backups():
"""Delete old backup files, keeping only the N most recent"""
try:
import os
from pathlib import Path
data = request.json or {}
keep_count = data.get('keep', 5) # Default: keep 5 most recent
# Use absolute path for Docker container or relative path for development
if os.path.exists('/app/backup.py'):
# Running in Docker container
backup_dir = Path('/app/backups')
else:
# Running in development
app_root = Path(__file__).parent.parent.parent
backup_dir = app_root / 'backups'
if not backup_dir.exists():
return jsonify({'error': 'Backup directory not found'}), 404
# Get all backup files sorted by modification time (newest first)
backup_files = list(backup_dir.glob('*.tar.gz'))
backup_files.sort(key=lambda x: x.stat().st_mtime, reverse=True)
# Delete old backups
deleted_files = []
files_to_delete = backup_files[keep_count:]
for backup_file in files_to_delete:
# Delete the backup file
backup_file.unlink()
deleted_files.append(backup_file.name)
# Also delete associated metadata file if it exists
metadata_file = backup_dir / f"{backup_file.stem}.json"
if metadata_file.exists():
metadata_file.unlink()
return jsonify({
'success': True,
'message': f'Cleanup completed. Deleted {len(deleted_files)} old backups.',
'deleted_files': deleted_files,
'kept_count': min(len(backup_files), keep_count)
})
except Exception as e:
return jsonify({'error': str(e)}), 500