updated backups solution

This commit is contained in:
Quality System Admin
2025-11-03 22:18:56 +02:00
parent 1ade0b5681
commit 9c19379810
17 changed files with 3105 additions and 50 deletions

View File

@@ -102,6 +102,7 @@ class DatabaseBackupManager:
backup_file = os.path.join(self.backup_path, filename)
# Build mysqldump command
# Note: --skip-lock-tables and --force help with views that have permission issues
cmd = [
'mysqldump',
f"--host={self.config['host']}",
@@ -109,6 +110,8 @@ class DatabaseBackupManager:
f"--user={self.config['user']}",
f"--password={self.config['password']}",
'--single-transaction',
'--skip-lock-tables',
'--force',
'--routines',
'--triggers',
'--events',
@@ -391,6 +394,181 @@ class DatabaseBackupManager:
'message': f'Failed to save schedule: {str(e)}'
}
def validate_backup_file(self, filename):
"""
Validate uploaded backup file for integrity and compatibility
Checks:
- File exists and is readable
- File contains valid SQL syntax
- File contains expected database structure (users table, etc.)
- File size is reasonable
- No malicious commands (DROP statements outside of backup context)
Args:
filename (str): Name of the backup file to validate
Returns:
dict: Validation result with success status, message, and details
"""
try:
# Security: ensure filename doesn't contain path traversal
if '..' in filename or '/' in filename:
return {
'success': False,
'message': 'Invalid filename - potential security issue',
'details': {}
}
file_path = os.path.join(self.backup_path, filename)
# Check if file exists
if not os.path.exists(file_path):
return {
'success': False,
'message': 'Backup file not found',
'details': {}
}
# Check file size (warn if too small or too large)
file_size = os.path.getsize(file_path)
size_mb = round(file_size / (1024 * 1024), 2)
if file_size < 1024: # Less than 1KB is suspicious
return {
'success': False,
'message': 'File too small - may be empty or corrupted',
'details': {'size_mb': size_mb}
}
# For very large files (>2GB), skip detailed validation to avoid timeouts
# Just do basic checks
if file_size > 2 * 1024 * 1024 * 1024: # Over 2GB
return {
'success': True,
'message': f'Large backup file accepted ({size_mb:.2f} MB) - detailed validation skipped for performance',
'details': {
'size_mb': size_mb,
'validation_skipped': True,
'reason': 'File too large for line-by-line validation'
},
'warnings': ['Detailed content validation skipped due to large file size']
}
# Read and validate SQL content (only for files < 2GB)
validation_details = {
'size_mb': size_mb,
'has_create_database': False,
'has_users_table': False,
'has_insert_statements': False,
'suspicious_commands': [],
'line_count': 0
}
# For large files (100MB - 2GB), only read first 10MB for validation
max_bytes_to_read = 10 * 1024 * 1024 if file_size > 100 * 1024 * 1024 else None
bytes_read = 0
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
content_preview = []
line_count = 0
for line in f:
line_count += 1
bytes_read += len(line.encode('utf-8'))
# Stop reading after max_bytes for large files
if max_bytes_to_read and bytes_read > max_bytes_to_read:
validation_details['partial_validation'] = True
validation_details['bytes_validated'] = f'{bytes_read / (1024*1024):.2f} MB'
break
line_upper = line.strip().upper()
# Store first 10 non-comment lines for preview
if len(content_preview) < 10 and line_upper and not line_upper.startswith('--') and not line_upper.startswith('/*'):
content_preview.append(line.strip()[:100]) # First 100 chars
# Check for expected SQL commands
if 'CREATE DATABASE' in line_upper or 'CREATE SCHEMA' in line_upper:
validation_details['has_create_database'] = True
if 'CREATE TABLE' in line_upper and 'USERS' in line_upper:
validation_details['has_users_table'] = True
if line_upper.startswith('INSERT INTO'):
validation_details['has_insert_statements'] = True
# Check for potentially dangerous commands (outside of normal backup context)
if 'DROP DATABASE' in line_upper and 'IF EXISTS' not in line_upper:
validation_details['suspicious_commands'].append('Unconditional DROP DATABASE found')
if 'TRUNCATE TABLE' in line_upper:
validation_details['suspicious_commands'].append('TRUNCATE TABLE found')
# Check for very long lines (potential binary data)
if len(line) > 50000:
validation_details['suspicious_commands'].append('Very long lines detected (possible binary data)')
break
validation_details['line_count'] = line_count
validation_details['preview'] = content_preview[:5] # First 5 lines
# Evaluate validation results
issues = []
warnings = []
if not validation_details['has_insert_statements']:
warnings.append('No INSERT statements found - backup may be empty')
if not validation_details['has_users_table']:
warnings.append('Users table not found - may not be compatible with this application')
if validation_details['suspicious_commands']:
issues.extend(validation_details['suspicious_commands'])
if validation_details['line_count'] < 10:
issues.append('Too few lines - file may be incomplete')
# Final validation decision
if issues:
return {
'success': False,
'message': f'Validation failed: {"; ".join(issues)}',
'details': validation_details,
'warnings': warnings
}
if warnings:
return {
'success': True,
'message': 'Validation passed with warnings',
'details': validation_details,
'warnings': warnings
}
return {
'success': True,
'message': 'Backup file validated successfully',
'details': validation_details,
'warnings': []
}
except UnicodeDecodeError as e:
return {
'success': False,
'message': 'File contains invalid characters - may be corrupted or not a text file',
'details': {'error': str(e)}
}
except Exception as e:
print(f"Error validating backup file: {e}")
return {
'success': False,
'message': f'Validation error: {str(e)}',
'details': {}
}
def cleanup_old_backups(self, retention_days=30):
"""
Delete backups older than retention_days