Fixed the scan error and backup problems
This commit is contained in:
@@ -281,6 +281,95 @@ class DatabaseBackupManager:
|
||||
except Exception as e:
|
||||
print(f"Error removing backup metadata: {e}")
|
||||
|
||||
def create_data_only_backup(self, backup_name=None):
|
||||
"""
|
||||
Create a data-only backup (no schema, triggers, or structure)
|
||||
Only exports INSERT statements for existing tables
|
||||
|
||||
Args:
|
||||
backup_name (str, optional): Custom name for the backup file
|
||||
|
||||
Returns:
|
||||
dict: Result with success status, message, and backup file path
|
||||
"""
|
||||
try:
|
||||
if not self.config:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Database configuration not loaded'
|
||||
}
|
||||
|
||||
# Generate backup filename with data_only prefix
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
if backup_name:
|
||||
filename = f"data_only_{backup_name}_{timestamp}.sql"
|
||||
else:
|
||||
filename = f"data_only_{self.config['database']}_{timestamp}.sql"
|
||||
|
||||
backup_file = os.path.join(self.backup_path, filename)
|
||||
|
||||
# Build mysqldump command for data only
|
||||
# --no-create-info: Skip CREATE TABLE statements
|
||||
# --skip-triggers: Skip trigger definitions
|
||||
# --no-create-db: Skip CREATE DATABASE statement
|
||||
# --complete-insert: Include column names in INSERT (more reliable)
|
||||
# --extended-insert: Use multi-row INSERT for efficiency
|
||||
cmd = [
|
||||
'mysqldump',
|
||||
f"--host={self.config['host']}",
|
||||
f"--port={self.config['port']}",
|
||||
f"--user={self.config['user']}",
|
||||
f"--password={self.config['password']}",
|
||||
'--no-create-info', # Skip table structure
|
||||
'--skip-triggers', # Skip triggers
|
||||
'--no-create-db', # Skip database creation
|
||||
'--complete-insert', # Include column names
|
||||
'--extended-insert', # Multi-row INSERTs
|
||||
'--single-transaction',
|
||||
'--skip-lock-tables',
|
||||
self.config['database']
|
||||
]
|
||||
|
||||
# Execute mysqldump and save to file
|
||||
with open(backup_file, 'w') as f:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
stdout=f,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
# Get file size
|
||||
file_size = os.path.getsize(backup_file)
|
||||
file_size_mb = file_size / (1024 * 1024)
|
||||
|
||||
# Save backup metadata
|
||||
self._save_backup_metadata(filename, file_size)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': f'Data-only backup created successfully',
|
||||
'filename': filename,
|
||||
'file_path': backup_file,
|
||||
'size': f"{file_size_mb:.2f} MB",
|
||||
'timestamp': timestamp
|
||||
}
|
||||
else:
|
||||
error_msg = result.stderr
|
||||
print(f"Data backup error: {error_msg}")
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Data backup failed: {error_msg}'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Exception during data backup: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Data backup failed: {str(e)}'
|
||||
}
|
||||
|
||||
def restore_backup(self, filename):
|
||||
"""
|
||||
Restore database from a backup file
|
||||
@@ -345,6 +434,127 @@ class DatabaseBackupManager:
|
||||
'message': f'Restore failed: {str(e)}'
|
||||
}
|
||||
|
||||
def restore_data_only(self, filename):
|
||||
"""
|
||||
Restore data from a data-only backup file
|
||||
Assumes database schema already exists
|
||||
Truncates tables before inserting data to avoid duplicates
|
||||
|
||||
Args:
|
||||
filename (str): Name of the data-only backup file to restore
|
||||
|
||||
Returns:
|
||||
dict: Result with success status and message
|
||||
"""
|
||||
try:
|
||||
# Security: ensure filename doesn't contain path traversal
|
||||
if '..' in filename or '/' in filename:
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Invalid filename'
|
||||
}
|
||||
|
||||
file_path = os.path.join(self.backup_path, filename)
|
||||
|
||||
if not os.path.exists(file_path):
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Backup file not found'
|
||||
}
|
||||
|
||||
# First, disable foreign key checks and truncate all tables
|
||||
# This ensures clean data import without constraint violations
|
||||
try:
|
||||
conn = mariadb.connect(
|
||||
host=self.config['host'],
|
||||
port=int(self.config['port']),
|
||||
user=self.config['user'],
|
||||
password=self.config['password'],
|
||||
database=self.config['database']
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Disable foreign key checks
|
||||
cursor.execute("SET FOREIGN_KEY_CHECKS = 0;")
|
||||
|
||||
# Get list of all tables in the database
|
||||
cursor.execute("SHOW TABLES;")
|
||||
tables = cursor.fetchall()
|
||||
|
||||
# Truncate each table (except system tables)
|
||||
for (table_name,) in tables:
|
||||
# Skip metadata and system tables
|
||||
if table_name not in ['backups_metadata', 'backup_schedule']:
|
||||
try:
|
||||
cursor.execute(f"TRUNCATE TABLE `{table_name}`;")
|
||||
print(f"Truncated table: {table_name}")
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not truncate {table_name}: {e}")
|
||||
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Warning during table truncation: {e}")
|
||||
# Continue anyway - the restore might still work
|
||||
|
||||
# Build mysql restore command for data
|
||||
cmd = [
|
||||
'mysql',
|
||||
f"--host={self.config['host']}",
|
||||
f"--port={self.config['port']}",
|
||||
f"--user={self.config['user']}",
|
||||
f"--password={self.config['password']}",
|
||||
self.config['database']
|
||||
]
|
||||
|
||||
# Execute mysql restore
|
||||
with open(file_path, 'r') as f:
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
stdin=f,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True
|
||||
)
|
||||
|
||||
# Re-enable foreign key checks
|
||||
try:
|
||||
conn = mariadb.connect(
|
||||
host=self.config['host'],
|
||||
port=int(self.config['port']),
|
||||
user=self.config['user'],
|
||||
password=self.config['password'],
|
||||
database=self.config['database']
|
||||
)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SET FOREIGN_KEY_CHECKS = 1;")
|
||||
conn.commit()
|
||||
cursor.close()
|
||||
conn.close()
|
||||
except Exception as e:
|
||||
print(f"Warning: Could not re-enable foreign key checks: {e}")
|
||||
|
||||
if result.returncode == 0:
|
||||
return {
|
||||
'success': True,
|
||||
'message': f'Data restored successfully from {filename}'
|
||||
}
|
||||
else:
|
||||
error_msg = result.stderr
|
||||
print(f"Data restore error: {error_msg}")
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Data restore failed: {error_msg}'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f"Exception during data restore: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Data restore failed: {str(e)}'
|
||||
}
|
||||
|
||||
def get_backup_schedule(self):
|
||||
"""Get current backup schedule configuration"""
|
||||
try:
|
||||
@@ -352,13 +562,18 @@ class DatabaseBackupManager:
|
||||
|
||||
if os.path.exists(schedule_file):
|
||||
with open(schedule_file, 'r') as f:
|
||||
return json.load(f)
|
||||
schedule = json.load(f)
|
||||
# Ensure backup_type exists (for backward compatibility)
|
||||
if 'backup_type' not in schedule:
|
||||
schedule['backup_type'] = 'full'
|
||||
return schedule
|
||||
|
||||
# Default schedule
|
||||
return {
|
||||
'enabled': False,
|
||||
'time': '02:00', # 2 AM
|
||||
'frequency': 'daily', # daily, weekly, monthly
|
||||
'backup_type': 'full', # full or data-only
|
||||
'retention_days': 30 # Keep backups for 30 days
|
||||
}
|
||||
|
||||
@@ -607,3 +822,112 @@ class DatabaseBackupManager:
|
||||
'success': False,
|
||||
'message': f'Cleanup failed: {str(e)}'
|
||||
}
|
||||
|
||||
def upload_backup(self, uploaded_file):
|
||||
"""
|
||||
Upload and validate an external backup file
|
||||
|
||||
Args:
|
||||
uploaded_file: Werkzeug FileStorage object from request.files
|
||||
|
||||
Returns:
|
||||
dict: Result with success status, filename, and validation details
|
||||
"""
|
||||
try:
|
||||
from werkzeug.utils import secure_filename
|
||||
from pathlib import Path
|
||||
|
||||
# Validate file extension
|
||||
if not uploaded_file.filename.lower().endswith('.sql'):
|
||||
return {
|
||||
'success': False,
|
||||
'message': 'Invalid file format. Only .sql files are allowed.'
|
||||
}
|
||||
|
||||
# Ensure backup_path is a Path object
|
||||
backup_path = Path(self.backup_path)
|
||||
backup_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate secure filename with timestamp to avoid conflicts
|
||||
original_filename = secure_filename(uploaded_file.filename)
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
|
||||
# If filename already starts with "backup_", keep it; otherwise add prefix
|
||||
if original_filename.startswith('backup_'):
|
||||
new_filename = f"{original_filename.rsplit('.', 1)[0]}_{timestamp}.sql"
|
||||
else:
|
||||
new_filename = f"backup_uploaded_{timestamp}_{original_filename}"
|
||||
|
||||
# Save file to backup directory
|
||||
file_path = backup_path / new_filename
|
||||
uploaded_file.save(str(file_path))
|
||||
|
||||
# Get file size
|
||||
file_size = file_path.stat().st_size
|
||||
size_mb = round(file_size / (1024 * 1024), 2)
|
||||
|
||||
# Validate the uploaded file for integrity and compatibility
|
||||
validation_result = self.validate_backup_file(new_filename)
|
||||
|
||||
if not validation_result['success']:
|
||||
# Validation failed - remove the uploaded file
|
||||
file_path.unlink() # Delete the invalid file
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Validation failed: {validation_result["message"]}',
|
||||
'validation_details': validation_result.get('details', {}),
|
||||
'warnings': validation_result.get('warnings', [])
|
||||
}
|
||||
|
||||
# Build response with validation details
|
||||
response = {
|
||||
'success': True,
|
||||
'message': 'Backup file uploaded and validated successfully',
|
||||
'filename': new_filename,
|
||||
'size': f'{size_mb} MB',
|
||||
'path': str(file_path),
|
||||
'validation': {
|
||||
'status': 'passed',
|
||||
'message': validation_result['message'],
|
||||
'details': validation_result.get('details', {}),
|
||||
'warnings': validation_result.get('warnings', [])
|
||||
}
|
||||
}
|
||||
|
||||
# Add warning flag if there are warnings
|
||||
if validation_result.get('warnings'):
|
||||
response['message'] = f'Backup uploaded with warnings: {"; ".join(validation_result["warnings"])}'
|
||||
|
||||
# Save metadata
|
||||
self._save_backup_metadata(new_filename, file_size)
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error uploading backup: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'message': f'Upload failed: {str(e)}'
|
||||
}
|
||||
|
||||
def get_backup_file_path(self, filename):
|
||||
"""
|
||||
Get the full path to a backup file (with security validation)
|
||||
|
||||
Args:
|
||||
filename (str): Name of the backup file
|
||||
|
||||
Returns:
|
||||
str or None: Full file path if valid, None if security check fails
|
||||
"""
|
||||
# Security: ensure filename doesn't contain path traversal
|
||||
if '..' in filename or '/' in filename:
|
||||
return None
|
||||
|
||||
file_path = os.path.join(self.backup_path, filename)
|
||||
|
||||
if os.path.exists(file_path):
|
||||
return file_path
|
||||
|
||||
return None
|
||||
|
||||
|
||||
Reference in New Issue
Block a user