Initial commit — Server_Monitorizare_v2
This commit is contained in:
201
app/__init__.py
Normal file
201
app/__init__.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Flask application factory and initialization
|
||||
"""
|
||||
from flask import Flask, request, jsonify, render_template
|
||||
import os
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from config.config import get_config
|
||||
|
||||
def create_app(config_name=None):
|
||||
"""Application factory pattern"""
|
||||
# Get the project root directory (parent of app directory)
|
||||
project_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
template_dir = os.path.join(project_root, 'templates')
|
||||
static_dir = os.path.join(project_root, 'static')
|
||||
|
||||
app = Flask(__name__, template_folder=template_dir, static_folder=static_dir)
|
||||
|
||||
# Load configuration
|
||||
config_class = get_config(config_name)
|
||||
app.config.from_object(config_class)
|
||||
|
||||
# Ensure required directories exist
|
||||
_ensure_directories()
|
||||
|
||||
# Setup logging
|
||||
_setup_logging(app)
|
||||
|
||||
# Register blueprints
|
||||
_register_blueprints(app)
|
||||
|
||||
# Register error handlers
|
||||
_register_error_handlers(app)
|
||||
|
||||
# Add context processors
|
||||
_register_context_processors(app)
|
||||
|
||||
return app
|
||||
|
||||
def _ensure_directories():
|
||||
"""Ensure required directories exist"""
|
||||
directories = [
|
||||
'data',
|
||||
'data/uploads',
|
||||
'data/backups',
|
||||
'logs',
|
||||
'ansible/inventory',
|
||||
'ansible/playbooks',
|
||||
'ansible/roles'
|
||||
]
|
||||
|
||||
for directory in directories:
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
def _setup_logging(app):
|
||||
"""Setup application logging"""
|
||||
if not app.debug and not app.testing:
|
||||
# File logging for production
|
||||
if app.config.get('LOG_FILE'):
|
||||
if not os.path.exists('logs'):
|
||||
os.mkdir('logs')
|
||||
|
||||
file_handler = RotatingFileHandler(
|
||||
app.config['LOG_FILE'],
|
||||
maxBytes=10240000, # 10MB
|
||||
backupCount=10
|
||||
)
|
||||
file_handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
|
||||
))
|
||||
file_handler.setLevel(logging.INFO)
|
||||
app.logger.addHandler(file_handler)
|
||||
|
||||
app.logger.setLevel(logging.INFO)
|
||||
app.logger.info('Enhanced monitoring server startup')
|
||||
|
||||
def _register_blueprints(app):
|
||||
"""Register all blueprints"""
|
||||
|
||||
# Import blueprints here to avoid circular imports
|
||||
from app.api.logs import logs_bp
|
||||
from app.api.ansible import ansible_bp
|
||||
from app.api.wmt import wmt_api_bp
|
||||
from app.web.main import main_bp
|
||||
from app.web.ansible import ansible_web_bp
|
||||
from app.web.wmt import wmt_web_bp
|
||||
|
||||
app.register_blueprint(logs_bp)
|
||||
app.register_blueprint(ansible_bp)
|
||||
app.register_blueprint(wmt_api_bp)
|
||||
app.register_blueprint(main_bp)
|
||||
app.register_blueprint(ansible_web_bp)
|
||||
app.register_blueprint(wmt_web_bp)
|
||||
|
||||
# Add compatibility routes for old clients
|
||||
@app.route('/logs', methods=['POST'])
|
||||
def compatibility_logs():
|
||||
"""Compatibility endpoint for old prezenta clients"""
|
||||
from flask import request, redirect, url_for
|
||||
import re
|
||||
|
||||
# Forward the request to the new API endpoint
|
||||
# Import inside function to avoid circular imports
|
||||
from app.api.logs import submit_log
|
||||
return submit_log()
|
||||
|
||||
def _register_error_handlers(app):
|
||||
"""Register error handlers"""
|
||||
|
||||
@app.errorhandler(400)
|
||||
def bad_request(error):
|
||||
if request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Bad request',
|
||||
'message': 'The request could not be understood by the server'
|
||||
}), 400
|
||||
return render_template('errors/400.html'), 400
|
||||
|
||||
@app.errorhandler(401)
|
||||
def unauthorized(error):
|
||||
if request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Unauthorized',
|
||||
'message': 'Authentication required'
|
||||
}), 401
|
||||
return render_template('errors/401.html'), 401
|
||||
|
||||
@app.errorhandler(403)
|
||||
def forbidden(error):
|
||||
if request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Forbidden',
|
||||
'message': 'Insufficient permissions'
|
||||
}), 403
|
||||
return render_template('errors/403.html'), 403
|
||||
|
||||
@app.errorhandler(404)
|
||||
def not_found(error):
|
||||
if request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Not found',
|
||||
'message': 'The requested resource was not found'
|
||||
}), 404
|
||||
try:
|
||||
return render_template('errors/404.html'), 404
|
||||
except Exception as e:
|
||||
# Fallback if template cannot be loaded
|
||||
app.logger.error(f'Error loading 404 template: {e}')
|
||||
return '''
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>404 - Page Not Found</title></head>
|
||||
<body>
|
||||
<h1>404 - Page Not Found</h1>
|
||||
<p>The requested resource was not found.</p>
|
||||
<a href="/">← Back to Dashboard</a>
|
||||
</body>
|
||||
</html>
|
||||
''', 404
|
||||
|
||||
@app.errorhandler(500)
|
||||
def internal_error(error):
|
||||
app.logger.error(f'Internal server error: {error}')
|
||||
if request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'message': 'An unexpected error occurred'
|
||||
}), 500
|
||||
try:
|
||||
return render_template('errors/500.html'), 500
|
||||
except Exception as e:
|
||||
app.logger.error(f'Error loading 500 template: {e}')
|
||||
return '''
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head><title>500 - Internal Server Error</title></head>
|
||||
<body>
|
||||
<h1>500 - Internal Server Error</h1>
|
||||
<p>An unexpected error occurred.</p>
|
||||
<a href="/">← Back to Dashboard</a>
|
||||
</body>
|
||||
</html>
|
||||
''', 500
|
||||
|
||||
def _register_context_processors(app):
|
||||
"""Register template context processors"""
|
||||
|
||||
@app.context_processor
|
||||
def inject_config():
|
||||
from app.models import WMTUpdateRequest
|
||||
from config.database_config import get_db
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
pending_wmt_count = session.query(WMTUpdateRequest).filter_by(status='pending').count()
|
||||
except Exception:
|
||||
pending_wmt_count = 0
|
||||
return {
|
||||
'app_name': 'Enhanced Server Monitoring',
|
||||
'app_version': '2.0.0',
|
||||
'pending_wmt_count': pending_wmt_count,
|
||||
}
|
||||
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
454
app/api/ansible.py
Normal file
454
app/api/ansible.py
Normal file
@@ -0,0 +1,454 @@
|
||||
"""
|
||||
Ansible and SSH management API endpoints
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from app.services.ansible_service import AnsibleService
|
||||
from app.models import Device, AnsibleExecution
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
# Create blueprint
|
||||
ansible_bp = Blueprint('ansible', __name__, url_prefix='/api/ansible')
|
||||
|
||||
# Initialize service
|
||||
ansible_service = AnsibleService()
|
||||
|
||||
@ansible_bp.route('/inventory', methods=['GET'])
|
||||
def get_inventory():
|
||||
"""Get current Ansible inventory (structured)"""
|
||||
try:
|
||||
data = ansible_service.get_inventory_data()
|
||||
return jsonify({'success': True, 'inventory': data})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/raw', methods=['GET'])
|
||||
def get_inventory_raw():
|
||||
"""Get raw YAML inventory text"""
|
||||
try:
|
||||
data = ansible_service.get_inventory_data()
|
||||
return jsonify({'success': True, 'yaml': data.get('raw_yaml', '')})
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/sync', methods=['POST'])
|
||||
def sync_inventory():
|
||||
"""Sync all active app devices into monitoring_devices inventory group"""
|
||||
try:
|
||||
result = ansible_service.sync_devices_to_inventory()
|
||||
status = 200 if result.get('success') else 400
|
||||
return jsonify(result), status
|
||||
except Exception as e:
|
||||
logging.error(f"Error syncing inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/group/add', methods=['POST'])
|
||||
def add_inventory_group():
|
||||
"""Add a new inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group_name = (data or {}).get('group_name', '').strip()
|
||||
if not group_name:
|
||||
return jsonify({'success': False, 'error': 'group_name is required'}), 400
|
||||
result = ansible_service.add_group_to_inventory(group_name)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/group/remove', methods=['POST'])
|
||||
def remove_inventory_group():
|
||||
"""Remove an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group_name = (data or {}).get('group_name', '').strip()
|
||||
if not group_name:
|
||||
return jsonify({'success': False, 'error': 'group_name is required'}), 400
|
||||
result = ansible_service.remove_group_from_inventory(group_name)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/host/add', methods=['POST'])
|
||||
def add_inventory_host():
|
||||
"""Add a host to an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({'success': False, 'error': 'JSON body required'}), 400
|
||||
group = data.get('group', '').strip()
|
||||
hostname = data.get('hostname', '').strip()
|
||||
ip = data.get('ip', '').strip()
|
||||
ssh_user = data.get('ssh_user', 'pi').strip() or 'pi'
|
||||
ssh_port = int(data.get('ssh_port', 22))
|
||||
use_key = bool(data.get('use_key', True))
|
||||
password = data.get('password', None)
|
||||
if not group or not hostname or not ip:
|
||||
return jsonify({'success': False, 'error': 'group, hostname and ip are required'}), 400
|
||||
result = ansible_service.add_host_to_inventory(
|
||||
group=group, hostname=hostname, ip=ip,
|
||||
ssh_user=ssh_user, ssh_port=ssh_port,
|
||||
use_key=use_key, password=password
|
||||
)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/host/remove', methods=['POST'])
|
||||
def remove_inventory_host():
|
||||
"""Remove a host from an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group = (data or {}).get('group', '').strip()
|
||||
hostname = (data or {}).get('hostname', '').strip()
|
||||
if not group or not hostname:
|
||||
return jsonify({'success': False, 'error': 'group and hostname are required'}), 400
|
||||
result = ansible_service.remove_host_from_inventory(group=group, hostname=hostname)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/refresh', methods=['POST'])
|
||||
def refresh_inventory():
|
||||
"""Refresh Ansible inventory from database (legacy alias for /sync)"""
|
||||
try:
|
||||
result = ansible_service.sync_devices_to_inventory()
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
logging.error(f"Error refreshing inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/playbooks', methods=['GET'])
|
||||
def list_playbooks():
|
||||
"""List available Ansible playbooks"""
|
||||
try:
|
||||
playbook_dir = ansible_service.playbook_dir
|
||||
playbooks = []
|
||||
|
||||
if playbook_dir.exists():
|
||||
for file in playbook_dir.glob('*.yml'):
|
||||
playbooks.append({
|
||||
'name': file.stem,
|
||||
'filename': file.name,
|
||||
'path': str(file),
|
||||
'modified': datetime.fromtimestamp(file.stat().st_mtime).isoformat()
|
||||
})
|
||||
|
||||
# Add built-in playbooks
|
||||
builtin_playbooks = [
|
||||
{
|
||||
'name': 'update_devices',
|
||||
'description': 'Update all packages on monitoring devices',
|
||||
'builtin': True
|
||||
},
|
||||
{
|
||||
'name': 'restart_service',
|
||||
'description': 'Restart monitoring services on devices',
|
||||
'builtin': True
|
||||
}
|
||||
]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'playbooks': playbooks,
|
||||
'builtin_playbooks': builtin_playbooks
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error listing playbooks: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/execute', methods=['POST'])
|
||||
def execute_playbook():
|
||||
"""
|
||||
Execute Ansible playbook
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"playbook": "update_devices",
|
||||
"limit_hosts": ["device-01", "device-02"], # optional
|
||||
"extra_vars": {"key": "value"}, # optional
|
||||
"create_builtin": True # optional, create builtin playbook if needed
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('playbook'):
|
||||
return jsonify({
|
||||
'error': 'Playbook name is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
playbook_name = data['playbook']
|
||||
limit_hosts = data.get('limit_hosts')
|
||||
extra_vars = data.get('extra_vars', {})
|
||||
create_builtin = data.get('create_builtin', True)
|
||||
|
||||
# Create builtin playbooks if they don't exist
|
||||
if create_builtin:
|
||||
if playbook_name == 'update_devices':
|
||||
ansible_service.create_update_playbook()
|
||||
elif playbook_name == 'restart_service':
|
||||
ansible_service.create_restart_service_playbook()
|
||||
|
||||
# Add controller IP to extra vars for callbacks
|
||||
extra_vars['ansible_controller_ip'] = request.host
|
||||
|
||||
# Execute playbook
|
||||
result = ansible_service.execute_playbook(
|
||||
playbook_name=playbook_name,
|
||||
limit_hosts=limit_hosts,
|
||||
extra_vars=extra_vars
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Playbook execution started',
|
||||
'execution_id': result['execution_id'],
|
||||
'log_file': result['log_file']
|
||||
})
|
||||
else:
|
||||
return jsonify(result), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error executing playbook: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/executions', methods=['GET'])
|
||||
def get_executions():
|
||||
"""Get Ansible execution history"""
|
||||
try:
|
||||
limit = min(int(request.args.get('limit', 50)), 200)
|
||||
executions = ansible_service.get_execution_history(limit=limit)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'executions': executions
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting executions: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
|
||||
@ansible_bp.route('/executions/<execution_id>/live', methods=['GET'])
|
||||
def get_execution_live(execution_id):
|
||||
"""Poll current log + status for a running or finished execution (UUID)."""
|
||||
result = ansible_service.get_live_execution(execution_id)
|
||||
if not result.get('success'):
|
||||
return jsonify(result), 404
|
||||
return jsonify(result), 200
|
||||
|
||||
@ansible_bp.route('/executions/<int:execution_id>', methods=['GET'])
|
||||
def get_execution_details(execution_id):
|
||||
"""Get detailed execution information"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
execution = session.query(AnsibleExecution).get(execution_id)
|
||||
|
||||
if not execution:
|
||||
return jsonify({
|
||||
'error': 'Execution not found',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
execution_data = {
|
||||
'id': execution.id,
|
||||
'playbook_name': execution.playbook_name,
|
||||
'target_devices': json.loads(execution.target_devices) if execution.target_devices else [],
|
||||
'command_line': execution.command_line,
|
||||
'start_time': execution.start_time.isoformat() if execution.start_time else None,
|
||||
'end_time': execution.end_time.isoformat() if execution.end_time else None,
|
||||
'status': execution.status,
|
||||
'exit_code': execution.exit_code,
|
||||
'stdout_log': execution.stdout_log,
|
||||
'stderr_log': execution.stderr_log,
|
||||
'successful_hosts': execution.successful_hosts,
|
||||
'failed_hosts': execution.failed_hosts,
|
||||
'unreachable_hosts': execution.unreachable_hosts
|
||||
}
|
||||
|
||||
# Read log file if it exists
|
||||
if execution.ansible_log_file and os.path.exists(execution.ansible_log_file):
|
||||
with open(execution.ansible_log_file, 'r') as f:
|
||||
execution_data['full_log'] = f.read()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'execution': execution_data
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting execution details: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/test', methods=['POST'])
|
||||
def test_ssh_connectivity():
|
||||
"""
|
||||
Test SSH connectivity to devices
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"device_ips": ["192.168.1.100", "192.168.1.101"],
|
||||
"username": "pi" # optional, defaults to "pi"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('device_ips'):
|
||||
return jsonify({
|
||||
'error': 'device_ips list is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
device_ips = data['device_ips']
|
||||
username = data.get('username', 'pi')
|
||||
|
||||
# Test connectivity
|
||||
if len(device_ips) == 1:
|
||||
# Single device test
|
||||
result = ansible_service.test_ssh_connectivity(device_ips[0], username)
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'results': {device_ips[0]: result}
|
||||
})
|
||||
else:
|
||||
# Bulk test
|
||||
results = ansible_service.bulk_ssh_test(device_ips)
|
||||
|
||||
# Summary
|
||||
successful = sum(1 for r in results.values() if r.get('success'))
|
||||
total = len(results)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'results': results,
|
||||
'summary': {
|
||||
'successful': successful,
|
||||
'failed': total - successful,
|
||||
'total': total
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error testing SSH connectivity: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/keys/setup', methods=['POST'])
|
||||
def setup_ssh_keys():
|
||||
"""Setup SSH keys for Ansible authentication"""
|
||||
try:
|
||||
result = ansible_service.setup_ssh_keys()
|
||||
return jsonify(result)
|
||||
except Exception as e:
|
||||
logging.error(f"Error setting up SSH keys: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/keys/public', methods=['GET'])
|
||||
def get_public_key():
|
||||
"""Get public key for distribution to devices"""
|
||||
try:
|
||||
public_key_path = ansible_service.ssh_key_path.with_suffix('.pub')
|
||||
|
||||
if not public_key_path.exists():
|
||||
return jsonify({
|
||||
'error': 'Public key not found. Run SSH setup first.',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
with open(public_key_path, 'r') as f:
|
||||
public_key = f.read().strip()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'public_key': public_key,
|
||||
'key_path': str(public_key_path)
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting public key: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/devices/status', methods=['GET'])
|
||||
def get_devices_status():
|
||||
"""Get status of all devices for Ansible operations"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
devices = session.query(Device).all()
|
||||
|
||||
device_data = []
|
||||
for device in devices:
|
||||
device_data.append({
|
||||
'id': device.id,
|
||||
'hostname': device.hostname,
|
||||
'device_ip': device.device_ip,
|
||||
'nume_masa': device.nume_masa,
|
||||
'status': device.status,
|
||||
'last_seen': device.last_seen.isoformat() if device.last_seen else None,
|
||||
'device_type': device.device_type,
|
||||
'os_version': device.os_version,
|
||||
'location': device.location
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'devices': device_data,
|
||||
'total_count': len(device_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting devices status: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
# Callback endpoints for Ansible playbooks
|
||||
@ansible_bp.route('/callback/update_complete', methods=['POST'])
|
||||
def update_complete_callback():
|
||||
"""Callback endpoint for update completion"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
logging.info(f"Update completed for {data.get('hostname')}: {data}")
|
||||
|
||||
# You could update device status, send notifications, etc.
|
||||
return jsonify({'success': True})
|
||||
except Exception as e:
|
||||
logging.error(f"Error in update callback: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@ansible_bp.route('/callback/service_restarted', methods=['POST'])
|
||||
def service_restart_callback():
|
||||
"""Callback endpoint for service restart completion"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
logging.info(f"Service restarted for {data.get('hostname')}: {data}")
|
||||
|
||||
return jsonify({'success': True})
|
||||
except Exception as e:
|
||||
logging.error(f"Error in service restart callback: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
373
app/api/logs.py
Normal file
373
app/api/logs.py
Normal file
@@ -0,0 +1,373 @@
|
||||
"""
|
||||
Enhanced API endpoints for logs with compression and file support
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from werkzeug.utils import secure_filename
|
||||
import os
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from app.services.log_service import LogCompressionService
|
||||
from app.services.file_service import FileUploadService
|
||||
from app.models import Device, LogEntry, FileUpload
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
# Create blueprint
|
||||
logs_bp = Blueprint('logs', __name__, url_prefix='/api/logs')
|
||||
|
||||
# Initialize services
|
||||
log_service = LogCompressionService()
|
||||
file_service = FileUploadService()
|
||||
|
||||
@logs_bp.route('/', methods=['POST'])
|
||||
@logs_bp.route('/submit', methods=['POST'])
|
||||
def submit_log():
|
||||
"""
|
||||
Enhanced log submission with compression support
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"hostname": "device-01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"nume_masa": "Masa-01",
|
||||
"log_message": "Card detected: ABC123",
|
||||
"severity": "info", # optional: debug, info, warning, error, critical
|
||||
"source_file": "/path/to/logfile.log", # optional
|
||||
"metadata": {} # optional additional metadata
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Validate content type
|
||||
if not request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Content-Type must be application/json',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['hostname', 'device_ip', 'nume_masa', 'log_message']
|
||||
missing_fields = [field for field in required_fields if not data.get(field)]
|
||||
|
||||
if missing_fields:
|
||||
return jsonify({
|
||||
'error': f'Missing required fields: {", ".join(missing_fields)}',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Prepare device info
|
||||
device_info = {
|
||||
'hostname': data['hostname'],
|
||||
'device_ip': data['device_ip'],
|
||||
'nume_masa': data['nume_masa'],
|
||||
# Optional – clients can send these to keep device records up to date
|
||||
'device_type': data.get('device_type') or (data.get('metadata') or {}).get('device_type'),
|
||||
'os_version': data.get('os_version') or (data.get('metadata') or {}).get('os_version'),
|
||||
'location': data.get('location') or (data.get('metadata') or {}).get('location'),
|
||||
'mac_address': data.get('mac_address') or (data.get('metadata') or {}).get('mac_address'),
|
||||
}
|
||||
|
||||
# Process log with compression
|
||||
result = log_service.process_log_message(
|
||||
device_info=device_info,
|
||||
message=data['log_message'],
|
||||
severity=data.get('severity', 'info')
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
# Prepare response with compression info
|
||||
response_data = {
|
||||
'success': True,
|
||||
'message': 'Log processed successfully',
|
||||
'log_id': result['log_id'],
|
||||
'device_id': result['device_id'],
|
||||
'compression_info': result['compression']
|
||||
}
|
||||
|
||||
# Add alias info if template was used
|
||||
if result['compression'].get('used_template'):
|
||||
response_data['template_alias'] = result['compression']['template_alias']
|
||||
|
||||
# For clients: suggest using alias in future requests
|
||||
if result['compression'].get('new_template'):
|
||||
response_data['suggestion'] = f"For similar messages, you can use template alias: {result['compression']['template_alias']}"
|
||||
|
||||
return jsonify(response_data), 201
|
||||
else:
|
||||
return jsonify(result), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in submit_log: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/template/<alias>', methods=['POST'])
|
||||
def submit_templated_log():
|
||||
"""
|
||||
Submit log using template alias (smaller payload)
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"alias": "CD001",
|
||||
"variables": {"card_id": "ABC123"},
|
||||
"device_info": {
|
||||
"hostname": "device-01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"nume_masa": "Masa-01"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
alias = request.view_args['alias']
|
||||
|
||||
# Validate required fields
|
||||
if not data.get('device_info'):
|
||||
return jsonify({
|
||||
'error': 'device_info is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Get template message
|
||||
variables = data.get('variables', {})
|
||||
full_message = log_service.get_message_by_alias(alias, variables)
|
||||
|
||||
if not full_message:
|
||||
return jsonify({
|
||||
'error': f'Template alias {alias} not found',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
# Process as regular log
|
||||
result = log_service.process_log_message(
|
||||
device_info=data['device_info'],
|
||||
message=full_message,
|
||||
severity=data.get('severity', 'info')
|
||||
)
|
||||
|
||||
return jsonify(result), 201 if result['success'] else 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in submit_templated_log: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/file', methods=['POST'])
|
||||
def upload_log_file():
|
||||
"""
|
||||
Upload log file for processing
|
||||
|
||||
Expects multipart/form-data with:
|
||||
- file: log file
|
||||
- device_info: JSON string with device information
|
||||
"""
|
||||
try:
|
||||
# Check if file was uploaded
|
||||
if 'file' not in request.files:
|
||||
return jsonify({
|
||||
'error': 'No file uploaded',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
file = request.files['file']
|
||||
if file.filename == '':
|
||||
return jsonify({
|
||||
'error': 'No file selected',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Get device info
|
||||
device_info_str = request.form.get('device_info')
|
||||
if not device_info_str:
|
||||
return jsonify({
|
||||
'error': 'device_info is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
import json
|
||||
device_info = json.loads(device_info_str)
|
||||
|
||||
# Process file upload
|
||||
result = file_service.process_uploaded_file(file, device_info)
|
||||
|
||||
return jsonify(result), 201 if result['success'] else 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in upload_log_file: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/query', methods=['GET'])
|
||||
def query_logs():
|
||||
"""
|
||||
Query logs with filters and pagination
|
||||
|
||||
Query parameters:
|
||||
- device_id: Filter by device ID
|
||||
- hostname: Filter by hostname
|
||||
- severity: Filter by severity level
|
||||
- start_time: Start time (ISO format)
|
||||
- end_time: End time (ISO format)
|
||||
- limit: Number of results (default 100)
|
||||
- offset: Offset for pagination (default 0)
|
||||
- include_template: Include resolved template messages (default true)
|
||||
"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
# Build query
|
||||
query = session.query(LogEntry).join(Device)
|
||||
|
||||
# Apply filters
|
||||
if request.args.get('device_id'):
|
||||
query = query.filter(LogEntry.device_id == int(request.args.get('device_id')))
|
||||
|
||||
if request.args.get('hostname'):
|
||||
query = query.filter(Device.hostname == request.args.get('hostname'))
|
||||
|
||||
if request.args.get('severity'):
|
||||
query = query.filter(LogEntry.severity == request.args.get('severity'))
|
||||
|
||||
if request.args.get('start_time'):
|
||||
start_time = datetime.fromisoformat(request.args.get('start_time'))
|
||||
query = query.filter(LogEntry.timestamp >= start_time)
|
||||
|
||||
if request.args.get('end_time'):
|
||||
end_time = datetime.fromisoformat(request.args.get('end_time'))
|
||||
query = query.filter(LogEntry.timestamp <= end_time)
|
||||
|
||||
# Pagination
|
||||
limit = min(int(request.args.get('limit', 100)), 1000) # Max 1000
|
||||
offset = int(request.args.get('offset', 0))
|
||||
|
||||
# Order by timestamp descending
|
||||
query = query.order_by(LogEntry.timestamp.desc())
|
||||
|
||||
# Get total count
|
||||
total_count = query.count()
|
||||
|
||||
# Apply pagination
|
||||
logs = query.limit(limit).offset(offset).all()
|
||||
|
||||
# Format response
|
||||
include_template = request.args.get('include_template', 'true').lower() == 'true'
|
||||
log_data = []
|
||||
|
||||
for log in logs:
|
||||
log_item = {
|
||||
'id': log.id,
|
||||
'device': {
|
||||
'id': log.device.id,
|
||||
'hostname': log.device.hostname,
|
||||
'device_ip': log.device.device_ip,
|
||||
'nume_masa': log.device.nume_masa
|
||||
},
|
||||
'timestamp': log.timestamp.isoformat(),
|
||||
'severity': log.severity
|
||||
}
|
||||
|
||||
if include_template and log.template:
|
||||
log_item['message'] = log.resolved_message
|
||||
log_item['template_alias'] = log.template.alias
|
||||
log_item['template_category'] = log.template.category
|
||||
else:
|
||||
log_item['message'] = log.full_message or log.resolved_message
|
||||
|
||||
log_data.append(log_item)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'logs': log_data,
|
||||
'pagination': {
|
||||
'total_count': total_count,
|
||||
'limit': limit,
|
||||
'offset': offset,
|
||||
'has_more': offset + limit < total_count
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in query_logs: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/stats', methods=['GET'])
|
||||
def get_log_stats():
|
||||
"""Get logging and compression statistics"""
|
||||
try:
|
||||
# Get compression stats
|
||||
compression_stats = log_service.get_compression_stats()
|
||||
|
||||
# Get additional stats
|
||||
with get_db().get_session() as session:
|
||||
# Device stats
|
||||
active_devices = session.query(Device).filter_by(status='active').count()
|
||||
total_devices = session.query(Device).count()
|
||||
|
||||
# Recent activity
|
||||
from datetime import datetime, timedelta
|
||||
last_hour = datetime.utcnow() - timedelta(hours=1)
|
||||
recent_logs = session.query(LogEntry).filter(
|
||||
LogEntry.timestamp >= last_hour
|
||||
).count()
|
||||
|
||||
stats = {
|
||||
'success': True,
|
||||
'compression': compression_stats,
|
||||
'devices': {
|
||||
'active': active_devices,
|
||||
'total': total_devices
|
||||
},
|
||||
'activity': {
|
||||
'logs_last_hour': recent_logs
|
||||
}
|
||||
}
|
||||
|
||||
return jsonify(stats)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in get_log_stats: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/templates', methods=['GET'])
|
||||
def get_templates():
|
||||
"""Get available message templates and aliases"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
from app.models import MessageTemplate
|
||||
|
||||
templates = session.query(MessageTemplate).order_by(
|
||||
MessageTemplate.usage_count.desc()
|
||||
).all()
|
||||
|
||||
template_data = [{
|
||||
'alias': template.alias,
|
||||
'category': template.category,
|
||||
'template_text': template.template_text,
|
||||
'usage_count': template.usage_count,
|
||||
'created_at': template.created_at.isoformat()
|
||||
} for template in templates]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'templates': template_data,
|
||||
'total_count': len(template_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in get_templates: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
170
app/api/wmt.py
Normal file
170
app/api/wmt.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
WMT (Workstation Management Terminal) configuration API
|
||||
Handles config distribution and device update requests from WMT clients.
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from datetime import datetime
|
||||
from app.models import WMTGlobalConfig, Device, WMTUpdateRequest
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
wmt_api_bp = Blueprint('wmt_api', __name__, url_prefix='/api/wmt')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _get_or_create_global_config(session):
|
||||
"""Return the single WMTGlobalConfig row, creating it with defaults if absent."""
|
||||
cfg = session.query(WMTGlobalConfig).first()
|
||||
if cfg is None:
|
||||
cfg = WMTGlobalConfig()
|
||||
session.add(cfg)
|
||||
session.flush()
|
||||
return cfg
|
||||
|
||||
|
||||
def _latest_config_ts(session, mac_address):
|
||||
"""Return timestamps for global config and this device's admin-reviewed info."""
|
||||
global_cfg = session.query(WMTGlobalConfig).first()
|
||||
global_ts = global_cfg.updated_at if global_cfg and global_cfg.updated_at else datetime(1970, 1, 1)
|
||||
|
||||
device = session.query(Device).filter_by(mac_address=mac_address).first()
|
||||
# Use info_reviewed_at as the authoritative device-level timestamp
|
||||
device_ts = device.info_reviewed_at if device and device.info_reviewed_at else datetime(1970, 1, 1)
|
||||
|
||||
latest = max(global_ts, device_ts)
|
||||
return global_ts, device_ts, latest
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@wmt_api_bp.route('/config/timestamp', methods=['GET'])
|
||||
def get_config_timestamp():
|
||||
"""
|
||||
Returns the last-modified timestamps for global config and this device's config.
|
||||
Query param: mac=<mac_address>
|
||||
|
||||
Response:
|
||||
{
|
||||
"global_updated_at": "2026-04-22T10:00:00",
|
||||
"device_updated_at": "2026-04-22T09:00:00", // null if device unknown
|
||||
"latest_updated_at": "2026-04-22T10:00:00"
|
||||
}
|
||||
"""
|
||||
mac = request.args.get('mac', '').strip().lower()
|
||||
if not mac:
|
||||
return jsonify({'error': 'mac query parameter is required'}), 400
|
||||
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
global_ts, device_info_reviewed_ts, latest = _latest_config_ts(session, mac)
|
||||
|
||||
return jsonify({
|
||||
'global_updated_at': global_ts.isoformat() if global_ts != datetime(1970, 1, 1) else None,
|
||||
'device_info_reviewed_at': device_info_reviewed_ts.isoformat() if device_info_reviewed_ts != datetime(1970, 1, 1) else None,
|
||||
'latest_updated_at': latest.isoformat(),
|
||||
}), 200
|
||||
except Exception as e:
|
||||
logger.error(f'Error getting WMT config timestamp: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
@wmt_api_bp.route('/config/<mac_address>', methods=['GET'])
|
||||
def get_device_config(mac_address):
|
||||
"""
|
||||
Returns merged config (global settings + device-specific) for a given MAC.
|
||||
Used by WMT client to pull updated config at startup.
|
||||
|
||||
Response: merged dict consumable by the WMT config.txt writer.
|
||||
"""
|
||||
mac = mac_address.strip().lower()
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
global_cfg = _get_or_create_global_config(session)
|
||||
device = session.query(Device).filter_by(mac_address=mac).first()
|
||||
|
||||
# Update last_seen if device is known
|
||||
if device:
|
||||
device.last_seen = datetime.utcnow()
|
||||
|
||||
_, device_ts, latest_ts = _latest_config_ts(session, mac)
|
||||
|
||||
payload = {
|
||||
# Global settings
|
||||
'chrome_url': global_cfg.chrome_url,
|
||||
'chrome_local_url': global_cfg.chrome_local_url or '',
|
||||
'chrome_insecure_origin': global_cfg.chrome_insecure_origin,
|
||||
'card_api_base_url': global_cfg.card_api_base_url,
|
||||
'server_log_url': global_cfg.server_log_url,
|
||||
'internet_check_host': global_cfg.internet_check_host,
|
||||
'update_host': global_cfg.update_host,
|
||||
'update_user': global_cfg.update_user,
|
||||
# Device-specific settings (empty string if unknown)
|
||||
'device_name': device.device_name if device else '',
|
||||
'hostname': device.hostname if device else '',
|
||||
'device_ip': device.device_ip if device else '',
|
||||
'location': device.location if device else '',
|
||||
# Admin-review timestamp for device info (client stores in [device] section)
|
||||
'info_reviewed_at': device.info_reviewed_at.isoformat() if (device and device.info_reviewed_at) else '1970-01-01T00:00:00',
|
||||
# Sync metadata
|
||||
'config_updated_at': latest_ts.isoformat(),
|
||||
}
|
||||
return jsonify(payload), 200
|
||||
except Exception as e:
|
||||
logger.error(f'Error fetching WMT config for {mac}: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
@wmt_api_bp.route('/config/update_request', methods=['POST'])
|
||||
def submit_update_request():
|
||||
"""
|
||||
WMT client sends current device info as an update request for admin approval.
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"mac_address": "b8:27:eb:aa:bb:cc",
|
||||
"device_name": "Masa-01",
|
||||
"hostname": "rpi-masa01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"client_config_mtime": "2026-04-22T09:30:00" // optional
|
||||
}
|
||||
"""
|
||||
if not request.is_json:
|
||||
return jsonify({'error': 'Content-Type must be application/json'}), 400
|
||||
|
||||
data = request.get_json()
|
||||
mac = (data.get('mac_address') or '').strip().lower()
|
||||
if not mac:
|
||||
return jsonify({'error': 'mac_address is required'}), 400
|
||||
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).filter_by(mac_address=mac).first()
|
||||
|
||||
req = WMTUpdateRequest(
|
||||
mac_address=mac,
|
||||
device_id=device.id if device else None,
|
||||
proposed_device_name=data.get('device_name'),
|
||||
proposed_hostname=data.get('hostname'),
|
||||
proposed_device_ip=data.get('device_ip'),
|
||||
client_config_mtime=data.get('client_config_mtime'),
|
||||
submitted_at=datetime.utcnow(),
|
||||
status='pending',
|
||||
)
|
||||
session.add(req)
|
||||
|
||||
# Update device last_seen
|
||||
if device:
|
||||
device.last_seen = datetime.utcnow()
|
||||
|
||||
logger.info(f'WMT update request received from {mac}')
|
||||
return jsonify({'status': 'received', 'message': 'Update request queued for admin review'}), 201
|
||||
except Exception as e:
|
||||
logger.error(f'Error saving WMT update request from {mac}: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
629
app/models/__init__.py
Normal file
629
app/models/__init__.py
Normal file
@@ -0,0 +1,629 @@
|
||||
"""
|
||||
Database models for enhanced server monitoring system
|
||||
"""
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text, Boolean, ForeignKey, LargeBinary, Float, Table
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime
|
||||
import json
|
||||
import hashlib
|
||||
from cryptography.fernet import Fernet
|
||||
import base64
|
||||
import os
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
# Association table for many-to-many relationship between inventory groups and devices
|
||||
device_inventory_association = Table(
|
||||
'device_inventory_groups',
|
||||
Base.metadata,
|
||||
Column('device_id', Integer, ForeignKey('devices.id'), primary_key=True),
|
||||
Column('group_id', Integer, ForeignKey('inventory_groups.id'), primary_key=True)
|
||||
)
|
||||
|
||||
# Export all models
|
||||
__all__ = [
|
||||
'Base', 'Device', 'MessageTemplate', 'LogEntry', 'FileUpload',
|
||||
'AnsibleExecution', 'SystemStats', 'InventoryGroup', 'PlaybookExecution',
|
||||
'PlaybookHostResult', 'ExecutionQueue', 'device_inventory_association',
|
||||
'WMTGlobalConfig', 'WMTUpdateRequest',
|
||||
]
|
||||
|
||||
class Device(Base):
|
||||
"""Device information and metadata"""
|
||||
__tablename__ = 'devices'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
hostname = Column(String(255), nullable=False)
|
||||
device_ip = Column(String(45), nullable=False) # Support IPv6
|
||||
nume_masa = Column(String(100), nullable=False)
|
||||
|
||||
# Enhanced device metadata
|
||||
device_type = Column(String(50), default='unknown')
|
||||
os_version = Column(String(100))
|
||||
last_seen = Column(DateTime, default=datetime.utcnow)
|
||||
status = Column(String(20), default='active') # active, inactive, maintenance
|
||||
location = Column(String(200))
|
||||
description = Column(Text)
|
||||
|
||||
# WMT (Workstation Management Terminal) integration fields
|
||||
mac_address = Column(String(17), unique=True, nullable=True, index=True)
|
||||
config_updated_at = Column(DateTime)
|
||||
info_reviewed_at = Column(DateTime, default=lambda: datetime(1970, 1, 1))
|
||||
|
||||
# Relationships
|
||||
logs = relationship("LogEntry", back_populates="device")
|
||||
files = relationship("FileUpload", back_populates="device")
|
||||
inventory_groups = relationship("InventoryGroup", secondary=device_inventory_association, back_populates="devices")
|
||||
update_requests = relationship('WMTUpdateRequest', back_populates='device',
|
||||
cascade='all, delete-orphan',
|
||||
order_by='WMTUpdateRequest.submitted_at.desc()')
|
||||
|
||||
@property
|
||||
def device_name(self):
|
||||
"""Alias for nume_masa – used by WMT module."""
|
||||
return self.nume_masa
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Device(hostname='{self.hostname}', ip='{self.device_ip}')>"
|
||||
|
||||
class MessageTemplate(Base):
|
||||
"""Message templates for compression and aliases"""
|
||||
__tablename__ = 'message_templates'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
template_hash = Column(String(64), unique=True, nullable=False) # SHA-256 hash
|
||||
template_text = Column(Text, nullable=False)
|
||||
category = Column(String(50), nullable=False) # error, info, warning, system
|
||||
alias = Column(String(20), unique=True) # Short alias like "SYS001"
|
||||
usage_count = Column(Integer, default=0)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
|
||||
# Relationships
|
||||
log_entries = relationship("LogEntry", back_populates="template")
|
||||
|
||||
@staticmethod
|
||||
def create_hash(message):
|
||||
"""Create hash for message template"""
|
||||
return hashlib.sha256(message.encode('utf-8')).hexdigest()
|
||||
|
||||
def __repr__(self):
|
||||
return f"<MessageTemplate(alias='{self.alias}', category='{self.category}')>"
|
||||
|
||||
class LogEntry(Base):
|
||||
"""Compressed log entries with template references"""
|
||||
__tablename__ = 'log_entries'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey('devices.id'), nullable=False)
|
||||
template_id = Column(Integer, ForeignKey('message_templates.id'))
|
||||
|
||||
# Original fields
|
||||
timestamp = Column(DateTime, default=datetime.utcnow, nullable=False)
|
||||
severity = Column(String(20), default='info') # debug, info, warning, error, critical
|
||||
|
||||
# Compressed message storage
|
||||
full_message = Column(Text) # Only for unique messages not in templates
|
||||
template_variables = Column(Text) # JSON for template variable substitution
|
||||
|
||||
# Enhanced metadata
|
||||
source_file = Column(String(255)) # If log comes from file
|
||||
line_number = Column(Integer)
|
||||
process_id = Column(Integer)
|
||||
thread_id = Column(String(50))
|
||||
|
||||
# Relationships
|
||||
device = relationship("Device", back_populates="logs")
|
||||
template = relationship("MessageTemplate", back_populates="log_entries")
|
||||
|
||||
@property
|
||||
def resolved_message(self):
|
||||
"""Get the full resolved message"""
|
||||
if self.template:
|
||||
if self.template_variables:
|
||||
variables = json.loads(self.template_variables)
|
||||
return self.template.template_text.format(**variables)
|
||||
return self.template.template_text
|
||||
return self.full_message
|
||||
|
||||
def __repr__(self):
|
||||
return f"<LogEntry(device_id={self.device_id}, timestamp='{self.timestamp}')>"
|
||||
|
||||
class FileUpload(Base):
|
||||
"""File upload tracking and metadata"""
|
||||
__tablename__ = 'file_uploads'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey('devices.id'), nullable=False)
|
||||
|
||||
# File metadata
|
||||
filename = Column(String(255), nullable=False)
|
||||
original_filename = Column(String(255), nullable=False)
|
||||
file_path = Column(String(500), nullable=False) # Server storage path
|
||||
file_size = Column(Integer, nullable=False)
|
||||
file_hash = Column(String(64)) # SHA-256 for deduplication
|
||||
mime_type = Column(String(100))
|
||||
|
||||
# Upload metadata
|
||||
upload_date = Column(DateTime, default=datetime.utcnow)
|
||||
upload_ip = Column(String(45))
|
||||
upload_user_agent = Column(String(500))
|
||||
|
||||
# Processing status
|
||||
processed = Column(Boolean, default=False)
|
||||
processing_status = Column(String(50), default='pending') # pending, processing, completed, error
|
||||
processing_error = Column(Text)
|
||||
|
||||
# File content analysis (for logs/config files)
|
||||
is_log_file = Column(Boolean, default=False)
|
||||
log_entries_extracted = Column(Integer, default=0)
|
||||
|
||||
# Relationships
|
||||
device = relationship("Device", back_populates="files")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<FileUpload(filename='{self.filename}', device_id={self.device_id})>"
|
||||
|
||||
class AnsibleExecution(Base):
|
||||
"""
|
||||
DEPRECATED MODEL - DO NOT USE FOR NEW DEVELOPMENT
|
||||
|
||||
This model is kept only for backward compatibility and data migration.
|
||||
All new automation functionality should use PlaybookExecution instead.
|
||||
|
||||
This table will be removed in a future version after data migration.
|
||||
"""
|
||||
__tablename__ = 'ansible_executions'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
playbook_name = Column(String(200), nullable=False)
|
||||
target_devices = Column(Text) # JSON list of device IDs/IPs
|
||||
|
||||
# Execution details
|
||||
command_line = Column(Text, nullable=False)
|
||||
execution_user = Column(String(100))
|
||||
start_time = Column(DateTime, default=datetime.utcnow)
|
||||
end_time = Column(DateTime)
|
||||
status = Column(String(20), default='running') # running, completed, failed, cancelled
|
||||
exit_code = Column(Integer)
|
||||
|
||||
# Output and logs
|
||||
stdout_log = Column(Text)
|
||||
stderr_log = Column(Text)
|
||||
ansible_log_file = Column(String(500))
|
||||
|
||||
# Results summary
|
||||
successful_hosts = Column(Integer, default=0)
|
||||
failed_hosts = Column(Integer, default=0)
|
||||
unreachable_hosts = Column(Integer, default=0)
|
||||
|
||||
# Relationships - Note: This class is deprecated, use PlaybookExecution instead
|
||||
|
||||
@classmethod
|
||||
def migrate_to_new_model(cls, session):
|
||||
"""Migrate this execution to new PlaybookExecution model"""
|
||||
# This method is used by migration scripts
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AnsibleExecution(DEPRECATED)(playbook='{self.playbook_name}')>"
|
||||
|
||||
class SystemStats(Base):
|
||||
"""System statistics and metrics"""
|
||||
__tablename__ = 'system_stats'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
device_id = Column(Integer, ForeignKey('devices.id'), nullable=False)
|
||||
|
||||
# System metrics
|
||||
timestamp = Column(DateTime, default=datetime.utcnow)
|
||||
cpu_usage = Column(Float)
|
||||
memory_usage = Column(Float)
|
||||
disk_usage = Column(Float)
|
||||
network_in = Column(Integer)
|
||||
network_out = Column(Integer)
|
||||
load_average = Column(Float)
|
||||
uptime = Column(Integer) # seconds
|
||||
|
||||
# Process counts
|
||||
total_processes = Column(Integer)
|
||||
running_processes = Column(Integer)
|
||||
|
||||
# Temperature (for Raspberry Pi)
|
||||
cpu_temperature = Column(Float)
|
||||
|
||||
# Relationships
|
||||
device = relationship("Device")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<SystemStats(device_id={self.device_id}, timestamp='{self.timestamp}')>"
|
||||
|
||||
class InventoryGroup(Base):
|
||||
"""Ansible inventory groups with encrypted SSH credentials"""
|
||||
__tablename__ = 'inventory_groups'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String(100), unique=True, nullable=False)
|
||||
description = Column(Text)
|
||||
|
||||
# SSH Connection details
|
||||
ssh_user = Column(String(100), default='pi')
|
||||
ssh_port = Column(Integer, default=22)
|
||||
ssh_key_file = Column(String(500)) # Path to SSH key
|
||||
ssh_password_encrypted = Column(LargeBinary) # Encrypted password
|
||||
|
||||
# Group settings
|
||||
ansible_vars = Column(Text) # JSON for group variables
|
||||
is_enabled = Column(Boolean, default=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
# Relationships
|
||||
devices = relationship("Device", secondary=device_inventory_association, back_populates="inventory_groups")
|
||||
executions = relationship("PlaybookExecution", back_populates="inventory_group")
|
||||
|
||||
def set_ssh_password(self, password: str):
|
||||
"""Encrypt and store SSH password"""
|
||||
if password:
|
||||
# Generate or get encryption key
|
||||
key = self._get_encryption_key()
|
||||
f = Fernet(key)
|
||||
self.ssh_password_encrypted = f.encrypt(password.encode())
|
||||
|
||||
def get_ssh_password(self) -> str:
|
||||
"""Decrypt and return SSH password"""
|
||||
if self.ssh_password_encrypted:
|
||||
key = self._get_encryption_key()
|
||||
f = Fernet(key)
|
||||
return f.decrypt(self.ssh_password_encrypted).decode()
|
||||
return None
|
||||
|
||||
def _get_encryption_key(self) -> bytes:
|
||||
"""Get or generate encryption key"""
|
||||
key_file = 'data/.ssh_encrypt_key'
|
||||
if os.path.exists(key_file):
|
||||
with open(key_file, 'rb') as f:
|
||||
return f.read()
|
||||
else:
|
||||
# Generate new key
|
||||
key = Fernet.generate_key()
|
||||
with open(key_file, 'wb') as f:
|
||||
f.write(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
return f"<InventoryGroup(name='{self.name}', devices={len(self.devices)})>"
|
||||
|
||||
class PlaybookExecution(Base):
|
||||
"""
|
||||
Enhanced playbook execution with queue management and comprehensive tracking.
|
||||
|
||||
This is the primary model for all automation execution tracking.
|
||||
Replaces the deprecated AnsibleExecution model.
|
||||
"""
|
||||
__tablename__ = 'playbook_executions'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
execution_id = Column(String(36), unique=True, nullable=False) # UUID
|
||||
playbook_name = Column(String(200), nullable=False)
|
||||
playbook_description = Column(Text) # User-friendly description
|
||||
inventory_group_id = Column(Integer, ForeignKey('inventory_groups.id'))
|
||||
target_hosts = Column(Text) # JSON list of specific hosts
|
||||
|
||||
# Execution details
|
||||
command_line = Column(Text)
|
||||
extra_vars = Column(Text) # JSON
|
||||
execution_user = Column(String(100))
|
||||
execution_ip = Column(String(45))
|
||||
|
||||
# Enhanced timing
|
||||
queued_at = Column(DateTime, default=datetime.utcnow)
|
||||
started_at = Column(DateTime)
|
||||
completed_at = Column(DateTime)
|
||||
estimated_duration = Column(Integer) # Estimated seconds
|
||||
|
||||
# Enhanced status tracking
|
||||
status = Column(String(20), default='queued') # queued, running, completed, failed, cancelled, timeout
|
||||
queue_position = Column(Integer, default=0)
|
||||
priority = Column(Integer, default=5) # 1-10, higher = more priority
|
||||
pid = Column(Integer) # Process ID when running
|
||||
exit_code = Column(Integer)
|
||||
retry_count = Column(Integer, default=0)
|
||||
max_retries = Column(Integer, default=0)
|
||||
|
||||
# Enhanced output and logs
|
||||
stdout_log = Column(Text)
|
||||
stderr_log = Column(Text)
|
||||
ansible_log_file = Column(String(500))
|
||||
summary_message = Column(Text) # User-friendly summary
|
||||
|
||||
# Enhanced results summary
|
||||
total_hosts = Column(Integer, default=0)
|
||||
successful_hosts = Column(Integer, default=0)
|
||||
failed_hosts = Column(Integer, default=0)
|
||||
unreachable_hosts = Column(Integer, default=0)
|
||||
skipped_hosts = Column(Integer, default=0)
|
||||
changed_hosts = Column(Integer, default=0) # Hosts where changes were made
|
||||
|
||||
# Relationships
|
||||
inventory_group = relationship("InventoryGroup", back_populates="executions")
|
||||
host_results = relationship("PlaybookHostResult", back_populates="execution", cascade="all, delete-orphan")
|
||||
|
||||
# Properties for better UX
|
||||
@property
|
||||
def duration(self):
|
||||
"""Calculate execution duration in seconds"""
|
||||
if self.started_at and self.completed_at:
|
||||
return (self.completed_at - self.started_at).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def duration_formatted(self):
|
||||
"""Human-readable duration"""
|
||||
duration = self.duration
|
||||
if duration is None:
|
||||
return "N/A"
|
||||
|
||||
if duration < 60:
|
||||
return f"{int(duration)}s"
|
||||
elif duration < 3600:
|
||||
mins = int(duration // 60)
|
||||
secs = int(duration % 60)
|
||||
return f"{mins}m {secs}s"
|
||||
else:
|
||||
hours = int(duration // 3600)
|
||||
mins = int((duration % 3600) // 60)
|
||||
return f"{hours}h {mins}m"
|
||||
|
||||
@property
|
||||
def success_rate(self):
|
||||
"""Calculate success rate percentage"""
|
||||
if self.total_hosts > 0:
|
||||
return round((self.successful_hosts / self.total_hosts) * 100, 1)
|
||||
return 0
|
||||
|
||||
@property
|
||||
def status_display(self):
|
||||
"""User-friendly status display"""
|
||||
status_map = {
|
||||
'queued': '⏳ Queued',
|
||||
'running': '🔄 Running',
|
||||
'completed': '✅ Completed',
|
||||
'failed': '❌ Failed',
|
||||
'cancelled': '🚫 Cancelled',
|
||||
'timeout': '⏰ Timeout'
|
||||
}
|
||||
return status_map.get(self.status, self.status)
|
||||
|
||||
@property
|
||||
def is_running(self):
|
||||
"""Check if execution is currently running"""
|
||||
return self.status in ['queued', 'running']
|
||||
|
||||
@property
|
||||
def is_finished(self):
|
||||
"""Check if execution has finished"""
|
||||
return self.status in ['completed', 'failed', 'cancelled', 'timeout']
|
||||
|
||||
@property
|
||||
def can_retry(self):
|
||||
"""Check if execution can be retried"""
|
||||
return (self.status in ['failed', 'timeout'] and
|
||||
self.retry_count < self.max_retries)
|
||||
|
||||
def get_host_summary(self):
|
||||
"""Get summary of host results"""
|
||||
return {
|
||||
'total': self.total_hosts,
|
||||
'successful': self.successful_hosts,
|
||||
'failed': self.failed_hosts,
|
||||
'unreachable': self.unreachable_hosts,
|
||||
'skipped': self.skipped_hosts,
|
||||
'changed': self.changed_hosts
|
||||
}
|
||||
|
||||
def get_failed_hosts(self):
|
||||
"""Get list of failed hosts for debugging"""
|
||||
return [result for result in self.host_results
|
||||
if result.status == 'failed']
|
||||
|
||||
def get_status_color(self):
|
||||
"""Get CSS color class for status"""
|
||||
color_map = {
|
||||
'queued': 'text-warning',
|
||||
'running': 'text-info',
|
||||
'completed': 'text-success',
|
||||
'failed': 'text-danger',
|
||||
'cancelled': 'text-secondary',
|
||||
'timeout': 'text-warning'
|
||||
}
|
||||
return color_map.get(self.status, 'text-secondary')
|
||||
|
||||
def update_summary(self):
|
||||
"""Update summary message based on execution results"""
|
||||
if self.status == 'completed':
|
||||
if self.failed_hosts == 0:
|
||||
self.summary_message = f"✅ Successfully executed on all {self.successful_hosts} hosts"
|
||||
else:
|
||||
self.summary_message = f"⚠️ Completed with {self.failed_hosts} failures out of {self.total_hosts} hosts"
|
||||
elif self.status == 'failed':
|
||||
self.summary_message = f"❌ Execution failed: {self.failed_hosts}/{self.total_hosts} hosts failed"
|
||||
elif self.status == 'running':
|
||||
self.summary_message = f"🔄 Executing on {self.total_hosts} hosts..."
|
||||
elif self.status == 'queued':
|
||||
self.summary_message = f"⏳ Queued for execution on {self.total_hosts} hosts"
|
||||
|
||||
def __repr__(self):
|
||||
return f"<PlaybookExecution(id='{self.execution_id}', playbook='{self.playbook_name}', status='{self.status}')>"
|
||||
|
||||
class PlaybookHostResult(Base):
|
||||
"""Individual host results for playbook executions"""
|
||||
__tablename__ = 'playbook_host_results'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
execution_id = Column(String(36), ForeignKey('playbook_executions.execution_id'), nullable=False)
|
||||
device_id = Column(Integer, ForeignKey('devices.id'), nullable=False)
|
||||
hostname = Column(String(255), nullable=False)
|
||||
|
||||
# Result details
|
||||
status = Column(String(20), nullable=False) # ok, failed, unreachable, skipped
|
||||
changed = Column(Boolean, default=False)
|
||||
failed_tasks = Column(Integer, default=0)
|
||||
total_tasks = Column(Integer, default=0)
|
||||
|
||||
# Timing
|
||||
start_time = Column(DateTime)
|
||||
end_time = Column(DateTime)
|
||||
|
||||
# Output specific to this host
|
||||
host_output = Column(Text)
|
||||
error_message = Column(Text)
|
||||
|
||||
# Task results summary
|
||||
task_results = Column(Text) # JSON with per-task results
|
||||
|
||||
# Relationships
|
||||
execution = relationship("PlaybookExecution", back_populates="host_results")
|
||||
device = relationship("Device")
|
||||
|
||||
@property
|
||||
def duration(self):
|
||||
"""Calculate host execution duration"""
|
||||
if self.start_time and self.end_time:
|
||||
return (self.end_time - self.start_time).total_seconds()
|
||||
return None
|
||||
|
||||
@property
|
||||
def success_rate(self):
|
||||
"""Calculate task success rate for this host"""
|
||||
if self.total_tasks > 0:
|
||||
successful_tasks = self.total_tasks - self.failed_tasks
|
||||
return (successful_tasks / self.total_tasks) * 100
|
||||
return 0
|
||||
|
||||
def __repr__(self):
|
||||
return f"<PlaybookHostResult(hostname='{self.hostname}', status='{self.status}')>"
|
||||
|
||||
class ExecutionQueue(Base):
|
||||
"""Queue management for background playbook executions"""
|
||||
__tablename__ = 'execution_queue'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
execution_id = Column(String(36), ForeignKey('playbook_executions.execution_id'), nullable=False)
|
||||
queue_position = Column(Integer, nullable=False, default=0)
|
||||
priority = Column(Integer, default=5) # 1-10, higher = more priority
|
||||
|
||||
# Queue metadata
|
||||
queued_by = Column(String(100))
|
||||
queued_at = Column(DateTime, default=datetime.utcnow)
|
||||
scheduled_for = Column(DateTime) # For scheduled executions
|
||||
|
||||
# Dependencies
|
||||
depends_on = Column(String(36), ForeignKey('playbook_executions.execution_id')) # Wait for this execution
|
||||
|
||||
# Status
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
# Relationships
|
||||
execution = relationship("PlaybookExecution", foreign_keys=[execution_id])
|
||||
dependency = relationship("PlaybookExecution", foreign_keys=[depends_on])
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ExecutionQueue(execution_id='{self.execution_id}', position={self.queue_position})>"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# WMT (Workstation Management Terminal) configuration models
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class WMTGlobalConfig(Base):
|
||||
"""Global WMT application settings – one row shared by all devices."""
|
||||
__tablename__ = 'wmt_global_config'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
# Chrome launch URLs
|
||||
chrome_url = Column(String(500), nullable=False,
|
||||
default='http://10.76.140.17/iweb_v2/index.php/traceability/production')
|
||||
chrome_local_url = Column(String(500)) # optional local / fallback URL
|
||||
chrome_insecure_origin = Column(String(200), default='http://10.76.140.17')
|
||||
|
||||
# Card API
|
||||
card_api_base_url = Column(String(500), nullable=False,
|
||||
default='https://dataswsibiusb01.sibiusb.harting.intra/RO_Quality_PRD/api/record')
|
||||
|
||||
# Server connectivity
|
||||
server_log_url = Column(String(500), default='http://rpi-ansible:80/logs')
|
||||
internet_check_host = Column(String(200), default='10.76.140.17')
|
||||
update_host = Column(String(200), default='rpi-ansible')
|
||||
update_user = Column(String(100), default='pi')
|
||||
|
||||
# Metadata
|
||||
updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
updated_by = Column(String(100), default='admin')
|
||||
notes = Column(Text)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'chrome_url': self.chrome_url,
|
||||
'chrome_local_url': self.chrome_local_url,
|
||||
'chrome_insecure_origin': self.chrome_insecure_origin,
|
||||
'card_api_base_url': self.card_api_base_url,
|
||||
'server_log_url': self.server_log_url,
|
||||
'internet_check_host': self.internet_check_host,
|
||||
'update_host': self.update_host,
|
||||
'update_user': self.update_user,
|
||||
'updated_at': self.updated_at.isoformat() if self.updated_at else None,
|
||||
'updated_by': self.updated_by,
|
||||
'notes': self.notes,
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return f"<WMTGlobalConfig(chrome_url='{self.chrome_url}')>"
|
||||
|
||||
|
||||
class WMTUpdateRequest(Base):
|
||||
"""Device-initiated update request awaiting admin approval."""
|
||||
__tablename__ = 'wmt_update_requests'
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
# Foreign key to Device (nullable – device may not be registered yet)
|
||||
device_id = Column(Integer, ForeignKey('devices.id'), nullable=True)
|
||||
mac_address = Column(String(17), nullable=False, index=True) # always stored
|
||||
|
||||
# Data proposed by the device
|
||||
proposed_device_name = Column(String(100))
|
||||
proposed_hostname = Column(String(255))
|
||||
proposed_device_ip = Column(String(45))
|
||||
|
||||
# Request metadata
|
||||
submitted_at = Column(DateTime, default=datetime.utcnow)
|
||||
client_config_mtime = Column(String(30)) # ISO timestamp from the client
|
||||
|
||||
# Admin decision
|
||||
status = Column(String(20), default='pending') # pending | accepted | rejected
|
||||
admin_reviewed_at = Column(DateTime)
|
||||
admin_notes = Column(Text)
|
||||
|
||||
# Relationship
|
||||
device = relationship('Device', back_populates='update_requests')
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'device_id': self.device_id,
|
||||
'mac_address': self.mac_address,
|
||||
'proposed_device_name': self.proposed_device_name,
|
||||
'proposed_hostname': self.proposed_hostname,
|
||||
'proposed_device_ip': self.proposed_device_ip,
|
||||
'submitted_at': self.submitted_at.isoformat() if self.submitted_at else None,
|
||||
'client_config_mtime': self.client_config_mtime,
|
||||
'status': self.status,
|
||||
'admin_reviewed_at': self.admin_reviewed_at.isoformat() if self.admin_reviewed_at else None,
|
||||
'admin_notes': self.admin_notes,
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return f"<WMTUpdateRequest(mac='{self.mac_address}', status='{self.status}')>"
|
||||
1
app/models/device.py
Normal file
1
app/models/device.py
Normal file
@@ -0,0 +1 @@
|
||||
# Enhanced Server Monitoring System v2.0 - Models Package
|
||||
0
app/services/__init__.py
Normal file
0
app/services/__init__.py
Normal file
925
app/services/ansible_service.py
Normal file
925
app/services/ansible_service.py
Normal file
@@ -0,0 +1,925 @@
|
||||
"""
|
||||
SSH and Ansible management service for remote device operations
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
import tempfile
|
||||
import threading
|
||||
import paramiko
|
||||
import yaml
|
||||
import uuid
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import logging
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from app.models import Device, AnsibleExecution, PlaybookExecution
|
||||
from config.database_config import get_db
|
||||
|
||||
class AnsibleService:
|
||||
"""Service for managing remote devices via SSH and Ansible"""
|
||||
|
||||
SETTINGS_FILE = Path("data/ansible_settings.json")
|
||||
DEFAULT_SETTINGS = {
|
||||
"ssh_fallback_password": "raspberry",
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.db = get_db()
|
||||
self.ansible_dir = Path("ansible")
|
||||
self.inventory_file = self.ansible_dir / "inventory" / "dynamic_inventory.yaml"
|
||||
self.playbook_dir = self.ansible_dir / "playbooks"
|
||||
self.ssh_key_path = Path.home() / ".ssh" / "ansible_key"
|
||||
|
||||
# Ensure directories exist
|
||||
self.ansible_dir.mkdir(exist_ok=True)
|
||||
(self.ansible_dir / "inventory").mkdir(exist_ok=True)
|
||||
(self.ansible_dir / "playbooks").mkdir(exist_ok=True)
|
||||
(self.ansible_dir / "roles").mkdir(exist_ok=True)
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Settings helpers #
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def load_settings(self) -> Dict:
|
||||
"""Load ansible settings from data/ansible_settings.json."""
|
||||
settings = dict(self.DEFAULT_SETTINGS)
|
||||
if self.SETTINGS_FILE.exists():
|
||||
try:
|
||||
with open(self.SETTINGS_FILE, 'r') as f:
|
||||
stored = json.load(f)
|
||||
settings.update(stored)
|
||||
except Exception as e:
|
||||
logging.error(f"Error reading ansible settings: {e}")
|
||||
return settings
|
||||
|
||||
def save_settings(self, settings: Dict):
|
||||
"""Persist ansible settings to data/ansible_settings.json."""
|
||||
self.SETTINGS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
current = self.load_settings()
|
||||
current.update(settings)
|
||||
with open(self.SETTINGS_FILE, 'w') as f:
|
||||
json.dump(current, f, indent=2)
|
||||
logging.info("Ansible settings saved")
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Inventory file helpers #
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def _read_inventory(self) -> Dict:
|
||||
"""Read inventory YAML file and return parsed dict (safe)."""
|
||||
if self.inventory_file.exists():
|
||||
try:
|
||||
with open(self.inventory_file, 'r') as f:
|
||||
data = yaml.safe_load(f) or {}
|
||||
if 'all' not in data:
|
||||
data['all'] = {'children': {}}
|
||||
if 'children' not in (data['all'] or {}):
|
||||
data['all']['children'] = {}
|
||||
return data
|
||||
except Exception as e:
|
||||
logging.error(f"Error reading inventory file: {e}")
|
||||
return {'all': {'children': {}}}
|
||||
|
||||
def _write_inventory(self, data: Dict):
|
||||
"""Write inventory dict to YAML file."""
|
||||
self.inventory_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(self.inventory_file, 'w') as f:
|
||||
yaml.dump(data, f, default_flow_style=False, allow_unicode=True)
|
||||
|
||||
def get_inventory_data(self) -> Dict:
|
||||
"""Return structured inventory data for display (groups + hosts)."""
|
||||
data = self._read_inventory()
|
||||
groups = {}
|
||||
children = data.get('all', {}).get('children', {}) or {}
|
||||
for group_name, group_data in children.items():
|
||||
hosts = []
|
||||
group_data = group_data or {}
|
||||
for hostname, host_vars in (group_data.get('hosts') or {}).items():
|
||||
entry = {'hostname': hostname}
|
||||
entry.update(host_vars or {})
|
||||
hosts.append(entry)
|
||||
groups[group_name] = {
|
||||
'hosts': hosts,
|
||||
'vars': group_data.get('vars', {}) or {}
|
||||
}
|
||||
raw = ''
|
||||
if self.inventory_file.exists():
|
||||
try:
|
||||
with open(self.inventory_file, 'r') as f:
|
||||
raw = f.read()
|
||||
except Exception:
|
||||
pass
|
||||
return {'groups': groups, 'raw_yaml': raw}
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Inventory CRUD #
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def sync_devices_to_inventory(self) -> Dict:
|
||||
"""Sync all active DB devices into monitoring_devices group.
|
||||
Preserves all other custom groups already in the inventory."""
|
||||
try:
|
||||
import re as _re
|
||||
data = self._read_inventory()
|
||||
children = data['all'].setdefault('children', {})
|
||||
# Reset only monitoring_devices group
|
||||
children['monitoring_devices'] = {'hosts': {}}
|
||||
synced = 0
|
||||
with self.db.get_session() as session:
|
||||
devices = session.query(Device).filter_by(status='active').all()
|
||||
for device in devices:
|
||||
if device.device_ip == '127.0.0.1' or device.hostname == 'localhost':
|
||||
hvars = {
|
||||
'ansible_connection': 'local',
|
||||
'ansible_host': '127.0.0.1'
|
||||
}
|
||||
else:
|
||||
hvars = {
|
||||
'ansible_host': device.device_ip,
|
||||
'ansible_user': 'pi',
|
||||
'ansible_ssh_private_key_file': str(self.ssh_key_path),
|
||||
'ansible_ssh_common_args': '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
||||
}
|
||||
children['monitoring_devices']['hosts'][device.hostname] = hvars
|
||||
synced += 1
|
||||
self._write_inventory(data)
|
||||
return {'success': True, 'synced': synced,
|
||||
'message': f'Synced {synced} device(s) to monitoring_devices group'}
|
||||
except Exception as e:
|
||||
logging.error(f"Error syncing devices to inventory: {e}")
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def add_group_to_inventory(self, group_name: str) -> Dict:
|
||||
"""Add a new empty group to the inventory."""
|
||||
import re as _re
|
||||
if not _re.match(r'^[a-zA-Z0-9_-]+$', group_name):
|
||||
return {'success': False,
|
||||
'error': 'Group name may only contain letters, numbers, underscores and hyphens'}
|
||||
try:
|
||||
data = self._read_inventory()
|
||||
children = data['all'].setdefault('children', {})
|
||||
if group_name in children:
|
||||
return {'success': False, 'error': f'Group "{group_name}" already exists'}
|
||||
children[group_name] = {'hosts': {}}
|
||||
self._write_inventory(data)
|
||||
return {'success': True, 'message': f'Group "{group_name}" created'}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def remove_group_from_inventory(self, group_name: str) -> Dict:
|
||||
"""Remove a custom group from the inventory."""
|
||||
if group_name == 'monitoring_devices':
|
||||
return {'success': False,
|
||||
'error': 'Cannot remove the default monitoring_devices group'}
|
||||
try:
|
||||
data = self._read_inventory()
|
||||
children = data['all'].get('children', {}) or {}
|
||||
if group_name not in children:
|
||||
return {'success': False, 'error': f'Group "{group_name}" not found'}
|
||||
del children[group_name]
|
||||
self._write_inventory(data)
|
||||
return {'success': True, 'message': f'Group "{group_name}" removed'}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def add_host_to_inventory(self, group: str, hostname: str, ip: str,
|
||||
ssh_user: str = 'pi', ssh_port: int = 22,
|
||||
use_key: bool = True, password: str = None) -> Dict:
|
||||
"""Manually add a host to a specific inventory group."""
|
||||
import re as _re
|
||||
if not _re.match(r'^[a-zA-Z0-9_.-]+$', hostname):
|
||||
return {'success': False, 'error': 'Invalid hostname (letters, digits, dot, hyphen, underscore only)'}
|
||||
try:
|
||||
data = self._read_inventory()
|
||||
children = data['all'].setdefault('children', {})
|
||||
if group not in children:
|
||||
children[group] = {'hosts': {}}
|
||||
if children[group] is None:
|
||||
children[group] = {'hosts': {}}
|
||||
hosts = children[group].setdefault('hosts', {})
|
||||
if hosts is None:
|
||||
children[group]['hosts'] = {}
|
||||
hosts = children[group]['hosts']
|
||||
hvars = {
|
||||
'ansible_host': ip,
|
||||
'ansible_user': ssh_user,
|
||||
'ansible_port': ssh_port,
|
||||
'ansible_ssh_common_args': '-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null'
|
||||
}
|
||||
if use_key:
|
||||
hvars['ansible_ssh_private_key_file'] = str(self.ssh_key_path)
|
||||
elif password:
|
||||
hvars['ansible_password'] = password
|
||||
hosts[hostname] = hvars
|
||||
self._write_inventory(data)
|
||||
return {'success': True, 'message': f'Host "{hostname}" added to group "{group}"'}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def remove_host_from_inventory(self, group: str, hostname: str) -> Dict:
|
||||
"""Remove a host from an inventory group."""
|
||||
try:
|
||||
data = self._read_inventory()
|
||||
children = data['all'].get('children', {}) or {}
|
||||
group_data = children.get(group) or {}
|
||||
hosts = group_data.get('hosts') or {}
|
||||
if hostname not in hosts:
|
||||
return {'success': False,
|
||||
'error': f'Host "{hostname}" not found in group "{group}"'}
|
||||
del hosts[hostname]
|
||||
self._write_inventory(data)
|
||||
return {'success': True, 'message': f'Host "{hostname}" removed from "{group}"'}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Legacy / compatibility #
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def generate_dynamic_inventory(self) -> Dict:
|
||||
"""Sync DB devices into inventory and return the full inventory dict."""
|
||||
self.sync_devices_to_inventory()
|
||||
return self._read_inventory()
|
||||
|
||||
def create_update_playbook(self) -> str:
|
||||
"""Create Ansible playbook for device updates"""
|
||||
playbook_content = {
|
||||
'name': 'Update monitoring devices',
|
||||
'hosts': 'all',
|
||||
'become': True,
|
||||
'gather_facts': True,
|
||||
'tasks': [
|
||||
{
|
||||
'name': 'Update apt cache',
|
||||
'apt': {
|
||||
'update_cache': True,
|
||||
'cache_valid_time': 3600
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Upgrade all packages',
|
||||
'apt': {
|
||||
'upgrade': 'dist',
|
||||
'autoremove': True,
|
||||
'autoclean': True
|
||||
},
|
||||
'register': 'upgrade_result'
|
||||
},
|
||||
{
|
||||
'name': 'Restart device if required',
|
||||
'reboot': {
|
||||
'reboot_timeout': 600
|
||||
},
|
||||
'when': 'upgrade_result.changed'
|
||||
},
|
||||
{
|
||||
'name': 'Check service status',
|
||||
'systemd': {
|
||||
'name': 'prezenta.service',
|
||||
'state': 'started'
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Report update completion',
|
||||
'uri': {
|
||||
'url': 'http://{{ ansible_controller_ip }}/api/update_complete',
|
||||
'method': 'POST',
|
||||
'body_format': 'json',
|
||||
'body': {
|
||||
'hostname': '{{ inventory_hostname }}',
|
||||
'device_ip': '{{ ansible_host }}',
|
||||
'status': 'completed',
|
||||
'packages_updated': '{{ upgrade_result.stdout_lines | length }}'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
playbook_path = self.playbook_dir / "update_devices.yml"
|
||||
with open(playbook_path, 'w') as f:
|
||||
yaml.dump([playbook_content], f, default_flow_style=False)
|
||||
|
||||
return str(playbook_path)
|
||||
|
||||
def create_restart_service_playbook(self) -> str:
|
||||
"""Create playbook for restarting device services"""
|
||||
playbook_content = {
|
||||
'name': 'Restart monitoring service',
|
||||
'hosts': 'all',
|
||||
'become': True,
|
||||
'tasks': [
|
||||
{
|
||||
'name': 'Stop prezenta service',
|
||||
'systemd': {
|
||||
'name': 'prezenta.service',
|
||||
'state': 'stopped'
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Wait for service to stop',
|
||||
'wait_for': {
|
||||
'timeout': 10
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Start prezenta service',
|
||||
'systemd': {
|
||||
'name': 'prezenta.service',
|
||||
'state': 'started',
|
||||
'enabled': True
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Verify service is running',
|
||||
'systemd': {
|
||||
'name': 'prezenta.service'
|
||||
},
|
||||
'register': 'service_status'
|
||||
},
|
||||
{
|
||||
'name': 'Report service restart',
|
||||
'uri': {
|
||||
'url': 'http://{{ ansible_controller_ip }}/api/service_restarted',
|
||||
'method': 'POST',
|
||||
'body_format': 'json',
|
||||
'body': {
|
||||
'hostname': '{{ inventory_hostname }}',
|
||||
'device_ip': '{{ ansible_host }}',
|
||||
'service_status': '{{ service_status.status.ActiveState }}'
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
playbook_path = self.playbook_dir / "restart_service.yml"
|
||||
with open(playbook_path, 'w') as f:
|
||||
yaml.dump([playbook_content], f, default_flow_style=False)
|
||||
|
||||
return str(playbook_path)
|
||||
|
||||
def execute_playbook(self, playbook_name: str, limit_hosts: List[str] = None,
|
||||
extra_vars: Dict = None, priority: int = 5, max_retries: int = 0) -> Dict:
|
||||
"""Execute Ansible playbook with enhanced tracking and queue management"""
|
||||
try:
|
||||
# Generate fresh inventory
|
||||
self.generate_dynamic_inventory()
|
||||
|
||||
# Build ansible-playbook command
|
||||
playbook_path = self.playbook_dir / f"{playbook_name}.yml"
|
||||
if not playbook_path.exists():
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'Playbook {playbook_name} not found'
|
||||
}
|
||||
|
||||
cmd = [
|
||||
'ansible-playbook',
|
||||
str(playbook_path.resolve()),
|
||||
'-i', str(self.inventory_file.resolve()),
|
||||
'-v' # Verbose output
|
||||
]
|
||||
|
||||
# Limit to specific hosts if provided
|
||||
if limit_hosts:
|
||||
cmd.extend(['--limit', ','.join(limit_hosts)])
|
||||
|
||||
# Add extra variables
|
||||
if extra_vars:
|
||||
cmd.extend(['--extra-vars', json.dumps(extra_vars)])
|
||||
|
||||
# Create enhanced execution record using new model
|
||||
execution_id = str(uuid.uuid4())
|
||||
with self.db.get_session() as session:
|
||||
execution = PlaybookExecution(
|
||||
execution_id=execution_id,
|
||||
playbook_name=playbook_name,
|
||||
playbook_description=self._get_playbook_description(playbook_name),
|
||||
target_hosts=json.dumps(limit_hosts or []),
|
||||
command_line=' '.join(cmd),
|
||||
extra_vars=json.dumps(extra_vars or {}),
|
||||
queued_at=datetime.utcnow(),
|
||||
started_at=datetime.utcnow(),
|
||||
status='running',
|
||||
priority=priority,
|
||||
max_retries=max_retries,
|
||||
total_hosts=len(limit_hosts) if limit_hosts else 0
|
||||
)
|
||||
session.add(execution)
|
||||
session.flush()
|
||||
execution_db_id = execution.id
|
||||
|
||||
# Execute playbook
|
||||
with tempfile.NamedTemporaryFile(mode='w+', suffix='.log', delete=False) as log_file:
|
||||
log_file_path = log_file.name
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
cwd=str(self.ansible_dir)
|
||||
)
|
||||
|
||||
stdout, stderr = process.communicate()
|
||||
|
||||
# Update execution record with results
|
||||
with self.db.get_session() as session:
|
||||
execution = session.query(PlaybookExecution).get(execution_db_id)
|
||||
execution.completed_at = datetime.utcnow()
|
||||
execution.exit_code = process.returncode
|
||||
execution.stdout_log = stdout
|
||||
execution.stderr_log = stderr
|
||||
execution.ansible_log_file = log_file_path
|
||||
|
||||
if process.returncode == 0:
|
||||
execution.status = 'completed'
|
||||
execution.summary_message = 'Playbook executed successfully'
|
||||
# Parse stdout for success/failure counts
|
||||
self._parse_ansible_results_enhanced(execution, stdout)
|
||||
else:
|
||||
execution.status = 'failed'
|
||||
execution.summary_message = f'Playbook failed with exit code {process.returncode}'
|
||||
# Check if retry is needed
|
||||
if execution.retry_count < max_retries:
|
||||
execution.status = 'retry_pending'
|
||||
|
||||
# Write logs to file
|
||||
with open(log_file_path, 'w') as f:
|
||||
f.write(f"STDOUT:\n{stdout}\n\nSTDERR:\n{stderr}\n")
|
||||
|
||||
return {
|
||||
'success': process.returncode == 0,
|
||||
'execution_id': execution_id,
|
||||
'stdout': stdout,
|
||||
'stderr': stderr,
|
||||
'exit_code': process.returncode,
|
||||
'log_file': log_file_path,
|
||||
'error': stderr if process.returncode != 0 else None
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error executing playbook {playbook_name}: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------ #
|
||||
# Async execution (background thread + live log streaming) #
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
def execute_playbook_async(self, playbook_name: str, limit_hosts: List[str] = None,
|
||||
extra_vars: Dict = None, priority: int = 5,
|
||||
max_retries: int = 0) -> Dict:
|
||||
"""
|
||||
Start a playbook in a background thread.
|
||||
Returns immediately with the execution_id so the caller can poll /live.
|
||||
"""
|
||||
try:
|
||||
self.generate_dynamic_inventory()
|
||||
|
||||
playbook_path = self.playbook_dir / f"{playbook_name}.yml"
|
||||
if not playbook_path.exists():
|
||||
return {'success': False, 'error': f'Playbook {playbook_name} not found'}
|
||||
|
||||
cmd = [
|
||||
'ansible-playbook',
|
||||
str(playbook_path.resolve()),
|
||||
'-i', str(self.inventory_file.resolve()),
|
||||
'-v',
|
||||
]
|
||||
if limit_hosts:
|
||||
cmd.extend(['--limit', ','.join(limit_hosts)])
|
||||
if extra_vars:
|
||||
# Pass all extra vars as a single JSON string to avoid value-quoting issues
|
||||
cmd.extend(['--extra-vars', json.dumps(extra_vars)])
|
||||
|
||||
# Create a persistent log file (NOT deleted on close)
|
||||
log_fd, log_file_path = tempfile.mkstemp(suffix='.log', prefix='ansible_')
|
||||
os.close(log_fd)
|
||||
|
||||
execution_id = str(uuid.uuid4())
|
||||
with self.db.get_session() as session:
|
||||
execution = PlaybookExecution(
|
||||
execution_id=execution_id,
|
||||
playbook_name=playbook_name,
|
||||
playbook_description=self._get_playbook_description(playbook_name),
|
||||
target_hosts=json.dumps(limit_hosts or []),
|
||||
command_line=' '.join(cmd),
|
||||
extra_vars=json.dumps(extra_vars or {}),
|
||||
queued_at=datetime.utcnow(),
|
||||
started_at=datetime.utcnow(),
|
||||
status='running',
|
||||
priority=priority,
|
||||
max_retries=max_retries,
|
||||
total_hosts=len(limit_hosts) if limit_hosts else 0,
|
||||
ansible_log_file=log_file_path,
|
||||
)
|
||||
session.add(execution)
|
||||
session.flush()
|
||||
execution_db_id = execution.id
|
||||
|
||||
thread = threading.Thread(
|
||||
target=self._run_playbook_thread,
|
||||
args=(execution_db_id, execution_id, cmd, log_file_path, max_retries),
|
||||
daemon=True,
|
||||
)
|
||||
thread.start()
|
||||
|
||||
return {'success': True, 'execution_id': execution_id}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error starting async playbook {playbook_name}: {e}")
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def _run_playbook_thread(self, execution_db_id: int, execution_id: str,
|
||||
cmd: List[str], log_file_path: str, max_retries: int):
|
||||
"""Background worker: streams stdout/stderr to log file, updates DB on completion."""
|
||||
try:
|
||||
# Build subprocess env: PYTHONUNBUFFERED forces ansible (Python-based) to
|
||||
# flush each line immediately instead of block-buffering through the pipe.
|
||||
env = os.environ.copy()
|
||||
env['PYTHONUNBUFFERED'] = '1'
|
||||
env['ANSIBLE_FORCE_COLOR'] = '0'
|
||||
env['ANSIBLE_NOCOLOR'] = '1'
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT, # merge stderr into stdout
|
||||
text=True,
|
||||
bufsize=1, # line-buffered on the read side
|
||||
cwd=str(self.ansible_dir),
|
||||
env=env,
|
||||
)
|
||||
|
||||
with open(log_file_path, 'w') as lf:
|
||||
# Write a startup marker immediately so the UI has something to show
|
||||
lf.write(f'--- ansible-playbook started (pid {process.pid}) ---\n')
|
||||
lf.write(f'Command: {" ".join(cmd)}\n')
|
||||
lf.write('---\n')
|
||||
lf.flush()
|
||||
|
||||
# Explicit readline loop — avoids Python's read-ahead buffer
|
||||
# that the `for line in process.stdout` iterator uses.
|
||||
while True:
|
||||
line = process.stdout.readline()
|
||||
if line:
|
||||
lf.write(line)
|
||||
lf.flush() # flush after every line for live view
|
||||
elif process.poll() is not None:
|
||||
break
|
||||
|
||||
process.wait()
|
||||
|
||||
# Read full output for DB storage
|
||||
with open(log_file_path, 'r') as lf:
|
||||
full_output = lf.read()
|
||||
|
||||
with self.db.get_session() as session:
|
||||
execution = session.query(PlaybookExecution).get(execution_db_id)
|
||||
if execution:
|
||||
execution.completed_at = datetime.utcnow()
|
||||
execution.exit_code = process.returncode
|
||||
execution.stdout_log = full_output
|
||||
if process.returncode == 0:
|
||||
execution.status = 'completed'
|
||||
execution.summary_message = 'Playbook executed successfully'
|
||||
self._parse_ansible_results_enhanced(execution, full_output)
|
||||
else:
|
||||
execution.status = 'failed'
|
||||
execution.summary_message = f'Playbook failed (exit {process.returncode})'
|
||||
if execution.retry_count < max_retries:
|
||||
execution.status = 'retry_pending'
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Background playbook thread error [{execution_id}]: {e}")
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
execution = session.query(PlaybookExecution).get(execution_db_id)
|
||||
if execution:
|
||||
execution.status = 'failed'
|
||||
execution.summary_message = str(e)
|
||||
execution.completed_at = datetime.utcnow()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def get_live_execution(self, execution_id: str) -> Dict:
|
||||
"""Return current status + log content for a running or finished execution."""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
execution = session.query(PlaybookExecution).filter_by(
|
||||
execution_id=execution_id
|
||||
).first()
|
||||
|
||||
if not execution:
|
||||
return {'success': False, 'error': 'Execution not found'}
|
||||
|
||||
log_content = ''
|
||||
log_file = execution.ansible_log_file
|
||||
if log_file and os.path.exists(log_file):
|
||||
try:
|
||||
with open(log_file, 'r') as f:
|
||||
log_content = f.read()
|
||||
except Exception:
|
||||
log_content = execution.stdout_log or ''
|
||||
else:
|
||||
log_content = execution.stdout_log or ''
|
||||
|
||||
if not log_content and execution.status == 'running':
|
||||
log_content = f'Waiting for ansible-playbook to produce output...\nCommand: {execution.command_line or ""}'
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'execution_id': execution_id,
|
||||
'status': execution.status,
|
||||
'playbook_name': execution.playbook_name,
|
||||
'target_hosts': json.loads(execution.target_hosts) if execution.target_hosts else [],
|
||||
'started_at': execution.started_at.isoformat() if execution.started_at else None,
|
||||
'completed_at': execution.completed_at.isoformat() if execution.completed_at else None,
|
||||
'successful_hosts': execution.successful_hosts,
|
||||
'failed_hosts': execution.failed_hosts,
|
||||
'unreachable_hosts': execution.unreachable_hosts,
|
||||
'exit_code': execution.exit_code,
|
||||
'summary_message': execution.summary_message,
|
||||
'log': log_content,
|
||||
}
|
||||
except Exception as e:
|
||||
logging.error(f"Error fetching live execution {execution_id}: {e}")
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def _parse_ansible_results_enhanced(self, execution: PlaybookExecution, output: str):
|
||||
"""Parse Ansible output for enhanced result statistics"""
|
||||
lines = output.split('\n')
|
||||
successful_hosts = 0
|
||||
failed_hosts = 0
|
||||
unreachable_hosts = 0
|
||||
skipped_hosts = 0
|
||||
changed_hosts = 0
|
||||
|
||||
for line in lines:
|
||||
if 'ok=' in line and 'changed=' in line:
|
||||
# Parse line like: "host1: ok=4 changed=2 unreachable=0 failed=0"
|
||||
try:
|
||||
if 'failed=0' in line:
|
||||
successful_hosts += 1
|
||||
else:
|
||||
failed_count = int(line.split('failed=')[1].split()[0])
|
||||
if failed_count > 0:
|
||||
failed_hosts += 1
|
||||
else:
|
||||
successful_hosts += 1
|
||||
|
||||
if 'unreachable=' in line:
|
||||
unreachable = int(line.split('unreachable=')[1].split()[0])
|
||||
if unreachable > 0:
|
||||
unreachable_hosts += 1
|
||||
|
||||
if 'skipped=' in line:
|
||||
skipped = int(line.split('skipped=')[1].split()[0])
|
||||
if skipped > 0:
|
||||
skipped_hosts += 1
|
||||
|
||||
if 'changed=' in line:
|
||||
changed = int(line.split('changed=')[1].split()[0])
|
||||
if changed > 0:
|
||||
changed_hosts += 1
|
||||
|
||||
except (ValueError, IndexError):
|
||||
# Skip malformed lines
|
||||
continue
|
||||
|
||||
# Update execution record
|
||||
execution.successful_hosts = successful_hosts
|
||||
execution.failed_hosts = failed_hosts
|
||||
execution.unreachable_hosts = unreachable_hosts
|
||||
execution.skipped_hosts = skipped_hosts
|
||||
execution.changed_hosts = changed_hosts
|
||||
|
||||
def _get_playbook_description(self, playbook_name: str) -> str:
|
||||
"""Get user-friendly description for playbook"""
|
||||
descriptions = {
|
||||
'update_devices': 'Update all packages and monitoring software on devices',
|
||||
'restart_service': 'Restart monitoring services on selected devices',
|
||||
'system_health': 'Check system health and monitoring status',
|
||||
'maintenance_mode': 'Put devices in maintenance mode'
|
||||
}
|
||||
return descriptions.get(playbook_name, f'Execute {playbook_name} playbook')
|
||||
|
||||
def create_system_health_playbook(self) -> str:
|
||||
"""Create system health check playbook"""
|
||||
playbook_content = {
|
||||
'name': 'System Health Check',
|
||||
'hosts': 'all',
|
||||
'become': True,
|
||||
'gather_facts': True,
|
||||
'tasks': [
|
||||
{
|
||||
'name': 'Check disk usage',
|
||||
'shell': 'df -h',
|
||||
'register': 'disk_usage'
|
||||
},
|
||||
{
|
||||
'name': 'Check memory usage',
|
||||
'shell': 'free -m',
|
||||
'register': 'memory_usage'
|
||||
},
|
||||
{
|
||||
'name': 'Check system uptime',
|
||||
'shell': 'uptime',
|
||||
'register': 'system_uptime'
|
||||
},
|
||||
{
|
||||
'name': 'Check running services',
|
||||
'shell': 'systemctl list-units --type=service --state=running | grep -E "(ssh|monitoring|python)"',
|
||||
'register': 'running_services',
|
||||
'ignore_errors': True
|
||||
},
|
||||
{
|
||||
'name': 'Check network connectivity',
|
||||
'shell': 'ping -c 3 8.8.8.8',
|
||||
'register': 'network_test',
|
||||
'ignore_errors': True
|
||||
},
|
||||
{
|
||||
'name': 'Display health summary',
|
||||
'debug': {
|
||||
'msg': [
|
||||
'=== SYSTEM HEALTH REPORT ===',
|
||||
'Disk Usage: {{ disk_usage.stdout_lines[0] if disk_usage.stdout_lines else "N/A" }}',
|
||||
'Memory: {{ memory_usage.stdout_lines[1] if memory_usage.stdout_lines|length > 1 else "N/A" }}',
|
||||
'Uptime: {{ system_uptime.stdout if system_uptime.stdout else "N/A" }}',
|
||||
'Network: {{ "OK" if network_test.rc == 0 else "FAILED" }}',
|
||||
'Services: {{ running_services.stdout_lines|length if running_services.stdout_lines else 0 }} monitoring services running'
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
self.playbook_dir.mkdir(exist_ok=True)
|
||||
playbook_path = self.playbook_dir / "system_health.yml"
|
||||
with open(playbook_path, 'w') as f:
|
||||
yaml.dump([playbook_content], f, default_flow_style=False)
|
||||
|
||||
return str(playbook_path)
|
||||
|
||||
def _parse_ansible_results(self, execution: AnsibleExecution, output: str):
|
||||
"""Parse Ansible output for result statistics"""
|
||||
lines = output.split('\n')
|
||||
for line in lines:
|
||||
if 'ok=' in line and 'changed=' in line:
|
||||
# Parse line like: "host1: ok=4 changed=2 unreachable=0 failed=0"
|
||||
if 'failed=0' in line or 'failed=0 ' in line:
|
||||
execution.successful_hosts += 1
|
||||
else:
|
||||
execution.failed_hosts += 1
|
||||
if 'unreachable=' in line:
|
||||
unreachable = int(line.split('unreachable=')[1].split()[0])
|
||||
execution.unreachable_hosts += unreachable
|
||||
|
||||
def test_ssh_connectivity(self, device_ip: str, username: str = 'pi') -> Dict:
|
||||
"""Test SSH connectivity to a device"""
|
||||
try:
|
||||
client = paramiko.SSHClient()
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
# Try with SSH key first, then password
|
||||
try:
|
||||
client.connect(
|
||||
device_ip,
|
||||
username=username,
|
||||
key_filename=str(self.ssh_key_path),
|
||||
timeout=10
|
||||
)
|
||||
except paramiko.AuthenticationException:
|
||||
# Fallback to configurable password
|
||||
fallback_pw = self.load_settings().get('ssh_fallback_password', 'raspberry')
|
||||
client.connect(
|
||||
device_ip,
|
||||
username=username,
|
||||
password=fallback_pw,
|
||||
timeout=10
|
||||
)
|
||||
|
||||
# Test command execution
|
||||
stdin, stdout, stderr = client.exec_command('uptime')
|
||||
uptime_output = stdout.read().decode()
|
||||
|
||||
client.close()
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'SSH connection successful',
|
||||
'uptime': uptime_output.strip()
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def bulk_ssh_test(self, device_ips: List[str]) -> Dict:
|
||||
"""Test SSH connectivity to multiple devices in parallel"""
|
||||
results = {}
|
||||
|
||||
with ThreadPoolExecutor(max_workers=10) as executor:
|
||||
future_to_ip = {
|
||||
executor.submit(self.test_ssh_connectivity, ip): ip
|
||||
for ip in device_ips
|
||||
}
|
||||
|
||||
for future in as_completed(future_to_ip):
|
||||
ip = future_to_ip[future]
|
||||
try:
|
||||
result = future.result()
|
||||
results[ip] = result
|
||||
except Exception as e:
|
||||
results[ip] = {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
return results
|
||||
|
||||
def setup_ssh_keys(self) -> Dict:
|
||||
"""Setup SSH keys for Ansible authentication"""
|
||||
try:
|
||||
key_path = Path(self.ssh_key_path)
|
||||
key_path.parent.mkdir(exist_ok=True)
|
||||
|
||||
if not key_path.exists():
|
||||
# Generate new SSH key pair
|
||||
subprocess.run([
|
||||
'ssh-keygen',
|
||||
'-t', 'rsa',
|
||||
'-b', '4096',
|
||||
'-f', str(key_path),
|
||||
'-N', '', # No passphrase
|
||||
'-C', 'ansible@monitoring-server'
|
||||
], check=True)
|
||||
|
||||
# Set proper permissions
|
||||
key_path.chmod(0o600)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'SSH key pair generated',
|
||||
'public_key_path': f"{key_path}.pub",
|
||||
'private_key_path': str(key_path)
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'SSH key already exists',
|
||||
'public_key_path': f"{key_path}.pub",
|
||||
'private_key_path': str(key_path)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error setting up SSH keys: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def get_execution_history(self, limit: int = 50) -> List[Dict]:
|
||||
"""Get Ansible execution history using enhanced PlaybookExecution model"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
executions = session.query(PlaybookExecution).order_by(
|
||||
PlaybookExecution.queued_at.desc()
|
||||
).limit(limit).all()
|
||||
|
||||
return [{
|
||||
'id': exec.id,
|
||||
'execution_id': exec.execution_id,
|
||||
'playbook_name': exec.playbook_name,
|
||||
'playbook_description': exec.playbook_description,
|
||||
'queued_at': exec.queued_at.isoformat() if exec.queued_at else None,
|
||||
'started_at': exec.started_at.isoformat() if exec.started_at else None,
|
||||
'completed_at': exec.completed_at.isoformat() if exec.completed_at else None,
|
||||
'status': exec.status,
|
||||
'priority': exec.priority,
|
||||
'retry_count': exec.retry_count,
|
||||
'max_retries': exec.max_retries,
|
||||
'exit_code': exec.exit_code,
|
||||
'total_hosts': exec.total_hosts,
|
||||
'successful_hosts': exec.successful_hosts,
|
||||
'failed_hosts': exec.failed_hosts,
|
||||
'unreachable_hosts': exec.unreachable_hosts,
|
||||
'skipped_hosts': exec.skipped_hosts,
|
||||
'changed_hosts': exec.changed_hosts,
|
||||
'summary_message': exec.summary_message,
|
||||
'duration': exec.duration,
|
||||
'duration_formatted': exec.duration_formatted
|
||||
} for exec in executions]
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting execution history: {e}")
|
||||
return []
|
||||
324
app/services/device_service.py
Normal file
324
app/services/device_service.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""
|
||||
Device management service with CRUD operations and device monitoring
|
||||
"""
|
||||
import logging
|
||||
from typing import List, Optional, Dict, Any
|
||||
from datetime import datetime, timedelta
|
||||
from sqlalchemy.orm import Session, joinedload
|
||||
from sqlalchemy import desc, func, and_
|
||||
from app.models import Device, LogEntry, FileUpload, InventoryGroup
|
||||
from config.database_config import get_db
|
||||
|
||||
class DeviceService:
|
||||
"""Service for managing devices and device-related operations"""
|
||||
|
||||
def __init__(self):
|
||||
self.db = get_db()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Basic CRUD Operations
|
||||
|
||||
def create_device(self, hostname: str, device_ip: str, nume_masa: str, **kwargs) -> Device:
|
||||
"""Create a new device"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
# Check if device already exists
|
||||
existing = session.query(Device).filter(
|
||||
(Device.hostname == hostname) | (Device.device_ip == device_ip)
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
raise ValueError(f"Device with hostname '{hostname}' or IP '{device_ip}' already exists")
|
||||
|
||||
device = Device(
|
||||
hostname=hostname,
|
||||
device_ip=device_ip,
|
||||
nume_masa=nume_masa,
|
||||
device_type=kwargs.get('device_type', 'unknown'),
|
||||
os_version=kwargs.get('os_version'),
|
||||
status=kwargs.get('status', 'active'),
|
||||
location=kwargs.get('location'),
|
||||
description=kwargs.get('description'),
|
||||
last_seen=datetime.utcnow()
|
||||
)
|
||||
|
||||
session.add(device)
|
||||
session.commit()
|
||||
session.refresh(device)
|
||||
|
||||
self.logger.info(f"Created device: {hostname} ({device_ip})")
|
||||
return device
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error creating device: {e}")
|
||||
raise
|
||||
|
||||
def get_device_by_id(self, device_id: int) -> Optional[Device]:
|
||||
"""Get device by ID with relationships loaded"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
return session.query(Device).options(
|
||||
joinedload(Device.logs),
|
||||
joinedload(Device.files),
|
||||
joinedload(Device.inventory_groups)
|
||||
).filter(Device.id == device_id).first()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device {device_id}: {e}")
|
||||
return None
|
||||
|
||||
def get_device_by_hostname(self, hostname: str) -> Optional[Device]:
|
||||
"""Get device by hostname"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
return session.query(Device).filter(Device.hostname == hostname).first()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device by hostname {hostname}: {e}")
|
||||
return None
|
||||
|
||||
def get_device_by_ip(self, device_ip: str) -> Optional[Device]:
|
||||
"""Get device by IP address"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
return session.query(Device).filter(Device.device_ip == device_ip).first()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device by IP {device_ip}: {e}")
|
||||
return None
|
||||
|
||||
def get_all_devices(self, status: Optional[str] = None, limit: Optional[int] = None) -> List[Device]:
|
||||
"""Get all devices with optional filtering"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
query = session.query(Device).order_by(desc(Device.last_seen))
|
||||
|
||||
if status:
|
||||
query = query.filter(Device.status == status)
|
||||
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
return query.all()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting devices: {e}")
|
||||
return []
|
||||
|
||||
def update_device(self, device_id: int, **kwargs) -> Optional[Device]:
|
||||
"""Update device information"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
device = session.query(Device).filter(Device.id == device_id).first()
|
||||
if not device:
|
||||
return None
|
||||
|
||||
# Update allowed fields
|
||||
allowed_fields = [
|
||||
'hostname', 'device_ip', 'nume_masa', 'device_type',
|
||||
'os_version', 'status', 'location', 'description'
|
||||
]
|
||||
|
||||
for field, value in kwargs.items():
|
||||
if field in allowed_fields and hasattr(device, field):
|
||||
setattr(device, field, value)
|
||||
|
||||
session.commit()
|
||||
session.refresh(device)
|
||||
|
||||
self.logger.info(f"Updated device {device_id}")
|
||||
return device
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error updating device {device_id}: {e}")
|
||||
raise
|
||||
|
||||
def delete_device(self, device_id: int) -> bool:
|
||||
"""Delete device (soft delete by setting status to inactive)"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
device = session.query(Device).filter(Device.id == device_id).first()
|
||||
if not device:
|
||||
return False
|
||||
|
||||
# Soft delete - set status to inactive
|
||||
device.status = 'inactive'
|
||||
session.commit()
|
||||
|
||||
self.logger.info(f"Soft deleted device {device_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting device {device_id}: {e}")
|
||||
return False
|
||||
|
||||
# Device Monitoring Functions
|
||||
|
||||
def update_device_last_seen(self, hostname: str = None, device_ip: str = None) -> Optional[Device]:
|
||||
"""Update device last seen timestamp"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
device = None
|
||||
|
||||
if hostname:
|
||||
device = session.query(Device).filter(Device.hostname == hostname).first()
|
||||
elif device_ip:
|
||||
device = session.query(Device).filter(Device.device_ip == device_ip).first()
|
||||
|
||||
if device:
|
||||
device.last_seen = datetime.utcnow()
|
||||
session.commit()
|
||||
|
||||
return device
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error updating last seen: {e}")
|
||||
return None
|
||||
|
||||
def get_device_statistics(self, device_id: int) -> Dict[str, Any]:
|
||||
"""Get comprehensive statistics for a device"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
device = session.query(Device).filter(Device.id == device_id).first()
|
||||
if not device:
|
||||
return {}
|
||||
|
||||
# Log statistics
|
||||
total_logs = session.query(LogEntry).filter(LogEntry.device_id == device_id).count()
|
||||
|
||||
# Logs by severity
|
||||
severity_counts = session.query(
|
||||
LogEntry.severity,
|
||||
func.count(LogEntry.id)
|
||||
).filter(
|
||||
LogEntry.device_id == device_id
|
||||
).group_by(LogEntry.severity).all()
|
||||
|
||||
# Recent activity (last 24 hours)
|
||||
last_24h = datetime.utcnow() - timedelta(hours=24)
|
||||
recent_logs = session.query(LogEntry).filter(
|
||||
and_(LogEntry.device_id == device_id, LogEntry.timestamp >= last_24h)
|
||||
).count()
|
||||
|
||||
# File uploads
|
||||
total_files = session.query(FileUpload).filter(
|
||||
FileUpload.device_id == device_id
|
||||
).count()
|
||||
|
||||
# Last log
|
||||
last_log = session.query(LogEntry).filter(
|
||||
LogEntry.device_id == device_id
|
||||
).order_by(desc(LogEntry.timestamp)).first()
|
||||
|
||||
return {
|
||||
'device': device,
|
||||
'total_logs': total_logs,
|
||||
'severity_counts': dict(severity_counts),
|
||||
'recent_logs_24h': recent_logs,
|
||||
'total_files': total_files,
|
||||
'last_log': last_log,
|
||||
'uptime_days': (datetime.utcnow() - device.last_seen).days if device.last_seen else 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device statistics: {e}")
|
||||
return {}
|
||||
|
||||
def get_inactive_devices(self, hours: int = 24) -> List[Device]:
|
||||
"""Get devices that haven't been seen recently"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
cutoff_time = datetime.utcnow() - timedelta(hours=hours)
|
||||
|
||||
return session.query(Device).filter(
|
||||
and_(
|
||||
Device.last_seen < cutoff_time,
|
||||
Device.status.in_(['active', 'maintenance'])
|
||||
)
|
||||
).order_by(desc(Device.last_seen)).all()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting inactive devices: {e}")
|
||||
return []
|
||||
|
||||
def get_device_logs(self, device_id: int, limit: int = 100, severity: str = None) -> List[LogEntry]:
|
||||
"""Get logs for a specific device"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
query = session.query(LogEntry).filter(
|
||||
LogEntry.device_id == device_id
|
||||
).order_by(desc(LogEntry.timestamp))
|
||||
|
||||
if severity:
|
||||
query = query.filter(LogEntry.severity == severity)
|
||||
|
||||
return query.limit(limit).all()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device logs: {e}")
|
||||
return []
|
||||
|
||||
def search_devices(self, search_term: str) -> List[Device]:
|
||||
"""Search devices by hostname, IP, or description"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
search_pattern = f"%{search_term}%"
|
||||
|
||||
return session.query(Device).filter(
|
||||
(Device.hostname.like(search_pattern)) |
|
||||
(Device.device_ip.like(search_pattern)) |
|
||||
(Device.nume_masa.like(search_pattern)) |
|
||||
(Device.location.like(search_pattern)) |
|
||||
(Device.description.like(search_pattern))
|
||||
).order_by(desc(Device.last_seen)).all()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error searching devices: {e}")
|
||||
return []
|
||||
|
||||
# Bulk Operations
|
||||
|
||||
def bulk_update_status(self, device_ids: List[int], status: str) -> int:
|
||||
"""Update status for multiple devices"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
updated = session.query(Device).filter(
|
||||
Device.id.in_(device_ids)
|
||||
).update({Device.status: status}, synchronize_session=False)
|
||||
|
||||
session.commit()
|
||||
self.logger.info(f"Updated status for {updated} devices")
|
||||
return updated
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error bulk updating status: {e}")
|
||||
return 0
|
||||
|
||||
def get_device_summary(self) -> Dict[str, Any]:
|
||||
"""Get summary statistics for all devices"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
# Device status counts
|
||||
status_counts = session.query(
|
||||
Device.status,
|
||||
func.count(Device.id)
|
||||
).group_by(Device.status).all()
|
||||
|
||||
# Device type counts
|
||||
type_counts = session.query(
|
||||
Device.device_type,
|
||||
func.count(Device.id)
|
||||
).group_by(Device.device_type).all()
|
||||
|
||||
# Recent activity
|
||||
last_24h = datetime.utcnow() - timedelta(hours=24)
|
||||
devices_seen_24h = session.query(Device).filter(
|
||||
Device.last_seen >= last_24h
|
||||
).count()
|
||||
|
||||
return {
|
||||
'total_devices': session.query(Device).count(),
|
||||
'status_counts': dict(status_counts),
|
||||
'type_counts': dict(type_counts),
|
||||
'devices_seen_24h': devices_seen_24h
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting device summary: {e}")
|
||||
return {}
|
||||
256
app/services/file_service.py
Normal file
256
app/services/file_service.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
File upload and processing service
|
||||
"""
|
||||
import os
|
||||
import hashlib
|
||||
import mimetypes
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from werkzeug.utils import secure_filename
|
||||
from app.models import Device, FileUpload
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
class FileUploadService:
|
||||
"""Service for handling file uploads and processing"""
|
||||
|
||||
def __init__(self):
|
||||
self.db = get_db()
|
||||
self.upload_folder = Path("data/uploads")
|
||||
self.upload_folder.mkdir(exist_ok=True)
|
||||
|
||||
# Allowed file extensions
|
||||
self.allowed_extensions = {
|
||||
'txt', 'log', 'conf', 'cfg', 'json', 'yml', 'yaml',
|
||||
'py', 'sh', 'service', 'env', 'ini'
|
||||
}
|
||||
|
||||
# Max file size (50MB)
|
||||
self.max_file_size = 50 * 1024 * 1024
|
||||
|
||||
def process_uploaded_file(self, file, device_info):
|
||||
"""Process uploaded file from device"""
|
||||
try:
|
||||
# Validate file
|
||||
if not file or file.filename == '':
|
||||
return {'success': False, 'error': 'No file provided'}
|
||||
|
||||
# Check file extension
|
||||
filename = secure_filename(file.filename)
|
||||
if not self._allowed_file(filename):
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'File type not allowed. Allowed: {", ".join(self.allowed_extensions)}'
|
||||
}
|
||||
|
||||
# Check file size
|
||||
file.seek(0, 2) # Seek to end
|
||||
file_size = file.tell()
|
||||
file.seek(0) # Reset position
|
||||
|
||||
if file_size > self.max_file_size:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'File too large. Max size: {self.max_file_size // (1024*1024)}MB'
|
||||
}
|
||||
|
||||
# Calculate file hash
|
||||
file_content = file.read()
|
||||
file.seek(0) # Reset for saving
|
||||
file_hash = hashlib.sha256(file_content).hexdigest()
|
||||
|
||||
with self.db.get_session() as session:
|
||||
# Get or create device
|
||||
device = self._get_or_create_device(session, device_info)
|
||||
|
||||
# Check for duplicate file
|
||||
existing_file = session.query(FileUpload).filter_by(file_hash=file_hash).first()
|
||||
if existing_file:
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'File already exists (duplicate detected)',
|
||||
'file_id': existing_file.id,
|
||||
'duplicate': True
|
||||
}
|
||||
|
||||
# Generate unique filename
|
||||
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
|
||||
new_filename = f"{device.hostname}_{timestamp}_{filename}"
|
||||
file_path = self.upload_folder / new_filename
|
||||
|
||||
# Save file
|
||||
with open(file_path, 'wb') as f:
|
||||
f.write(file_content)
|
||||
|
||||
# Get MIME type
|
||||
mime_type, _ = mimetypes.guess_type(filename)
|
||||
|
||||
# Create file record
|
||||
file_upload = FileUpload(
|
||||
device_id=device.id,
|
||||
filename=new_filename,
|
||||
original_filename=filename,
|
||||
file_path=str(file_path),
|
||||
file_size=file_size,
|
||||
file_hash=file_hash,
|
||||
mime_type=mime_type,
|
||||
upload_date=datetime.utcnow(),
|
||||
upload_ip=device_info.get('device_ip'),
|
||||
processed=False,
|
||||
processing_status='pending'
|
||||
)
|
||||
|
||||
session.add(file_upload)
|
||||
session.flush()
|
||||
|
||||
# Process file content if it's a log file
|
||||
if self._is_log_file(filename, mime_type):
|
||||
self._process_log_file(file_upload, file_content)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'File uploaded successfully',
|
||||
'file_id': file_upload.id,
|
||||
'filename': new_filename,
|
||||
'size': file_size,
|
||||
'hash': file_hash,
|
||||
'processed': file_upload.processed
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error processing uploaded file: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
def _allowed_file(self, filename):
|
||||
"""Check if file extension is allowed"""
|
||||
return '.' in filename and \
|
||||
filename.rsplit('.', 1)[1].lower() in self.allowed_extensions
|
||||
|
||||
def _get_or_create_device(self, session, device_info):
|
||||
"""Get existing device or create new one"""
|
||||
device = session.query(Device).filter_by(
|
||||
hostname=device_info['hostname'],
|
||||
device_ip=device_info['device_ip']
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
device = Device(
|
||||
hostname=device_info['hostname'],
|
||||
device_ip=device_info['device_ip'],
|
||||
nume_masa=device_info['nume_masa'],
|
||||
last_seen=datetime.utcnow(),
|
||||
status='active'
|
||||
)
|
||||
session.add(device)
|
||||
session.flush()
|
||||
else:
|
||||
device.last_seen = datetime.utcnow()
|
||||
if device.nume_masa != device_info['nume_masa']:
|
||||
device.nume_masa = device_info['nume_masa']
|
||||
|
||||
return device
|
||||
|
||||
def _is_log_file(self, filename, mime_type):
|
||||
"""Check if file is a log file that should be processed"""
|
||||
log_extensions = {'log', 'txt'}
|
||||
log_keywords = ['log', 'error', 'debug', 'trace', 'audit']
|
||||
|
||||
# Check extension
|
||||
if '.' in filename:
|
||||
ext = filename.rsplit('.', 1)[1].lower()
|
||||
if ext in log_extensions:
|
||||
return True
|
||||
|
||||
# Check filename for log keywords
|
||||
filename_lower = filename.lower()
|
||||
for keyword in log_keywords:
|
||||
if keyword in filename_lower:
|
||||
return True
|
||||
|
||||
# Check MIME type
|
||||
if mime_type and 'text' in mime_type:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _process_log_file(self, file_upload, content):
|
||||
"""Process log file content to extract log entries"""
|
||||
try:
|
||||
# Mark as log file
|
||||
file_upload.is_log_file = True
|
||||
|
||||
# Simple log processing - split by lines
|
||||
lines = content.decode('utf-8', errors='ignore').split('\n')
|
||||
entries_extracted = 0
|
||||
|
||||
from app.services.log_service import LogCompressionService
|
||||
log_service = LogCompressionService()
|
||||
|
||||
device_info = {
|
||||
'hostname': file_upload.device.hostname,
|
||||
'device_ip': file_upload.device.device_ip,
|
||||
'nume_masa': file_upload.device.nume_masa
|
||||
}
|
||||
|
||||
for line_num, line in enumerate(lines, 1):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Try to extract timestamp and message
|
||||
# This is a simple implementation - enhance as needed
|
||||
message = f"[File: {file_upload.original_filename}:{line_num}] {line}"
|
||||
|
||||
# Process through log compression service
|
||||
result = log_service.process_log_message(
|
||||
device_info=device_info,
|
||||
message=message,
|
||||
severity='info'
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
entries_extracted += 1
|
||||
|
||||
# Update file record
|
||||
file_upload.log_entries_extracted = entries_extracted
|
||||
file_upload.processed = True
|
||||
file_upload.processing_status = 'completed'
|
||||
|
||||
logging.info(f"Processed log file {file_upload.filename}: {entries_extracted} entries extracted")
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error processing log file content: {e}")
|
||||
file_upload.processing_status = 'error'
|
||||
file_upload.processing_error = str(e)
|
||||
|
||||
def get_upload_stats(self):
|
||||
"""Get file upload statistics"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
total_files = session.query(FileUpload).count()
|
||||
log_files = session.query(FileUpload).filter_by(is_log_file=True).count()
|
||||
|
||||
# Calculate total size
|
||||
total_size = session.query(
|
||||
session.func.sum(FileUpload.file_size)
|
||||
).scalar() or 0
|
||||
|
||||
# Count by processing status
|
||||
processed = session.query(FileUpload).filter_by(processed=True).count()
|
||||
pending = session.query(FileUpload).filter(
|
||||
FileUpload.processing_status == 'pending'
|
||||
).count()
|
||||
|
||||
return {
|
||||
'total_files': total_files,
|
||||
'log_files': log_files,
|
||||
'total_size': total_size,
|
||||
'processed': processed,
|
||||
'pending': pending
|
||||
}
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting upload stats: {e}")
|
||||
return {'error': str(e)}
|
||||
378
app/services/log_service.py
Normal file
378
app/services/log_service.py
Normal file
@@ -0,0 +1,378 @@
|
||||
"""
|
||||
Log processing service with message compression and aliasing
|
||||
"""
|
||||
import json
|
||||
import re
|
||||
import hashlib
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from app.models import Device, LogEntry, MessageTemplate
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
class LogCompressionService:
|
||||
"""Service for compressing log messages using templates and aliases"""
|
||||
|
||||
def __init__(self):
|
||||
self.db = get_db()
|
||||
self.template_patterns = self._load_common_patterns()
|
||||
|
||||
def _load_common_patterns(self) -> List[Dict]:
|
||||
"""Load common log message patterns for template matching"""
|
||||
return [
|
||||
{
|
||||
'pattern': r'Card detected: ([A-F0-9]+)',
|
||||
'template': 'Card detected: {card_id}',
|
||||
'category': 'card_detection',
|
||||
'alias_prefix': 'CD'
|
||||
},
|
||||
{
|
||||
'pattern': r'Connection failed: (.+)',
|
||||
'template': 'Connection failed: {error}',
|
||||
'category': 'connection_error',
|
||||
'alias_prefix': 'CE'
|
||||
},
|
||||
{
|
||||
'pattern': r'System startup completed in ([0-9.]+)s',
|
||||
'template': 'System startup completed in {time}s',
|
||||
'category': 'system_startup',
|
||||
'alias_prefix': 'SS'
|
||||
},
|
||||
{
|
||||
'pattern': r'Auto-update: (.+)',
|
||||
'template': 'Auto-update: {message}',
|
||||
'category': 'auto_update',
|
||||
'alias_prefix': 'AU'
|
||||
},
|
||||
{
|
||||
'pattern': r'Command \'([^\']+)\' (SUCCESS|FAILED)',
|
||||
'template': 'Command \'{command}\' {status}',
|
||||
'category': 'command_execution',
|
||||
'alias_prefix': 'EX'
|
||||
},
|
||||
{
|
||||
'pattern': r'Temperature: ([0-9.]+)°C',
|
||||
'template': 'Temperature: {temp}°C',
|
||||
'category': 'temperature',
|
||||
'alias_prefix': 'TM'
|
||||
}
|
||||
]
|
||||
|
||||
def process_log_message(self, device_info: Dict, message: str, severity: str = 'info') -> Dict:
|
||||
"""
|
||||
Process incoming log message with compression
|
||||
|
||||
Args:
|
||||
device_info: Dict with hostname, device_ip, nume_masa
|
||||
message: Log message text
|
||||
severity: Message severity level
|
||||
|
||||
Returns:
|
||||
Dict with processing results and storage info
|
||||
"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
# Get or create device
|
||||
device = self._get_or_create_device(session, device_info)
|
||||
|
||||
# Try to match message to existing template
|
||||
template, variables = self._match_message_template(session, message)
|
||||
|
||||
if template:
|
||||
# Use existing template
|
||||
log_entry = LogEntry(
|
||||
device_id=device.id,
|
||||
template_id=template.id,
|
||||
template_variables=json.dumps(variables) if variables else None,
|
||||
severity=severity,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
# Update template usage count
|
||||
template.usage_count += 1
|
||||
|
||||
# Calculate size savings
|
||||
original_size = len(message.encode('utf-8'))
|
||||
compressed_size = len(template.alias.encode('utf-8')) + \
|
||||
len(json.dumps(variables or {}).encode('utf-8'))
|
||||
|
||||
compression_info = {
|
||||
'used_template': True,
|
||||
'template_alias': template.alias,
|
||||
'original_size': original_size,
|
||||
'compressed_size': compressed_size,
|
||||
'savings_percent': ((original_size - compressed_size) / original_size) * 100
|
||||
}
|
||||
else:
|
||||
# Create new template if message matches a pattern
|
||||
template = self._create_new_template(session, message)
|
||||
|
||||
if template:
|
||||
# New template created
|
||||
variables = self._extract_variables(message, template.template_text)
|
||||
log_entry = LogEntry(
|
||||
device_id=device.id,
|
||||
template_id=template.id,
|
||||
template_variables=json.dumps(variables) if variables else None,
|
||||
severity=severity,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
template.usage_count = 1
|
||||
|
||||
compression_info = {
|
||||
'used_template': True,
|
||||
'template_alias': template.alias,
|
||||
'new_template': True,
|
||||
'original_size': len(message.encode('utf-8')),
|
||||
'compressed_size': len(template.alias.encode('utf-8'))
|
||||
}
|
||||
else:
|
||||
# Store as full message
|
||||
log_entry = LogEntry(
|
||||
device_id=device.id,
|
||||
full_message=message,
|
||||
severity=severity,
|
||||
timestamp=datetime.utcnow()
|
||||
)
|
||||
|
||||
compression_info = {
|
||||
'used_template': False,
|
||||
'stored_full': True,
|
||||
'original_size': len(message.encode('utf-8'))
|
||||
}
|
||||
|
||||
session.add(log_entry)
|
||||
session.flush() # Get the log entry ID
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'log_id': log_entry.id,
|
||||
'device_id': device.id,
|
||||
'compression': compression_info,
|
||||
'message': 'Log processed successfully'
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error processing log message: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e),
|
||||
'message': 'Log processing failed'
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _infer_device_type(hostname: str) -> str:
|
||||
"""Guess device type from hostname pattern."""
|
||||
h = hostname.upper()
|
||||
if any(k in h for k in ('RPI', 'PI', 'RASP')):
|
||||
return 'Raspberry Pi'
|
||||
if any(k in h for k in ('SRV', 'SERVER')):
|
||||
return 'Server'
|
||||
if any(k in h for k in ('PC', 'DESK', 'WRK')):
|
||||
return 'PC'
|
||||
if any(k in h for k in ('LAPTOP', 'NB')):
|
||||
return 'Laptop'
|
||||
return 'unknown'
|
||||
|
||||
def _get_or_create_device(self, session: Session, device_info: Dict) -> Device:
|
||||
"""Get existing device or create new one.
|
||||
|
||||
Lookup priority:
|
||||
1. MAC address (most reliable – survives IP/hostname changes)
|
||||
2. hostname + device_ip (legacy fallback)
|
||||
"""
|
||||
mac = device_info.get('mac_address')
|
||||
device = None
|
||||
|
||||
# 1. Try MAC lookup first
|
||||
if mac:
|
||||
device = session.query(Device).filter_by(mac_address=mac).first()
|
||||
|
||||
# 2. Fall back to hostname+IP
|
||||
if not device:
|
||||
device = session.query(Device).filter_by(
|
||||
hostname=device_info['hostname'],
|
||||
device_ip=device_info['device_ip']
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
device = Device(
|
||||
hostname=device_info['hostname'],
|
||||
device_ip=device_info['device_ip'],
|
||||
nume_masa=device_info['nume_masa'],
|
||||
device_type=device_info.get('device_type') or self._infer_device_type(device_info['hostname']),
|
||||
os_version=device_info.get('os_version'),
|
||||
location=device_info.get('location'),
|
||||
mac_address=mac or None,
|
||||
last_seen=datetime.utcnow(),
|
||||
status='active'
|
||||
)
|
||||
session.add(device)
|
||||
session.flush()
|
||||
else:
|
||||
# Always update last_seen and nome_masa
|
||||
device.last_seen = datetime.utcnow()
|
||||
if device.nume_masa != device_info['nume_masa']:
|
||||
device.nume_masa = device_info['nume_masa']
|
||||
|
||||
# Sync MAC address if we now know it and device doesn't have one
|
||||
if mac and not device.mac_address:
|
||||
device.mac_address = mac
|
||||
|
||||
# Update type from hostname if still unknown
|
||||
if not device.device_type or device.device_type == 'unknown':
|
||||
device.device_type = device_info.get('device_type') or self._infer_device_type(device_info['hostname'])
|
||||
|
||||
# Update OS / location only when client sends them (don't overwrite manual edits with None)
|
||||
if device_info.get('os_version'):
|
||||
device.os_version = device_info['os_version']
|
||||
if device_info.get('location'):
|
||||
device.location = device_info['location']
|
||||
|
||||
return device
|
||||
|
||||
def _match_message_template(self, session: Session, message: str) -> Tuple[Optional[MessageTemplate], Optional[Dict]]:
|
||||
"""Try to match message to existing template"""
|
||||
# First, try exact template match
|
||||
message_hash = MessageTemplate.create_hash(message)
|
||||
template = session.query(MessageTemplate).filter_by(template_hash=message_hash).first()
|
||||
|
||||
if template:
|
||||
return template, None
|
||||
|
||||
# Try pattern matching with variable extraction
|
||||
for pattern_info in self.template_patterns:
|
||||
match = re.match(pattern_info['pattern'], message)
|
||||
if match:
|
||||
# Look for template with this pattern
|
||||
template_text = pattern_info['template']
|
||||
template = session.query(MessageTemplate).filter_by(
|
||||
template_text=template_text,
|
||||
category=pattern_info['category']
|
||||
).first()
|
||||
|
||||
if template:
|
||||
# Extract variables
|
||||
variables = {}
|
||||
for i, group in enumerate(match.groups(), 1):
|
||||
# Map to variable names based on template
|
||||
if '{card_id}' in template_text and pattern_info['category'] == 'card_detection':
|
||||
variables['card_id'] = group
|
||||
elif '{error}' in template_text and pattern_info['category'] == 'connection_error':
|
||||
variables['error'] = group
|
||||
elif '{time}' in template_text and pattern_info['category'] == 'system_startup':
|
||||
variables['time'] = group
|
||||
elif '{message}' in template_text:
|
||||
variables['message'] = group
|
||||
elif '{command}' in template_text and i == 1:
|
||||
variables['command'] = group
|
||||
elif '{status}' in template_text and i == 2:
|
||||
variables['status'] = group
|
||||
elif '{temp}' in template_text:
|
||||
variables['temp'] = group
|
||||
|
||||
return template, variables
|
||||
|
||||
return None, None
|
||||
|
||||
def _create_new_template(self, session: Session, message: str) -> Optional[MessageTemplate]:
|
||||
"""Create new template if message matches a known pattern"""
|
||||
for pattern_info in self.template_patterns:
|
||||
match = re.match(pattern_info['pattern'], message)
|
||||
if match:
|
||||
# Check if template already exists
|
||||
existing = session.query(MessageTemplate).filter_by(
|
||||
template_text=pattern_info['template'],
|
||||
category=pattern_info['category']
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
return existing
|
||||
|
||||
# Create new template
|
||||
alias = self._generate_alias(session, pattern_info['alias_prefix'])
|
||||
template_hash = MessageTemplate.create_hash(pattern_info['template'])
|
||||
|
||||
template = MessageTemplate(
|
||||
template_hash=template_hash,
|
||||
template_text=pattern_info['template'],
|
||||
category=pattern_info['category'],
|
||||
alias=alias,
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
|
||||
session.add(template)
|
||||
session.flush()
|
||||
return template
|
||||
|
||||
return None
|
||||
|
||||
def _generate_alias(self, session: Session, prefix: str) -> str:
|
||||
"""Generate unique alias for template"""
|
||||
# Find highest existing alias number for this prefix
|
||||
existing_aliases = session.query(MessageTemplate.alias).filter(
|
||||
MessageTemplate.alias.like(f"{prefix}%")
|
||||
).all()
|
||||
|
||||
max_num = 0
|
||||
for (alias,) in existing_aliases:
|
||||
try:
|
||||
num = int(alias[len(prefix):])
|
||||
max_num = max(max_num, num)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return f"{prefix}{max_num + 1:03d}"
|
||||
|
||||
def _extract_variables(self, message: str, template: str) -> Dict:
|
||||
"""Extract variables from message using template"""
|
||||
# Simple variable extraction - could be enhanced
|
||||
variables = {}
|
||||
# This is a simplified implementation
|
||||
# In production, you'd want more sophisticated template matching
|
||||
return variables
|
||||
|
||||
def get_compression_stats(self) -> Dict:
|
||||
"""Get compression statistics"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
# Count total logs
|
||||
total_logs = session.query(LogEntry).count()
|
||||
|
||||
# Count templated logs
|
||||
templated_logs = session.query(LogEntry).filter(
|
||||
LogEntry.template_id.isnot(None)
|
||||
).count()
|
||||
|
||||
# Count templates
|
||||
total_templates = session.query(MessageTemplate).count()
|
||||
|
||||
# Calculate average savings (simplified)
|
||||
compression_ratio = (templated_logs / total_logs * 100) if total_logs > 0 else 0
|
||||
|
||||
return {
|
||||
'total_logs': total_logs,
|
||||
'templated_logs': templated_logs,
|
||||
'total_templates': total_templates,
|
||||
'compression_ratio': round(compression_ratio, 2),
|
||||
'estimated_savings': round(compression_ratio * 0.6, 2) # Estimated 60% savings per template
|
||||
}
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting compression stats: {e}")
|
||||
return {'error': str(e)}
|
||||
|
||||
def get_message_by_alias(self, alias: str, variables: Dict = None) -> Optional[str]:
|
||||
"""Retrieve full message using alias and variables"""
|
||||
try:
|
||||
with self.db.get_session() as session:
|
||||
template = session.query(MessageTemplate).filter_by(alias=alias).first()
|
||||
|
||||
if template:
|
||||
if variables:
|
||||
return template.template_text.format(**variables)
|
||||
return template.template_text
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logging.error(f"Error retrieving message by alias: {e}")
|
||||
return None
|
||||
0
app/utils/__init__.py
Normal file
0
app/utils/__init__.py
Normal file
0
app/web/__init__.py
Normal file
0
app/web/__init__.py
Normal file
599
app/web/ansible.py
Normal file
599
app/web/ansible.py
Normal file
@@ -0,0 +1,599 @@
|
||||
"""
|
||||
Web routes for Ansible management interface
|
||||
"""
|
||||
from flask import Blueprint, render_template, request, redirect, url_for, flash, jsonify
|
||||
from app.services.ansible_service import AnsibleService
|
||||
from app.models import Device, AnsibleExecution, PlaybookExecution
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
# Create blueprint
|
||||
ansible_web_bp = Blueprint('ansible_web', __name__, url_prefix='/ansible')
|
||||
|
||||
# Initialize service
|
||||
ansible_service = AnsibleService()
|
||||
|
||||
@ansible_web_bp.route('/')
|
||||
def index():
|
||||
"""Redirect ansible root to playbooks"""
|
||||
return redirect(url_for('ansible_web.playbooks'))
|
||||
|
||||
@ansible_web_bp.route('/devices')
|
||||
def devices():
|
||||
"""Ansible inventory management interface"""
|
||||
try:
|
||||
# Load current inventory from file
|
||||
inventory_data = ansible_service.get_inventory_data()
|
||||
|
||||
# Load all DB devices so the user can see which haven't been synced yet
|
||||
with get_db().get_session() as session:
|
||||
db_devices = session.query(Device).all()
|
||||
db_devices_list = [
|
||||
{
|
||||
'hostname': d.hostname,
|
||||
'device_ip': d.device_ip,
|
||||
'status': d.status,
|
||||
'device_type': d.device_type,
|
||||
'location': d.location
|
||||
}
|
||||
for d in db_devices
|
||||
]
|
||||
|
||||
# Collect all hostnames already present in inventory
|
||||
all_inv_hosts = set()
|
||||
for group_data in inventory_data.get('groups', {}).values():
|
||||
for h in group_data.get('hosts', []):
|
||||
all_inv_hosts.add(h['hostname'])
|
||||
|
||||
# Mark which DB devices are in inventory
|
||||
for d in db_devices_list:
|
||||
d['in_inventory'] = d['hostname'] in all_inv_hosts
|
||||
|
||||
return render_template(
|
||||
'ansible/devices.html',
|
||||
inventory=inventory_data,
|
||||
db_devices=db_devices_list,
|
||||
all_inv_hosts=all_inv_hosts
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading inventory page: {e}")
|
||||
flash(f'Error loading inventory: {e}', 'error')
|
||||
return render_template(
|
||||
'ansible/devices.html',
|
||||
inventory={'groups': {}, 'raw_yaml': ''},
|
||||
db_devices=[],
|
||||
all_inv_hosts=set()
|
||||
)
|
||||
|
||||
@ansible_web_bp.route('/playbooks')
|
||||
def playbooks():
|
||||
"""Playbook management interface"""
|
||||
try:
|
||||
# Get available playbooks
|
||||
playbook_dir = ansible_service.playbook_dir
|
||||
playbooks = []
|
||||
|
||||
if playbook_dir.exists():
|
||||
for file in playbook_dir.glob('*.yml'):
|
||||
playbooks.append({
|
||||
'name': file.stem,
|
||||
'filename': file.name,
|
||||
'path': str(file)
|
||||
})
|
||||
|
||||
# Add built-in playbooks
|
||||
builtin_playbooks = [
|
||||
{
|
||||
'name': 'update_devices',
|
||||
'description': 'Update all packages on monitoring devices',
|
||||
'builtin': True
|
||||
},
|
||||
{
|
||||
'name': 'restart_service',
|
||||
'description': 'Restart monitoring services on devices',
|
||||
'builtin': True
|
||||
}
|
||||
]
|
||||
|
||||
return render_template('ansible/playbooks.html',
|
||||
playbooks=playbooks,
|
||||
builtin_playbooks=builtin_playbooks)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading playbooks: {e}")
|
||||
flash(f'Error loading playbooks: {e}', 'error')
|
||||
return render_template('ansible/playbooks.html', playbooks=[], builtin_playbooks=[])
|
||||
|
||||
@ansible_web_bp.route('/execute', methods=['GET', 'POST'])
|
||||
def execute():
|
||||
"""Execute playbook interface"""
|
||||
if request.method == 'GET':
|
||||
try:
|
||||
preselect = request.args.get('playbook', '')
|
||||
inventory_data = ansible_service.get_inventory_data()
|
||||
|
||||
# Flatten all unique hosts from inventory for the host picker
|
||||
seen = set()
|
||||
all_inv_hosts = []
|
||||
for group in inventory_data.get('groups', {}).values():
|
||||
for h in group.get('hosts', []):
|
||||
if h['hostname'] not in seen:
|
||||
all_inv_hosts.append({
|
||||
'hostname': h['hostname'],
|
||||
'ip': h.get('ansible_host', '')
|
||||
})
|
||||
seen.add(h['hostname'])
|
||||
|
||||
return render_template('ansible/execute.html',
|
||||
inventory=inventory_data,
|
||||
all_inv_hosts=all_inv_hosts,
|
||||
preselect_playbook=preselect)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading execute form: {e}")
|
||||
flash(f'Error loading form: {e}', 'error')
|
||||
return render_template('ansible/execute.html',
|
||||
inventory={'groups': {}},
|
||||
all_inv_hosts=[],
|
||||
preselect_playbook='')
|
||||
|
||||
elif request.method == 'POST':
|
||||
# Execute playbook
|
||||
try:
|
||||
import json as _json
|
||||
is_ajax = request.headers.get('X-Requested-With') == 'XMLHttpRequest'
|
||||
|
||||
playbook_name = request.form.get('playbook')
|
||||
selected_hosts = request.form.getlist('hosts')
|
||||
priority = int(request.form.get('priority', 5))
|
||||
max_retries = int(request.form.get('max_retries', 0))
|
||||
check_mode = bool(request.form.get('check_mode'))
|
||||
extra_vars = {}
|
||||
|
||||
# Parse extra variables if provided
|
||||
extra_vars_str = request.form.get('extra_vars', '').strip()
|
||||
if extra_vars_str:
|
||||
try:
|
||||
extra_vars = _json.loads(extra_vars_str)
|
||||
except _json.JSONDecodeError:
|
||||
if is_ajax:
|
||||
return jsonify({'success': False, 'error': 'Invalid JSON in extra variables'}), 400
|
||||
flash('Invalid JSON format for extra variables', 'error')
|
||||
return redirect(url_for('ansible_web.execute'))
|
||||
|
||||
# Add check mode to extra vars if enabled
|
||||
if check_mode:
|
||||
extra_vars['check_mode'] = True
|
||||
|
||||
if not playbook_name:
|
||||
if is_ajax:
|
||||
return jsonify({'success': False, 'error': 'Playbook selection is required'}), 400
|
||||
flash('Playbook selection is required', 'error')
|
||||
return redirect(url_for('ansible_web.execute'))
|
||||
|
||||
if not selected_hosts:
|
||||
if is_ajax:
|
||||
return jsonify({'success': False, 'error': 'At least one device must be selected'}), 400
|
||||
flash('At least one device must be selected', 'error')
|
||||
return redirect(url_for('ansible_web.execute'))
|
||||
|
||||
# Create builtin playbooks if needed
|
||||
if playbook_name == 'update_devices':
|
||||
ansible_service.create_update_playbook()
|
||||
elif playbook_name == 'restart_service':
|
||||
ansible_service.create_restart_service_playbook()
|
||||
elif playbook_name == 'system_health':
|
||||
ansible_service.create_system_health_playbook()
|
||||
|
||||
# Add controller IP for callbacks
|
||||
extra_vars['ansible_controller_ip'] = request.host
|
||||
|
||||
# Use async execution (returns immediately with execution_id)
|
||||
result = ansible_service.execute_playbook_async(
|
||||
playbook_name=playbook_name,
|
||||
limit_hosts=selected_hosts,
|
||||
extra_vars=extra_vars,
|
||||
priority=priority,
|
||||
max_retries=max_retries
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
if is_ajax:
|
||||
return jsonify({'success': True, 'execution_id': result['execution_id']})
|
||||
flash(f'Playbook "{playbook_name}" started! Monitoring execution...', 'success')
|
||||
return redirect(url_for('ansible_web.execution_details',
|
||||
execution_id=result['execution_id']))
|
||||
else:
|
||||
error_msg = result.get('error', 'Unknown error')
|
||||
if is_ajax:
|
||||
return jsonify({'success': False, 'error': error_msg}), 500
|
||||
flash(f'Playbook execution failed: {error_msg}', 'error')
|
||||
return redirect(url_for('ansible_web.execute'))
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error executing playbook: {e}")
|
||||
if request.headers.get('X-Requested-With') == 'XMLHttpRequest':
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
flash(f'Error executing playbook: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.execute'))
|
||||
|
||||
@ansible_web_bp.route('/executions')
|
||||
def executions():
|
||||
"""Execution history interface"""
|
||||
try:
|
||||
executions = ansible_service.get_execution_history(limit=100)
|
||||
return render_template('ansible/executions.html', executions=executions)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading executions: {e}")
|
||||
flash(f'Error loading executions: {e}', 'error')
|
||||
return render_template('ansible/executions.html', executions=[])
|
||||
|
||||
@ansible_web_bp.route('/executions/<execution_id>')
|
||||
def execution_details(execution_id):
|
||||
"""View detailed execution results"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
execution = session.query(PlaybookExecution).filter_by(
|
||||
execution_id=execution_id
|
||||
).first()
|
||||
|
||||
if not execution:
|
||||
flash('Execution not found', 'error')
|
||||
return redirect(url_for('ansible_web.executions'))
|
||||
|
||||
# Read log file if available
|
||||
log_content = None
|
||||
if execution.ansible_log_file:
|
||||
try:
|
||||
with open(execution.ansible_log_file, 'r') as f:
|
||||
log_content = f.read()
|
||||
except FileNotFoundError:
|
||||
log_content = "Log file not found"
|
||||
|
||||
return render_template('ansible/execution_details.html',
|
||||
execution=execution,
|
||||
log_content=log_content)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading execution details: {e}")
|
||||
flash(f'Error loading execution details: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.executions'))
|
||||
|
||||
@ansible_web_bp.route('/ssh/setup')
|
||||
def ssh_setup():
|
||||
"""SSH key setup interface"""
|
||||
try:
|
||||
# Check if SSH key exists
|
||||
key_exists = ansible_service.ssh_key_path.exists()
|
||||
public_key = None
|
||||
|
||||
if key_exists:
|
||||
public_key_path = ansible_service.ssh_key_path.with_suffix('.pub')
|
||||
if public_key_path.exists():
|
||||
with open(public_key_path, 'r') as f:
|
||||
public_key = f.read().strip()
|
||||
|
||||
settings = ansible_service.load_settings()
|
||||
|
||||
return render_template('ansible/ssh_setup.html',
|
||||
key_exists=key_exists,
|
||||
public_key=public_key,
|
||||
settings=settings)
|
||||
except Exception as e:
|
||||
logging.error(f"Error in SSH setup page: {e}")
|
||||
flash(f'Error loading SSH setup: {e}', 'error')
|
||||
return render_template('ansible/ssh_setup.html', key_exists=False, public_key=None, settings={})
|
||||
|
||||
|
||||
@ansible_web_bp.route('/ssh/settings', methods=['POST'])
|
||||
def save_ssh_settings():
|
||||
"""Save SSH settings (fallback password etc.)"""
|
||||
try:
|
||||
fallback_password = request.form.get('ssh_fallback_password', '').strip()
|
||||
if not fallback_password:
|
||||
flash('Fallback password cannot be empty.', 'error')
|
||||
return redirect(url_for('ansible_web.ssh_setup'))
|
||||
|
||||
ansible_service.save_settings({'ssh_fallback_password': fallback_password})
|
||||
flash('SSH settings saved successfully.', 'success')
|
||||
except Exception as e:
|
||||
logging.error(f"Error saving SSH settings: {e}")
|
||||
flash(f'Error saving SSH settings: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.ssh_setup'))
|
||||
|
||||
@ansible_web_bp.route('/ssh/generate', methods=['POST'])
|
||||
def generate_ssh_keys():
|
||||
"""Generate new SSH keys"""
|
||||
try:
|
||||
result = ansible_service.setup_ssh_keys()
|
||||
|
||||
if result['success']:
|
||||
flash('SSH keys generated successfully!', 'success')
|
||||
else:
|
||||
flash(f'Error generating SSH keys: {result.get("error")}', 'error')
|
||||
|
||||
return redirect(url_for('ansible_web.ssh_setup'))
|
||||
except Exception as e:
|
||||
logging.error(f"Error generating SSH keys: {e}")
|
||||
flash(f'Error generating SSH keys: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.ssh_setup'))
|
||||
|
||||
@ansible_web_bp.route('/ssh/test', methods=['POST'])
|
||||
def test_ssh():
|
||||
"""Test SSH connectivity to selected devices"""
|
||||
try:
|
||||
selected_ips = request.form.getlist('device_ips')
|
||||
|
||||
if not selected_ips:
|
||||
flash('Please select at least one device to test', 'error')
|
||||
return redirect(url_for('ansible_web.devices'))
|
||||
|
||||
# Test connectivity
|
||||
results = ansible_service.bulk_ssh_test(selected_ips)
|
||||
|
||||
# Count results
|
||||
successful = sum(1 for r in results.values() if r.get('success'))
|
||||
total = len(results)
|
||||
|
||||
flash(f'SSH test completed: {successful}/{total} devices reachable',
|
||||
'success' if successful == total else 'warning')
|
||||
|
||||
return render_template('ansible/ssh_test_results.html', results=results)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error testing SSH: {e}")
|
||||
flash(f'Error testing SSH: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.devices'))
|
||||
|
||||
# API endpoints for AJAX calls
|
||||
@ansible_web_bp.route('/api/refresh_inventory', methods=['POST'])
|
||||
def api_refresh_inventory():
|
||||
"""AJAX endpoint to refresh inventory"""
|
||||
try:
|
||||
inventory = ansible_service.generate_dynamic_inventory()
|
||||
device_count = len(inventory.get('all', {}).get('children', {}).get('monitoring_devices', {}).get('hosts', {}))
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Inventory refreshed with {device_count} devices'
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error refreshing inventory: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@ansible_web_bp.route('/api/execution_status/<execution_id>')
|
||||
def api_execution_status(execution_id):
|
||||
"""AJAX endpoint to get execution status"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
execution = session.query(PlaybookExecution).filter_by(
|
||||
execution_id=execution_id
|
||||
).first()
|
||||
|
||||
if not execution:
|
||||
return jsonify({'error': 'Execution not found'}), 404
|
||||
|
||||
return jsonify({
|
||||
'id': execution.id,
|
||||
'status': execution.status,
|
||||
'start_time': execution.start_time.isoformat() if execution.start_time else None,
|
||||
'end_time': execution.end_time.isoformat() if execution.end_time else None,
|
||||
'exit_code': execution.exit_code,
|
||||
'successful_hosts': execution.successful_hosts,
|
||||
'failed_hosts': execution.failed_hosts,
|
||||
'unreachable_hosts': execution.unreachable_hosts
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting execution status: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@ansible_web_bp.route('/upload_playbook', methods=['POST'])
|
||||
def upload_playbook():
|
||||
"""Upload a custom playbook file"""
|
||||
try:
|
||||
if 'playbook_file' not in request.files:
|
||||
flash('No playbook file selected', 'error')
|
||||
return redirect(url_for('ansible_web.playbooks'))
|
||||
|
||||
file = request.files['playbook_file']
|
||||
if file.filename == '':
|
||||
flash('No playbook file selected', 'error')
|
||||
return redirect(url_for('ansible_web.playbooks'))
|
||||
|
||||
if file and file.filename.lower().endswith(('.yml', '.yaml')):
|
||||
# Get playbook name
|
||||
playbook_name = request.form.get('playbook_name', '').strip()
|
||||
if not playbook_name:
|
||||
playbook_name = file.filename.rsplit('.', 1)[0]
|
||||
|
||||
# Clean filename
|
||||
import re
|
||||
safe_filename = re.sub(r'[^a-zA-Z0-9_-]', '_', playbook_name)
|
||||
filename = f"{safe_filename}.yml"
|
||||
|
||||
# Save file
|
||||
playbook_path = ansible_service.playbook_dir / filename
|
||||
file.save(str(playbook_path))
|
||||
|
||||
flash(f'Playbook "{filename}" uploaded successfully!', 'success')
|
||||
else:
|
||||
flash('Invalid file type. Please upload a .yml or .yaml file.', 'error')
|
||||
|
||||
return redirect(url_for('ansible_web.playbooks'))
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error uploading playbook: {e}")
|
||||
flash(f'Error uploading playbook: {e}', 'error')
|
||||
return redirect(url_for('ansible_web.playbooks'))
|
||||
|
||||
@ansible_web_bp.route('/playbook/content')
|
||||
def playbook_content():
|
||||
"""Get playbook content for viewing"""
|
||||
try:
|
||||
playbook_path = request.args.get('path')
|
||||
if not playbook_path:
|
||||
return "No playbook path provided", 400
|
||||
|
||||
# Security check - ensure path is within playbooks directory
|
||||
from pathlib import Path
|
||||
requested_path = Path(playbook_path)
|
||||
if not requested_path.is_absolute():
|
||||
requested_path = ansible_service.playbook_dir / requested_path
|
||||
|
||||
# Ensure path is within playbook directory
|
||||
try:
|
||||
requested_path.resolve().relative_to(ansible_service.playbook_dir.resolve())
|
||||
except ValueError:
|
||||
return "Invalid playbook path", 400
|
||||
|
||||
if not requested_path.exists():
|
||||
return "Playbook file not found", 404
|
||||
|
||||
with open(requested_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
|
||||
return content, 200, {'Content-Type': 'text/plain'}
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error reading playbook content: {e}")
|
||||
return f"Error reading playbook: {e}", 500
|
||||
|
||||
@ansible_web_bp.route('/playbook/save', methods=['POST'])
|
||||
def save_playbook():
|
||||
"""Save a new or existing playbook"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({'error': 'No data provided'}), 400
|
||||
|
||||
name = data.get('name', '').strip()
|
||||
content = data.get('content', '').strip()
|
||||
is_new = data.get('is_new', False)
|
||||
|
||||
if not name:
|
||||
return jsonify({'error': 'Playbook name is required'}), 400
|
||||
|
||||
if not content:
|
||||
return jsonify({'error': 'Playbook content is required'}), 400
|
||||
|
||||
# Validate YAML syntax
|
||||
try:
|
||||
import yaml
|
||||
yaml.safe_load(content)
|
||||
except yaml.YAMLError as e:
|
||||
return jsonify({'error': f'Invalid YAML syntax: {e}'}), 400
|
||||
|
||||
# Clean filename
|
||||
import re
|
||||
safe_filename = re.sub(r'[^a-zA-Z0-9_-]', '_', name)
|
||||
filename = f"{safe_filename}.yml"
|
||||
|
||||
# Save file
|
||||
playbook_path = ansible_service.playbook_dir / filename
|
||||
|
||||
# Check if file exists and not updating
|
||||
if is_new and playbook_path.exists():
|
||||
return jsonify({'error': f'Playbook {filename} already exists'}), 400
|
||||
|
||||
with open(playbook_path, 'w', encoding='utf-8') as f:
|
||||
f.write(content)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Playbook {filename} saved successfully',
|
||||
'filename': filename
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error saving playbook: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@ansible_web_bp.route('/playbook/validate', methods=['POST'])
|
||||
def validate_playbook():
|
||||
"""Validate playbook YAML syntax and structure"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({'error': 'No data provided'}), 400
|
||||
|
||||
content = data.get('content', '').strip()
|
||||
if not content:
|
||||
return jsonify({'error': 'No content to validate'}), 400
|
||||
|
||||
# Validate YAML syntax
|
||||
try:
|
||||
import yaml
|
||||
parsed = yaml.safe_load(content)
|
||||
except yaml.YAMLError as e:
|
||||
return jsonify({'valid': False, 'error': f'YAML syntax error: {e}'})
|
||||
|
||||
# Basic Ansible playbook structure validation
|
||||
if not isinstance(parsed, list):
|
||||
return jsonify({'valid': False, 'error': 'Playbook must be a list of plays'})
|
||||
|
||||
for i, play in enumerate(parsed):
|
||||
if not isinstance(play, dict):
|
||||
return jsonify({'valid': False, 'error': f'Play {i+1} must be a dictionary'})
|
||||
|
||||
if 'hosts' not in play:
|
||||
return jsonify({'valid': False, 'error': f'Play {i+1} is missing required "hosts" field'})
|
||||
|
||||
if 'tasks' not in play and 'roles' not in play:
|
||||
return jsonify({'valid': False, 'error': f'Play {i+1} must have either "tasks" or "roles"'})
|
||||
|
||||
return jsonify({'valid': True, 'message': 'Playbook is valid'})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error validating playbook: {e}")
|
||||
return jsonify({'valid': False, 'error': str(e)})
|
||||
|
||||
@ansible_web_bp.route('/playbook/delete', methods=['POST'])
|
||||
def delete_playbook():
|
||||
"""Delete a custom playbook"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({'error': 'No data provided'}), 400
|
||||
|
||||
playbook_name = data.get('playbook_name', '').strip()
|
||||
if not playbook_name:
|
||||
return jsonify({'error': 'Playbook name is required'}), 400
|
||||
|
||||
# Find the playbook file
|
||||
import re
|
||||
safe_filename = re.sub(r'[^a-zA-Z0-9_-]', '_', playbook_name)
|
||||
|
||||
# Try both with and without .yml extension
|
||||
possible_files = [
|
||||
ansible_service.playbook_dir / f"{safe_filename}.yml",
|
||||
ansible_service.playbook_dir / f"{safe_filename}.yaml",
|
||||
ansible_service.playbook_dir / f"{playbook_name}.yml",
|
||||
ansible_service.playbook_dir / f"{playbook_name}.yaml"
|
||||
]
|
||||
|
||||
playbook_path = None
|
||||
for path in possible_files:
|
||||
if path.exists():
|
||||
playbook_path = path
|
||||
break
|
||||
|
||||
if not playbook_path:
|
||||
return jsonify({'error': f'Playbook {playbook_name} not found'}), 404
|
||||
|
||||
# Security check - ensure path is within playbooks directory
|
||||
try:
|
||||
playbook_path.resolve().relative_to(ansible_service.playbook_dir.resolve())
|
||||
except ValueError:
|
||||
return jsonify({'error': 'Invalid playbook path'}), 400
|
||||
|
||||
# Delete the file
|
||||
playbook_path.unlink()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Playbook {playbook_name} deleted successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error deleting playbook: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
578
app/web/main.py
Normal file
578
app/web/main.py
Normal file
@@ -0,0 +1,578 @@
|
||||
"""
|
||||
Main web routes for dashboard and device management
|
||||
"""
|
||||
from flask import Blueprint, render_template, request, redirect, url_for, flash, jsonify
|
||||
from app.models import Device, LogEntry, MessageTemplate, AnsibleExecution, WMTUpdateRequest, InventoryGroup, device_inventory_association
|
||||
from config.database_config import get_db
|
||||
from app.services.log_service import LogCompressionService
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from sqlalchemy import text, func
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
# Create blueprint
|
||||
main_bp = Blueprint('main', __name__)
|
||||
|
||||
# Initialize services
|
||||
log_service = LogCompressionService()
|
||||
|
||||
@main_bp.route('/')
|
||||
def index():
|
||||
"""Redirect root to devices page"""
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
@main_bp.route('/dashboard')
|
||||
def dashboard():
|
||||
"""Redirect /dashboard to devices"""
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
@main_bp.route('/devices')
|
||||
def devices():
|
||||
"""Device management page"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
devices = session.query(Device).order_by(Device.last_seen.desc()).all()
|
||||
|
||||
# Get log count per device
|
||||
device_log_counts = {}
|
||||
for device in devices:
|
||||
log_count = session.query(LogEntry).filter_by(device_id=device.id).count()
|
||||
device_log_counts[device.id] = log_count
|
||||
|
||||
pending_count = session.query(WMTUpdateRequest).filter_by(status='pending').count()
|
||||
|
||||
return render_template('device_management.html',
|
||||
devices=devices,
|
||||
device_log_counts=device_log_counts,
|
||||
pending_count=pending_count)
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading devices: {e}")
|
||||
flash(f'Error loading devices: {e}', 'error')
|
||||
return render_template('device_management.html', devices=[], device_log_counts={}, pending_count=0)
|
||||
|
||||
@main_bp.route('/device/<int:device_id>')
|
||||
def device_detail(device_id):
|
||||
"""Device detail page with logs and stats"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).get(device_id)
|
||||
|
||||
if not device:
|
||||
flash('Device not found', 'error')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
# Get device logs (last 100)
|
||||
logs = session.query(LogEntry).filter_by(device_id=device_id).order_by(
|
||||
LogEntry.timestamp.desc()
|
||||
).limit(100).all()
|
||||
|
||||
# Get log statistics
|
||||
log_stats = {
|
||||
'total': len(logs),
|
||||
'by_severity': {},
|
||||
'last_24h': 0
|
||||
}
|
||||
|
||||
last_24h = datetime.utcnow() - timedelta(hours=24)
|
||||
for log in logs:
|
||||
# Count by severity
|
||||
severity = log.severity
|
||||
log_stats['by_severity'][severity] = log_stats['by_severity'].get(severity, 0) + 1
|
||||
|
||||
# Count last 24h
|
||||
if log.timestamp >= last_24h:
|
||||
log_stats['last_24h'] += 1
|
||||
|
||||
return render_template('device_detail.html',
|
||||
device=device,
|
||||
logs=logs,
|
||||
log_stats=log_stats)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading device detail: {e}")
|
||||
flash(f'Error loading device detail: {e}', 'error')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
@main_bp.route('/logs')
|
||||
def logs():
|
||||
"""Log viewer with filtering"""
|
||||
try:
|
||||
# Get filter parameters
|
||||
device_id = request.args.get('device_id', type=int)
|
||||
severity = request.args.get('severity')
|
||||
search = request.args.get('search', '')
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = min(request.args.get('per_page', 50, type=int), 200)
|
||||
|
||||
with get_db().get_session() as session:
|
||||
# Build query
|
||||
query = session.query(LogEntry).join(Device)
|
||||
|
||||
# Apply filters
|
||||
if device_id:
|
||||
query = query.filter(LogEntry.device_id == device_id)
|
||||
|
||||
if severity:
|
||||
query = query.filter(LogEntry.severity == severity)
|
||||
|
||||
if search:
|
||||
# Search in resolved message or full message
|
||||
query = query.filter(
|
||||
# This is simplified - in production you'd want full-text search
|
||||
LogEntry.full_message.contains(search)
|
||||
)
|
||||
|
||||
# Order by timestamp desc
|
||||
query = query.order_by(LogEntry.timestamp.desc())
|
||||
|
||||
# Get total count for pagination
|
||||
total = query.count()
|
||||
|
||||
# Apply pagination
|
||||
offset = (page - 1) * per_page
|
||||
logs = query.offset(offset).limit(per_page).all()
|
||||
|
||||
# Calculate pagination info
|
||||
total_pages = (total + per_page - 1) // per_page
|
||||
has_prev = page > 1
|
||||
has_next = page < total_pages
|
||||
|
||||
# Get device list for filter dropdown
|
||||
devices = session.query(Device).order_by(Device.hostname).all()
|
||||
|
||||
pagination = {
|
||||
'page': page,
|
||||
'per_page': per_page,
|
||||
'total': total,
|
||||
'total_pages': total_pages,
|
||||
'has_prev': has_prev,
|
||||
'has_next': has_next,
|
||||
'prev_num': page - 1 if has_prev else None,
|
||||
'next_num': page + 1 if has_next else None
|
||||
}
|
||||
|
||||
return render_template('logs.html',
|
||||
logs=logs,
|
||||
pagination=pagination,
|
||||
devices=devices,
|
||||
current_device_id=device_id,
|
||||
current_severity=severity,
|
||||
current_search=search)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading logs: {e}")
|
||||
flash(f'Error loading logs: {e}', 'error')
|
||||
return render_template('logs.html',
|
||||
logs=[],
|
||||
pagination={},
|
||||
devices=[],
|
||||
current_device_id=None,
|
||||
current_severity=None,
|
||||
current_search='')
|
||||
|
||||
@main_bp.route('/templates')
|
||||
def templates():
|
||||
"""Message templates management"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
templates = session.query(MessageTemplate).order_by(
|
||||
MessageTemplate.usage_count.desc()
|
||||
).all()
|
||||
|
||||
# Get template statistics
|
||||
template_stats = {
|
||||
'total': len(templates),
|
||||
'by_category': {},
|
||||
'total_usage': sum(t.usage_count for t in templates)
|
||||
}
|
||||
|
||||
for template in templates:
|
||||
category = template.category
|
||||
template_stats['by_category'][category] = template_stats['by_category'].get(category, 0) + 1
|
||||
|
||||
return render_template('templates.html',
|
||||
templates=templates,
|
||||
template_stats=template_stats)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading templates: {e}")
|
||||
flash(f'Error loading templates: {e}', 'error')
|
||||
return render_template('templates.html', templates=[], template_stats={})
|
||||
|
||||
@main_bp.route('/stats')
|
||||
def stats():
|
||||
"""System statistics and analytics"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
# Get compression stats
|
||||
compression_stats = log_service.get_compression_stats()
|
||||
|
||||
# Get device statistics
|
||||
device_stats = {
|
||||
'total': session.query(Device).count(),
|
||||
'active': session.query(Device).filter_by(status='active').count(),
|
||||
'inactive': session.query(Device).filter_by(status='inactive').count(),
|
||||
'maintenance': session.query(Device).filter_by(status='maintenance').count()
|
||||
}
|
||||
|
||||
# Get log statistics by time periods
|
||||
now = datetime.utcnow()
|
||||
periods = {
|
||||
'last_hour': now - timedelta(hours=1),
|
||||
'last_24h': now - timedelta(hours=24),
|
||||
'last_week': now - timedelta(days=7),
|
||||
'last_month': now - timedelta(days=30)
|
||||
}
|
||||
|
||||
log_stats = {}
|
||||
for period_name, period_start in periods.items():
|
||||
count = session.query(LogEntry).filter(
|
||||
LogEntry.timestamp >= period_start
|
||||
).count()
|
||||
log_stats[period_name] = count
|
||||
|
||||
# Get execution statistics
|
||||
exec_stats = {
|
||||
'total': session.query(AnsibleExecution).count(),
|
||||
'successful': session.query(AnsibleExecution).filter_by(status='completed').count(),
|
||||
'failed': session.query(AnsibleExecution).filter_by(status='failed').count(),
|
||||
'running': session.query(AnsibleExecution).filter_by(status='running').count()
|
||||
}
|
||||
|
||||
return render_template('stats.html',
|
||||
compression_stats=compression_stats,
|
||||
device_stats=device_stats,
|
||||
log_stats=log_stats,
|
||||
exec_stats=exec_stats)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error loading stats: {e}")
|
||||
flash(f'Error loading stats: {e}', 'error')
|
||||
return render_template('stats.html',
|
||||
compression_stats={},
|
||||
device_stats={},
|
||||
log_stats={},
|
||||
exec_stats={})
|
||||
|
||||
# API Endpoints for Device Management
|
||||
|
||||
@main_bp.route('/api/devices/add', methods=['POST'])
|
||||
def api_add_device():
|
||||
"""API endpoint to add a device manually"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['hostname', 'device_ip', 'nume_masa']
|
||||
for field in required_fields:
|
||||
if not data.get(field):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Missing required field: {field}'
|
||||
}), 400
|
||||
|
||||
# Check if device already exists (MAC-first, then hostname/IP)
|
||||
with get_db().get_session() as session:
|
||||
mac_input = data.get('mac_address', '').strip().lower() or None
|
||||
|
||||
existing_device = None
|
||||
if mac_input:
|
||||
existing_device = session.query(Device).filter_by(mac_address=mac_input).first()
|
||||
|
||||
if not existing_device:
|
||||
existing_device = session.query(Device).filter(
|
||||
(Device.hostname == data['hostname']) |
|
||||
(Device.device_ip == data['device_ip'])
|
||||
).first()
|
||||
|
||||
if existing_device:
|
||||
# If found by MAC or hostname/IP, update it rather than reject
|
||||
if mac_input and not existing_device.mac_address:
|
||||
existing_device.mac_address = mac_input
|
||||
existing_device.hostname = data['hostname']
|
||||
existing_device.device_ip = data['device_ip']
|
||||
existing_device.nume_masa = data['nume_masa']
|
||||
if data.get('device_type'):
|
||||
existing_device.device_type = data['device_type']
|
||||
if data.get('os_version'):
|
||||
existing_device.os_version = data['os_version']
|
||||
if data.get('location'):
|
||||
existing_device.location = data['location']
|
||||
if data.get('status'):
|
||||
existing_device.status = data['status']
|
||||
existing_device.config_updated_at = datetime.utcnow()
|
||||
existing_device.info_reviewed_at = datetime.utcnow()
|
||||
session.flush()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Device already existed – record updated',
|
||||
'device_id': existing_device.id
|
||||
}), 200
|
||||
|
||||
# Create new device
|
||||
new_device = Device(
|
||||
hostname=data['hostname'],
|
||||
device_ip=data['device_ip'],
|
||||
nume_masa=data['nume_masa'],
|
||||
mac_address=data.get('mac_address', '').strip().lower() or None,
|
||||
device_type=data.get('device_type', 'unknown'),
|
||||
os_version=data.get('os_version'),
|
||||
status=data.get('status', 'active'),
|
||||
location=data.get('location'),
|
||||
description=data.get('description'),
|
||||
config_updated_at=datetime.utcnow(),
|
||||
info_reviewed_at=datetime.utcnow(),
|
||||
last_seen=datetime.utcnow()
|
||||
)
|
||||
|
||||
session.add(new_device)
|
||||
session.commit()
|
||||
|
||||
# Refresh ansible inventory
|
||||
try:
|
||||
from app.services.ansible_service import AnsibleService
|
||||
ansible_service = AnsibleService()
|
||||
ansible_service.generate_dynamic_inventory()
|
||||
except Exception as e:
|
||||
logging.warning(f"Failed to update ansible inventory: {e}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Device added successfully',
|
||||
'device_id': new_device.id
|
||||
}), 201
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error adding device: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Error adding device: {str(e)}'
|
||||
}), 500
|
||||
|
||||
@main_bp.route('/api/devices/<int:device_id>/execute', methods=['POST'])
|
||||
def api_execute_device_command(device_id):
|
||||
"""API endpoint to execute commands on devices"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
command = data.get('command')
|
||||
|
||||
if not command:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Command is required'
|
||||
}), 400
|
||||
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).get(device_id)
|
||||
|
||||
if not device:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Device not found'
|
||||
}), 404
|
||||
|
||||
# Mock implementation - in production this would execute actual commands
|
||||
if command == 'ping':
|
||||
# Simulate ping command
|
||||
import subprocess
|
||||
try:
|
||||
result = subprocess.run(['ping', '-c', '1', device.device_ip],
|
||||
capture_output=True, text=True, timeout=5)
|
||||
success = result.returncode == 0
|
||||
output = result.stdout if success else result.stderr
|
||||
|
||||
return jsonify({
|
||||
'success': success,
|
||||
'command': command,
|
||||
'output': output,
|
||||
'device': device.hostname
|
||||
})
|
||||
except subprocess.TimeoutExpired:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': 'Command timed out',
|
||||
'command': command
|
||||
})
|
||||
else:
|
||||
# For other commands, return placeholder
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Command "{command}" would be executed on {device.hostname}',
|
||||
'command': command,
|
||||
'note': 'This is a placeholder implementation'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error executing device command: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'message': f'Error executing command: {str(e)}'
|
||||
}), 500
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Device edit / delete (unified – includes WMT fields)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@main_bp.route('/devices/<int:device_id>/edit', methods=['GET', 'POST'])
|
||||
def device_edit(device_id):
|
||||
"""Edit a device record (monitoring + WMT fields)."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).filter_by(id=device_id).first()
|
||||
if not device:
|
||||
flash('Device not found.', 'error')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
if request.method == 'POST':
|
||||
device.hostname = request.form.get('hostname', '').strip() or device.hostname
|
||||
device.device_ip = request.form.get('device_ip', '').strip() or device.device_ip
|
||||
device.nume_masa = request.form.get('nume_masa', '').strip() or device.nume_masa
|
||||
mac_raw = request.form.get('mac_address', '').strip().lower() or None
|
||||
# Only assign MAC if no other device owns it
|
||||
if mac_raw and mac_raw != device.mac_address:
|
||||
conflict = session.query(Device).filter(
|
||||
Device.mac_address == mac_raw, Device.id != device_id
|
||||
).first()
|
||||
if conflict:
|
||||
flash(f'MAC {mac_raw} is already assigned to {conflict.hostname}.', 'error')
|
||||
return render_template('device_edit.html', device=device)
|
||||
device.mac_address = mac_raw
|
||||
device.status = request.form.get('status', 'active')
|
||||
device.location = request.form.get('location', '').strip() or None
|
||||
device.device_type = request.form.get('device_type', '').strip() or 'unknown'
|
||||
device.description = request.form.get('description', '').strip() or None
|
||||
device.os_version = request.form.get('os_version', '').strip() or None
|
||||
device.config_updated_at = datetime.utcnow()
|
||||
device.info_reviewed_at = datetime.utcnow()
|
||||
flash('Device updated.', 'success')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
return render_template(
|
||||
'device_edit.html',
|
||||
device=device,
|
||||
breadcrumbs=[
|
||||
{'url': url_for('main.dashboard'), 'title': 'Dashboard'},
|
||||
{'url': url_for('main.devices'), 'title': 'Devices'},
|
||||
{'url': '#', 'title': f'Edit {device.hostname}'},
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f'Device edit error: {e}')
|
||||
flash(f'Error: {e}', 'error')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
|
||||
@main_bp.route('/devices/<int:device_id>/delete', methods=['POST'])
|
||||
def device_delete(device_id):
|
||||
"""Delete a device and all its logs."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).filter_by(id=device_id).first()
|
||||
if device:
|
||||
name = device.hostname
|
||||
session.delete(device)
|
||||
flash(f'Device {name} deleted.', 'success')
|
||||
else:
|
||||
flash('Device not found.', 'error')
|
||||
except Exception as e:
|
||||
logging.error(f'Device delete error: {e}')
|
||||
flash(f'Error deleting device: {e}', 'error')
|
||||
return redirect(url_for('main.devices'))
|
||||
|
||||
|
||||
# ── Admin page ────────────────────────────────────────────────────────
|
||||
|
||||
INVENTORY_FILE = Path('ansible/inventory/dynamic_inventory.yaml')
|
||||
|
||||
@main_bp.route('/admin')
|
||||
def admin():
|
||||
"""Admin / maintenance page with DB and inventory stats."""
|
||||
stats = {}
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
stats['devices'] = session.query(func.count(Device.id)).scalar()
|
||||
stats['logs'] = session.query(func.count(LogEntry.id)).scalar()
|
||||
stats['templates'] = session.query(func.count(MessageTemplate.id)).scalar()
|
||||
stats['inventory_groups'] = session.query(func.count(InventoryGroup.id)).scalar()
|
||||
stats['wmt_requests'] = session.query(func.count(WMTUpdateRequest.id)).scalar()
|
||||
except Exception as e:
|
||||
logging.error(f'Admin stats error: {e}')
|
||||
# Inventory host count
|
||||
try:
|
||||
if INVENTORY_FILE.exists():
|
||||
data = yaml.safe_load(INVENTORY_FILE.read_text()) or {}
|
||||
all_children = data.get('all', {}).get('children', {})
|
||||
inv_hosts = set()
|
||||
for g in all_children.values():
|
||||
inv_hosts.update((g or {}).get('hosts', {}).keys())
|
||||
stats['inventory_hosts'] = len(inv_hosts)
|
||||
stats['inventory_groups_yaml'] = len(all_children)
|
||||
else:
|
||||
stats['inventory_hosts'] = 0
|
||||
stats['inventory_groups_yaml'] = 0
|
||||
except Exception as e:
|
||||
logging.error(f'Admin inventory stats error: {e}')
|
||||
stats['inventory_hosts'] = '?'
|
||||
stats['inventory_groups_yaml'] = '?'
|
||||
return render_template('admin.html', stats=stats)
|
||||
|
||||
|
||||
@main_bp.route('/admin/clear/logs', methods=['POST'])
|
||||
def admin_clear_logs():
|
||||
"""Delete all log entries from the database."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
count = session.query(LogEntry).delete()
|
||||
session.commit()
|
||||
return jsonify({'success': True, 'deleted': count})
|
||||
except Exception as e:
|
||||
logging.error(f'Admin clear logs error: {e}')
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@main_bp.route('/admin/clear/devices', methods=['POST'])
|
||||
def admin_clear_devices():
|
||||
"""Delete all devices (and their log entries) from the database."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
session.execute(text('DELETE FROM device_inventory_groups'))
|
||||
logs = session.query(LogEntry).delete()
|
||||
devices = session.query(Device).delete()
|
||||
session.commit()
|
||||
return jsonify({'success': True, 'deleted_devices': devices, 'deleted_logs': logs})
|
||||
except Exception as e:
|
||||
logging.error(f'Admin clear devices error: {e}')
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@main_bp.route('/admin/clear/inventory', methods=['POST'])
|
||||
def admin_clear_inventory():
|
||||
"""Reset the Ansible inventory file to a completely empty state."""
|
||||
try:
|
||||
empty = {'_meta': {'hostvars': {}}, 'all': {'hosts': {}, 'children': {}}}
|
||||
INVENTORY_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
INVENTORY_FILE.write_text(yaml.dump(empty, default_flow_style=False))
|
||||
# Also clear inventory_groups table
|
||||
with get_db().get_session() as session:
|
||||
session.execute(text('DELETE FROM device_inventory_groups'))
|
||||
groups = session.query(InventoryGroup).delete()
|
||||
session.commit()
|
||||
return jsonify({'success': True, 'groups_deleted': groups})
|
||||
except Exception as e:
|
||||
logging.error(f'Admin clear inventory error: {e}')
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
|
||||
@main_bp.route('/admin/clear/wmt', methods=['POST'])
|
||||
def admin_clear_wmt():
|
||||
"""Delete all WMT update requests."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
count = session.query(WMTUpdateRequest).delete()
|
||||
session.commit()
|
||||
return jsonify({'success': True, 'deleted': count})
|
||||
except Exception as e:
|
||||
logging.error(f'Admin clear WMT requests error: {e}')
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
204
app/web/wmt.py
Normal file
204
app/web/wmt.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""
|
||||
WMT management web routes – global settings, device registry, update requests.
|
||||
"""
|
||||
from flask import Blueprint, render_template, request, redirect, url_for, flash
|
||||
from datetime import datetime
|
||||
from app.models import WMTGlobalConfig, Device, WMTUpdateRequest
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
wmt_web_bp = Blueprint('wmt_web', __name__, url_prefix='/wmt')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _get_or_create_global_config(session):
|
||||
cfg = session.query(WMTGlobalConfig).first()
|
||||
if cfg is None:
|
||||
cfg = WMTGlobalConfig()
|
||||
session.add(cfg)
|
||||
session.flush()
|
||||
return cfg
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dashboard
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@wmt_web_bp.route('/')
|
||||
def index():
|
||||
"""WMT management dashboard."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
global_cfg = _get_or_create_global_config(session)
|
||||
devices = session.query(Device).filter(Device.mac_address.isnot(None)).order_by(Device.nume_masa).all()
|
||||
pending_count = session.query(WMTUpdateRequest).filter_by(status='pending').count()
|
||||
recent_requests = (
|
||||
session.query(WMTUpdateRequest)
|
||||
.order_by(WMTUpdateRequest.submitted_at.desc())
|
||||
.limit(5)
|
||||
.all()
|
||||
)
|
||||
return render_template(
|
||||
'wmt/index.html',
|
||||
global_cfg=global_cfg,
|
||||
devices=devices,
|
||||
pending_count=pending_count,
|
||||
recent_requests=recent_requests,
|
||||
breadcrumbs=[{'url': url_for('wmt_web.index'), 'title': 'WMT Management'}],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f'WMT dashboard error: {e}')
|
||||
flash(f'Error loading dashboard: {e}', 'error')
|
||||
return render_template('wmt/index.html', global_cfg=None, devices=[],
|
||||
pending_count=0, recent_requests=[])
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Global settings
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@wmt_web_bp.route('/settings', methods=['GET', 'POST'])
|
||||
def settings():
|
||||
"""View and edit global WMT configuration."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
cfg = _get_or_create_global_config(session)
|
||||
|
||||
if request.method == 'POST':
|
||||
cfg.chrome_url = request.form.get('chrome_url', '').strip()
|
||||
cfg.chrome_local_url = request.form.get('chrome_local_url', '').strip() or None
|
||||
cfg.chrome_insecure_origin = request.form.get('chrome_insecure_origin', '').strip()
|
||||
cfg.card_api_base_url = request.form.get('card_api_base_url', '').strip()
|
||||
cfg.server_log_url = request.form.get('server_log_url', '').strip()
|
||||
cfg.internet_check_host = request.form.get('internet_check_host', '').strip()
|
||||
cfg.update_host = request.form.get('update_host', '').strip()
|
||||
cfg.update_user = request.form.get('update_user', '').strip()
|
||||
cfg.notes = request.form.get('notes', '').strip() or None
|
||||
cfg.updated_at = datetime.utcnow()
|
||||
cfg.updated_by = 'admin'
|
||||
flash('Global settings saved.', 'success')
|
||||
return redirect(url_for('wmt_web.settings'))
|
||||
|
||||
return render_template(
|
||||
'wmt/settings.html',
|
||||
cfg=cfg,
|
||||
breadcrumbs=[
|
||||
{'url': url_for('wmt_web.index'), 'title': 'WMT Management'},
|
||||
{'url': url_for('wmt_web.settings'), 'title': 'Global Settings'},
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f'WMT settings error: {e}')
|
||||
flash(f'Error: {e}', 'error')
|
||||
return redirect(url_for('wmt_web.index'))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Update requests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@wmt_web_bp.route('/requests')
|
||||
def update_requests():
|
||||
"""List all device update requests."""
|
||||
status_filter = request.args.get('status', 'pending')
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
query = session.query(WMTUpdateRequest)
|
||||
if status_filter != 'all':
|
||||
query = query.filter_by(status=status_filter)
|
||||
req_list = query.order_by(WMTUpdateRequest.submitted_at.desc()).all()
|
||||
pending_count = session.query(WMTUpdateRequest).filter_by(status='pending').count()
|
||||
|
||||
return render_template(
|
||||
'wmt/requests.html',
|
||||
requests=req_list,
|
||||
status_filter=status_filter,
|
||||
pending_count=pending_count,
|
||||
breadcrumbs=[
|
||||
{'url': url_for('wmt_web.index'), 'title': 'WMT Management'},
|
||||
{'url': url_for('wmt_web.update_requests'), 'title': 'Update Requests'},
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f'WMT requests list error: {e}')
|
||||
flash(f'Error: {e}', 'error')
|
||||
return redirect(url_for('wmt_web.index'))
|
||||
|
||||
|
||||
@wmt_web_bp.route('/requests/<int:req_id>/accept', methods=['POST'])
|
||||
def accept_request(req_id):
|
||||
"""Accept an update request: apply proposed values to WMTDevice."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
req = session.query(WMTUpdateRequest).filter_by(id=req_id).first()
|
||||
if not req:
|
||||
flash('Request not found.', 'error')
|
||||
return redirect(url_for('wmt_web.update_requests'))
|
||||
|
||||
# Find or create the Device
|
||||
device = session.query(Device).filter_by(mac_address=req.mac_address).first()
|
||||
if device is None:
|
||||
device = Device(
|
||||
mac_address=req.mac_address,
|
||||
hostname=req.proposed_hostname or '',
|
||||
device_ip=req.proposed_device_ip or '',
|
||||
nume_masa=req.proposed_device_name or '',
|
||||
)
|
||||
session.add(device)
|
||||
session.flush()
|
||||
req.device_id = device.id
|
||||
|
||||
# Apply proposed values
|
||||
if req.proposed_device_name is not None:
|
||||
device.nume_masa = req.proposed_device_name
|
||||
if req.proposed_hostname is not None:
|
||||
device.hostname = req.proposed_hostname
|
||||
if req.proposed_device_ip is not None:
|
||||
device.device_ip = req.proposed_device_ip
|
||||
device.config_updated_at = datetime.utcnow()
|
||||
device.info_reviewed_at = datetime.utcnow() # admin reviewed → push timestamp to devices
|
||||
|
||||
# Mark request as accepted
|
||||
req.status = 'accepted'
|
||||
req.admin_reviewed_at = datetime.utcnow()
|
||||
req.admin_notes = request.form.get('admin_notes', '').strip() or None
|
||||
|
||||
flash('Request accepted and device record updated.', 'success')
|
||||
except Exception as e:
|
||||
logger.error(f'WMT accept request error: {e}')
|
||||
flash(f'Error accepting request: {e}', 'error')
|
||||
return redirect(url_for('wmt_web.update_requests'))
|
||||
|
||||
|
||||
@wmt_web_bp.route('/requests/<int:req_id>/reject', methods=['POST'])
|
||||
def reject_request(req_id):
|
||||
"""Reject an update request (updates reviewed_at so WMT client won't re-submit)."""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
req = session.query(WMTUpdateRequest).filter_by(id=req_id).first()
|
||||
if not req:
|
||||
flash('Request not found.', 'error')
|
||||
return redirect(url_for('wmt_web.update_requests'))
|
||||
|
||||
req.status = 'rejected'
|
||||
req.admin_reviewed_at = datetime.utcnow()
|
||||
req.admin_notes = request.form.get('admin_notes', '').strip() or None
|
||||
|
||||
# Update device info_reviewed_at even though data didn't change –
|
||||
# this signals to the WMT client that the server has reviewed the state
|
||||
# so it won't keep re-submitting the same request.
|
||||
if req.device_id:
|
||||
device = session.query(Device).filter_by(id=req.device_id).first()
|
||||
if device:
|
||||
device.info_reviewed_at = datetime.utcnow()
|
||||
|
||||
flash('Request rejected.', 'warning')
|
||||
except Exception as e:
|
||||
logger.error(f'WMT reject request error: {e}')
|
||||
flash(f'Error rejecting request: {e}', 'error')
|
||||
return redirect(url_for('wmt_web.update_requests'))
|
||||
Reference in New Issue
Block a user