Initial commit — Server_Monitorizare_v2
This commit is contained in:
0
app/api/__init__.py
Normal file
0
app/api/__init__.py
Normal file
454
app/api/ansible.py
Normal file
454
app/api/ansible.py
Normal file
@@ -0,0 +1,454 @@
|
||||
"""
|
||||
Ansible and SSH management API endpoints
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
from app.services.ansible_service import AnsibleService
|
||||
from app.models import Device, AnsibleExecution
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
# Create blueprint
|
||||
ansible_bp = Blueprint('ansible', __name__, url_prefix='/api/ansible')
|
||||
|
||||
# Initialize service
|
||||
ansible_service = AnsibleService()
|
||||
|
||||
@ansible_bp.route('/inventory', methods=['GET'])
|
||||
def get_inventory():
|
||||
"""Get current Ansible inventory (structured)"""
|
||||
try:
|
||||
data = ansible_service.get_inventory_data()
|
||||
return jsonify({'success': True, 'inventory': data})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/raw', methods=['GET'])
|
||||
def get_inventory_raw():
|
||||
"""Get raw YAML inventory text"""
|
||||
try:
|
||||
data = ansible_service.get_inventory_data()
|
||||
return jsonify({'success': True, 'yaml': data.get('raw_yaml', '')})
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/sync', methods=['POST'])
|
||||
def sync_inventory():
|
||||
"""Sync all active app devices into monitoring_devices inventory group"""
|
||||
try:
|
||||
result = ansible_service.sync_devices_to_inventory()
|
||||
status = 200 if result.get('success') else 400
|
||||
return jsonify(result), status
|
||||
except Exception as e:
|
||||
logging.error(f"Error syncing inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/group/add', methods=['POST'])
|
||||
def add_inventory_group():
|
||||
"""Add a new inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group_name = (data or {}).get('group_name', '').strip()
|
||||
if not group_name:
|
||||
return jsonify({'success': False, 'error': 'group_name is required'}), 400
|
||||
result = ansible_service.add_group_to_inventory(group_name)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/group/remove', methods=['POST'])
|
||||
def remove_inventory_group():
|
||||
"""Remove an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group_name = (data or {}).get('group_name', '').strip()
|
||||
if not group_name:
|
||||
return jsonify({'success': False, 'error': 'group_name is required'}), 400
|
||||
result = ansible_service.remove_group_from_inventory(group_name)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/host/add', methods=['POST'])
|
||||
def add_inventory_host():
|
||||
"""Add a host to an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({'success': False, 'error': 'JSON body required'}), 400
|
||||
group = data.get('group', '').strip()
|
||||
hostname = data.get('hostname', '').strip()
|
||||
ip = data.get('ip', '').strip()
|
||||
ssh_user = data.get('ssh_user', 'pi').strip() or 'pi'
|
||||
ssh_port = int(data.get('ssh_port', 22))
|
||||
use_key = bool(data.get('use_key', True))
|
||||
password = data.get('password', None)
|
||||
if not group or not hostname or not ip:
|
||||
return jsonify({'success': False, 'error': 'group, hostname and ip are required'}), 400
|
||||
result = ansible_service.add_host_to_inventory(
|
||||
group=group, hostname=hostname, ip=ip,
|
||||
ssh_user=ssh_user, ssh_port=ssh_port,
|
||||
use_key=use_key, password=password
|
||||
)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/host/remove', methods=['POST'])
|
||||
def remove_inventory_host():
|
||||
"""Remove a host from an inventory group"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
group = (data or {}).get('group', '').strip()
|
||||
hostname = (data or {}).get('hostname', '').strip()
|
||||
if not group or not hostname:
|
||||
return jsonify({'success': False, 'error': 'group and hostname are required'}), 400
|
||||
result = ansible_service.remove_host_from_inventory(group=group, hostname=hostname)
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/inventory/refresh', methods=['POST'])
|
||||
def refresh_inventory():
|
||||
"""Refresh Ansible inventory from database (legacy alias for /sync)"""
|
||||
try:
|
||||
result = ansible_service.sync_devices_to_inventory()
|
||||
return jsonify(result), 200 if result.get('success') else 400
|
||||
except Exception as e:
|
||||
logging.error(f"Error refreshing inventory: {e}")
|
||||
return jsonify({'error': str(e), 'success': False}), 500
|
||||
|
||||
@ansible_bp.route('/playbooks', methods=['GET'])
|
||||
def list_playbooks():
|
||||
"""List available Ansible playbooks"""
|
||||
try:
|
||||
playbook_dir = ansible_service.playbook_dir
|
||||
playbooks = []
|
||||
|
||||
if playbook_dir.exists():
|
||||
for file in playbook_dir.glob('*.yml'):
|
||||
playbooks.append({
|
||||
'name': file.stem,
|
||||
'filename': file.name,
|
||||
'path': str(file),
|
||||
'modified': datetime.fromtimestamp(file.stat().st_mtime).isoformat()
|
||||
})
|
||||
|
||||
# Add built-in playbooks
|
||||
builtin_playbooks = [
|
||||
{
|
||||
'name': 'update_devices',
|
||||
'description': 'Update all packages on monitoring devices',
|
||||
'builtin': True
|
||||
},
|
||||
{
|
||||
'name': 'restart_service',
|
||||
'description': 'Restart monitoring services on devices',
|
||||
'builtin': True
|
||||
}
|
||||
]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'playbooks': playbooks,
|
||||
'builtin_playbooks': builtin_playbooks
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error listing playbooks: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/execute', methods=['POST'])
|
||||
def execute_playbook():
|
||||
"""
|
||||
Execute Ansible playbook
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"playbook": "update_devices",
|
||||
"limit_hosts": ["device-01", "device-02"], # optional
|
||||
"extra_vars": {"key": "value"}, # optional
|
||||
"create_builtin": True # optional, create builtin playbook if needed
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('playbook'):
|
||||
return jsonify({
|
||||
'error': 'Playbook name is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
playbook_name = data['playbook']
|
||||
limit_hosts = data.get('limit_hosts')
|
||||
extra_vars = data.get('extra_vars', {})
|
||||
create_builtin = data.get('create_builtin', True)
|
||||
|
||||
# Create builtin playbooks if they don't exist
|
||||
if create_builtin:
|
||||
if playbook_name == 'update_devices':
|
||||
ansible_service.create_update_playbook()
|
||||
elif playbook_name == 'restart_service':
|
||||
ansible_service.create_restart_service_playbook()
|
||||
|
||||
# Add controller IP to extra vars for callbacks
|
||||
extra_vars['ansible_controller_ip'] = request.host
|
||||
|
||||
# Execute playbook
|
||||
result = ansible_service.execute_playbook(
|
||||
playbook_name=playbook_name,
|
||||
limit_hosts=limit_hosts,
|
||||
extra_vars=extra_vars
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Playbook execution started',
|
||||
'execution_id': result['execution_id'],
|
||||
'log_file': result['log_file']
|
||||
})
|
||||
else:
|
||||
return jsonify(result), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error executing playbook: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/executions', methods=['GET'])
|
||||
def get_executions():
|
||||
"""Get Ansible execution history"""
|
||||
try:
|
||||
limit = min(int(request.args.get('limit', 50)), 200)
|
||||
executions = ansible_service.get_execution_history(limit=limit)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'executions': executions
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting executions: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
|
||||
@ansible_bp.route('/executions/<execution_id>/live', methods=['GET'])
|
||||
def get_execution_live(execution_id):
|
||||
"""Poll current log + status for a running or finished execution (UUID)."""
|
||||
result = ansible_service.get_live_execution(execution_id)
|
||||
if not result.get('success'):
|
||||
return jsonify(result), 404
|
||||
return jsonify(result), 200
|
||||
|
||||
@ansible_bp.route('/executions/<int:execution_id>', methods=['GET'])
|
||||
def get_execution_details(execution_id):
|
||||
"""Get detailed execution information"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
execution = session.query(AnsibleExecution).get(execution_id)
|
||||
|
||||
if not execution:
|
||||
return jsonify({
|
||||
'error': 'Execution not found',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
execution_data = {
|
||||
'id': execution.id,
|
||||
'playbook_name': execution.playbook_name,
|
||||
'target_devices': json.loads(execution.target_devices) if execution.target_devices else [],
|
||||
'command_line': execution.command_line,
|
||||
'start_time': execution.start_time.isoformat() if execution.start_time else None,
|
||||
'end_time': execution.end_time.isoformat() if execution.end_time else None,
|
||||
'status': execution.status,
|
||||
'exit_code': execution.exit_code,
|
||||
'stdout_log': execution.stdout_log,
|
||||
'stderr_log': execution.stderr_log,
|
||||
'successful_hosts': execution.successful_hosts,
|
||||
'failed_hosts': execution.failed_hosts,
|
||||
'unreachable_hosts': execution.unreachable_hosts
|
||||
}
|
||||
|
||||
# Read log file if it exists
|
||||
if execution.ansible_log_file and os.path.exists(execution.ansible_log_file):
|
||||
with open(execution.ansible_log_file, 'r') as f:
|
||||
execution_data['full_log'] = f.read()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'execution': execution_data
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting execution details: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/test', methods=['POST'])
|
||||
def test_ssh_connectivity():
|
||||
"""
|
||||
Test SSH connectivity to devices
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"device_ips": ["192.168.1.100", "192.168.1.101"],
|
||||
"username": "pi" # optional, defaults to "pi"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
if not data or not data.get('device_ips'):
|
||||
return jsonify({
|
||||
'error': 'device_ips list is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
device_ips = data['device_ips']
|
||||
username = data.get('username', 'pi')
|
||||
|
||||
# Test connectivity
|
||||
if len(device_ips) == 1:
|
||||
# Single device test
|
||||
result = ansible_service.test_ssh_connectivity(device_ips[0], username)
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'results': {device_ips[0]: result}
|
||||
})
|
||||
else:
|
||||
# Bulk test
|
||||
results = ansible_service.bulk_ssh_test(device_ips)
|
||||
|
||||
# Summary
|
||||
successful = sum(1 for r in results.values() if r.get('success'))
|
||||
total = len(results)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'results': results,
|
||||
'summary': {
|
||||
'successful': successful,
|
||||
'failed': total - successful,
|
||||
'total': total
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error testing SSH connectivity: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/keys/setup', methods=['POST'])
|
||||
def setup_ssh_keys():
|
||||
"""Setup SSH keys for Ansible authentication"""
|
||||
try:
|
||||
result = ansible_service.setup_ssh_keys()
|
||||
return jsonify(result)
|
||||
except Exception as e:
|
||||
logging.error(f"Error setting up SSH keys: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/ssh/keys/public', methods=['GET'])
|
||||
def get_public_key():
|
||||
"""Get public key for distribution to devices"""
|
||||
try:
|
||||
public_key_path = ansible_service.ssh_key_path.with_suffix('.pub')
|
||||
|
||||
if not public_key_path.exists():
|
||||
return jsonify({
|
||||
'error': 'Public key not found. Run SSH setup first.',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
with open(public_key_path, 'r') as f:
|
||||
public_key = f.read().strip()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'public_key': public_key,
|
||||
'key_path': str(public_key_path)
|
||||
})
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting public key: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@ansible_bp.route('/devices/status', methods=['GET'])
|
||||
def get_devices_status():
|
||||
"""Get status of all devices for Ansible operations"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
devices = session.query(Device).all()
|
||||
|
||||
device_data = []
|
||||
for device in devices:
|
||||
device_data.append({
|
||||
'id': device.id,
|
||||
'hostname': device.hostname,
|
||||
'device_ip': device.device_ip,
|
||||
'nume_masa': device.nume_masa,
|
||||
'status': device.status,
|
||||
'last_seen': device.last_seen.isoformat() if device.last_seen else None,
|
||||
'device_type': device.device_type,
|
||||
'os_version': device.os_version,
|
||||
'location': device.location
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'devices': device_data,
|
||||
'total_count': len(device_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error getting devices status: {e}")
|
||||
return jsonify({
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
# Callback endpoints for Ansible playbooks
|
||||
@ansible_bp.route('/callback/update_complete', methods=['POST'])
|
||||
def update_complete_callback():
|
||||
"""Callback endpoint for update completion"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
logging.info(f"Update completed for {data.get('hostname')}: {data}")
|
||||
|
||||
# You could update device status, send notifications, etc.
|
||||
return jsonify({'success': True})
|
||||
except Exception as e:
|
||||
logging.error(f"Error in update callback: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@ansible_bp.route('/callback/service_restarted', methods=['POST'])
|
||||
def service_restart_callback():
|
||||
"""Callback endpoint for service restart completion"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
logging.info(f"Service restarted for {data.get('hostname')}: {data}")
|
||||
|
||||
return jsonify({'success': True})
|
||||
except Exception as e:
|
||||
logging.error(f"Error in service restart callback: {e}")
|
||||
return jsonify({'error': str(e)}), 500
|
||||
373
app/api/logs.py
Normal file
373
app/api/logs.py
Normal file
@@ -0,0 +1,373 @@
|
||||
"""
|
||||
Enhanced API endpoints for logs with compression and file support
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from werkzeug.utils import secure_filename
|
||||
import os
|
||||
import hashlib
|
||||
from datetime import datetime
|
||||
from app.services.log_service import LogCompressionService
|
||||
from app.services.file_service import FileUploadService
|
||||
from app.models import Device, LogEntry, FileUpload
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
# Create blueprint
|
||||
logs_bp = Blueprint('logs', __name__, url_prefix='/api/logs')
|
||||
|
||||
# Initialize services
|
||||
log_service = LogCompressionService()
|
||||
file_service = FileUploadService()
|
||||
|
||||
@logs_bp.route('/', methods=['POST'])
|
||||
@logs_bp.route('/submit', methods=['POST'])
|
||||
def submit_log():
|
||||
"""
|
||||
Enhanced log submission with compression support
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"hostname": "device-01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"nume_masa": "Masa-01",
|
||||
"log_message": "Card detected: ABC123",
|
||||
"severity": "info", # optional: debug, info, warning, error, critical
|
||||
"source_file": "/path/to/logfile.log", # optional
|
||||
"metadata": {} # optional additional metadata
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Validate content type
|
||||
if not request.is_json:
|
||||
return jsonify({
|
||||
'error': 'Content-Type must be application/json',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['hostname', 'device_ip', 'nume_masa', 'log_message']
|
||||
missing_fields = [field for field in required_fields if not data.get(field)]
|
||||
|
||||
if missing_fields:
|
||||
return jsonify({
|
||||
'error': f'Missing required fields: {", ".join(missing_fields)}',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Prepare device info
|
||||
device_info = {
|
||||
'hostname': data['hostname'],
|
||||
'device_ip': data['device_ip'],
|
||||
'nume_masa': data['nume_masa'],
|
||||
# Optional – clients can send these to keep device records up to date
|
||||
'device_type': data.get('device_type') or (data.get('metadata') or {}).get('device_type'),
|
||||
'os_version': data.get('os_version') or (data.get('metadata') or {}).get('os_version'),
|
||||
'location': data.get('location') or (data.get('metadata') or {}).get('location'),
|
||||
'mac_address': data.get('mac_address') or (data.get('metadata') or {}).get('mac_address'),
|
||||
}
|
||||
|
||||
# Process log with compression
|
||||
result = log_service.process_log_message(
|
||||
device_info=device_info,
|
||||
message=data['log_message'],
|
||||
severity=data.get('severity', 'info')
|
||||
)
|
||||
|
||||
if result['success']:
|
||||
# Prepare response with compression info
|
||||
response_data = {
|
||||
'success': True,
|
||||
'message': 'Log processed successfully',
|
||||
'log_id': result['log_id'],
|
||||
'device_id': result['device_id'],
|
||||
'compression_info': result['compression']
|
||||
}
|
||||
|
||||
# Add alias info if template was used
|
||||
if result['compression'].get('used_template'):
|
||||
response_data['template_alias'] = result['compression']['template_alias']
|
||||
|
||||
# For clients: suggest using alias in future requests
|
||||
if result['compression'].get('new_template'):
|
||||
response_data['suggestion'] = f"For similar messages, you can use template alias: {result['compression']['template_alias']}"
|
||||
|
||||
return jsonify(response_data), 201
|
||||
else:
|
||||
return jsonify(result), 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in submit_log: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/template/<alias>', methods=['POST'])
|
||||
def submit_templated_log():
|
||||
"""
|
||||
Submit log using template alias (smaller payload)
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"alias": "CD001",
|
||||
"variables": {"card_id": "ABC123"},
|
||||
"device_info": {
|
||||
"hostname": "device-01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"nume_masa": "Masa-01"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
alias = request.view_args['alias']
|
||||
|
||||
# Validate required fields
|
||||
if not data.get('device_info'):
|
||||
return jsonify({
|
||||
'error': 'device_info is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Get template message
|
||||
variables = data.get('variables', {})
|
||||
full_message = log_service.get_message_by_alias(alias, variables)
|
||||
|
||||
if not full_message:
|
||||
return jsonify({
|
||||
'error': f'Template alias {alias} not found',
|
||||
'success': False
|
||||
}), 404
|
||||
|
||||
# Process as regular log
|
||||
result = log_service.process_log_message(
|
||||
device_info=data['device_info'],
|
||||
message=full_message,
|
||||
severity=data.get('severity', 'info')
|
||||
)
|
||||
|
||||
return jsonify(result), 201 if result['success'] else 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in submit_templated_log: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/file', methods=['POST'])
|
||||
def upload_log_file():
|
||||
"""
|
||||
Upload log file for processing
|
||||
|
||||
Expects multipart/form-data with:
|
||||
- file: log file
|
||||
- device_info: JSON string with device information
|
||||
"""
|
||||
try:
|
||||
# Check if file was uploaded
|
||||
if 'file' not in request.files:
|
||||
return jsonify({
|
||||
'error': 'No file uploaded',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
file = request.files['file']
|
||||
if file.filename == '':
|
||||
return jsonify({
|
||||
'error': 'No file selected',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
# Get device info
|
||||
device_info_str = request.form.get('device_info')
|
||||
if not device_info_str:
|
||||
return jsonify({
|
||||
'error': 'device_info is required',
|
||||
'success': False
|
||||
}), 400
|
||||
|
||||
import json
|
||||
device_info = json.loads(device_info_str)
|
||||
|
||||
# Process file upload
|
||||
result = file_service.process_uploaded_file(file, device_info)
|
||||
|
||||
return jsonify(result), 201 if result['success'] else 500
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in upload_log_file: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/query', methods=['GET'])
|
||||
def query_logs():
|
||||
"""
|
||||
Query logs with filters and pagination
|
||||
|
||||
Query parameters:
|
||||
- device_id: Filter by device ID
|
||||
- hostname: Filter by hostname
|
||||
- severity: Filter by severity level
|
||||
- start_time: Start time (ISO format)
|
||||
- end_time: End time (ISO format)
|
||||
- limit: Number of results (default 100)
|
||||
- offset: Offset for pagination (default 0)
|
||||
- include_template: Include resolved template messages (default true)
|
||||
"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
# Build query
|
||||
query = session.query(LogEntry).join(Device)
|
||||
|
||||
# Apply filters
|
||||
if request.args.get('device_id'):
|
||||
query = query.filter(LogEntry.device_id == int(request.args.get('device_id')))
|
||||
|
||||
if request.args.get('hostname'):
|
||||
query = query.filter(Device.hostname == request.args.get('hostname'))
|
||||
|
||||
if request.args.get('severity'):
|
||||
query = query.filter(LogEntry.severity == request.args.get('severity'))
|
||||
|
||||
if request.args.get('start_time'):
|
||||
start_time = datetime.fromisoformat(request.args.get('start_time'))
|
||||
query = query.filter(LogEntry.timestamp >= start_time)
|
||||
|
||||
if request.args.get('end_time'):
|
||||
end_time = datetime.fromisoformat(request.args.get('end_time'))
|
||||
query = query.filter(LogEntry.timestamp <= end_time)
|
||||
|
||||
# Pagination
|
||||
limit = min(int(request.args.get('limit', 100)), 1000) # Max 1000
|
||||
offset = int(request.args.get('offset', 0))
|
||||
|
||||
# Order by timestamp descending
|
||||
query = query.order_by(LogEntry.timestamp.desc())
|
||||
|
||||
# Get total count
|
||||
total_count = query.count()
|
||||
|
||||
# Apply pagination
|
||||
logs = query.limit(limit).offset(offset).all()
|
||||
|
||||
# Format response
|
||||
include_template = request.args.get('include_template', 'true').lower() == 'true'
|
||||
log_data = []
|
||||
|
||||
for log in logs:
|
||||
log_item = {
|
||||
'id': log.id,
|
||||
'device': {
|
||||
'id': log.device.id,
|
||||
'hostname': log.device.hostname,
|
||||
'device_ip': log.device.device_ip,
|
||||
'nume_masa': log.device.nume_masa
|
||||
},
|
||||
'timestamp': log.timestamp.isoformat(),
|
||||
'severity': log.severity
|
||||
}
|
||||
|
||||
if include_template and log.template:
|
||||
log_item['message'] = log.resolved_message
|
||||
log_item['template_alias'] = log.template.alias
|
||||
log_item['template_category'] = log.template.category
|
||||
else:
|
||||
log_item['message'] = log.full_message or log.resolved_message
|
||||
|
||||
log_data.append(log_item)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'logs': log_data,
|
||||
'pagination': {
|
||||
'total_count': total_count,
|
||||
'limit': limit,
|
||||
'offset': offset,
|
||||
'has_more': offset + limit < total_count
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in query_logs: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/stats', methods=['GET'])
|
||||
def get_log_stats():
|
||||
"""Get logging and compression statistics"""
|
||||
try:
|
||||
# Get compression stats
|
||||
compression_stats = log_service.get_compression_stats()
|
||||
|
||||
# Get additional stats
|
||||
with get_db().get_session() as session:
|
||||
# Device stats
|
||||
active_devices = session.query(Device).filter_by(status='active').count()
|
||||
total_devices = session.query(Device).count()
|
||||
|
||||
# Recent activity
|
||||
from datetime import datetime, timedelta
|
||||
last_hour = datetime.utcnow() - timedelta(hours=1)
|
||||
recent_logs = session.query(LogEntry).filter(
|
||||
LogEntry.timestamp >= last_hour
|
||||
).count()
|
||||
|
||||
stats = {
|
||||
'success': True,
|
||||
'compression': compression_stats,
|
||||
'devices': {
|
||||
'active': active_devices,
|
||||
'total': total_devices
|
||||
},
|
||||
'activity': {
|
||||
'logs_last_hour': recent_logs
|
||||
}
|
||||
}
|
||||
|
||||
return jsonify(stats)
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in get_log_stats: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
|
||||
@logs_bp.route('/templates', methods=['GET'])
|
||||
def get_templates():
|
||||
"""Get available message templates and aliases"""
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
from app.models import MessageTemplate
|
||||
|
||||
templates = session.query(MessageTemplate).order_by(
|
||||
MessageTemplate.usage_count.desc()
|
||||
).all()
|
||||
|
||||
template_data = [{
|
||||
'alias': template.alias,
|
||||
'category': template.category,
|
||||
'template_text': template.template_text,
|
||||
'usage_count': template.usage_count,
|
||||
'created_at': template.created_at.isoformat()
|
||||
} for template in templates]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'templates': template_data,
|
||||
'total_count': len(template_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logging.error(f"Error in get_templates: {e}")
|
||||
return jsonify({
|
||||
'error': 'Internal server error',
|
||||
'success': False
|
||||
}), 500
|
||||
170
app/api/wmt.py
Normal file
170
app/api/wmt.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
WMT (Workstation Management Terminal) configuration API
|
||||
Handles config distribution and device update requests from WMT clients.
|
||||
"""
|
||||
from flask import Blueprint, request, jsonify
|
||||
from datetime import datetime
|
||||
from app.models import WMTGlobalConfig, Device, WMTUpdateRequest
|
||||
from config.database_config import get_db
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
wmt_api_bp = Blueprint('wmt_api', __name__, url_prefix='/api/wmt')
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _get_or_create_global_config(session):
|
||||
"""Return the single WMTGlobalConfig row, creating it with defaults if absent."""
|
||||
cfg = session.query(WMTGlobalConfig).first()
|
||||
if cfg is None:
|
||||
cfg = WMTGlobalConfig()
|
||||
session.add(cfg)
|
||||
session.flush()
|
||||
return cfg
|
||||
|
||||
|
||||
def _latest_config_ts(session, mac_address):
|
||||
"""Return timestamps for global config and this device's admin-reviewed info."""
|
||||
global_cfg = session.query(WMTGlobalConfig).first()
|
||||
global_ts = global_cfg.updated_at if global_cfg and global_cfg.updated_at else datetime(1970, 1, 1)
|
||||
|
||||
device = session.query(Device).filter_by(mac_address=mac_address).first()
|
||||
# Use info_reviewed_at as the authoritative device-level timestamp
|
||||
device_ts = device.info_reviewed_at if device and device.info_reviewed_at else datetime(1970, 1, 1)
|
||||
|
||||
latest = max(global_ts, device_ts)
|
||||
return global_ts, device_ts, latest
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Endpoints
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@wmt_api_bp.route('/config/timestamp', methods=['GET'])
|
||||
def get_config_timestamp():
|
||||
"""
|
||||
Returns the last-modified timestamps for global config and this device's config.
|
||||
Query param: mac=<mac_address>
|
||||
|
||||
Response:
|
||||
{
|
||||
"global_updated_at": "2026-04-22T10:00:00",
|
||||
"device_updated_at": "2026-04-22T09:00:00", // null if device unknown
|
||||
"latest_updated_at": "2026-04-22T10:00:00"
|
||||
}
|
||||
"""
|
||||
mac = request.args.get('mac', '').strip().lower()
|
||||
if not mac:
|
||||
return jsonify({'error': 'mac query parameter is required'}), 400
|
||||
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
global_ts, device_info_reviewed_ts, latest = _latest_config_ts(session, mac)
|
||||
|
||||
return jsonify({
|
||||
'global_updated_at': global_ts.isoformat() if global_ts != datetime(1970, 1, 1) else None,
|
||||
'device_info_reviewed_at': device_info_reviewed_ts.isoformat() if device_info_reviewed_ts != datetime(1970, 1, 1) else None,
|
||||
'latest_updated_at': latest.isoformat(),
|
||||
}), 200
|
||||
except Exception as e:
|
||||
logger.error(f'Error getting WMT config timestamp: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
@wmt_api_bp.route('/config/<mac_address>', methods=['GET'])
|
||||
def get_device_config(mac_address):
|
||||
"""
|
||||
Returns merged config (global settings + device-specific) for a given MAC.
|
||||
Used by WMT client to pull updated config at startup.
|
||||
|
||||
Response: merged dict consumable by the WMT config.txt writer.
|
||||
"""
|
||||
mac = mac_address.strip().lower()
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
global_cfg = _get_or_create_global_config(session)
|
||||
device = session.query(Device).filter_by(mac_address=mac).first()
|
||||
|
||||
# Update last_seen if device is known
|
||||
if device:
|
||||
device.last_seen = datetime.utcnow()
|
||||
|
||||
_, device_ts, latest_ts = _latest_config_ts(session, mac)
|
||||
|
||||
payload = {
|
||||
# Global settings
|
||||
'chrome_url': global_cfg.chrome_url,
|
||||
'chrome_local_url': global_cfg.chrome_local_url or '',
|
||||
'chrome_insecure_origin': global_cfg.chrome_insecure_origin,
|
||||
'card_api_base_url': global_cfg.card_api_base_url,
|
||||
'server_log_url': global_cfg.server_log_url,
|
||||
'internet_check_host': global_cfg.internet_check_host,
|
||||
'update_host': global_cfg.update_host,
|
||||
'update_user': global_cfg.update_user,
|
||||
# Device-specific settings (empty string if unknown)
|
||||
'device_name': device.device_name if device else '',
|
||||
'hostname': device.hostname if device else '',
|
||||
'device_ip': device.device_ip if device else '',
|
||||
'location': device.location if device else '',
|
||||
# Admin-review timestamp for device info (client stores in [device] section)
|
||||
'info_reviewed_at': device.info_reviewed_at.isoformat() if (device and device.info_reviewed_at) else '1970-01-01T00:00:00',
|
||||
# Sync metadata
|
||||
'config_updated_at': latest_ts.isoformat(),
|
||||
}
|
||||
return jsonify(payload), 200
|
||||
except Exception as e:
|
||||
logger.error(f'Error fetching WMT config for {mac}: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
|
||||
@wmt_api_bp.route('/config/update_request', methods=['POST'])
|
||||
def submit_update_request():
|
||||
"""
|
||||
WMT client sends current device info as an update request for admin approval.
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"mac_address": "b8:27:eb:aa:bb:cc",
|
||||
"device_name": "Masa-01",
|
||||
"hostname": "rpi-masa01",
|
||||
"device_ip": "192.168.1.100",
|
||||
"client_config_mtime": "2026-04-22T09:30:00" // optional
|
||||
}
|
||||
"""
|
||||
if not request.is_json:
|
||||
return jsonify({'error': 'Content-Type must be application/json'}), 400
|
||||
|
||||
data = request.get_json()
|
||||
mac = (data.get('mac_address') or '').strip().lower()
|
||||
if not mac:
|
||||
return jsonify({'error': 'mac_address is required'}), 400
|
||||
|
||||
try:
|
||||
with get_db().get_session() as session:
|
||||
device = session.query(Device).filter_by(mac_address=mac).first()
|
||||
|
||||
req = WMTUpdateRequest(
|
||||
mac_address=mac,
|
||||
device_id=device.id if device else None,
|
||||
proposed_device_name=data.get('device_name'),
|
||||
proposed_hostname=data.get('hostname'),
|
||||
proposed_device_ip=data.get('device_ip'),
|
||||
client_config_mtime=data.get('client_config_mtime'),
|
||||
submitted_at=datetime.utcnow(),
|
||||
status='pending',
|
||||
)
|
||||
session.add(req)
|
||||
|
||||
# Update device last_seen
|
||||
if device:
|
||||
device.last_seen = datetime.utcnow()
|
||||
|
||||
logger.info(f'WMT update request received from {mac}')
|
||||
return jsonify({'status': 'received', 'message': 'Update request queued for admin review'}), 201
|
||||
except Exception as e:
|
||||
logger.error(f'Error saving WMT update request from {mac}: {e}')
|
||||
return jsonify({'error': str(e)}), 500
|
||||
Reference in New Issue
Block a user