Add configuration, utilities, and update server with enhanced monitoring features
- Add config.py for environment configuration management - Add utils.py with utility functions - Add .env.example for environment variable reference - Add routes_example.py as route reference - Add login.html template for authentication - Update server.py with enhancements - Update all dashboard and log templates - Move documentation to 'explanations and old code' directory - Update database schema
This commit is contained in:
241
routes_example.py
Normal file
241
routes_example.py
Normal file
@@ -0,0 +1,241 @@
|
||||
# Refactored Server Logs Route Module
|
||||
# This shows the recommended structure for modularizing the application
|
||||
|
||||
from flask import Blueprint, request, render_template, jsonify
|
||||
from datetime import datetime
|
||||
import sqlite3
|
||||
import logging
|
||||
from utils import (
|
||||
require_auth, log_request, error_response, success_response,
|
||||
APIError
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logs_bp = Blueprint('logs', __name__, url_prefix='/logs')
|
||||
|
||||
|
||||
def get_db_connection(database_path):
|
||||
"""Get database connection"""
|
||||
try:
|
||||
conn = sqlite3.connect(database_path)
|
||||
conn.row_factory = sqlite3.Row # Return rows as dictionaries
|
||||
return conn
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Database connection error: {e}")
|
||||
raise APIError("Database connection failed", 500)
|
||||
|
||||
|
||||
@logs_bp.route('', methods=['POST'])
|
||||
@logs_bp.route('/log', methods=['POST'])
|
||||
@require_auth
|
||||
@log_request
|
||||
def log_event():
|
||||
"""
|
||||
Handle log submissions from devices
|
||||
|
||||
Expected JSON:
|
||||
{
|
||||
"hostname": "device-name",
|
||||
"device_ip": "192.168.1.1",
|
||||
"nume_masa": "table-name",
|
||||
"log_message": "event description"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
raise APIError("Invalid or missing JSON payload", 400)
|
||||
|
||||
# Extract and validate fields
|
||||
hostname = data.get('hostname', '').strip()
|
||||
device_ip = data.get('device_ip', '').strip()
|
||||
nume_masa = data.get('nume_masa', '').strip()
|
||||
log_message = data.get('log_message', '').strip()
|
||||
|
||||
# Validate required fields
|
||||
if not all([hostname, device_ip, nume_masa, log_message]):
|
||||
missing = [k for k in ['hostname', 'device_ip', 'nume_masa', 'log_message']
|
||||
if not data.get(k, '').strip()]
|
||||
raise APIError("Missing required fields", 400, {'missing_fields': missing})
|
||||
|
||||
# Validate field lengths
|
||||
if len(hostname) > 255 or len(device_ip) > 15 or len(nume_masa) > 255:
|
||||
raise APIError("Field length exceeds maximum", 400)
|
||||
|
||||
# Save to database
|
||||
from config import get_config
|
||||
config = get_config()
|
||||
|
||||
conn = get_db_connection(config.DATABASE_PATH)
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
cursor.execute('''
|
||||
INSERT INTO logs (hostname, device_ip, nume_masa, timestamp, event_description)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (hostname, device_ip, nume_masa, timestamp, log_message))
|
||||
|
||||
conn.commit()
|
||||
logger.info(f"Log saved from {hostname} ({device_ip})")
|
||||
|
||||
return success_response(
|
||||
{"log_id": cursor.lastrowid},
|
||||
"Log saved successfully",
|
||||
201
|
||||
)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
except APIError as e:
|
||||
return error_response(e.message, e.status_code, e.details)
|
||||
except sqlite3.Error as e:
|
||||
logger.error(f"Database error: {e}")
|
||||
return error_response("Database operation failed", 500)
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error in log_event")
|
||||
return error_response("Internal server error", 500)
|
||||
|
||||
|
||||
@logs_bp.route('/dashboard', methods=['GET'])
|
||||
@log_request
|
||||
def dashboard():
|
||||
"""
|
||||
Display device logs dashboard
|
||||
"""
|
||||
try:
|
||||
from config import get_config
|
||||
config = get_config()
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = min(
|
||||
request.args.get('per_page', config.DEFAULT_PAGE_SIZE, type=int),
|
||||
config.MAX_PAGE_SIZE
|
||||
)
|
||||
|
||||
conn = get_db_connection(config.DATABASE_PATH)
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get total count
|
||||
cursor.execute('SELECT COUNT(*) FROM logs WHERE hostname != "SERVER"')
|
||||
total = cursor.fetchone()[0]
|
||||
|
||||
# Get paginated results
|
||||
offset = (page - 1) * per_page
|
||||
cursor.execute('''
|
||||
SELECT hostname, device_ip, nume_masa, timestamp, event_description
|
||||
FROM logs
|
||||
WHERE hostname != 'SERVER'
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ? OFFSET ?
|
||||
''', (per_page, offset))
|
||||
|
||||
logs = cursor.fetchall()
|
||||
|
||||
return render_template(
|
||||
'dashboard.html',
|
||||
logs=logs,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
total_pages=(total + per_page - 1) // per_page
|
||||
)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
except APIError as e:
|
||||
return error_response(e.message, e.status_code), 400
|
||||
except Exception as e:
|
||||
logger.exception("Error in dashboard")
|
||||
return error_response("Failed to load dashboard", 500), 500
|
||||
|
||||
|
||||
@logs_bp.route('/stats', methods=['GET'])
|
||||
@log_request
|
||||
def get_stats():
|
||||
"""
|
||||
Get database statistics
|
||||
"""
|
||||
try:
|
||||
from config import get_config
|
||||
config = get_config()
|
||||
|
||||
conn = get_db_connection(config.DATABASE_PATH)
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get statistics
|
||||
cursor.execute('SELECT COUNT(*) FROM logs')
|
||||
total_logs = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('SELECT COUNT(DISTINCT hostname) FROM logs WHERE hostname != "SERVER"')
|
||||
unique_devices = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('SELECT COUNT(DISTINCT hostname) FROM logs WHERE hostname = "SERVER"')
|
||||
server_events = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('SELECT MIN(timestamp), MAX(timestamp) FROM logs')
|
||||
date_range = cursor.fetchone()
|
||||
|
||||
return success_response({
|
||||
'total_logs': total_logs,
|
||||
'unique_devices': unique_devices,
|
||||
'server_events': server_events,
|
||||
'earliest_log': date_range[0],
|
||||
'latest_log': date_range[1]
|
||||
})
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Error in get_stats")
|
||||
return error_response("Failed to retrieve statistics", 500), 500
|
||||
|
||||
|
||||
@logs_bp.route('/clear', methods=['POST'])
|
||||
@require_auth
|
||||
@log_request
|
||||
def clear_logs():
|
||||
"""
|
||||
Clear all logs (requires authentication)
|
||||
"""
|
||||
try:
|
||||
from config import get_config
|
||||
config = get_config()
|
||||
|
||||
# Backup before clearing
|
||||
if config.BACKUP_ENABLED:
|
||||
from datetime import datetime
|
||||
import shutil
|
||||
import os
|
||||
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
backup_file = f'{config.BACKUP_DIR}/database_backup_{timestamp}.db'
|
||||
|
||||
os.makedirs(config.BACKUP_DIR, exist_ok=True)
|
||||
shutil.copy2(config.DATABASE_PATH, backup_file)
|
||||
logger.info(f"Database backup created before clearing: {backup_file}")
|
||||
|
||||
conn = get_db_connection(config.DATABASE_PATH)
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute('SELECT COUNT(*) FROM logs')
|
||||
log_count = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute('DELETE FROM logs')
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Database cleared: {log_count} logs deleted")
|
||||
|
||||
return success_response(
|
||||
{"deleted_count": log_count},
|
||||
"Database cleared successfully"
|
||||
)
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Error in clear_logs")
|
||||
return error_response("Failed to clear database", 500), 500
|
||||
Reference in New Issue
Block a user