Implement database connection pooling with context manager pattern

- Added DBUtils PooledDB for intelligent connection pooling
- Created db_pool.py with lazy-initialized connection pool (max 20 connections)
- Added db_connection_context() context manager for safe connection handling
- Refactored all 19 database operations to use context manager pattern
- Ensures proper connection cleanup and exception handling
- Prevents connection exhaustion on POST requests
- Added logging configuration for debugging

Changes:
- py_app/app/db_pool.py: New connection pool manager
- py_app/app/logging_config.py: Centralized logging
- py_app/app/__init__.py: Updated to use connection pool
- py_app/app/routes.py: Refactored all DB operations to use context manager
- py_app/app/settings.py: Updated settings handlers
- py_app/requirements.txt: Added DBUtils dependency

This solves the connection timeout issues experienced with the fgscan page.
This commit is contained in:
Quality App System
2026-01-22 22:07:06 +02:00
parent fd801ab78d
commit 64b67b2979
9 changed files with 1928 additions and 920 deletions

View File

@@ -1,10 +1,17 @@
from flask import Flask
from datetime import datetime
import os
def create_app():
app = Flask(__name__)
app.config['SECRET_KEY'] = 'your_secret_key'
# Initialize logging first
from app.logging_config import setup_logging
log_dir = os.path.join(app.instance_path, '..', 'logs')
logger = setup_logging(app=app, log_dir=log_dir)
logger.info("Flask app initialization started")
# Configure session persistence
from datetime import timedelta
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7)
@@ -15,14 +22,21 @@ def create_app():
# Set max upload size to 10GB for large database backups
app.config['MAX_CONTENT_LENGTH'] = 10 * 1024 * 1024 * 1024 # 10GB
# Note: Database connection pool is lazily initialized on first use
# This is to avoid trying to read configuration before it's created
# during application startup. See app.db_pool.get_db_pool() for details.
logger.info("Database connection pool will be lazily initialized on first use")
# Application uses direct MariaDB connections via external_server.conf
# No SQLAlchemy ORM needed - all database operations use raw SQL
# Connection pooling via DBUtils prevents connection exhaustion
logger.info("Registering Flask blueprints...")
from app.routes import bp as main_bp, warehouse_bp
from app.daily_mirror import daily_mirror_bp
app.register_blueprint(main_bp, url_prefix='/')
app.register_blueprint(warehouse_bp, url_prefix='/warehouse')
app.register_blueprint(daily_mirror_bp)
logger.info("Blueprints registered successfully")
# Add 'now' function to Jinja2 globals
app.jinja_env.globals['now'] = datetime.now

122
py_app/app/db_pool.py Normal file
View File

@@ -0,0 +1,122 @@
"""
Database Connection Pool Manager for MariaDB
Provides connection pooling to prevent connection exhaustion
"""
import os
import mariadb
from dbutils.pooled_db import PooledDB
from flask import current_app
from app.logging_config import get_logger
logger = get_logger('db_pool')
# Global connection pool instance
_db_pool = None
_pool_initialized = False
def get_db_pool():
"""
Get or create the database connection pool.
Implements lazy initialization to ensure app context is available and config file exists.
This function should only be called when needing a database connection,
after the database config file has been created.
"""
global _db_pool, _pool_initialized
logger.debug("get_db_pool() called")
if _db_pool is not None:
logger.debug("Pool already initialized, returning existing pool")
return _db_pool
if _pool_initialized:
# Already tried to initialize but failed - don't retry
logger.error("Pool initialization flag set but _db_pool is None - not retrying")
raise RuntimeError("Database pool initialization failed previously")
try:
logger.info("Initializing database connection pool...")
# Read settings from the configuration file
settings_file = os.path.join(current_app.instance_path, 'external_server.conf')
logger.debug(f"Looking for config file: {settings_file}")
if not os.path.exists(settings_file):
raise FileNotFoundError(f"Database config file not found: {settings_file}")
logger.debug("Config file found, parsing...")
settings = {}
with open(settings_file, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
logger.debug(f"Parsed config: host={settings.get('server_domain')}, db={settings.get('database_name')}, user={settings.get('username')}")
# Validate we have all required settings
required_keys = ['username', 'password', 'server_domain', 'port', 'database_name']
for key in required_keys:
if key not in settings:
raise ValueError(f"Missing database configuration: {key}")
logger.info(f"Creating connection pool: max_connections=20, min_cached=3, max_cached=10, max_shared=5")
# Create connection pool
_db_pool = PooledDB(
creator=mariadb,
maxconnections=20, # Max connections in pool
mincached=3, # Min idle connections
maxcached=10, # Max idle connections
maxshared=5, # Shared connections
blocking=True, # Block if no connection available
ping=1, # Ping database to check connection health (1 = on demand)
user=settings['username'],
password=settings['password'],
host=settings['server_domain'],
port=int(settings['port']),
database=settings['database_name'],
autocommit=False # Explicit commit for safety
)
_pool_initialized = True
logger.info("✅ Database connection pool initialized successfully (max 20 connections)")
return _db_pool
except Exception as e:
_pool_initialized = True
logger.error(f"FAILED to initialize database pool: {e}", exc_info=True)
raise RuntimeError(f"Database pool initialization failed: {e}") from e
def get_db_connection():
"""
Get a connection from the pool.
Always use with 'with' statement or ensure close() is called.
"""
logger.debug("get_db_connection() called")
try:
pool = get_db_pool()
conn = pool.connection()
logger.debug("Successfully obtained connection from pool")
return conn
except Exception as e:
logger.error(f"Failed to get connection from pool: {e}", exc_info=True)
raise
def close_db_pool():
"""
Close all connections in the pool (called at app shutdown).
"""
global _db_pool
if _db_pool:
logger.info("Closing database connection pool...")
_db_pool.close()
_db_pool = None
logger.info("✅ Database connection pool closed")
# That's it! The pool is lazily initialized on first connection.
# No other initialization needed.

View File

@@ -0,0 +1,142 @@
"""
Logging Configuration for Trasabilitate Application
Centralizes all logging setup for the application
"""
import logging
import logging.handlers
import os
import sys
from datetime import datetime
def setup_logging(app=None, log_dir='/srv/quality_app/logs'):
"""
Configure comprehensive logging for the application
Args:
app: Flask app instance (optional)
log_dir: Directory to store log files
"""
# Ensure log directory exists
os.makedirs(log_dir, exist_ok=True)
# Create formatters
detailed_formatter = logging.Formatter(
'[%(asctime)s] [%(name)s] [%(levelname)s] %(filename)s:%(lineno)d - %(funcName)s() - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
simple_formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
# Create logger
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Remove any existing handlers to avoid duplicates
for handler in root_logger.handlers[:]:
root_logger.removeHandler(handler)
# ========================================================================
# File Handler - All logs (DEBUG and above)
# ========================================================================
all_log_file = os.path.join(log_dir, f'application_{datetime.now().strftime("%Y%m%d")}.log')
file_handler_all = logging.handlers.RotatingFileHandler(
all_log_file,
maxBytes=10 * 1024 * 1024, # 10 MB
backupCount=10
)
file_handler_all.setLevel(logging.DEBUG)
file_handler_all.setFormatter(detailed_formatter)
root_logger.addHandler(file_handler_all)
# ========================================================================
# File Handler - Error logs (ERROR and above)
# ========================================================================
error_log_file = os.path.join(log_dir, f'errors_{datetime.now().strftime("%Y%m%d")}.log')
file_handler_errors = logging.handlers.RotatingFileHandler(
error_log_file,
maxBytes=5 * 1024 * 1024, # 5 MB
backupCount=5
)
file_handler_errors.setLevel(logging.ERROR)
file_handler_errors.setFormatter(detailed_formatter)
root_logger.addHandler(file_handler_errors)
# ========================================================================
# Console Handler - INFO and above (for Docker logs)
# ========================================================================
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(simple_formatter)
root_logger.addHandler(console_handler)
# ========================================================================
# Database-specific logger
# ========================================================================
db_logger = logging.getLogger('trasabilitate.db')
db_logger.setLevel(logging.DEBUG)
db_log_file = os.path.join(log_dir, f'database_{datetime.now().strftime("%Y%m%d")}.log')
db_file_handler = logging.handlers.RotatingFileHandler(
db_log_file,
maxBytes=10 * 1024 * 1024, # 10 MB
backupCount=10
)
db_file_handler.setLevel(logging.DEBUG)
db_file_handler.setFormatter(detailed_formatter)
db_logger.addHandler(db_file_handler)
# ========================================================================
# Routes-specific logger
# ========================================================================
routes_logger = logging.getLogger('trasabilitate.routes')
routes_logger.setLevel(logging.DEBUG)
routes_log_file = os.path.join(log_dir, f'routes_{datetime.now().strftime("%Y%m%d")}.log')
routes_file_handler = logging.handlers.RotatingFileHandler(
routes_log_file,
maxBytes=10 * 1024 * 1024, # 10 MB
backupCount=10
)
routes_file_handler.setLevel(logging.DEBUG)
routes_file_handler.setFormatter(detailed_formatter)
routes_logger.addHandler(routes_file_handler)
# ========================================================================
# Settings-specific logger
# ========================================================================
settings_logger = logging.getLogger('trasabilitate.settings')
settings_logger.setLevel(logging.DEBUG)
settings_log_file = os.path.join(log_dir, f'settings_{datetime.now().strftime("%Y%m%d")}.log')
settings_file_handler = logging.handlers.RotatingFileHandler(
settings_log_file,
maxBytes=5 * 1024 * 1024, # 5 MB
backupCount=5
)
settings_file_handler.setLevel(logging.DEBUG)
settings_file_handler.setFormatter(detailed_formatter)
settings_logger.addHandler(settings_file_handler)
# Log initialization
root_logger.info("=" * 80)
root_logger.info("Trasabilitate Application - Logging Initialized")
root_logger.info("=" * 80)
root_logger.info(f"Log directory: {log_dir}")
root_logger.info(f"Main log file: {all_log_file}")
root_logger.info(f"Error log file: {error_log_file}")
root_logger.info(f"Database log file: {db_log_file}")
root_logger.info(f"Routes log file: {routes_log_file}")
root_logger.info(f"Settings log file: {settings_log_file}")
root_logger.info("=" * 80)
return root_logger
def get_logger(name):
"""Get a logger with the given name"""
return logging.getLogger(f'trasabilitate.{name}')

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,37 @@
from flask import render_template, request, session, redirect, url_for, flash, current_app, jsonify
from .permissions import APP_PERMISSIONS, ROLE_HIERARCHY, ACTIONS, get_all_permissions, get_default_permissions_for_role
from .db_pool import get_db_connection
from .logging_config import get_logger
import mariadb
import os
import json
from contextlib import contextmanager
logger = get_logger('settings')
# Global permission cache to avoid repeated database queries
_permission_cache = {}
@contextmanager
def db_connection_context():
"""
Context manager for database connections from the pool.
Ensures connections are properly closed and committed/rolled back.
"""
logger.debug("Acquiring database connection from pool (settings)")
conn = get_db_connection()
try:
logger.debug("Database connection acquired successfully")
yield conn
except Exception as e:
logger.error(f"Error in settings database operation: {e}", exc_info=True)
conn.rollback()
raise e
finally:
if conn:
logger.debug("Closing database connection (settings)")
conn.close()
def check_permission(permission_key, user_role=None):
"""
Check if the current user (or specified role) has a specific permission.
@@ -18,40 +43,46 @@ def check_permission(permission_key, user_role=None):
Returns:
bool: True if user has the permission, False otherwise
"""
logger.debug(f"Checking permission '{permission_key}' for role '{user_role or session.get('role')}'")
if user_role is None:
user_role = session.get('role')
if not user_role:
logger.warning(f"Cannot check permission - no role provided")
return False
# Superadmin always has all permissions
if user_role == 'superadmin':
logger.debug(f"Superadmin bypass - permission '{permission_key}' granted")
return True
# Check cache first
cache_key = f"{user_role}:{permission_key}"
if cache_key in _permission_cache:
logger.debug(f"Permission '{permission_key}' found in cache: {_permission_cache[cache_key]}")
return _permission_cache[cache_key]
try:
conn = get_external_db_connection()
cursor = conn.cursor()
cursor.execute("""
SELECT granted FROM role_permissions
WHERE role = %s AND permission_key = %s
""", (user_role, permission_key))
result = cursor.fetchone()
conn.close()
# Cache the result
has_permission = bool(result and result[0])
_permission_cache[cache_key] = has_permission
return has_permission
logger.debug(f"Checking permission '{permission_key}' for role '{user_role}' in database")
with db_connection_context() as conn:
cursor = conn.cursor()
cursor.execute("""
SELECT granted FROM role_permissions
WHERE role = %s AND permission_key = %s
""", (user_role, permission_key))
result = cursor.fetchone()
# Cache the result
has_permission = bool(result and result[0])
_permission_cache[cache_key] = has_permission
logger.info(f"Permission '{permission_key}' for role '{user_role}': {has_permission}")
return has_permission
except Exception as e:
print(f"Error checking permission {permission_key} for role {user_role}: {e}")
logger.error(f"Error checking permission {permission_key} for role {user_role}: {e}", exc_info=True)
return False
def clear_permission_cache():
@@ -226,31 +257,12 @@ def settings_handler():
# Helper function to get external database connection
def get_external_db_connection():
"""Reads the external_server.conf file and returns a MariaDB database connection."""
settings_file = os.path.join(current_app.instance_path, 'external_server.conf')
if not os.path.exists(settings_file):
raise FileNotFoundError("The external_server.conf file is missing in the instance folder.")
# Read settings from the configuration file
settings = {}
with open(settings_file, 'r') as f:
for line in f:
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
# Create a database connection
return mariadb.connect(
user=settings['username'],
password=settings['password'],
host=settings['server_domain'],
port=int(settings['port']),
database=settings['database_name']
)
"""
DEPRECATED: Use get_db_connection() from db_pool.py instead.
This function is kept for backward compatibility.
Returns a connection from the managed connection pool.
"""
return get_db_connection()
# User management handlers
def create_user_handler():