Implement boxes management module with auto-numbered box creation

- Add boxes_crates database table with BIGINT IDs and 8-digit auto-numbered box_numbers
- Implement boxes CRUD operations (add, edit, update, delete, delete_multiple)
- Create boxes route handlers with POST actions for all operations
- Add boxes.html template with 3-panel layout matching warehouse locations module
- Implement barcode generation and printing with JsBarcode and QZ Tray integration
- Add browser print fallback for when QZ Tray is not available
- Simplify create box form to single button with auto-generation
- Fix JavaScript null reference errors with proper element validation
- Convert tuple data to dictionaries for Jinja2 template compatibility
- Register boxes blueprint in Flask app initialization
This commit is contained in:
Quality App Developer
2026-01-26 22:08:31 +02:00
parent 3c5a273a89
commit e1f3302c6b
37 changed files with 8429 additions and 66 deletions

View File

@@ -0,0 +1,231 @@
"""
Settings Module - Log Explorer Helper
Provides functions to explore and manage application logs
"""
import os
from datetime import datetime
from pathlib import Path
import logging
logger = logging.getLogger(__name__)
def get_log_files():
"""Get list of all log files in the logs folder"""
try:
log_dir = './data/logs'
if not os.path.exists(log_dir):
return []
log_files = []
for filename in sorted(os.listdir(log_dir)):
filepath = os.path.join(log_dir, filename)
if os.path.isfile(filepath):
try:
stat_info = os.stat(filepath)
log_files.append({
'name': filename,
'size': stat_info.st_size,
'size_mb': round(stat_info.st_size / 1024 / 1024, 2),
'modified_at': datetime.fromtimestamp(stat_info.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
'path': filepath
})
except Exception as e:
logger.error(f"Error getting stat info for {filename}: {e}")
continue
return sorted(log_files, key=lambda x: x['modified_at'], reverse=True)
except Exception as e:
logger.error(f"Error getting log files: {e}")
return []
def get_log_content(filename, lines=100):
"""Get content of a log file
Args:
filename: Name of the log file (without path)
lines: Number of lines to read from the end (None for all)
Returns:
Dictionary with file info and content
"""
try:
log_dir = './data/logs'
filepath = os.path.join(log_dir, filename)
# Security check - ensure filepath is within log_dir
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
logger.error(f"Attempted to access file outside log directory: {filepath}")
return {
'success': False,
'error': 'Invalid file path',
'filename': filename
}
if not os.path.exists(filepath):
return {
'success': False,
'error': f'File not found: {filename}',
'filename': filename
}
# Read file content
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
if lines:
# Read all lines and get the last N lines
all_lines = f.readlines()
content_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
content = ''.join(content_lines)
total_lines = len(all_lines)
else:
content = f.read()
total_lines = len(content.splitlines())
stat_info = os.stat(filepath)
return {
'success': True,
'filename': filename,
'size': stat_info.st_size,
'size_mb': round(stat_info.st_size / 1024 / 1024, 2),
'modified_at': datetime.fromtimestamp(stat_info.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
'content': content,
'total_lines': total_lines,
'displayed_lines': len(content.splitlines()),
'truncated': lines and total_lines > lines
}
except Exception as e:
return {
'success': False,
'error': f'Error reading file: {str(e)}',
'filename': filename
}
except Exception as e:
logger.error(f"Error getting log content for {filename}: {e}")
return {
'success': False,
'error': f'Error: {str(e)}',
'filename': filename
}
def get_log_file_path(filename):
"""Get safe file path for download/save
Args:
filename: Name of the log file (without path)
Returns:
Full file path if valid, None otherwise
"""
try:
log_dir = './data/logs'
filepath = os.path.join(log_dir, filename)
# Security check - ensure filepath is within log_dir
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
logger.error(f"Attempted to access file outside log directory: {filepath}")
return None
if not os.path.exists(filepath):
logger.error(f"File not found: {filepath}")
return None
return filepath
except Exception as e:
logger.error(f"Error getting log file path for {filename}: {e}")
return None
def get_log_statistics():
"""Get statistics about log files"""
try:
log_files = get_log_files()
if not log_files:
return {
'total_files': 0,
'total_size_mb': 0,
'oldest_log': None,
'newest_log': None
}
total_size = sum(f['size'] for f in log_files)
return {
'total_files': len(log_files),
'total_size_mb': round(total_size / 1024 / 1024, 2),
'oldest_log': log_files[-1]['modified_at'] if log_files else None,
'newest_log': log_files[0]['modified_at'] if log_files else None
}
except Exception as e:
logger.error(f"Error getting log statistics: {e}")
return {
'total_files': 0,
'total_size_mb': 0,
'oldest_log': None,
'newest_log': None
}
def search_in_logs(search_term, filename=None, max_results=50):
"""Search for a term in log files
Args:
search_term: Term to search for
filename: Optional specific file to search in
max_results: Maximum number of results to return
Returns:
List of matching lines with context
"""
try:
log_dir = './data/logs'
results = []
if filename:
# Search in specific file
filepath = os.path.join(log_dir, filename)
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
return []
if os.path.exists(filepath):
try:
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
for line_num, line in enumerate(f, 1):
if search_term.lower() in line.lower():
results.append({
'file': filename,
'line_num': line_num,
'line': line.strip()
})
if len(results) >= max_results:
break
except Exception as e:
logger.error(f"Error searching in {filename}: {e}")
else:
# Search in all log files
for log_file in get_log_files():
try:
with open(log_file['path'], 'r', encoding='utf-8', errors='ignore') as f:
for line_num, line in enumerate(f, 1):
if search_term.lower() in line.lower():
results.append({
'file': log_file['name'],
'line_num': line_num,
'line': line.strip()
})
if len(results) >= max_results:
break
if len(results) >= max_results:
break
except Exception as e:
logger.error(f"Error searching in {log_file['name']}: {e}")
return results
except Exception as e:
logger.error(f"Error searching logs: {e}")
return []

View File

@@ -7,6 +7,8 @@ import hashlib
import secrets
from datetime import datetime, timedelta
from app.database import get_db
from app.modules.settings.stats import get_all_stats
from app.modules.settings.logs import get_log_files, get_log_content, get_log_file_path, get_log_statistics, search_in_logs
import subprocess
import os
import json
@@ -19,11 +21,30 @@ settings_bp = Blueprint('settings', __name__, url_prefix='/settings')
@settings_bp.route('/', methods=['GET'])
def settings_index():
"""Settings module main page"""
"""Settings module main page with app overview"""
if 'user_id' not in session:
return redirect(url_for('main.login'))
return render_template('modules/settings/index.html')
# Get all app statistics
try:
stats = get_all_stats()
except Exception as e:
logger.error(f"Error getting stats in settings_index: {e}", exc_info=True)
stats = {
'user_count': 0,
'database_size_mb': 0,
'logs_size_mb': 0,
'database_count': 0,
'backup_count': 0,
'printer_keys_count': 0,
'app_key_availability': {
'available': False,
'count': 0,
'status': 'Error loading data'
}
}
return render_template('modules/settings/index.html', stats=stats)
@settings_bp.route('/general', methods=['GET', 'POST'])
@@ -1254,3 +1275,100 @@ def toggle_backup_schedule(schedule_id):
except Exception as e:
return jsonify({'error': str(e)}), 500
# ============================================================================
# Log Explorer Routes
# ============================================================================
@settings_bp.route('/logs', methods=['GET'])
def logs_explorer():
"""Log explorer main page - list all log files"""
if 'user_id' not in session:
return redirect(url_for('main.login'))
try:
log_files = get_log_files()
log_stats = get_log_statistics()
return render_template('modules/settings/logs_explorer.html',
log_files=log_files,
log_stats=log_stats)
except Exception as e:
logger.error(f"Error loading logs explorer: {e}")
flash(f"Error loading logs: {str(e)}", 'error')
return render_template('modules/settings/logs_explorer.html',
log_files=[],
log_stats={})
@settings_bp.route('/logs/view/<filename>', methods=['GET'])
def view_log(filename):
"""View content of a specific log file"""
if 'user_id' not in session:
return redirect(url_for('main.login'))
try:
lines = request.args.get('lines', default=100, type=int)
log_data = get_log_content(filename, lines=lines)
if not log_data.get('success'):
flash(log_data.get('error', 'Error reading log file'), 'error')
return redirect(url_for('settings.logs_explorer'))
return render_template('modules/settings/view_log.html', log_data=log_data)
except Exception as e:
logger.error(f"Error viewing log {filename}: {e}")
flash(f"Error viewing log: {str(e)}", 'error')
return redirect(url_for('settings.logs_explorer'))
@settings_bp.route('/logs/download/<filename>', methods=['GET'])
def download_log(filename):
"""Download a log file"""
if 'user_id' not in session:
return redirect(url_for('main.login'))
try:
filepath = get_log_file_path(filename)
if not filepath:
flash('Invalid file or file not found', 'error')
return redirect(url_for('settings.logs_explorer'))
return send_file(
filepath,
as_attachment=True,
download_name=f"{filename}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
)
except Exception as e:
logger.error(f"Error downloading log {filename}: {e}")
flash(f"Error downloading log: {str(e)}", 'error')
return redirect(url_for('settings.logs_explorer'))
@settings_bp.route('/logs/search', methods=['GET'])
def search_logs():
"""Search for terms in log files"""
if 'user_id' not in session:
return redirect(url_for('main.login'))
try:
search_term = request.args.get('q', '').strip()
filename = request.args.get('file', default=None)
results = []
if search_term:
results = search_in_logs(search_term, filename=filename)
log_files = get_log_files()
return render_template('modules/settings/search_logs.html',
search_term=search_term,
results=results,
log_files=log_files,
selected_file=filename)
except Exception as e:
logger.error(f"Error searching logs: {e}")
flash(f"Error searching logs: {str(e)}", 'error')
return redirect(url_for('settings.logs_explorer'))

View File

@@ -0,0 +1,247 @@
"""
Settings Module - App Statistics Helper
Provides functions to collect various app statistics for the overview
"""
import os
import pymysql
from datetime import datetime
from pathlib import Path
from app.database import get_db
import logging
logger = logging.getLogger(__name__)
def get_user_count():
"""Get total number of existing users"""
try:
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) as count FROM users WHERE is_active = 1")
result = cursor.fetchone()
cursor.close()
return result[0] if result else 0
except Exception as e:
logger.error(f"Error getting user count: {e}")
return 0
def get_database_size():
"""Get size of the database in MB"""
try:
conn = get_db()
cursor = conn.cursor()
try:
# Get database name from connection
cursor.execute("SELECT DATABASE()")
result = cursor.fetchone()
if not result:
cursor.close()
return 0
db_name = result[0]
# Get database size
query = f"""
SELECT ROUND(SUM(data_length + index_length) / 1024 / 1024, 2)
FROM information_schema.tables
WHERE table_schema = %s
"""
cursor.execute(query, (db_name,))
result = cursor.fetchone()
cursor.close()
return result[0] if result and result[0] else 0
except Exception as e:
cursor.close()
logger.error(f"Error executing database size query: {e}")
return 0
except Exception as e:
logger.error(f"Error getting database size: {e}")
return 0
def get_logs_size():
"""Get total size of log files in MB"""
try:
log_dir = './data/logs'
if not os.path.exists(log_dir):
return 0
total_size = 0
for filename in os.listdir(log_dir):
filepath = os.path.join(log_dir, filename)
if os.path.isfile(filepath):
total_size += os.path.getsize(filepath)
return round(total_size / 1024 / 1024, 2)
except Exception as e:
logger.error(f"Error getting logs size: {e}")
return 0
def get_database_count():
"""Get number of existing databases (user accessible)"""
try:
conn = get_db()
cursor = conn.cursor()
try:
cursor.execute("SHOW DATABASES")
result = cursor.fetchall()
cursor.close()
# Filter out system databases
if result:
excluded_dbs = {'information_schema', 'mysql', 'performance_schema', 'sys'}
user_dbs = [db for db in result if db[0] not in excluded_dbs]
return len(user_dbs)
return 0
except Exception as e:
cursor.close()
logger.error(f"Error executing show databases query: {e}")
return 0
except Exception as e:
logger.error(f"Error getting database count: {e}")
return 0
def get_backup_count():
"""Get number of scheduled backups for the database"""
try:
conn = get_db()
cursor = conn.cursor()
# Check if backups table exists
cursor.execute("""
SELECT COUNT(*) FROM information_schema.tables
WHERE table_schema = DATABASE() AND table_name = 'backup_schedules'
""")
if cursor.fetchone()[0] > 0:
cursor.execute("SELECT COUNT(*) FROM backup_schedules WHERE is_active = 1")
result = cursor.fetchone()
cursor.close()
return result[0] if result else 0
else:
cursor.close()
# Count backup files if no table exists
backup_dir = './data/backups'
if os.path.exists(backup_dir):
return len([f for f in os.listdir(backup_dir) if f.endswith('.sql')])
return 0
except Exception as e:
logger.error(f"Error getting backup count: {e}")
# Fallback to counting backup files
try:
backup_dir = './data/backups'
if os.path.exists(backup_dir):
return len([f for f in os.listdir(backup_dir) if f.endswith('.sql')])
except:
pass
return 0
def get_printer_keys_count():
"""Get number of keys for printers (pairing keys)"""
try:
conn = get_db()
cursor = conn.cursor()
try:
# Check if qz_pairing_keys table exists
cursor.execute("""
SELECT COUNT(*) FROM information_schema.tables
WHERE table_schema = DATABASE() AND table_name = 'qz_pairing_keys'
""")
if cursor.fetchone()[0] > 0:
cursor.execute("SELECT COUNT(*) FROM qz_pairing_keys")
result = cursor.fetchone()
cursor.close()
return result[0] if result else 0
else:
cursor.close()
return 0
except Exception as e:
cursor.close()
logger.error(f"Error executing printer keys query: {e}")
return 0
except Exception as e:
logger.error(f"Error getting printer keys count: {e}")
return 0
def check_app_key_availability():
"""Check app key availability"""
try:
conn = get_db()
cursor = conn.cursor()
try:
# Check if api_keys table exists
cursor.execute("""
SELECT COUNT(*) FROM information_schema.tables
WHERE table_schema = DATABASE() AND table_name = 'api_keys'
""")
if cursor.fetchone()[0] > 0:
cursor.execute("SELECT COUNT(*) FROM api_keys WHERE is_active = 1")
result = cursor.fetchone()
cursor.close()
count = result[0] if result else 0
return {
'available': count > 0,
'count': count,
'status': f'{count} active key(s)' if count > 0 else 'No active keys'
}
else:
cursor.close()
return {
'available': False,
'count': 0,
'status': 'API Keys table not found'
}
except Exception as e:
cursor.close()
logger.error(f"Error executing api_keys query: {e}")
return {
'available': False,
'count': 0,
'status': f'Error: {str(e)}'
}
except Exception as e:
logger.error(f"Error checking app key availability: {e}")
return {
'available': False,
'count': 0,
'status': f'Error: {str(e)}'
}
def get_all_stats():
"""Get all statistics for the overview"""
try:
return {
'user_count': get_user_count(),
'database_size_mb': get_database_size(),
'logs_size_mb': get_logs_size(),
'database_count': get_database_count(),
'backup_count': get_backup_count(),
'printer_keys_count': get_printer_keys_count(),
'app_key_availability': check_app_key_availability()
}
except Exception as e:
logger.error(f"Error getting all stats: {e}")
# Return defaults on error
return {
'user_count': 0,
'database_size_mb': 0,
'logs_size_mb': 0,
'database_count': 0,
'backup_count': 0,
'printer_keys_count': 0,
'app_key_availability': {
'available': False,
'count': 0,
'status': 'Error loading data'
}
}