Implement boxes management module with auto-numbered box creation
- Add boxes_crates database table with BIGINT IDs and 8-digit auto-numbered box_numbers - Implement boxes CRUD operations (add, edit, update, delete, delete_multiple) - Create boxes route handlers with POST actions for all operations - Add boxes.html template with 3-panel layout matching warehouse locations module - Implement barcode generation and printing with JsBarcode and QZ Tray integration - Add browser print fallback for when QZ Tray is not available - Simplify create box form to single button with auto-generation - Fix JavaScript null reference errors with proper element validation - Convert tuple data to dictionaries for Jinja2 template compatibility - Register boxes blueprint in Flask app initialization
This commit is contained in:
231
app/modules/settings/logs.py
Normal file
231
app/modules/settings/logs.py
Normal file
@@ -0,0 +1,231 @@
|
||||
"""
|
||||
Settings Module - Log Explorer Helper
|
||||
Provides functions to explore and manage application logs
|
||||
"""
|
||||
import os
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_log_files():
|
||||
"""Get list of all log files in the logs folder"""
|
||||
try:
|
||||
log_dir = './data/logs'
|
||||
if not os.path.exists(log_dir):
|
||||
return []
|
||||
|
||||
log_files = []
|
||||
for filename in sorted(os.listdir(log_dir)):
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
if os.path.isfile(filepath):
|
||||
try:
|
||||
stat_info = os.stat(filepath)
|
||||
log_files.append({
|
||||
'name': filename,
|
||||
'size': stat_info.st_size,
|
||||
'size_mb': round(stat_info.st_size / 1024 / 1024, 2),
|
||||
'modified_at': datetime.fromtimestamp(stat_info.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'path': filepath
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting stat info for {filename}: {e}")
|
||||
continue
|
||||
|
||||
return sorted(log_files, key=lambda x: x['modified_at'], reverse=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log files: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_log_content(filename, lines=100):
|
||||
"""Get content of a log file
|
||||
|
||||
Args:
|
||||
filename: Name of the log file (without path)
|
||||
lines: Number of lines to read from the end (None for all)
|
||||
|
||||
Returns:
|
||||
Dictionary with file info and content
|
||||
"""
|
||||
try:
|
||||
log_dir = './data/logs'
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
|
||||
# Security check - ensure filepath is within log_dir
|
||||
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
|
||||
logger.error(f"Attempted to access file outside log directory: {filepath}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': 'Invalid file path',
|
||||
'filename': filename
|
||||
}
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'File not found: {filename}',
|
||||
'filename': filename
|
||||
}
|
||||
|
||||
# Read file content
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
if lines:
|
||||
# Read all lines and get the last N lines
|
||||
all_lines = f.readlines()
|
||||
content_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
|
||||
content = ''.join(content_lines)
|
||||
total_lines = len(all_lines)
|
||||
else:
|
||||
content = f.read()
|
||||
total_lines = len(content.splitlines())
|
||||
|
||||
stat_info = os.stat(filepath)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'filename': filename,
|
||||
'size': stat_info.st_size,
|
||||
'size_mb': round(stat_info.st_size / 1024 / 1024, 2),
|
||||
'modified_at': datetime.fromtimestamp(stat_info.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'content': content,
|
||||
'total_lines': total_lines,
|
||||
'displayed_lines': len(content.splitlines()),
|
||||
'truncated': lines and total_lines > lines
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'Error reading file: {str(e)}',
|
||||
'filename': filename
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log content for {filename}: {e}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f'Error: {str(e)}',
|
||||
'filename': filename
|
||||
}
|
||||
|
||||
|
||||
def get_log_file_path(filename):
|
||||
"""Get safe file path for download/save
|
||||
|
||||
Args:
|
||||
filename: Name of the log file (without path)
|
||||
|
||||
Returns:
|
||||
Full file path if valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
log_dir = './data/logs'
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
|
||||
# Security check - ensure filepath is within log_dir
|
||||
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
|
||||
logger.error(f"Attempted to access file outside log directory: {filepath}")
|
||||
return None
|
||||
|
||||
if not os.path.exists(filepath):
|
||||
logger.error(f"File not found: {filepath}")
|
||||
return None
|
||||
|
||||
return filepath
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log file path for {filename}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def get_log_statistics():
|
||||
"""Get statistics about log files"""
|
||||
try:
|
||||
log_files = get_log_files()
|
||||
|
||||
if not log_files:
|
||||
return {
|
||||
'total_files': 0,
|
||||
'total_size_mb': 0,
|
||||
'oldest_log': None,
|
||||
'newest_log': None
|
||||
}
|
||||
|
||||
total_size = sum(f['size'] for f in log_files)
|
||||
|
||||
return {
|
||||
'total_files': len(log_files),
|
||||
'total_size_mb': round(total_size / 1024 / 1024, 2),
|
||||
'oldest_log': log_files[-1]['modified_at'] if log_files else None,
|
||||
'newest_log': log_files[0]['modified_at'] if log_files else None
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting log statistics: {e}")
|
||||
return {
|
||||
'total_files': 0,
|
||||
'total_size_mb': 0,
|
||||
'oldest_log': None,
|
||||
'newest_log': None
|
||||
}
|
||||
|
||||
|
||||
def search_in_logs(search_term, filename=None, max_results=50):
|
||||
"""Search for a term in log files
|
||||
|
||||
Args:
|
||||
search_term: Term to search for
|
||||
filename: Optional specific file to search in
|
||||
max_results: Maximum number of results to return
|
||||
|
||||
Returns:
|
||||
List of matching lines with context
|
||||
"""
|
||||
try:
|
||||
log_dir = './data/logs'
|
||||
results = []
|
||||
|
||||
if filename:
|
||||
# Search in specific file
|
||||
filepath = os.path.join(log_dir, filename)
|
||||
if not os.path.abspath(filepath).startswith(os.path.abspath(log_dir)):
|
||||
return []
|
||||
|
||||
if os.path.exists(filepath):
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8', errors='ignore') as f:
|
||||
for line_num, line in enumerate(f, 1):
|
||||
if search_term.lower() in line.lower():
|
||||
results.append({
|
||||
'file': filename,
|
||||
'line_num': line_num,
|
||||
'line': line.strip()
|
||||
})
|
||||
if len(results) >= max_results:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching in {filename}: {e}")
|
||||
else:
|
||||
# Search in all log files
|
||||
for log_file in get_log_files():
|
||||
try:
|
||||
with open(log_file['path'], 'r', encoding='utf-8', errors='ignore') as f:
|
||||
for line_num, line in enumerate(f, 1):
|
||||
if search_term.lower() in line.lower():
|
||||
results.append({
|
||||
'file': log_file['name'],
|
||||
'line_num': line_num,
|
||||
'line': line.strip()
|
||||
})
|
||||
if len(results) >= max_results:
|
||||
break
|
||||
if len(results) >= max_results:
|
||||
break
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching in {log_file['name']}: {e}")
|
||||
|
||||
return results
|
||||
except Exception as e:
|
||||
logger.error(f"Error searching logs: {e}")
|
||||
return []
|
||||
Reference in New Issue
Block a user