Compare commits

4 Commits

Author SHA1 Message Date
ske087
c6e254c390 updated aplications database 2025-11-22 22:26:23 +02:00
Quality System Admin
4d6bd537e3 updated siles 2025-11-22 18:51:13 +02:00
ske087
5de2584b27 Fix FG quality page: Display OK for quality code 0, export CSV with 0 value 2025-11-13 04:26:46 +02:00
ske087
0d98c527c6 Fix config file parsing and improve backup/restore functionality
- Fix external_server.conf parsing to skip comment lines and empty lines
- Update routes.py get_db_connection() to handle comments
- Update settings.py get_external_db_connection() to handle comments
- Improve restore_backup() to use mariadb command instead of Python parsing
- Remove SQLite database creation (MariaDB only)
- Add environment detection for dump command (mariadb-dump vs mysqldump)
- Add conditional SSL flag based on Docker environment
- Fix database restore to handle MariaDB sandbox mode comments
2025-11-13 03:59:27 +02:00
11 changed files with 3846 additions and 73 deletions

4
.gitignore vendored
View File

@@ -45,3 +45,7 @@ instance/external_server.conf
*.backup2 *.backup2
/logs /logs
/backups
/config
/data

View File

@@ -10,5 +10,113 @@
"size": 305632, "size": 305632,
"timestamp": "2025-11-06T03:00:00.179220", "timestamp": "2025-11-06T03:00:00.179220",
"database": "trasabilitate" "database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251107_030000.sql",
"size": 325353,
"timestamp": "2025-11-07T03:00:00.178234",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251108_030000.sql",
"size": 346471,
"timestamp": "2025-11-08T03:00:00.175266",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251109_030000.sql",
"size": 364071,
"timestamp": "2025-11-09T03:00:00.175309",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251110_030000.sql",
"size": 364071,
"timestamp": "2025-11-10T03:00:00.174557",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251111_030000.sql",
"size": 392102,
"timestamp": "2025-11-11T03:00:00.175496",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251112_030000.sql",
"size": 417468,
"timestamp": "2025-11-12T03:00:00.177699",
"database": "trasabilitate"
},
{
"filename": "data_only_trasabilitate_20251113_002851.sql",
"size": 435126,
"timestamp": "2025-11-13T00:28:51.949113",
"database": "trasabilitate"
},
{
"filename": "backup_trasabilitate_20251113_004522.sql",
"size": 455459,
"timestamp": "2025-11-13T00:45:22.992984",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251113_030000.sql",
"size": 435126,
"timestamp": "2025-11-13T03:00:00.187954",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251114_030000.sql",
"size": 458259,
"timestamp": "2025-11-14T03:00:00.179754",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251115_030000.sql",
"size": 484020,
"timestamp": "2025-11-15T03:00:00.181883",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251116_030000.sql",
"size": 494281,
"timestamp": "2025-11-16T03:00:00.179753",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251117_030000.sql",
"size": 494281,
"timestamp": "2025-11-17T03:00:00.181115",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251118_030000.sql",
"size": 536395,
"timestamp": "2025-11-18T03:00:00.183002",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251119_030000.sql",
"size": 539493,
"timestamp": "2025-11-19T03:00:00.182323",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251120_030000.sql",
"size": 539493,
"timestamp": "2025-11-20T03:00:00.182801",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251121_030000.sql",
"size": 539493,
"timestamp": "2025-11-21T03:00:00.183179",
"database": "trasabilitate"
},
{
"filename": "data_only_scheduled_20251122_030000.sql",
"size": 539493,
"timestamp": "2025-11-22T03:00:00.182628",
"database": "trasabilitate"
} }
] ]

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -17,12 +17,57 @@ logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class DailyMirrorDatabase: class DailyMirrorDatabase:
def __init__(self, host='localhost', user='trasabilitate', password='Initial01!', database='trasabilitate'): def __init__(self, host=None, user=None, password=None, database=None):
self.host = host """Initialize database connection parameters.
self.user = user If not provided, will read from external_server.conf"""
self.password = password # If parameters not provided, read from config file
self.database = database if host is None or user is None or password is None or database is None:
config = self._read_db_config()
self.host = config.get('host', 'db')
self.user = config.get('user', 'trasabilitate')
self.password = config.get('password', 'Initial01!')
self.database = config.get('database', 'trasabilitate')
else:
self.host = host
self.user = user
self.password = password
self.database = database
self.connection = None self.connection = None
def _read_db_config(self):
"""Read database configuration from external_server.conf"""
try:
from flask import current_app
settings_file = os.path.join(current_app.instance_path, 'external_server.conf')
if os.path.exists(settings_file):
settings = {}
with open(settings_file, 'r') as f:
for line in f:
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
return {
'host': settings.get('server_domain', 'db'),
'user': settings.get('username', 'trasabilitate'),
'password': settings.get('password', 'Initial01!'),
'database': settings.get('database_name', 'trasabilitate')
}
except Exception as e:
logger.warning(f"Could not read config file, using defaults: {e}")
# Fallback defaults for Docker environment
return {
'host': 'db',
'user': 'trasabilitate',
'password': 'Initial01!',
'database': 'trasabilitate'
}
def connect(self): def connect(self):
"""Establish database connection""" """Establish database connection"""
@@ -33,7 +78,7 @@ class DailyMirrorDatabase:
password=self.password, password=self.password,
database=self.database database=self.database
) )
logger.info("Database connection established") logger.info(f"Database connection established to {self.host}")
return True return True
except Exception as e: except Exception as e:
logger.error(f"Database connection failed: {e}") logger.error(f"Database connection failed: {e}")

View File

@@ -415,7 +415,7 @@ class DatabaseBackupManager:
def restore_backup(self, filename): def restore_backup(self, filename):
""" """
Restore database from a backup file Restore database from a backup file using mariadb command
Args: Args:
filename (str): Name of the backup file to restore filename (str): Name of the backup file to restore
@@ -439,43 +439,71 @@ class DatabaseBackupManager:
'message': 'Backup file not found' 'message': 'Backup file not found'
} }
# Read SQL file and execute using Python mariadb library # Read the SQL file
import mariadb
with open(file_path, 'r') as f: with open(file_path, 'r') as f:
sql_content = f.read() sql_content = f.read()
# Connect to database # Clean up SQL content to avoid DEFINER issues
conn = mariadb.connect( import re
user=self.config['user'],
password=self.config['password'], # Remove problematic MariaDB sandbox mode comment from first line
host=self.config['host'], lines = sql_content.split('\n')
port=int(self.config['port']), if lines and lines[0].startswith('/*M!999999'):
database=self.config['database'] sql_content = '\n'.join(lines[1:])
# Remove DEFINER clauses that cause permission errors
# This regex matches DEFINER=`user`@`host` in various contexts
sql_content = re.sub(
r'/\*!50017 DEFINER=`[^`]+`@`[^`]+`\*/',
'',
sql_content
)
sql_content = re.sub(
r'/\*!50013 DEFINER=`[^`]+`@`[^`]+`\*/',
'',
sql_content
)
sql_content = re.sub(
r'DEFINER=`[^`]+`@`[^`]+`',
'',
sql_content
) )
cursor = conn.cursor() # Build mariadb restore command
cmd = [
'mariadb',
f"--host={self.config['host']}",
f"--port={self.config['port']}",
f"--user={self.config['user']}",
f"--password={self.config['password']}",
]
# Split SQL into statements and execute # Add SSL args if needed (Docker environment)
statements = sql_content.split(';') cmd.extend(self._get_ssl_args())
executed = 0
for statement in statements: # Add database name
statement = statement.strip() cmd.append(self.config['database'])
if statement:
try:
cursor.execute(statement)
executed += 1
except Exception as stmt_error:
print(f"Warning executing statement: {stmt_error}")
conn.commit() # Execute restore with stdin
conn.close() result = subprocess.run(
cmd,
input=sql_content,
capture_output=True,
text=True
)
return { if result.returncode == 0:
'success': True, return {
'message': f'Database restored successfully from {filename} ({executed} statements executed)' 'success': True,
} 'message': f'Database restored successfully from {filename}'
}
else:
error_msg = result.stderr
print(f"Restore error: {error_msg}")
return {
'success': False,
'message': f'Restore failed: {error_msg}'
}
except Exception as e: except Exception as e:
print(f"Exception during restore: {e}") print(f"Exception during restore: {e}")
@@ -512,6 +540,28 @@ class DatabaseBackupManager:
'message': 'Backup file not found' 'message': 'Backup file not found'
} }
# Read SQL content
with open(file_path, 'r') as f:
sql_content = f.read()
# Clean up SQL content to avoid DEFINER issues (in case data-only backup has them)
import re
sql_content = re.sub(
r'/\*!50017 DEFINER=`[^`]+`@`[^`]+`\*/',
'',
sql_content
)
sql_content = re.sub(
r'/\*!50013 DEFINER=`[^`]+`@`[^`]+`\*/',
'',
sql_content
)
sql_content = re.sub(
r'DEFINER=`[^`]+`@`[^`]+`',
'',
sql_content
)
# First, disable foreign key checks and truncate all tables # First, disable foreign key checks and truncate all tables
# This ensures clean data import without constraint violations # This ensures clean data import without constraint violations
try: try:
@@ -549,10 +599,7 @@ class DatabaseBackupManager:
print(f"Warning during table truncation: {e}") print(f"Warning during table truncation: {e}")
# Continue anyway - the restore might still work # Continue anyway - the restore might still work
# Read and execute SQL file using Python mariadb library # Execute SQL using Python mariadb library
with open(file_path, 'r') as f:
sql_content = f.read()
conn = mariadb.connect( conn = mariadb.connect(
user=self.config['user'], user=self.config['user'],
password=self.config['password'], password=self.config['password'],
@@ -575,8 +622,8 @@ class DatabaseBackupManager:
print(f"Warning executing statement: {stmt_error}") print(f"Warning executing statement: {stmt_error}")
conn.commit() conn.commit()
result_success = True cursor.close()
result_returncode = 0 conn.close()
# Re-enable foreign key checks # Re-enable foreign key checks
try: try:
@@ -595,16 +642,10 @@ class DatabaseBackupManager:
except Exception as e: except Exception as e:
print(f"Warning: Could not re-enable foreign key checks: {e}") print(f"Warning: Could not re-enable foreign key checks: {e}")
if result_success: return {
return { 'success': True,
'success': True, 'message': f'Data restored successfully from {filename} ({executed} statements executed)'
'message': f'Data restored successfully from {filename}' }
}
else:
return {
'success': False,
'message': f'Data restore failed'
}
except Exception as e: except Exception as e:
print(f"Exception during data restore: {e}") print(f"Exception during data restore: {e}")

View File

@@ -680,14 +680,14 @@ def verify_database_setup():
cursor.close() cursor.close()
conn.close() conn.close()
# Check SQLite database # SQLite check disabled - using MariaDB only
instance_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../instance')) # instance_folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../instance'))
sqlite_path = os.path.join(instance_folder, 'users.db') # sqlite_path = os.path.join(instance_folder, 'users.db')
#
if os.path.exists(sqlite_path): # if os.path.exists(sqlite_path):
print_success("SQLite database 'users.db' exists") # print_success("SQLite database 'users.db' exists")
else: # else:
print_error("SQLite database 'users.db' missing") # print_error("SQLite database 'users.db' missing")
return True return True
@@ -707,7 +707,7 @@ def main():
create_warehouse_locations_table, create_warehouse_locations_table,
create_permissions_tables, create_permissions_tables,
create_users_table_mariadb, create_users_table_mariadb,
create_sqlite_tables, # create_sqlite_tables, # Disabled - using MariaDB only
create_database_triggers, create_database_triggers,
populate_permissions_data, populate_permissions_data,
update_external_config, update_external_config,
@@ -731,7 +731,7 @@ def main():
print(f"📅 Completed at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") print(f"📅 Completed at: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
print("\n📋 Setup Summary:") print("\n📋 Setup Summary:")
print(" • MariaDB tables created with triggers") print(" • MariaDB tables created with triggers")
print("SQLite user database initialized") print("MariaDB users table initialized")
print(" • Permissions system fully configured") print(" • Permissions system fully configured")
print(" • Default superadmin user created (username: superadmin, password: superadmin123)") print(" • Default superadmin user created (username: superadmin, password: superadmin123)")
print(" • Configuration files updated") print(" • Configuration files updated")

View File

@@ -169,8 +169,13 @@ def get_db_connection():
settings = {} settings = {}
with open(settings_file, 'r') as f: with open(settings_file, 'r') as f:
for line in f: for line in f:
key, value = line.strip().split('=', 1) line = line.strip()
settings[key] = value # Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
# Create a database connection # Create a database connection
return mariadb.connect( return mariadb.connect(

View File

@@ -214,8 +214,13 @@ def settings_handler():
if os.path.exists(settings_file): if os.path.exists(settings_file):
with open(settings_file, 'r') as f: with open(settings_file, 'r') as f:
for line in f: for line in f:
key, value = line.strip().split('=', 1) line = line.strip()
external_settings[key] = value # Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
external_settings[key] = value
return render_template('settings.html', users=users, external_settings=external_settings, current_user={'role': session.get('role', '')}) return render_template('settings.html', users=users, external_settings=external_settings, current_user={'role': session.get('role', '')})
@@ -230,8 +235,13 @@ def get_external_db_connection():
settings = {} settings = {}
with open(settings_file, 'r') as f: with open(settings_file, 'r') as f:
for line in f: for line in f:
key, value = line.strip().split('=', 1) line = line.strip()
settings[key] = value # Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
# Create a database connection # Create a database connection
return mariadb.connect( return mariadb.connect(

View File

@@ -354,12 +354,20 @@ window.addEventListener('DOMContentLoaded', function() {
thead.innerHTML = ''; thead.innerHTML = '';
tbody.innerHTML = ''; tbody.innerHTML = '';
// Find the index of the quality code column
let qualityCodeIndex = -1;
// Add headers // Add headers
if (data.headers) { if (data.headers) {
data.headers.forEach(header => { data.headers.forEach((header, index) => {
const th = document.createElement('th'); const th = document.createElement('th');
th.textContent = header; th.textContent = header;
thead.appendChild(th); thead.appendChild(th);
// Track the quality code column
if (header === 'Defect Code' || header === 'Quality Code') {
qualityCodeIndex = index;
}
}); });
} }
@@ -367,9 +375,20 @@ window.addEventListener('DOMContentLoaded', function() {
if (data.rows && data.rows.length > 0) { if (data.rows && data.rows.length > 0) {
data.rows.forEach(row => { data.rows.forEach(row => {
const tr = document.createElement('tr'); const tr = document.createElement('tr');
row.forEach(cell => { row.forEach((cell, index) => {
const td = document.createElement('td'); const td = document.createElement('td');
td.textContent = cell || '';
// Special handling for quality code column
if (index === qualityCodeIndex && (cell === 0 || cell === '0' || cell === '' || cell === null)) {
td.textContent = 'OK';
td.style.color = '#28a745'; // Green color for OK
td.style.fontWeight = '600';
td.setAttribute('data-csv-value', '0'); // Store original value for CSV
} else {
td.textContent = cell || '';
td.setAttribute('data-csv-value', cell || ''); // Store original value
}
tr.appendChild(td); tr.appendChild(td);
}); });
tbody.appendChild(tr); tbody.appendChild(tr);
@@ -420,11 +439,69 @@ window.addEventListener('DOMContentLoaded', function() {
.then(response => response.json()) .then(response => response.json())
.then(data => { .then(data => {
console.log('📊 FG Range data received:', data); console.log('📊 FG Range data received:', data);
// Handle response similar to above
document.getElementById('report-title').textContent = `FG Date Range Report (${startDate} to ${endDate})`; const table = document.getElementById('report-table');
const thead = table.querySelector('thead tr');
const tbody = table.querySelector('tbody');
// Clear existing content
thead.innerHTML = '';
tbody.innerHTML = '';
// Find the index of the quality code column
let qualityCodeIndex = -1;
// Add headers
if (data.headers) {
data.headers.forEach((header, index) => {
const th = document.createElement('th');
th.textContent = header;
thead.appendChild(th);
// Track the quality code column
if (header === 'Defect Code' || header === 'Quality Code') {
qualityCodeIndex = index;
}
});
}
// Add rows
if (data.rows && data.rows.length > 0) {
data.rows.forEach(row => {
const tr = document.createElement('tr');
row.forEach((cell, index) => {
const td = document.createElement('td');
// Special handling for quality code column
if (index === qualityCodeIndex && (cell === 0 || cell === '0' || cell === '' || cell === null)) {
td.textContent = 'OK';
td.style.color = '#28a745'; // Green color for OK
td.style.fontWeight = '600';
td.setAttribute('data-csv-value', '0'); // Store original value for CSV
} else {
td.textContent = cell || '';
td.setAttribute('data-csv-value', cell || ''); // Store original value
}
tr.appendChild(td);
});
tbody.appendChild(tr);
});
document.getElementById('report-title').textContent = `FG Date Range Report (${startDate} to ${endDate}) - ${data.rows.length} records`;
} else {
const tr = document.createElement('tr');
const td = document.createElement('td');
td.colSpan = data.headers ? data.headers.length : 1;
td.textContent = data.message || 'No FG data found';
td.style.textAlign = 'center';
tr.appendChild(td);
tbody.appendChild(tr);
document.getElementById('report-title').textContent = `FG Date Range Report (${startDate} to ${endDate}) - No data`;
}
}) })
.catch(error => { .catch(error => {
console.error('❌ Error fetching FG range data:', error); console.error('❌ Error fetching FG range data:', error);
document.getElementById('report-title').textContent = 'Error loading FG data';
}); });
// Hide modal // Hide modal

View File

@@ -1 +1 @@
426552 427002