Files
quality_recticel/py_app/app/daily_mirror_db_setup.py
2025-10-27 20:25:31 +02:00

840 lines
39 KiB
Python

"""
Daily Mirror Database Setup and Management
Quality Recticel Application
This script creates the database schema and provides utilities for
data import and Daily Mirror reporting functionality.
"""
import mariadb
import pandas as pd
import os
from datetime import datetime, timedelta
import logging
# Setup logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class DailyMirrorDatabase:
def __init__(self, host='localhost', user='trasabilitate', password='Initial01!', database='trasabilitate'):
self.host = host
self.user = user
self.password = password
self.database = database
self.connection = None
def connect(self):
"""Establish database connection"""
try:
self.connection = mariadb.connect(
host=self.host,
user=self.user,
password=self.password,
database=self.database
)
logger.info("Database connection established")
return True
except Exception as e:
logger.error(f"Database connection failed: {e}")
return False
def disconnect(self):
"""Close database connection"""
if self.connection:
self.connection.close()
logger.info("Database connection closed")
def create_database_schema(self):
"""Create the Daily Mirror database schema"""
try:
cursor = self.connection.cursor()
# Read and execute the schema file
schema_file = os.path.join(os.path.dirname(__file__), 'daily_mirror_database_schema.sql')
if not os.path.exists(schema_file):
logger.error(f"Schema file not found: {schema_file}")
return False
with open(schema_file, 'r') as file:
schema_sql = file.read()
# Split by statements and execute each one
statements = []
current_statement = ""
for line in schema_sql.split('\n'):
line = line.strip()
if line and not line.startswith('--'):
current_statement += line + " "
if line.endswith(';'):
statements.append(current_statement.strip())
current_statement = ""
# Add any remaining statement
if current_statement.strip():
statements.append(current_statement.strip())
for statement in statements:
if statement and any(statement.upper().startswith(cmd) for cmd in ['CREATE', 'ALTER', 'DROP', 'INSERT']):
try:
cursor.execute(statement)
logger.info(f"Executed: {statement[:80]}...")
except Exception as e:
if "already exists" not in str(e).lower():
logger.warning(f"Error executing statement: {e}")
self.connection.commit()
logger.info("Database schema created successfully")
return True
except Exception as e:
logger.error(f"Error creating database schema: {e}")
return False
def import_production_data(self, file_path):
"""Import production data from Excel file (Production orders Data sheet OR DataSheet)"""
try:
# Read from "Production orders Data" sheet (new format) or "DataSheet" (old format)
df = None
sheet_used = None
# Try different engines (openpyxl for .xlsx, pyxlsb for .xlsb)
engines_to_try = ['openpyxl', 'pyxlsb']
# Try different sheet names (new format first, then old format)
sheet_names_to_try = ['Production orders Data', 'DataSheet']
for engine in engines_to_try:
if df is not None:
break
try:
logger.info(f"Trying to read Excel file with engine: {engine}")
excel_file = pd.ExcelFile(file_path, engine=engine)
logger.info(f"Available sheets: {excel_file.sheet_names}")
# Try each sheet name
for sheet_name in sheet_names_to_try:
if sheet_name in excel_file.sheet_names:
try:
logger.info(f"Reading sheet '{sheet_name}'")
df = pd.read_excel(file_path, sheet_name=sheet_name, engine=engine, header=0)
sheet_used = f"{sheet_name} (engine: {engine})"
logger.info(f"Successfully read from sheet: {sheet_used}")
break
except Exception as sheet_error:
logger.warning(f"Failed to read sheet '{sheet_name}': {sheet_error}")
continue
if df is not None:
break
except Exception as e:
logger.warning(f"Failed with engine {engine}: {e}")
continue
if df is None:
raise Exception("Could not read Excel file. Please ensure it has a 'Production orders Data' or 'DataSheet' sheet.")
logger.info(f"Loaded production data from {sheet_used}: {len(df)} rows, {len(df.columns)} columns")
logger.info(f"First 5 column names: {list(df.columns)[:5]}")
cursor = self.connection.cursor()
success_count = 0
created_count = 0
updated_count = 0
error_count = 0
# Prepare insert statement with new schema
insert_sql = """
INSERT INTO dm_production_orders (
production_order, production_order_line, line_number,
open_for_order_line, client_order_line,
customer_code, customer_name, article_code, article_description,
quantity_requested, unit_of_measure, delivery_date, opening_date,
closing_date, data_planificare, production_status,
machine_code, machine_type, machine_number,
end_of_quilting, end_of_sewing,
phase_t1_prepared, t1_operator_name, t1_registration_date,
phase_t2_cut, t2_operator_name, t2_registration_date,
phase_t3_sewing, t3_operator_name, t3_registration_date,
design_number, classification, model_description, model_lb2,
needle_position, needle_row, priority
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
open_for_order_line = VALUES(open_for_order_line),
client_order_line = VALUES(client_order_line),
customer_code = VALUES(customer_code),
customer_name = VALUES(customer_name),
article_code = VALUES(article_code),
article_description = VALUES(article_description),
quantity_requested = VALUES(quantity_requested),
delivery_date = VALUES(delivery_date),
production_status = VALUES(production_status),
machine_code = VALUES(machine_code),
end_of_quilting = VALUES(end_of_quilting),
end_of_sewing = VALUES(end_of_sewing),
phase_t1_prepared = VALUES(phase_t1_prepared),
t1_operator_name = VALUES(t1_operator_name),
t1_registration_date = VALUES(t1_registration_date),
phase_t2_cut = VALUES(phase_t2_cut),
t2_operator_name = VALUES(t2_operator_name),
t2_registration_date = VALUES(t2_registration_date),
phase_t3_sewing = VALUES(phase_t3_sewing),
t3_operator_name = VALUES(t3_operator_name),
t3_registration_date = VALUES(t3_registration_date),
updated_at = CURRENT_TIMESTAMP
"""
for index, row in df.iterrows():
try:
# Create concatenated fields with dash separator
opened_for_order = str(row.get('Opened for Order', '')).strip() if pd.notna(row.get('Opened for Order')) else ''
linia = str(row.get('Linia', '')).strip() if pd.notna(row.get('Linia')) else ''
open_for_order_line = f"{opened_for_order}-{linia}" if opened_for_order and linia else ''
com_achiz_client = str(row.get('Com. Achiz. Client', '')).strip() if pd.notna(row.get('Com. Achiz. Client')) else ''
nr_linie_com_client = str(row.get('Nr. linie com. client', '')).strip() if pd.notna(row.get('Nr. linie com. client')) else ''
client_order_line = f"{com_achiz_client}-{nr_linie_com_client}" if com_achiz_client and nr_linie_com_client else ''
# Helper function to safely get numeric values
def safe_int(value, default=None):
if pd.isna(value) or value == '':
return default
try:
return int(float(value))
except (ValueError, TypeError):
return default
def safe_float(value, default=None):
if pd.isna(value) or value == '':
return default
try:
return float(value)
except (ValueError, TypeError):
return default
def safe_str(value, default=''):
if pd.isna(value):
return default
return str(value).strip()
# Prepare data tuple
data = (
safe_str(row.get('Comanda Productie')), # production_order
open_for_order_line, # open_for_order_line (concatenated)
client_order_line, # client_order_line (concatenated)
safe_str(row.get('Cod. Client')), # customer_code
safe_str(row.get('Customer Name')), # customer_name
safe_str(row.get('Cod Articol')), # article_code
safe_str(row.get('Descr. Articol.1')), # article_description
safe_int(row.get('Cantitate Com. Prod.'), 0), # quantity_requested
safe_str(row.get('U.M.')), # unit_of_measure
self._parse_date(row.get('SO Duedate')), # delivery_date
self._parse_date(row.get('Data Deschiderii')), # opening_date
self._parse_date(row.get('Data Inchiderii')), # closing_date
self._parse_date(row.get('Data Planific.')), # data_planificare
safe_str(row.get('Status')), # production_status
safe_str(row.get('Masina cusut')), # machine_code
safe_str(row.get('Tip masina')), # machine_type
safe_str(row.get('Machine Number')), # machine_number
self._parse_date(row.get('End of Quilting')), # end_of_quilting
self._parse_date(row.get('End of Sewing')), # end_of_sewing
safe_str(row.get('T2')), # phase_t1_prepared (using T2 column)
safe_str(row.get('Nume complet T2')), # t1_operator_name
self._parse_datetime(row.get('Data inregistrare T2')), # t1_registration_date
safe_str(row.get('T1')), # phase_t2_cut (using T1 column)
safe_str(row.get('Nume complet T1')), # t2_operator_name
self._parse_datetime(row.get('Data inregistrare T1')), # t2_registration_date
safe_str(row.get('T3')), # phase_t3_sewing (using T3 column)
safe_str(row.get('Nume complet T3')), # t3_operator_name
self._parse_datetime(row.get('Data inregistrare T3')), # t3_registration_date
safe_int(row.get('Design number')), # design_number
safe_str(row.get('Clasificare')), # classification
safe_str(row.get('Descriere Model')), # model_description
safe_str(row.get('Model Lb2')), # model_lb2
safe_float(row.get('Needle Position')), # needle_position
safe_str(row.get('Needle row')), # needle_row
safe_int(row.get('Prioritate executie'), 0) # priority
)
cursor.execute(insert_sql, data)
# Check if row was inserted (created) or updated
# In MySQL with ON DUPLICATE KEY UPDATE:
# - rowcount = 1 means INSERT (new row created)
# - rowcount = 2 means UPDATE (existing row updated)
# - rowcount = 0 means no change
if cursor.rowcount == 1:
created_count += 1
elif cursor.rowcount == 2:
updated_count += 1
success_count += 1
except Exception as row_error:
logger.warning(f"Error processing row {index}: {row_error}")
# Log first few values of problematic row
try:
row_sample = {k: v for k, v in list(row.items())[:5]}
logger.warning(f"Row data sample: {row_sample}")
except:
pass
error_count += 1
continue
self.connection.commit()
logger.info(f"Production data import completed: {success_count} successful ({created_count} created, {updated_count} updated), {error_count} failed")
return {
'success_count': success_count,
'created_count': created_count,
'updated_count': updated_count,
'error_count': error_count,
'total_rows': len(df)
}
except Exception as e:
logger.error(f"Error importing production data: {e}")
import traceback
logger.error(traceback.format_exc())
return None
def import_orders_data(self, file_path):
"""Import orders data from Excel file with enhanced error handling and multi-line support"""
try:
# Ensure we have a database connection
if not self.connection:
self.connect()
if not self.connection:
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': 'Could not establish database connection.'
}
logger.info(f"Attempting to import orders data from: {file_path}")
# Check if file exists
if not os.path.exists(file_path):
logger.error(f"Orders file not found: {file_path}")
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': f'Orders file not found: {file_path}'
}
# Read from DataSheet - the correct sheet for orders data
try:
df = pd.read_excel(file_path, sheet_name='DataSheet', engine='openpyxl', header=0)
logger.info(f"Successfully read orders data from DataSheet: {len(df)} rows, {len(df.columns)} columns")
logger.info(f"Available columns: {list(df.columns)[:15]}...")
except Exception as e:
logger.error(f"Failed to read DataSheet from orders file: {e}")
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': f'Could not read DataSheet from orders file: {e}'
}
cursor = self.connection.cursor()
success_count = 0
created_count = 0
updated_count = 0
error_count = 0
# Prepare insert statement matching the actual table structure
insert_sql = """
INSERT INTO dm_orders (
order_line, order_id, line_number, customer_code, customer_name,
client_order_line, article_code, article_description,
quantity_requested, balance, unit_of_measure, delivery_date, order_date,
order_status, article_status, priority, product_group, production_order,
production_status, model, closed
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
order_id = VALUES(order_id),
line_number = VALUES(line_number),
customer_code = VALUES(customer_code),
customer_name = VALUES(customer_name),
client_order_line = VALUES(client_order_line),
article_code = VALUES(article_code),
article_description = VALUES(article_description),
quantity_requested = VALUES(quantity_requested),
balance = VALUES(balance),
unit_of_measure = VALUES(unit_of_measure),
delivery_date = VALUES(delivery_date),
order_date = VALUES(order_date),
order_status = VALUES(order_status),
article_status = VALUES(article_status),
priority = VALUES(priority),
product_group = VALUES(product_group),
production_order = VALUES(production_order),
production_status = VALUES(production_status),
model = VALUES(model),
closed = VALUES(closed),
updated_at = CURRENT_TIMESTAMP
"""
# Safe value helper functions
def safe_str(value, default=''):
if pd.isna(value):
return default
return str(value).strip() if value != '' else default
def safe_int(value, default=None):
if pd.isna(value):
return default
try:
if isinstance(value, str):
value = value.strip()
if value == '':
return default
return int(float(value))
except (ValueError, TypeError):
return default
def safe_float(value, default=None):
if pd.isna(value):
return default
try:
if isinstance(value, str):
value = value.strip()
if value == '':
return default
return float(value)
except (ValueError, TypeError):
return default
# Process each row with the new schema
for index, row in df.iterrows():
try:
# Create concatenated unique keys
order_id = safe_str(row.get('Comanda'), f'ORD_{index:06d}')
line_number = safe_int(row.get('Linie'), 1)
order_line = f"{order_id}-{line_number}"
# Create concatenated client order line
client_order = safe_str(row.get('Com. Achiz. Client'))
client_order_line_num = safe_str(row.get('Nr. linie com. client'))
client_order_line = f"{client_order}-{client_order_line_num}" if client_order and client_order_line_num else ''
# Map all fields from Excel to database (21 fields, removed client_order)
data = (
order_line, # order_line (UNIQUE key: order_id-line_number)
order_id, # order_id
line_number, # line_number
safe_str(row.get('Cod. Client')), # customer_code
safe_str(row.get('Customer Name')), # customer_name
client_order_line, # client_order_line (concatenated)
safe_str(row.get('Cod Articol')), # article_code
safe_str(row.get('Part Description')), # article_description
safe_int(row.get('Cantitate')), # quantity_requested
safe_float(row.get('Balanta')), # balance
safe_str(row.get('U.M.')), # unit_of_measure
self._parse_date(row.get('Data livrare')), # delivery_date
self._parse_date(row.get('Data Comenzii')), # order_date
safe_str(row.get('Statut Comanda')), # order_status
safe_str(row.get('Stare Articol')), # article_status
safe_int(row.get('Prioritate')), # priority
safe_str(row.get('Grup')), # product_group
safe_str(row.get('Comanda Productie')), # production_order
safe_str(row.get('Stare CP')), # production_status
safe_str(row.get('Model')), # model
safe_str(row.get('Inchis')) # closed
)
cursor.execute(insert_sql, data)
# Track created vs updated
if cursor.rowcount == 1:
created_count += 1
elif cursor.rowcount == 2:
updated_count += 1
success_count += 1
except Exception as row_error:
logger.warning(f"Error processing row {index} (order_line: {order_line if 'order_line' in locals() else 'unknown'}): {row_error}")
error_count += 1
continue
self.connection.commit()
logger.info(f"Orders import completed: {success_count} successful ({created_count} created, {updated_count} updated), {error_count} errors")
return {
'success_count': success_count,
'created_count': created_count,
'updated_count': updated_count,
'error_count': error_count,
'total_rows': len(df),
'error_message': None if error_count == 0 else f'{error_count} rows failed to import'
}
except Exception as e:
logger.error(f"Error importing orders data: {e}")
import traceback
logger.error(traceback.format_exc())
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': str(e)
}
def import_delivery_data(self, file_path):
"""Import delivery data from Excel file with enhanced error handling"""
try:
# Ensure we have a database connection
if not self.connection:
self.connect()
if not self.connection:
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': 'Could not establish database connection.'
}
logger.info(f"Attempting to import delivery data from: {file_path}")
# Check if file exists
if not os.path.exists(file_path):
logger.error(f"Delivery file not found: {file_path}")
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': f'Delivery file not found: {file_path}'
}
# Try to get sheet names first
try:
excel_file = pd.ExcelFile(file_path)
sheet_names = excel_file.sheet_names
logger.info(f"Available sheets in delivery file: {sheet_names}")
except Exception as e:
logger.warning(f"Could not get sheet names: {e}")
sheet_names = ['DataSheet', 'Sheet1']
# Try multiple approaches to read the Excel file
df = None
sheet_used = None
approaches = [
('openpyxl', 0, 'read_only'),
('openpyxl', 0, 'normal'),
('openpyxl', 1, 'normal'),
('xlrd', 0, 'normal') if file_path.endswith('.xls') else None,
('default', 0, 'normal')
]
for approach in approaches:
if approach is None:
continue
engine, sheet_name, mode = approach
try:
logger.info(f"Trying to read delivery data with engine: {engine}, sheet: {sheet_name}, mode: {mode}")
if engine == 'default':
df = pd.read_excel(file_path, sheet_name=sheet_name, header=0)
elif mode == 'read_only':
df = pd.read_excel(file_path, sheet_name=sheet_name, engine=engine, header=0)
else:
df = pd.read_excel(file_path, sheet_name=sheet_name, engine=engine, header=0)
sheet_used = f"{engine} (sheet: {sheet_name}, mode: {mode})"
logger.info(f"Successfully read delivery data with: {sheet_used}")
break
except Exception as e:
logger.warning(f"Failed with {engine}, sheet {sheet_name}, mode {mode}: {e}")
continue
if df is None:
logger.error("Could not read the delivery file with any method")
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': 'Could not read the delivery Excel file. The file may have formatting issues or be corrupted.'
}
logger.info(f"Loaded delivery data from {sheet_used}: {len(df)} rows, {len(df.columns)} columns")
logger.info(f"Available columns: {list(df.columns)[:10]}...")
cursor = self.connection.cursor()
success_count = 0
created_count = 0
updated_count = 0
error_count = 0
# Prepare insert statement for deliveries - simple INSERT, every Excel row gets a database row
insert_sql = """
INSERT INTO dm_deliveries (
shipment_id, order_id, client_order_line, customer_code, customer_name,
article_code, article_description, quantity_delivered,
shipment_date, delivery_date, delivery_status, total_value
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
# Process each row with the actual column mapping and better null handling
for index, row in df.iterrows():
try:
# Safe value helper functions
def safe_str(value, default=''):
if pd.isna(value):
return default
return str(value).strip() if value != '' else default
def safe_int(value, default=None):
if pd.isna(value):
return default
try:
if isinstance(value, str):
value = value.strip()
if value == '':
return default
return int(float(value))
except (ValueError, TypeError):
return default
def safe_float(value, default=None):
if pd.isna(value):
return default
try:
if isinstance(value, str):
value = value.strip()
if value == '':
return default
return float(value)
except (ValueError, TypeError):
return default
# Create concatenated client order line: Com. Achiz. Client + "-" + Linie
client_order = safe_str(row.get('Com. Achiz. Client'))
linie = safe_str(row.get('Linie'))
client_order_line = f"{client_order}-{linie}" if client_order and linie else ''
# Map columns based on the actual Articole livrate_returnate format
data = (
safe_str(row.get('Document Number'), f'SH_{index:06d}'), # Shipment ID
safe_str(row.get('Comanda')), # Order ID
client_order_line, # Client Order Line (concatenated)
safe_str(row.get('Cod. Client')), # Customer Code
safe_str(row.get('Nume client')), # Customer Name
safe_str(row.get('Cod Articol')), # Article Code
safe_str(row.get('Part Description')), # Article Description
safe_int(row.get('Cantitate')), # Quantity Delivered
self._parse_date(row.get('Data')), # Shipment Date
self._parse_date(row.get('Data')), # Delivery Date (same as shipment for now)
safe_str(row.get('Stare'), 'DELIVERED'), # Delivery Status
safe_float(row.get('Total Price')) # Total Value
)
cursor.execute(insert_sql, data)
# Track created rows (simple INSERT always creates)
if cursor.rowcount == 1:
created_count += 1
success_count += 1
except Exception as row_error:
logger.warning(f"Error processing delivery row {index}: {row_error}")
error_count += 1
continue
self.connection.commit()
logger.info(f"Delivery import completed: {success_count} successful, {error_count} errors")
return {
'success_count': success_count,
'created_count': created_count,
'updated_count': updated_count,
'error_count': error_count,
'total_rows': len(df),
'error_message': None if error_count == 0 else f'{error_count} rows failed to import'
}
except Exception as e:
logger.error(f"Error importing delivery data: {e}")
return {
'success_count': 0,
'error_count': 1,
'total_rows': 0,
'error_message': str(e)
}
def generate_daily_summary(self, report_date=None):
"""Generate daily summary for Daily Mirror reporting"""
if not report_date:
report_date = datetime.now().date()
try:
cursor = self.connection.cursor()
# Check if summary already exists for this date
cursor.execute("SELECT id FROM dm_daily_summary WHERE report_date = ?", (report_date,))
existing = cursor.fetchone()
# Get production metrics
cursor.execute("""
SELECT
COUNT(*) as total_orders,
SUM(quantity_requested) as total_quantity,
SUM(CASE WHEN production_status = 'Inchis' THEN 1 ELSE 0 END) as completed_orders,
SUM(CASE WHEN end_of_quilting IS NOT NULL THEN 1 ELSE 0 END) as quilting_done,
SUM(CASE WHEN end_of_sewing IS NOT NULL THEN 1 ELSE 0 END) as sewing_done,
COUNT(DISTINCT customer_code) as unique_customers
FROM dm_production_orders
WHERE DATE(data_planificare) = ?
""", (report_date,))
production_metrics = cursor.fetchone()
# Get active operators count
cursor.execute("""
SELECT COUNT(DISTINCT CASE
WHEN t1_operator_name IS NOT NULL THEN t1_operator_name
WHEN t2_operator_name IS NOT NULL THEN t2_operator_name
WHEN t3_operator_name IS NOT NULL THEN t3_operator_name
END) as active_operators
FROM dm_production_orders
WHERE DATE(data_planificare) = ?
""", (report_date,))
operator_metrics = cursor.fetchone()
active_operators = operator_metrics[0] or 0
if existing:
# Update existing summary
update_sql = """
UPDATE dm_daily_summary SET
orders_quantity = ?, production_launched = ?, production_finished = ?,
quilting_completed = ?, sewing_completed = ?, unique_customers = ?,
active_operators = ?, updated_at = CURRENT_TIMESTAMP
WHERE report_date = ?
"""
cursor.execute(update_sql, (
production_metrics[1] or 0, production_metrics[0] or 0, production_metrics[2] or 0,
production_metrics[3] or 0, production_metrics[4] or 0, production_metrics[5] or 0,
active_operators, report_date
))
else:
# Insert new summary
insert_sql = """
INSERT INTO dm_daily_summary (
report_date, orders_quantity, production_launched, production_finished,
quilting_completed, sewing_completed, unique_customers, active_operators
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
"""
cursor.execute(insert_sql, (
report_date, production_metrics[1] or 0, production_metrics[0] or 0, production_metrics[2] or 0,
production_metrics[3] or 0, production_metrics[4] or 0, production_metrics[5] or 0,
active_operators
))
self.connection.commit()
logger.info(f"Daily summary generated for {report_date}")
return True
except Exception as e:
logger.error(f"Error generating daily summary: {e}")
return False
def clear_production_orders(self):
"""Delete all rows from the Daily Mirror production orders table"""
try:
cursor = self.connection.cursor()
cursor.execute("DELETE FROM dm_production_orders")
self.connection.commit()
logger.info("All production orders deleted from dm_production_orders table.")
return True
except Exception as e:
logger.error(f"Error deleting production orders: {e}")
return False
def clear_orders(self):
"""Delete all rows from the Daily Mirror orders table"""
try:
cursor = self.connection.cursor()
cursor.execute("DELETE FROM dm_orders")
self.connection.commit()
logger.info("All orders deleted from dm_orders table.")
return True
except Exception as e:
logger.error(f"Error deleting orders: {e}")
return False
def clear_delivery(self):
"""Delete all rows from the Daily Mirror delivery table"""
try:
cursor = self.connection.cursor()
cursor.execute("DELETE FROM dm_deliveries")
self.connection.commit()
logger.info("All delivery records deleted from dm_deliveries table.")
return True
except Exception as e:
logger.error(f"Error deleting delivery records: {e}")
return False
def _parse_date(self, date_value):
"""Parse date with better null handling"""
if pd.isna(date_value) or date_value == 'nan' or date_value is None or date_value == '':
return None
try:
if isinstance(date_value, str):
# Handle various date formats
for fmt in ['%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d.%m.%Y']:
try:
return datetime.strptime(date_value, fmt).date()
except ValueError:
continue
elif hasattr(date_value, 'date'):
return date_value.date()
elif isinstance(date_value, datetime):
return date_value.date()
return None # If all parsing attempts fail
except Exception as e:
logger.warning(f"Error parsing date {date_value}: {e}")
return None
def _parse_datetime(self, datetime_value):
"""Parse datetime value from Excel"""
if pd.isna(datetime_value):
return None
if isinstance(datetime_value, str) and datetime_value == '00:00:00':
return None
return datetime_value
def setup_daily_mirror_database():
"""Setup the Daily Mirror database schema"""
db = DailyMirrorDatabase()
if not db.connect():
return False
try:
success = db.create_database_schema()
if success:
print("✅ Daily Mirror database schema created successfully!")
# Generate sample daily summary for today
db.generate_daily_summary()
return success
finally:
db.disconnect()
if __name__ == "__main__":
setup_daily_mirror_database()