updated structure in the table view

This commit is contained in:
Quality System Admin
2025-10-26 19:30:12 +02:00
parent 8cbede35d2
commit d142129de6
9 changed files with 760 additions and 185 deletions

View File

@@ -94,50 +94,52 @@ class DailyMirrorDatabase:
return False
def import_production_data(self, file_path):
"""Import production data from Excel file (Comenzi Productie format)"""
"""Import production data from Excel file (Production orders Data sheet OR DataSheet)"""
try:
# The correct data is in the first sheet (DataSheet)
# Read from "Production orders Data" sheet (new format) or "DataSheet" (old format)
df = None
sheet_used = None
# Get available sheets
excel_file = pd.ExcelFile(file_path)
logger.info(f"Available sheets: {excel_file.sheet_names}")
# Try different engines (openpyxl for .xlsx, pyxlsb for .xlsb)
engines_to_try = ['openpyxl', 'pyxlsb']
# Try DataSheet first (where the actual production data is), then fallback options
sheet_attempts = [
('DataSheet', 'openpyxl'),
('DataSheet', 'xlrd'),
(0, 'openpyxl'),
(0, 'xlrd'),
('Sheet1', 'openpyxl'), # fallback to Sheet1 if DataSheet fails
(1, 'openpyxl')
]
# Try different sheet names (new format first, then old format)
sheet_names_to_try = ['Production orders Data', 'DataSheet']
for sheet_name, engine in sheet_attempts:
try:
logger.info(f"Trying to read sheet '{sheet_name}' with engine '{engine}'")
df = pd.read_excel(file_path, sheet_name=sheet_name, engine=engine, header=0)
sheet_used = f"{sheet_name} (engine: {engine})"
logger.info(f"Successfully read from sheet: {sheet_used}")
for engine in engines_to_try:
if df is not None:
break
try:
logger.info(f"Trying to read Excel file with engine: {engine}")
excel_file = pd.ExcelFile(file_path, engine=engine)
logger.info(f"Available sheets: {excel_file.sheet_names}")
# Try each sheet name
for sheet_name in sheet_names_to_try:
if sheet_name in excel_file.sheet_names:
try:
logger.info(f"Reading sheet '{sheet_name}'")
df = pd.read_excel(file_path, sheet_name=sheet_name, engine=engine, header=0)
sheet_used = f"{sheet_name} (engine: {engine})"
logger.info(f"Successfully read from sheet: {sheet_used}")
break
except Exception as sheet_error:
logger.warning(f"Failed to read sheet '{sheet_name}': {sheet_error}")
continue
if df is not None:
break
except Exception as e:
logger.warning(f"Failed to read sheet {sheet_name} with {engine}: {e}")
logger.warning(f"Failed with engine {engine}: {e}")
continue
# If all engines fail on DataSheet, try a different approach
if df is None:
try:
logger.info("Trying alternative method: reading without specifying engine")
df = pd.read_excel(file_path, sheet_name='DataSheet')
sheet_used = "DataSheet (default engine)"
logger.info("Successfully read with default engine")
except Exception as e:
logger.error(f"Failed with default engine: {e}")
raise Exception("Could not read the DataSheet from the Excel file. The file may be corrupted.")
raise Exception("Could not read Excel file. Please ensure it has a 'Production orders Data' or 'DataSheet' sheet.")
logger.info(f"Loaded production data from {sheet_used}: {len(df)} rows, {len(df.columns)} columns")
logger.info(f"Available columns: {list(df.columns)}")
logger.info(f"First 5 column names: {list(df.columns)[:5]}")
cursor = self.connection.cursor()
success_count = 0
@@ -145,60 +147,116 @@ class DailyMirrorDatabase:
updated_count = 0
error_count = 0
# Prepare insert statement
# Prepare insert statement with new schema
insert_sql = """
INSERT INTO dm_production_orders (
production_order, customer_code, client_order, article_code,
article_description, quantity_requested, delivery_date, production_status,
end_of_quilting, end_of_sewing, t1_status, t1_registration_date, t1_operator_name,
t2_status, t2_registration_date, t2_operator_name, t3_status, t3_registration_date,
t3_operator_name, machine_code, machine_type, classification, total_norm_time,
data_deschiderii, model_lb2, data_planificare, machine_number, design_number, needle_position
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
production_order, open_for_order_line, client_order_line,
customer_code, customer_name, article_code, article_description,
quantity_requested, unit_of_measure, delivery_date, opening_date,
closing_date, data_planificare, production_status,
machine_code, machine_type, machine_number,
end_of_quilting, end_of_sewing,
phase_t1_prepared, t1_operator_name, t1_registration_date,
phase_t2_cut, t2_operator_name, t2_registration_date,
phase_t3_sewing, t3_operator_name, t3_registration_date,
design_number, classification, model_description, model_lb2,
needle_position, needle_row, priority
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
open_for_order_line = VALUES(open_for_order_line),
client_order_line = VALUES(client_order_line),
customer_code = VALUES(customer_code),
client_order = VALUES(client_order),
customer_name = VALUES(customer_name),
article_code = VALUES(article_code),
article_description = VALUES(article_description),
quantity_requested = VALUES(quantity_requested),
delivery_date = VALUES(delivery_date),
production_status = VALUES(production_status),
machine_code = VALUES(machine_code),
end_of_quilting = VALUES(end_of_quilting),
end_of_sewing = VALUES(end_of_sewing),
phase_t1_prepared = VALUES(phase_t1_prepared),
t1_operator_name = VALUES(t1_operator_name),
t1_registration_date = VALUES(t1_registration_date),
phase_t2_cut = VALUES(phase_t2_cut),
t2_operator_name = VALUES(t2_operator_name),
t2_registration_date = VALUES(t2_registration_date),
phase_t3_sewing = VALUES(phase_t3_sewing),
t3_operator_name = VALUES(t3_operator_name),
t3_registration_date = VALUES(t3_registration_date),
updated_at = CURRENT_TIMESTAMP
"""
for index, row in df.iterrows():
try:
# Create concatenated fields with dash separator
opened_for_order = str(row.get('Opened for Order', '')).strip() if pd.notna(row.get('Opened for Order')) else ''
linia = str(row.get('Linia', '')).strip() if pd.notna(row.get('Linia')) else ''
open_for_order_line = f"{opened_for_order}-{linia}" if opened_for_order and linia else ''
com_achiz_client = str(row.get('Com. Achiz. Client', '')).strip() if pd.notna(row.get('Com. Achiz. Client')) else ''
nr_linie_com_client = str(row.get('Nr. linie com. client', '')).strip() if pd.notna(row.get('Nr. linie com. client')) else ''
client_order_line = f"{com_achiz_client}-{nr_linie_com_client}" if com_achiz_client and nr_linie_com_client else ''
# Helper function to safely get numeric values
def safe_int(value, default=None):
if pd.isna(value) or value == '':
return default
try:
return int(float(value))
except (ValueError, TypeError):
return default
def safe_float(value, default=None):
if pd.isna(value) or value == '':
return default
try:
return float(value)
except (ValueError, TypeError):
return default
def safe_str(value, default=''):
if pd.isna(value):
return default
return str(value).strip()
# Prepare data tuple
data = (
row.get('Comanda Productie', ''),
row.get('Customer', ''),
row.get('Comanda client', ''),
row.get('Cod Articol', ''),
row.get('Descriere', ''),
row.get('Cantitate ceruta', 0),
self._parse_date(row.get('Delivery date')),
row.get('Status', ''),
self._parse_datetime(row.get('End of Quilting')),
self._parse_datetime(row.get('End of sewing')),
row.get('T1', 0),
self._parse_datetime(row.get('Data inregistrare T1')),
row.get('Numele Complet T1', ''),
row.get('T2', 0),
self._parse_datetime(row.get('Data inregistrare T2')),
row.get('Numele Complet T2', ''),
row.get('T3', 0),
self._parse_datetime(row.get('Data inregistrare T3')),
row.get('Numele Complet T3', ''),
row.get('Masina Cusut ', ''),
row.get('Tip Masina', ''),
row.get('Clasificare', ''),
row.get('Timp normat total', 0),
self._parse_date(row.get('Data Deschiderii')),
row.get('Model Lb2', ''),
self._parse_date(row.get('Data Planific.')),
row.get('Numar masina', ''),
row.get('Design nr', 0),
row.get('Needle position', 0)
safe_str(row.get('Comanda Productie')), # production_order
open_for_order_line, # open_for_order_line (concatenated)
client_order_line, # client_order_line (concatenated)
safe_str(row.get('Cod. Client')), # customer_code
safe_str(row.get('Customer Name')), # customer_name
safe_str(row.get('Cod Articol')), # article_code
safe_str(row.get('Descr. Articol.1')), # article_description
safe_int(row.get('Cantitate Com. Prod.'), 0), # quantity_requested
safe_str(row.get('U.M.')), # unit_of_measure
self._parse_date(row.get('SO Duedate')), # delivery_date
self._parse_date(row.get('Data Deschiderii')), # opening_date
self._parse_date(row.get('Data Inchiderii')), # closing_date
self._parse_date(row.get('Data Planific.')), # data_planificare
safe_str(row.get('Status')), # production_status
safe_str(row.get('Masina cusut')), # machine_code
safe_str(row.get('Tip masina')), # machine_type
safe_str(row.get('Machine Number')), # machine_number
self._parse_date(row.get('End of Quilting')), # end_of_quilting
self._parse_date(row.get('End of Sewing')), # end_of_sewing
safe_str(row.get('T2')), # phase_t1_prepared (using T2 column)
safe_str(row.get('Nume complet T2')), # t1_operator_name
self._parse_datetime(row.get('Data inregistrare T2')), # t1_registration_date
safe_str(row.get('T1')), # phase_t2_cut (using T1 column)
safe_str(row.get('Nume complet T1')), # t2_operator_name
self._parse_datetime(row.get('Data inregistrare T1')), # t2_registration_date
safe_str(row.get('T3')), # phase_t3_sewing (using T3 column)
safe_str(row.get('Nume complet T3')), # t3_operator_name
self._parse_datetime(row.get('Data inregistrare T3')), # t3_registration_date
safe_int(row.get('Design number')), # design_number
safe_str(row.get('Clasificare')), # classification
safe_str(row.get('Descriere Model')), # model_description
safe_str(row.get('Model Lb2')), # model_lb2
safe_float(row.get('Needle Position')), # needle_position
safe_str(row.get('Needle row')), # needle_row
safe_int(row.get('Prioritate executie'), 0) # priority
)
cursor.execute(insert_sql, data)
@@ -217,11 +275,17 @@ class DailyMirrorDatabase:
except Exception as row_error:
logger.warning(f"Error processing row {index}: {row_error}")
# Log first few values of problematic row
try:
row_sample = {k: v for k, v in list(row.items())[:5]}
logger.warning(f"Row data sample: {row_sample}")
except:
pass
error_count += 1
continue
self.connection.commit()
logger.info(f"Production data import completed: {success_count} successful, {error_count} failed")
logger.info(f"Production data import completed: {success_count} successful ({created_count} created, {updated_count} updated), {error_count} failed")
return {
'success_count': success_count,
@@ -233,6 +297,8 @@ class DailyMirrorDatabase:
except Exception as e:
logger.error(f"Error importing production data: {e}")
import traceback
logger.error(traceback.format_exc())
return None
def import_orders_data(self, file_path):
@@ -688,6 +754,30 @@ class DailyMirrorDatabase:
logger.error(f"Error deleting production orders: {e}")
return False
def clear_orders(self):
"""Delete all rows from the Daily Mirror orders table"""
try:
cursor = self.connection.cursor()
cursor.execute("DELETE FROM dm_orders")
self.connection.commit()
logger.info("All orders deleted from dm_orders table.")
return True
except Exception as e:
logger.error(f"Error deleting orders: {e}")
return False
def clear_delivery(self):
"""Delete all rows from the Daily Mirror delivery table"""
try:
cursor = self.connection.cursor()
cursor.execute("DELETE FROM dm_deliveries")
self.connection.commit()
logger.info("All delivery records deleted from dm_deliveries table.")
return True
except Exception as e:
logger.error(f"Error deleting delivery records: {e}")
return False
def _parse_date(self, date_value):
"""Parse date with better null handling"""
if pd.isna(date_value) or date_value == 'nan' or date_value is None or date_value == '':