Fix print_lost_labels compact styling and production data import
- Added compact table styling to print_lost_labels page (smaller fonts, reduced padding) - Fixed production data import missing fields (production_order_line, line_number) - Added better error handling and logging for Excel file imports - Skip empty rows in production data import - Log all columns and columns with data for debugging
This commit is contained in:
@@ -10,6 +10,7 @@ import os
|
||||
import json
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
import pandas as pd
|
||||
|
||||
def get_db_connection():
|
||||
"""Get database connection using external server configuration"""
|
||||
@@ -73,8 +74,15 @@ def validate_order_row(row_data):
|
||||
data_livrare = row_data.get('data_livrare', '').strip()
|
||||
if data_livrare:
|
||||
try:
|
||||
# Try to parse common date formats
|
||||
for date_format in ['%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d.%m.%Y']:
|
||||
# Try to parse common date formats including Excel datetime format
|
||||
date_formats = [
|
||||
'%Y-%m-%d', # 2024-03-12
|
||||
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
|
||||
'%d/%m/%Y', # 12/03/2024
|
||||
'%m/%d/%Y', # 03/12/2024
|
||||
'%d.%m.%Y' # 12.03.2024
|
||||
]
|
||||
for date_format in date_formats:
|
||||
try:
|
||||
datetime.strptime(data_livrare, date_format)
|
||||
break
|
||||
@@ -118,8 +126,15 @@ def add_order_to_database(order_data):
|
||||
data_livrare_str = order_data.get('data_livrare', '').strip()
|
||||
if data_livrare_str:
|
||||
try:
|
||||
# Try to parse common date formats and convert to YYYY-MM-DD
|
||||
for date_format in ['%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d.%m.%Y']:
|
||||
# Try to parse common date formats including Excel datetime and convert to YYYY-MM-DD
|
||||
date_formats = [
|
||||
'%Y-%m-%d', # 2024-03-12
|
||||
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
|
||||
'%d/%m/%Y', # 12/03/2024
|
||||
'%m/%d/%Y', # 03/12/2024
|
||||
'%d.%m.%Y' # 12.03.2024
|
||||
]
|
||||
for date_format in date_formats:
|
||||
try:
|
||||
parsed_date = datetime.strptime(data_livrare_str, date_format)
|
||||
data_livrare_value = parsed_date.strftime('%Y-%m-%d')
|
||||
@@ -167,6 +182,141 @@ def add_order_to_database(order_data):
|
||||
except Exception as e:
|
||||
return False, f"Unexpected error: {str(e)}"
|
||||
|
||||
def process_excel_file(file_path):
|
||||
"""
|
||||
Process uploaded Excel file (.xlsx) and return parsed data with validation
|
||||
Returns: (orders_data: list, validation_errors: list, validation_warnings: list)
|
||||
"""
|
||||
orders_data = []
|
||||
all_errors = []
|
||||
all_warnings = []
|
||||
|
||||
try:
|
||||
# Read Excel file - try 'Sheet1' first (common data sheet), then fallback to first sheet
|
||||
try:
|
||||
df = pd.read_excel(file_path, sheet_name='Sheet1', engine='openpyxl')
|
||||
except:
|
||||
try:
|
||||
df = pd.read_excel(file_path, sheet_name=0, engine='openpyxl')
|
||||
except:
|
||||
# Last resort - try 'DataSheet'
|
||||
df = pd.read_excel(file_path, sheet_name='DataSheet', engine='openpyxl')
|
||||
|
||||
# Column mapping for Excel files (case-insensitive)
|
||||
# Maps Excel column names to database field names
|
||||
column_mapping = {
|
||||
# Core order fields
|
||||
'comanda productie': 'comanda_productie',
|
||||
'comanda_productie': 'comanda_productie',
|
||||
'cod articol': 'cod_articol',
|
||||
'cod_articol': 'cod_articol',
|
||||
'descriere': 'descr_com_prod',
|
||||
'descr. com. prod': 'descr_com_prod',
|
||||
'descr com prod': 'descr_com_prod',
|
||||
'descr_com_prod': 'descr_com_prod',
|
||||
'description': 'descr_com_prod',
|
||||
'cantitate': 'cantitate',
|
||||
'cantitate ceruta': 'cantitate',
|
||||
'quantity': 'cantitate',
|
||||
'datalivrare': 'data_livrare',
|
||||
'data livrare': 'data_livrare',
|
||||
'data_livrare': 'data_livrare',
|
||||
'delivery date': 'data_livrare',
|
||||
'dimensiune': 'dimensiune',
|
||||
'dimension': 'dimensiune',
|
||||
|
||||
# Customer and order info
|
||||
'customer': 'customer_name',
|
||||
'customer name': 'customer_name',
|
||||
'customer_name': 'customer_name',
|
||||
'comanda client': 'com_achiz_client',
|
||||
'com.achiz.client': 'com_achiz_client',
|
||||
'com achiz client': 'com_achiz_client',
|
||||
'com_achiz_client': 'com_achiz_client',
|
||||
'customer article number': 'customer_article_number',
|
||||
'customer_article_number': 'customer_article_number',
|
||||
|
||||
# Status and dates
|
||||
'status': 'status',
|
||||
'end of quilting': 'end_of_quilting',
|
||||
'end of sewing': 'end_of_sewing',
|
||||
'data deschiderii': 'data_deschiderii',
|
||||
'data planific.': 'data_planific',
|
||||
'data planific': 'data_planific',
|
||||
|
||||
# Machine and production info
|
||||
'masina cusut': 'masina_cusut',
|
||||
'masina cusut ': 'masina_cusut', # Note trailing space in Excel
|
||||
'tip masina': 'tip_masina',
|
||||
'numar masina': 'numar_masina',
|
||||
'clasificare': 'clasificare',
|
||||
'timp normat total': 'timp_normat_total',
|
||||
|
||||
# Quality control stages (T1, T2, T3)
|
||||
't1': 't1',
|
||||
'data inregistrare t1': 'data_inregistrare_t1',
|
||||
'numele complet t1': 'numele_complet_t1',
|
||||
't2': 't2',
|
||||
'data inregistrare t2': 'data_inregistrare_t2',
|
||||
'numele complet t2': 'numele_complet_t2',
|
||||
't3': 't3',
|
||||
'data inregistrare t3': 'data_inregistrare_t3',
|
||||
'numele complet t3': 'numele_complet_t3',
|
||||
|
||||
# Design and model info
|
||||
'model lb2': 'model_lb2',
|
||||
'design nr': 'design_nr',
|
||||
'needle position': 'needle_position',
|
||||
|
||||
# Line references
|
||||
'nr. linie com. client': 'nr_linie_com_client',
|
||||
'nr linie com client': 'nr_linie_com_client',
|
||||
'nr_linie_com_client': 'nr_linie_com_client',
|
||||
'line': 'line_number',
|
||||
'line_number': 'line_number',
|
||||
'open for order': 'open_for_order',
|
||||
'open_for_order': 'open_for_order'
|
||||
}
|
||||
|
||||
# Normalize column names
|
||||
df.columns = [col.lower().strip() if col else f'col_{i}' for i, col in enumerate(df.columns)]
|
||||
|
||||
# Process each row
|
||||
for idx, row in df.iterrows():
|
||||
# Skip empty rows
|
||||
if row.isna().all():
|
||||
continue
|
||||
|
||||
# Create normalized row data
|
||||
normalized_row = {}
|
||||
for col_name in df.columns:
|
||||
col_key = col_name.lower().strip()
|
||||
mapped_key = column_mapping.get(col_key, col_key.replace(' ', '_').replace('.', ''))
|
||||
|
||||
# Get value and convert to string, handle NaN
|
||||
value = row[col_name]
|
||||
if pd.isna(value):
|
||||
normalized_row[mapped_key] = ''
|
||||
else:
|
||||
normalized_row[mapped_key] = str(value).strip()
|
||||
|
||||
# Validate the row
|
||||
errors, warnings = validate_order_row(normalized_row)
|
||||
|
||||
if errors:
|
||||
all_errors.extend([f"Row {idx + 2}: {error}" for error in errors])
|
||||
else:
|
||||
# Only add valid rows
|
||||
orders_data.append(normalized_row)
|
||||
|
||||
if warnings:
|
||||
all_warnings.extend([f"Row {idx + 2}: {warning}" for warning in warnings])
|
||||
|
||||
except Exception as e:
|
||||
all_errors.append(f"Error processing Excel file: {str(e)}")
|
||||
|
||||
return orders_data, all_errors, all_warnings
|
||||
|
||||
def process_csv_file(file_path):
|
||||
"""
|
||||
Process uploaded CSV file and return parsed data with validation
|
||||
@@ -268,7 +418,7 @@ def upload_orders_handler():
|
||||
if request.method == 'POST':
|
||||
# Handle file upload
|
||||
file = request.files.get('csv_file')
|
||||
if file and file.filename.endswith(('.csv', '.CSV')):
|
||||
if file and file.filename.endswith(('.csv', '.CSV', '.xlsx', '.XLSX', '.xls', '.XLS')):
|
||||
try:
|
||||
# Save uploaded file
|
||||
temp_path = os.path.join(temp_dir, file.filename)
|
||||
@@ -278,8 +428,11 @@ def upload_orders_handler():
|
||||
session['csv_filename'] = file.filename
|
||||
session['orders_csv_filepath'] = temp_path
|
||||
|
||||
# Process the CSV file
|
||||
orders_data, validation_errors, validation_warnings = process_csv_file(temp_path)
|
||||
# Process the file based on extension
|
||||
if file.filename.lower().endswith(('.xlsx', '.xls')):
|
||||
orders_data, validation_errors, validation_warnings = process_excel_file(temp_path)
|
||||
else:
|
||||
orders_data, validation_errors, validation_warnings = process_csv_file(temp_path)
|
||||
|
||||
# Store processed data in session
|
||||
session['orders_csv_data'] = orders_data
|
||||
|
||||
Reference in New Issue
Block a user