Fix print_lost_labels compact styling and production data import

- Added compact table styling to print_lost_labels page (smaller fonts, reduced padding)
- Fixed production data import missing fields (production_order_line, line_number)
- Added better error handling and logging for Excel file imports
- Skip empty rows in production data import
- Log all columns and columns with data for debugging
This commit is contained in:
ske087
2025-11-26 21:59:03 +02:00
parent d3a0123acc
commit d070db0052
13 changed files with 649 additions and 1042 deletions

View File

@@ -184,7 +184,11 @@ class DailyMirrorDatabase:
raise Exception("Could not read Excel file. Please ensure it has a 'Production orders Data' or 'DataSheet' sheet.")
logger.info(f"Loaded production data from {sheet_used}: {len(df)} rows, {len(df.columns)} columns")
logger.info(f"First 5 column names: {list(df.columns)[:5]}")
logger.info(f"All column names: {list(df.columns)}")
# Log columns that have at least some non-null data
columns_with_data = [col for col in df.columns if df[col].notna().any()]
logger.info(f"Columns with data ({len(columns_with_data)}): {columns_with_data}")
cursor = self.connection.cursor()
success_count = 0
@@ -235,6 +239,10 @@ class DailyMirrorDatabase:
for index, row in df.iterrows():
try:
# Skip rows where production order is empty
if pd.isna(row.get('Comanda Productie')) or str(row.get('Comanda Productie')).strip() == '':
continue
# Create concatenated fields with dash separator
opened_for_order = str(row.get('Opened for Order', '')).strip() if pd.notna(row.get('Opened for Order')) else ''
linia = str(row.get('Linia', '')).strip() if pd.notna(row.get('Linia')) else ''
@@ -269,6 +277,8 @@ class DailyMirrorDatabase:
# Prepare data tuple
data = (
safe_str(row.get('Comanda Productie')), # production_order
safe_str(row.get('Opened for Order')), # production_order_line
safe_str(row.get('Linia')), # line_number
open_for_order_line, # open_for_order_line (concatenated)
client_order_line, # client_order_line (concatenated)
safe_str(row.get('Cod. Client')), # customer_code

View File

@@ -10,6 +10,7 @@ import os
import json
import tempfile
from datetime import datetime
import pandas as pd
def get_db_connection():
"""Get database connection using external server configuration"""
@@ -73,8 +74,15 @@ def validate_order_row(row_data):
data_livrare = row_data.get('data_livrare', '').strip()
if data_livrare:
try:
# Try to parse common date formats
for date_format in ['%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d.%m.%Y']:
# Try to parse common date formats including Excel datetime format
date_formats = [
'%Y-%m-%d', # 2024-03-12
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
'%d/%m/%Y', # 12/03/2024
'%m/%d/%Y', # 03/12/2024
'%d.%m.%Y' # 12.03.2024
]
for date_format in date_formats:
try:
datetime.strptime(data_livrare, date_format)
break
@@ -118,8 +126,15 @@ def add_order_to_database(order_data):
data_livrare_str = order_data.get('data_livrare', '').strip()
if data_livrare_str:
try:
# Try to parse common date formats and convert to YYYY-MM-DD
for date_format in ['%Y-%m-%d', '%d/%m/%Y', '%m/%d/%Y', '%d.%m.%Y']:
# Try to parse common date formats including Excel datetime and convert to YYYY-MM-DD
date_formats = [
'%Y-%m-%d', # 2024-03-12
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
'%d/%m/%Y', # 12/03/2024
'%m/%d/%Y', # 03/12/2024
'%d.%m.%Y' # 12.03.2024
]
for date_format in date_formats:
try:
parsed_date = datetime.strptime(data_livrare_str, date_format)
data_livrare_value = parsed_date.strftime('%Y-%m-%d')
@@ -167,6 +182,141 @@ def add_order_to_database(order_data):
except Exception as e:
return False, f"Unexpected error: {str(e)}"
def process_excel_file(file_path):
"""
Process uploaded Excel file (.xlsx) and return parsed data with validation
Returns: (orders_data: list, validation_errors: list, validation_warnings: list)
"""
orders_data = []
all_errors = []
all_warnings = []
try:
# Read Excel file - try 'Sheet1' first (common data sheet), then fallback to first sheet
try:
df = pd.read_excel(file_path, sheet_name='Sheet1', engine='openpyxl')
except:
try:
df = pd.read_excel(file_path, sheet_name=0, engine='openpyxl')
except:
# Last resort - try 'DataSheet'
df = pd.read_excel(file_path, sheet_name='DataSheet', engine='openpyxl')
# Column mapping for Excel files (case-insensitive)
# Maps Excel column names to database field names
column_mapping = {
# Core order fields
'comanda productie': 'comanda_productie',
'comanda_productie': 'comanda_productie',
'cod articol': 'cod_articol',
'cod_articol': 'cod_articol',
'descriere': 'descr_com_prod',
'descr. com. prod': 'descr_com_prod',
'descr com prod': 'descr_com_prod',
'descr_com_prod': 'descr_com_prod',
'description': 'descr_com_prod',
'cantitate': 'cantitate',
'cantitate ceruta': 'cantitate',
'quantity': 'cantitate',
'datalivrare': 'data_livrare',
'data livrare': 'data_livrare',
'data_livrare': 'data_livrare',
'delivery date': 'data_livrare',
'dimensiune': 'dimensiune',
'dimension': 'dimensiune',
# Customer and order info
'customer': 'customer_name',
'customer name': 'customer_name',
'customer_name': 'customer_name',
'comanda client': 'com_achiz_client',
'com.achiz.client': 'com_achiz_client',
'com achiz client': 'com_achiz_client',
'com_achiz_client': 'com_achiz_client',
'customer article number': 'customer_article_number',
'customer_article_number': 'customer_article_number',
# Status and dates
'status': 'status',
'end of quilting': 'end_of_quilting',
'end of sewing': 'end_of_sewing',
'data deschiderii': 'data_deschiderii',
'data planific.': 'data_planific',
'data planific': 'data_planific',
# Machine and production info
'masina cusut': 'masina_cusut',
'masina cusut ': 'masina_cusut', # Note trailing space in Excel
'tip masina': 'tip_masina',
'numar masina': 'numar_masina',
'clasificare': 'clasificare',
'timp normat total': 'timp_normat_total',
# Quality control stages (T1, T2, T3)
't1': 't1',
'data inregistrare t1': 'data_inregistrare_t1',
'numele complet t1': 'numele_complet_t1',
't2': 't2',
'data inregistrare t2': 'data_inregistrare_t2',
'numele complet t2': 'numele_complet_t2',
't3': 't3',
'data inregistrare t3': 'data_inregistrare_t3',
'numele complet t3': 'numele_complet_t3',
# Design and model info
'model lb2': 'model_lb2',
'design nr': 'design_nr',
'needle position': 'needle_position',
# Line references
'nr. linie com. client': 'nr_linie_com_client',
'nr linie com client': 'nr_linie_com_client',
'nr_linie_com_client': 'nr_linie_com_client',
'line': 'line_number',
'line_number': 'line_number',
'open for order': 'open_for_order',
'open_for_order': 'open_for_order'
}
# Normalize column names
df.columns = [col.lower().strip() if col else f'col_{i}' for i, col in enumerate(df.columns)]
# Process each row
for idx, row in df.iterrows():
# Skip empty rows
if row.isna().all():
continue
# Create normalized row data
normalized_row = {}
for col_name in df.columns:
col_key = col_name.lower().strip()
mapped_key = column_mapping.get(col_key, col_key.replace(' ', '_').replace('.', ''))
# Get value and convert to string, handle NaN
value = row[col_name]
if pd.isna(value):
normalized_row[mapped_key] = ''
else:
normalized_row[mapped_key] = str(value).strip()
# Validate the row
errors, warnings = validate_order_row(normalized_row)
if errors:
all_errors.extend([f"Row {idx + 2}: {error}" for error in errors])
else:
# Only add valid rows
orders_data.append(normalized_row)
if warnings:
all_warnings.extend([f"Row {idx + 2}: {warning}" for warning in warnings])
except Exception as e:
all_errors.append(f"Error processing Excel file: {str(e)}")
return orders_data, all_errors, all_warnings
def process_csv_file(file_path):
"""
Process uploaded CSV file and return parsed data with validation
@@ -268,7 +418,7 @@ def upload_orders_handler():
if request.method == 'POST':
# Handle file upload
file = request.files.get('csv_file')
if file and file.filename.endswith(('.csv', '.CSV')):
if file and file.filename.endswith(('.csv', '.CSV', '.xlsx', '.XLSX', '.xls', '.XLS')):
try:
# Save uploaded file
temp_path = os.path.join(temp_dir, file.filename)
@@ -278,8 +428,11 @@ def upload_orders_handler():
session['csv_filename'] = file.filename
session['orders_csv_filepath'] = temp_path
# Process the CSV file
orders_data, validation_errors, validation_warnings = process_csv_file(temp_path)
# Process the file based on extension
if file.filename.lower().endswith(('.xlsx', '.xls')):
orders_data, validation_errors, validation_warnings = process_excel_file(temp_path)
else:
orders_data, validation_errors, validation_warnings = process_csv_file(temp_path)
# Store processed data in session
session['orders_csv_data'] = orders_data

View File

@@ -7,8 +7,13 @@ def get_db_connection():
settings = {}
with open(settings_file, 'r') as f:
for line in f:
key, value = line.strip().split('=', 1)
settings[key] = value
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
if '=' in line:
key, value = line.split('=', 1)
settings[key] = value
return mariadb.connect(
user=settings['username'],
password=settings['password'],
@@ -23,6 +28,10 @@ def get_unprinted_orders_data(limit=100):
Returns list of order dictionaries where printed_labels != 1
"""
try:
import sys
sys.stderr.write(f"DEBUG print_module: get_unprinted_orders_data called with limit={limit}\n")
sys.stderr.flush()
conn = get_db_connection()
cursor = conn.cursor()
@@ -30,8 +39,14 @@ def get_unprinted_orders_data(limit=100):
cursor.execute("SHOW COLUMNS FROM order_for_labels LIKE 'printed_labels'")
column_exists = cursor.fetchone()
sys.stderr.write(f"DEBUG print_module: printed_labels column exists={bool(column_exists)}\n")
sys.stderr.flush()
if column_exists:
# Use printed_labels column
sys.stderr.write(f"DEBUG print_module: Executing query with printed_labels != 1\n")
sys.stderr.flush()
cursor.execute("""
SELECT id, comanda_productie, cod_articol, descr_com_prod, cantitate,
com_achiz_client, nr_linie_com_client, customer_name,
@@ -43,6 +58,9 @@ def get_unprinted_orders_data(limit=100):
LIMIT %s
""", (limit,))
else:
sys.stderr.write(f"DEBUG print_module: Executing fallback query (no printed_labels column)\n")
sys.stderr.flush()
# Fallback: get all orders if no printed_labels column
cursor.execute("""
SELECT id, comanda_productie, cod_articol, descr_com_prod, cantitate,
@@ -55,7 +73,21 @@ def get_unprinted_orders_data(limit=100):
""", (limit,))
orders = []
for row in cursor.fetchall():
rows = cursor.fetchall()
sys.stderr.write(f"DEBUG print_module: Query returned {len(rows)} rows\n")
sys.stderr.flush()
# Also write to file for debugging
try:
with open('/app/print_module_debug.log', 'w') as f:
f.write(f"Query returned {len(rows)} rows\n")
f.write(f"Column exists: {column_exists}\n")
if rows:
f.write(f"First row: {rows[0]}\n")
except:
pass
for row in rows:
if column_exists:
orders.append({
'id': row[0],
@@ -100,6 +132,21 @@ def get_unprinted_orders_data(limit=100):
return orders
except Exception as e:
import sys
import traceback
error_trace = traceback.format_exc()
sys.stderr.write(f"ERROR in get_unprinted_orders_data: {e}\n{error_trace}\n")
sys.stderr.flush()
# Write to file
try:
with open('/app/print_module_error.log', 'w') as f:
f.write(f"ERROR: {e}\n")
f.write(f"Traceback:\n{error_trace}\n")
except:
pass
print(f"Error retrieving unprinted orders: {e}")
return []

View File

@@ -1717,45 +1717,203 @@ def etichete():
@requires_labels_module
def upload_data():
if request.method == 'POST':
import sys
sys.stdout.flush()
# Write to file to ensure we can see it
try:
with open('/app/request.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"POST REQUEST at {datetime.now()}\n")
f.write(f"Form data: {dict(request.form)}\n")
f.write(f"Files: {list(request.files.keys())}\n")
except:
pass
sys.stderr.write(f"DEBUG: POST request received for upload_data\n")
sys.stderr.flush()
action = request.form.get('action', 'preview')
sys.stderr.write(f"DEBUG: Action = {action}\n")
sys.stderr.flush()
if action == 'preview':
# Handle file upload and show preview
print(f"DEBUG: Processing preview action")
print(f"DEBUG: Files in request: {list(request.files.keys())}")
if 'file' not in request.files:
print(f"DEBUG: No file in request.files")
flash('No file selected', 'error')
return redirect(request.url)
file = request.files['file']
print(f"DEBUG: File received: {file.filename}")
if file.filename == '':
print(f"DEBUG: Empty filename")
flash('No file selected', 'error')
return redirect(request.url)
if file and file.filename.lower().endswith('.csv'):
filename_lower = file.filename.lower()
print(f"DEBUG: Filename lowercase: {filename_lower}")
# Handle both CSV and Excel files
if file and (filename_lower.endswith('.csv') or filename_lower.endswith('.xlsx') or filename_lower.endswith('.xls')):
try:
# Read CSV file
import csv
import io
# Read the file content
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
csv_input = csv.DictReader(stream)
# Convert to list for preview
preview_data = []
headers = []
for i, row in enumerate(csv_input):
if i == 0:
headers = list(row.keys())
if i < 10: # Show only first 10 rows for preview
preview_data.append(row)
else:
break
if filename_lower.endswith('.csv'):
# Read CSV file
import csv
import io
# Read the file content
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
csv_input = csv.DictReader(stream)
# Define the fields that are stored in the database
database_fields = [
'comanda_productie', 'cod_articol', 'descr_com_prod', 'cantitate',
'data_livrare', 'dimensiune', 'com_achiz_client', 'nr_linie_com_client',
'customer_name', 'customer_article_number', 'open_for_order', 'line_number'
]
# Convert to list for preview
all_rows = []
for i, row in enumerate(csv_input):
all_rows.append(row)
if i == 0:
# Get all available fields from CSV
all_fields = list(row.keys())
# Filter to only database fields
headers = [field for field in database_fields if field in all_fields or
any(field.lower() == k.lower().replace(' ', '_').replace('.', '') for k in all_fields)]
if i < 10: # Show only first 10 rows for preview
# Filter row to only show database fields
filtered_row = {k: v for k, v in row.items() if k.lower().replace(' ', '_').replace('.', '') in database_fields}
preview_data.append(filtered_row)
# If no headers were set, use all available
if not headers and all_rows:
headers = list(all_rows[0].keys())
preview_data = all_rows[:10]
# Store the full file content in a temp file instead of session
import uuid
upload_id = str(uuid.uuid4())
temp_data_file = f'/tmp/upload_{upload_id}.csv'
file.stream.seek(0) # Reset file pointer
with open(temp_data_file, 'wb') as f:
f.write(file.stream.read())
# Store only the file reference in session
session['upload_id'] = upload_id
session['csv_filename'] = file.filename
session['file_type'] = 'csv'
session.modified = True
# Store the full file content in session for later processing
file.stream.seek(0) # Reset file pointer
session['csv_content'] = file.stream.read().decode("UTF8")
session['csv_filename'] = file.filename
else: # Excel file
print(f"DEBUG: Processing Excel file: {file.filename}")
import sys
sys.stderr.write(f"DEBUG: Processing Excel file: {file.filename}\n")
sys.stderr.flush()
import os
import tempfile
from app.order_labels import process_excel_file
# Save uploaded file temporarily
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=os.path.splitext(file.filename)[1])
print(f"DEBUG: Created temp file: {temp_file.name}")
sys.stderr.write(f"DEBUG: Created temp file: {temp_file.name}\n")
sys.stderr.flush()
file.save(temp_file.name)
temp_file.close()
print(f"DEBUG: Saved file to temp location")
sys.stderr.write(f"DEBUG: Saved file to temp location\n")
sys.stderr.flush()
# Process Excel file
print(f"DEBUG: Calling process_excel_file()")
orders_data, errors, warnings = process_excel_file(temp_file.name)
print(f"DEBUG: Process complete - orders: {len(orders_data)}, errors: {len(errors)}, warnings: {len(warnings)}")
# Clean up temp file
os.unlink(temp_file.name)
if errors:
for error in errors[:10]:
flash(error, 'error')
if len(errors) > 10:
flash(f'... and {len(errors) - 10} more errors', 'error')
if warnings:
for warning in warnings[:5]:
flash(warning, 'warning')
if not orders_data:
import sys
sys.stderr.write(f"ERROR: No valid orders data found. Errors: {len(errors)}\n")
sys.stderr.flush()
# Write to file
try:
with open('/app/upload_error.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"NO VALID DATA at {datetime.now()}\n")
f.write(f"File: {file.filename}\n")
f.write(f"Errors ({len(errors)}):\n")
for err in errors[:20]:
f.write(f" - {err}\n")
f.write(f"Warnings ({len(warnings)}):\n")
for warn in warnings[:20]:
f.write(f" - {warn}\n")
except:
pass
flash('No valid data found in Excel file', 'error')
return redirect(request.url)
# Get headers from first row - only show fields that will be stored in database
if orders_data:
# Define the fields that are stored in the database
database_fields = [
'comanda_productie', 'cod_articol', 'descr_com_prod', 'cantitate',
'data_livrare', 'dimensiune', 'com_achiz_client', 'nr_linie_com_client',
'customer_name', 'customer_article_number', 'open_for_order', 'line_number'
]
# Filter headers to only include database fields that exist in data
all_fields = list(orders_data[0].keys())
headers = [field for field in database_fields if field in all_fields]
# Filter preview data to only show database fields
preview_data = []
for order in orders_data[:10]:
filtered_order = {k: v for k, v in order.items() if k in headers}
preview_data.append(filtered_order)
# Store data in a temporary file instead of session (session is too small for large datasets)
import json
import uuid
upload_id = str(uuid.uuid4())
temp_data_file = f'/tmp/upload_{upload_id}.json'
with open(temp_data_file, 'w') as f:
json.dump(orders_data, f)
# Store only the file reference in session
session['upload_id'] = upload_id
session['csv_filename'] = file.filename
session['file_type'] = 'excel'
session.modified = True
return render_template('upload_orders.html',
preview_data=preview_data,
@@ -1764,27 +1922,112 @@ def upload_data():
filename=file.filename)
except Exception as e:
flash(f'Error reading CSV file: {str(e)}', 'error')
import traceback
import sys
error_trace = traceback.format_exc()
print(f"ERROR processing file: {error_trace}")
sys.stderr.write(f"ERROR processing file: {error_trace}\n")
sys.stderr.flush()
# Also write to a file
try:
with open('/app/upload_error.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"ERROR at {datetime.now()}\n")
f.write(f"File: {file.filename if file else 'unknown'}\n")
f.write(f"Error: {str(e)}\n")
f.write(f"Traceback:\n{error_trace}\n")
except:
pass
flash(f'Error reading file: {str(e)}', 'error')
return redirect(request.url)
else:
flash('Please upload a CSV file', 'error')
flash('Please upload a CSV or Excel file (.csv, .xlsx, .xls)', 'error')
return redirect(request.url)
elif action == 'save':
# Save the data to database
if 'csv_content' not in session:
import sys
sys.stderr.write("DEBUG: Save action triggered\n")
sys.stderr.flush()
# Log to file immediately
try:
with open('/app/save_check.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"SAVE ACTION at {datetime.now()}\n")
f.write(f"Session keys: {list(session.keys())}\n")
f.write(f"Session file_type: {session.get('file_type', 'NOT SET')}\n")
f.write(f"Has csv_content: {'csv_content' in session}\n")
f.write(f"Has orders_data: {'orders_data' in session}\n")
except Exception as log_err:
sys.stderr.write(f"Error writing log: {log_err}\n")
file_type = session.get('file_type')
upload_id = session.get('upload_id')
sys.stderr.write(f"DEBUG: File type = {file_type}, upload_id = {upload_id}\n")
sys.stderr.flush()
if not file_type or not upload_id:
sys.stderr.write("DEBUG: Missing file_type or upload_id in session\n")
sys.stderr.flush()
try:
with open('/app/save_check.log', 'a') as f:
f.write("ERROR: Missing upload data in session - redirecting\n")
except:
pass
flash('No data to save. Please upload a file first.', 'error')
return redirect(request.url)
try:
import csv
import io
print(f"DEBUG: Starting {file_type.upper()} upload processing...")
sys.stderr.write(f"DEBUG: Starting {file_type.upper()} upload processing...\n")
sys.stderr.flush()
print(f"DEBUG: Starting CSV upload processing...")
# Log to file
try:
with open('/app/save_debug.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"SAVE START at {datetime.now()}\n")
f.write(f"File type: {file_type}\n")
f.write(f"Session keys: {list(session.keys())}\n")
except:
pass
# Read the CSV content from session
stream = io.StringIO(session['csv_content'], newline=None)
csv_input = csv.DictReader(stream)
# Get orders data from temp file
import json
temp_data_file = f'/tmp/upload_{upload_id}.{"json" if file_type == "excel" else "csv"}'
if file_type == 'excel':
with open(temp_data_file, 'r') as f:
orders_list = json.load(f)
# Log
try:
with open('/app/save_debug.log', 'a') as f:
f.write(f"Loaded {len(orders_list)} orders from temp file (Excel)\n")
except:
pass
else:
# Read the CSV content from temp file
import csv
with open(temp_data_file, 'r') as f:
csv_input = csv.DictReader(f)
orders_list = list(csv_input)
# Log
try:
with open('/app/save_debug.log', 'a') as f:
f.write(f"Loaded {len(orders_list)} orders from temp file (CSV)\n")
except:
pass
# Connect to database
conn = get_db_connection()
@@ -1794,10 +2037,10 @@ def upload_data():
error_count = 0
errors = []
print(f"DEBUG: Connected to database, processing rows...")
print(f"DEBUG: Connected to database, processing {len(orders_list)} rows...")
# Process each row
for index, row in enumerate(csv_input):
for index, row in enumerate(orders_list):
try:
print(f"DEBUG: Processing row {index + 1}: {row}")
@@ -1824,10 +2067,18 @@ def upload_data():
# Convert empty string to None for date field
if data_livrare:
try:
# Parse date from various formats (9/23/2023, 23/9/2023, 2023-09-23, etc.)
# Parse date from various formats including Excel datetime format
from datetime import datetime
# Try different date formats
date_formats = ['%m/%d/%Y', '%d/%m/%Y', '%Y-%m-%d', '%m-%d-%Y', '%d-%m-%Y']
date_formats = [
'%Y-%m-%d', # 2024-03-12
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
'%m/%d/%Y', # 03/12/2024
'%d/%m/%Y', # 12/03/2024
'%m-%d-%Y', # 03-12-2024
'%d-%m-%Y', # 12-03-2024
'%d.%m.%Y' # 12.03.2024
]
parsed_date = None
for fmt in date_formats:
try:
@@ -1897,9 +2148,39 @@ def upload_data():
print(f"DEBUG: Committed {inserted_count} records to database")
# Clear session data
session.pop('csv_content', None)
# Log the result
import sys
try:
with open('/app/upload_success.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"UPLOAD COMPLETED at {datetime.now()}\n")
f.write(f"File type: {file_type}\n")
f.write(f"Total rows processed: {len(orders_list)}\n")
f.write(f"Successfully inserted: {inserted_count}\n")
f.write(f"Errors: {error_count}\n")
if errors:
f.write(f"First 10 errors:\n")
for err in errors[:10]:
f.write(f" - {err}\n")
except:
pass
sys.stderr.write(f"DEBUG: Upload complete - inserted {inserted_count}, errors {error_count}\n")
sys.stderr.flush()
# Clear session data and remove temp file
import os
temp_file_path = f'/tmp/upload_{upload_id}.{"json" if file_type == "excel" else "csv"}'
try:
if os.path.exists(temp_file_path):
os.unlink(temp_file_path)
except:
pass
session.pop('upload_id', None)
session.pop('csv_filename', None)
session.pop('file_type', None)
# Show results
if error_count > 0:
@@ -1912,6 +2193,24 @@ def upload_data():
flash(f'Successfully uploaded {inserted_count} orders for labels', 'success')
except Exception as e:
import sys
import traceback
error_trace = traceback.format_exc()
# Log the error
try:
with open('/app/upload_error.log', 'a') as f:
from datetime import datetime
f.write(f"\n{'='*80}\n")
f.write(f"SAVE ERROR at {datetime.now()}\n")
f.write(f"Error: {str(e)}\n")
f.write(f"Traceback:\n{error_trace}\n")
except:
pass
sys.stderr.write(f"ERROR in save: {error_trace}\n")
sys.stderr.flush()
flash(f'Error processing data: {str(e)}', 'error')
return redirect(url_for('main.upload_data'))
@@ -3092,15 +3391,46 @@ def get_unprinted_orders():
# return jsonify({'error': 'Access denied. Required roles: superadmin, warehouse_manager, etichete'}), 403
try:
print("DEBUG: Calling get_unprinted_orders_data()")
import sys
sys.stderr.write("DEBUG: Calling get_unprinted_orders_data()\n")
sys.stderr.flush()
data = get_unprinted_orders_data()
print(f"DEBUG: Retrieved {len(data)} orders")
sys.stderr.write(f"DEBUG: Retrieved {len(data)} orders\n")
sys.stderr.flush()
# Write to file
try:
with open('/app/unprinted_debug.log', 'w') as f:
from datetime import datetime
f.write(f"DEBUG at {datetime.now()}\n")
f.write(f"Retrieved {len(data)} orders\n")
if data:
f.write(f"First order: {data[0]}\n")
except:
pass
return jsonify(data)
except Exception as e:
print(f"DEBUG: Error in get_unprinted_orders: {e}")
import sys
import traceback
traceback.print_exc()
error_trace = traceback.format_exc()
sys.stderr.write(f"DEBUG: Error in get_unprinted_orders: {e}\n{error_trace}\n")
sys.stderr.flush()
# Write to file
try:
with open('/app/unprinted_debug.log', 'w') as f:
from datetime import datetime
f.write(f"ERROR at {datetime.now()}\n")
f.write(f"Error: {e}\n")
f.write(f"Traceback:\n{error_trace}\n")
except:
pass
return jsonify({'error': str(e)}), 500
@bp.route('/generate_labels_pdf/<int:order_id>', methods=['POST'])

View File

@@ -2,6 +2,50 @@
{% block head %}
<!-- Print Module CSS is now loaded via base.html for all printing pages -->
<style>
/* Compact table styling for print_lost_labels page */
.print-lost-labels-compact .scan-table.print-module-table {
font-size: 10px;
}
.print-lost-labels-compact .scan-table.print-module-table thead th {
font-size: 10px;
padding: 6px 8px;
line-height: 1.2;
}
.print-lost-labels-compact .scan-table.print-module-table tbody td {
font-size: 9px;
padding: 4px 6px;
line-height: 1.3;
}
/* Keep important data slightly larger and bold */
.print-lost-labels-compact .scan-table.print-module-table tbody td:nth-child(2) {
font-size: 10px;
font-weight: 600;
}
/* Make numbers more compact */
.print-lost-labels-compact .scan-table.print-module-table tbody td:nth-child(5),
.print-lost-labels-compact .scan-table.print-module-table tbody td:nth-child(9),
.print-lost-labels-compact .scan-table.print-module-table tbody td:nth-child(13) {
font-size: 9px;
}
/* Reduce row height */
.print-lost-labels-compact .scan-table.print-module-table tbody tr {
height: auto;
min-height: 24px;
}
/* Adjust header title */
.print-lost-labels-compact .card.scan-table-card h3 {
font-size: 16px;
padding: 8px 0;
margin-bottom: 8px;
}
</style>
{% endblock %}
{% block content %}
@@ -13,7 +57,7 @@
</div>
<!-- ROW 1: Search Card (full width) -->
<div class="scan-container lost-labels">
<div class="scan-container lost-labels print-lost-labels-compact">
<div class="card search-card">
<div style="display: flex; align-items: center; gap: 15px; flex-wrap: wrap;">
<label for="search-input" style="font-weight: bold; white-space: nowrap;">Search Order (CP...):</label>

View File

@@ -94,21 +94,25 @@ table.view-orders-table.scan-table tbody tr:hover td {
{% else %}
<!-- Show file upload -->
<input type="hidden" name="action" value="preview">
<label for="file">Choose CSV file:</label>
<input type="file" name="file" accept=".csv" required><br>
<label for="file">Choose CSV or Excel file:</label>
<input type="file" name="file" accept=".csv,.xlsx,.xls" required><br>
<button type="submit" class="btn">Upload & Preview</button>
<!-- CSV Format Information -->
<!-- File Format Information -->
<div style="margin-top: 20px; padding: 15px; background-color: var(--app-card-bg, #2a3441); border-radius: 5px; border-left: 4px solid var(--app-accent-color, #007bff); color: var(--app-text-color, #ffffff);">
<h5 style="margin-top: 0; color: var(--app-accent-color, #007bff);">Expected CSV Format</h5>
<p style="margin-bottom: 10px; color: var(--app-text-color, #ffffff);">Your CSV file should contain columns such as:</p>
<h5 style="margin-top: 0; color: var(--app-accent-color, #007bff);">Expected File Format</h5>
<p style="margin-bottom: 10px; color: var(--app-text-color, #ffffff);">Supported file types: <strong>CSV (.csv)</strong> and <strong>Excel (.xlsx, .xls)</strong></p>
<p style="margin-bottom: 10px; color: var(--app-text-color, #ffffff);">Your file should contain columns such as:</p>
<ul style="margin-bottom: 10px; color: var(--app-text-color, #ffffff);">
<li><strong>order_number</strong> - The order/production number</li>
<li><strong>quantity</strong> - Number of items</li>
<li><strong>warehouse_location</strong> - Storage location</li>
<li><strong>Comanda Productie</strong> - Production order number</li>
<li><strong>Cod Articol</strong> - Article code</li>
<li><strong>Descr. Com. Prod</strong> - Description</li>
<li><strong>Cantitate</strong> - Quantity</li>
<li><strong>Data Livrare</strong> - Delivery date</li>
<li><strong>Customer Name</strong> - Customer name</li>
</ul>
<p style="color: var(--app-secondary-text, #b8c5d1); font-size: 14px; margin-bottom: 0;">
Column names are case-insensitive and can have variations like "Order Number", "Quantity", "Location", etc.
Column names are case-insensitive and can have variations. For Excel files, the first sheet with data will be used.
</p>
</div>
{% endif %}