Fix print_lost_labels compact styling and production data import
- Added compact table styling to print_lost_labels page (smaller fonts, reduced padding) - Fixed production data import missing fields (production_order_line, line_number) - Added better error handling and logging for Excel file imports - Skip empty rows in production data import - Log all columns and columns with data for debugging
This commit is contained in:
@@ -1717,45 +1717,203 @@ def etichete():
|
||||
@requires_labels_module
|
||||
def upload_data():
|
||||
if request.method == 'POST':
|
||||
import sys
|
||||
sys.stdout.flush()
|
||||
|
||||
# Write to file to ensure we can see it
|
||||
try:
|
||||
with open('/app/request.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"POST REQUEST at {datetime.now()}\n")
|
||||
f.write(f"Form data: {dict(request.form)}\n")
|
||||
f.write(f"Files: {list(request.files.keys())}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.stderr.write(f"DEBUG: POST request received for upload_data\n")
|
||||
sys.stderr.flush()
|
||||
action = request.form.get('action', 'preview')
|
||||
sys.stderr.write(f"DEBUG: Action = {action}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
if action == 'preview':
|
||||
# Handle file upload and show preview
|
||||
print(f"DEBUG: Processing preview action")
|
||||
print(f"DEBUG: Files in request: {list(request.files.keys())}")
|
||||
|
||||
if 'file' not in request.files:
|
||||
print(f"DEBUG: No file in request.files")
|
||||
flash('No file selected', 'error')
|
||||
return redirect(request.url)
|
||||
|
||||
file = request.files['file']
|
||||
print(f"DEBUG: File received: {file.filename}")
|
||||
|
||||
if file.filename == '':
|
||||
print(f"DEBUG: Empty filename")
|
||||
flash('No file selected', 'error')
|
||||
return redirect(request.url)
|
||||
|
||||
if file and file.filename.lower().endswith('.csv'):
|
||||
filename_lower = file.filename.lower()
|
||||
print(f"DEBUG: Filename lowercase: {filename_lower}")
|
||||
|
||||
# Handle both CSV and Excel files
|
||||
if file and (filename_lower.endswith('.csv') or filename_lower.endswith('.xlsx') or filename_lower.endswith('.xls')):
|
||||
try:
|
||||
# Read CSV file
|
||||
import csv
|
||||
import io
|
||||
|
||||
# Read the file content
|
||||
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
|
||||
csv_input = csv.DictReader(stream)
|
||||
|
||||
# Convert to list for preview
|
||||
preview_data = []
|
||||
headers = []
|
||||
|
||||
for i, row in enumerate(csv_input):
|
||||
if i == 0:
|
||||
headers = list(row.keys())
|
||||
if i < 10: # Show only first 10 rows for preview
|
||||
preview_data.append(row)
|
||||
else:
|
||||
break
|
||||
if filename_lower.endswith('.csv'):
|
||||
# Read CSV file
|
||||
import csv
|
||||
import io
|
||||
|
||||
# Read the file content
|
||||
stream = io.StringIO(file.stream.read().decode("UTF8"), newline=None)
|
||||
csv_input = csv.DictReader(stream)
|
||||
|
||||
# Define the fields that are stored in the database
|
||||
database_fields = [
|
||||
'comanda_productie', 'cod_articol', 'descr_com_prod', 'cantitate',
|
||||
'data_livrare', 'dimensiune', 'com_achiz_client', 'nr_linie_com_client',
|
||||
'customer_name', 'customer_article_number', 'open_for_order', 'line_number'
|
||||
]
|
||||
|
||||
# Convert to list for preview
|
||||
all_rows = []
|
||||
for i, row in enumerate(csv_input):
|
||||
all_rows.append(row)
|
||||
if i == 0:
|
||||
# Get all available fields from CSV
|
||||
all_fields = list(row.keys())
|
||||
# Filter to only database fields
|
||||
headers = [field for field in database_fields if field in all_fields or
|
||||
any(field.lower() == k.lower().replace(' ', '_').replace('.', '') for k in all_fields)]
|
||||
if i < 10: # Show only first 10 rows for preview
|
||||
# Filter row to only show database fields
|
||||
filtered_row = {k: v for k, v in row.items() if k.lower().replace(' ', '_').replace('.', '') in database_fields}
|
||||
preview_data.append(filtered_row)
|
||||
|
||||
# If no headers were set, use all available
|
||||
if not headers and all_rows:
|
||||
headers = list(all_rows[0].keys())
|
||||
preview_data = all_rows[:10]
|
||||
|
||||
# Store the full file content in a temp file instead of session
|
||||
import uuid
|
||||
|
||||
upload_id = str(uuid.uuid4())
|
||||
temp_data_file = f'/tmp/upload_{upload_id}.csv'
|
||||
|
||||
file.stream.seek(0) # Reset file pointer
|
||||
with open(temp_data_file, 'wb') as f:
|
||||
f.write(file.stream.read())
|
||||
|
||||
# Store only the file reference in session
|
||||
session['upload_id'] = upload_id
|
||||
session['csv_filename'] = file.filename
|
||||
session['file_type'] = 'csv'
|
||||
session.modified = True
|
||||
|
||||
# Store the full file content in session for later processing
|
||||
file.stream.seek(0) # Reset file pointer
|
||||
session['csv_content'] = file.stream.read().decode("UTF8")
|
||||
session['csv_filename'] = file.filename
|
||||
else: # Excel file
|
||||
print(f"DEBUG: Processing Excel file: {file.filename}")
|
||||
import sys
|
||||
sys.stderr.write(f"DEBUG: Processing Excel file: {file.filename}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
from app.order_labels import process_excel_file
|
||||
|
||||
# Save uploaded file temporarily
|
||||
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=os.path.splitext(file.filename)[1])
|
||||
print(f"DEBUG: Created temp file: {temp_file.name}")
|
||||
sys.stderr.write(f"DEBUG: Created temp file: {temp_file.name}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
file.save(temp_file.name)
|
||||
temp_file.close()
|
||||
print(f"DEBUG: Saved file to temp location")
|
||||
sys.stderr.write(f"DEBUG: Saved file to temp location\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Process Excel file
|
||||
print(f"DEBUG: Calling process_excel_file()")
|
||||
orders_data, errors, warnings = process_excel_file(temp_file.name)
|
||||
print(f"DEBUG: Process complete - orders: {len(orders_data)}, errors: {len(errors)}, warnings: {len(warnings)}")
|
||||
|
||||
# Clean up temp file
|
||||
os.unlink(temp_file.name)
|
||||
|
||||
if errors:
|
||||
for error in errors[:10]:
|
||||
flash(error, 'error')
|
||||
if len(errors) > 10:
|
||||
flash(f'... and {len(errors) - 10} more errors', 'error')
|
||||
|
||||
if warnings:
|
||||
for warning in warnings[:5]:
|
||||
flash(warning, 'warning')
|
||||
|
||||
if not orders_data:
|
||||
import sys
|
||||
sys.stderr.write(f"ERROR: No valid orders data found. Errors: {len(errors)}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Write to file
|
||||
try:
|
||||
with open('/app/upload_error.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"NO VALID DATA at {datetime.now()}\n")
|
||||
f.write(f"File: {file.filename}\n")
|
||||
f.write(f"Errors ({len(errors)}):\n")
|
||||
for err in errors[:20]:
|
||||
f.write(f" - {err}\n")
|
||||
f.write(f"Warnings ({len(warnings)}):\n")
|
||||
for warn in warnings[:20]:
|
||||
f.write(f" - {warn}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
flash('No valid data found in Excel file', 'error')
|
||||
return redirect(request.url)
|
||||
|
||||
# Get headers from first row - only show fields that will be stored in database
|
||||
if orders_data:
|
||||
# Define the fields that are stored in the database
|
||||
database_fields = [
|
||||
'comanda_productie', 'cod_articol', 'descr_com_prod', 'cantitate',
|
||||
'data_livrare', 'dimensiune', 'com_achiz_client', 'nr_linie_com_client',
|
||||
'customer_name', 'customer_article_number', 'open_for_order', 'line_number'
|
||||
]
|
||||
|
||||
# Filter headers to only include database fields that exist in data
|
||||
all_fields = list(orders_data[0].keys())
|
||||
headers = [field for field in database_fields if field in all_fields]
|
||||
|
||||
# Filter preview data to only show database fields
|
||||
preview_data = []
|
||||
for order in orders_data[:10]:
|
||||
filtered_order = {k: v for k, v in order.items() if k in headers}
|
||||
preview_data.append(filtered_order)
|
||||
|
||||
# Store data in a temporary file instead of session (session is too small for large datasets)
|
||||
import json
|
||||
import uuid
|
||||
|
||||
upload_id = str(uuid.uuid4())
|
||||
temp_data_file = f'/tmp/upload_{upload_id}.json'
|
||||
|
||||
with open(temp_data_file, 'w') as f:
|
||||
json.dump(orders_data, f)
|
||||
|
||||
# Store only the file reference in session
|
||||
session['upload_id'] = upload_id
|
||||
session['csv_filename'] = file.filename
|
||||
session['file_type'] = 'excel'
|
||||
session.modified = True
|
||||
|
||||
return render_template('upload_orders.html',
|
||||
preview_data=preview_data,
|
||||
@@ -1764,27 +1922,112 @@ def upload_data():
|
||||
filename=file.filename)
|
||||
|
||||
except Exception as e:
|
||||
flash(f'Error reading CSV file: {str(e)}', 'error')
|
||||
import traceback
|
||||
import sys
|
||||
error_trace = traceback.format_exc()
|
||||
print(f"ERROR processing file: {error_trace}")
|
||||
sys.stderr.write(f"ERROR processing file: {error_trace}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Also write to a file
|
||||
try:
|
||||
with open('/app/upload_error.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"ERROR at {datetime.now()}\n")
|
||||
f.write(f"File: {file.filename if file else 'unknown'}\n")
|
||||
f.write(f"Error: {str(e)}\n")
|
||||
f.write(f"Traceback:\n{error_trace}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
flash(f'Error reading file: {str(e)}', 'error')
|
||||
return redirect(request.url)
|
||||
else:
|
||||
flash('Please upload a CSV file', 'error')
|
||||
flash('Please upload a CSV or Excel file (.csv, .xlsx, .xls)', 'error')
|
||||
return redirect(request.url)
|
||||
|
||||
elif action == 'save':
|
||||
# Save the data to database
|
||||
if 'csv_content' not in session:
|
||||
import sys
|
||||
sys.stderr.write("DEBUG: Save action triggered\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Log to file immediately
|
||||
try:
|
||||
with open('/app/save_check.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"SAVE ACTION at {datetime.now()}\n")
|
||||
f.write(f"Session keys: {list(session.keys())}\n")
|
||||
f.write(f"Session file_type: {session.get('file_type', 'NOT SET')}\n")
|
||||
f.write(f"Has csv_content: {'csv_content' in session}\n")
|
||||
f.write(f"Has orders_data: {'orders_data' in session}\n")
|
||||
except Exception as log_err:
|
||||
sys.stderr.write(f"Error writing log: {log_err}\n")
|
||||
|
||||
file_type = session.get('file_type')
|
||||
upload_id = session.get('upload_id')
|
||||
|
||||
sys.stderr.write(f"DEBUG: File type = {file_type}, upload_id = {upload_id}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
if not file_type or not upload_id:
|
||||
sys.stderr.write("DEBUG: Missing file_type or upload_id in session\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
try:
|
||||
with open('/app/save_check.log', 'a') as f:
|
||||
f.write("ERROR: Missing upload data in session - redirecting\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
flash('No data to save. Please upload a file first.', 'error')
|
||||
return redirect(request.url)
|
||||
|
||||
try:
|
||||
import csv
|
||||
import io
|
||||
print(f"DEBUG: Starting {file_type.upper()} upload processing...")
|
||||
sys.stderr.write(f"DEBUG: Starting {file_type.upper()} upload processing...\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
print(f"DEBUG: Starting CSV upload processing...")
|
||||
# Log to file
|
||||
try:
|
||||
with open('/app/save_debug.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"SAVE START at {datetime.now()}\n")
|
||||
f.write(f"File type: {file_type}\n")
|
||||
f.write(f"Session keys: {list(session.keys())}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Read the CSV content from session
|
||||
stream = io.StringIO(session['csv_content'], newline=None)
|
||||
csv_input = csv.DictReader(stream)
|
||||
# Get orders data from temp file
|
||||
import json
|
||||
temp_data_file = f'/tmp/upload_{upload_id}.{"json" if file_type == "excel" else "csv"}'
|
||||
|
||||
if file_type == 'excel':
|
||||
with open(temp_data_file, 'r') as f:
|
||||
orders_list = json.load(f)
|
||||
|
||||
# Log
|
||||
try:
|
||||
with open('/app/save_debug.log', 'a') as f:
|
||||
f.write(f"Loaded {len(orders_list)} orders from temp file (Excel)\n")
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
# Read the CSV content from temp file
|
||||
import csv
|
||||
with open(temp_data_file, 'r') as f:
|
||||
csv_input = csv.DictReader(f)
|
||||
orders_list = list(csv_input)
|
||||
|
||||
# Log
|
||||
try:
|
||||
with open('/app/save_debug.log', 'a') as f:
|
||||
f.write(f"Loaded {len(orders_list)} orders from temp file (CSV)\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
# Connect to database
|
||||
conn = get_db_connection()
|
||||
@@ -1794,10 +2037,10 @@ def upload_data():
|
||||
error_count = 0
|
||||
errors = []
|
||||
|
||||
print(f"DEBUG: Connected to database, processing rows...")
|
||||
print(f"DEBUG: Connected to database, processing {len(orders_list)} rows...")
|
||||
|
||||
# Process each row
|
||||
for index, row in enumerate(csv_input):
|
||||
for index, row in enumerate(orders_list):
|
||||
try:
|
||||
print(f"DEBUG: Processing row {index + 1}: {row}")
|
||||
|
||||
@@ -1824,10 +2067,18 @@ def upload_data():
|
||||
# Convert empty string to None for date field
|
||||
if data_livrare:
|
||||
try:
|
||||
# Parse date from various formats (9/23/2023, 23/9/2023, 2023-09-23, etc.)
|
||||
# Parse date from various formats including Excel datetime format
|
||||
from datetime import datetime
|
||||
# Try different date formats
|
||||
date_formats = ['%m/%d/%Y', '%d/%m/%Y', '%Y-%m-%d', '%m-%d-%Y', '%d-%m-%Y']
|
||||
date_formats = [
|
||||
'%Y-%m-%d', # 2024-03-12
|
||||
'%Y-%m-%d %H:%M:%S', # 2024-03-12 00:00:00 (Excel format)
|
||||
'%m/%d/%Y', # 03/12/2024
|
||||
'%d/%m/%Y', # 12/03/2024
|
||||
'%m-%d-%Y', # 03-12-2024
|
||||
'%d-%m-%Y', # 12-03-2024
|
||||
'%d.%m.%Y' # 12.03.2024
|
||||
]
|
||||
parsed_date = None
|
||||
for fmt in date_formats:
|
||||
try:
|
||||
@@ -1897,9 +2148,39 @@ def upload_data():
|
||||
|
||||
print(f"DEBUG: Committed {inserted_count} records to database")
|
||||
|
||||
# Clear session data
|
||||
session.pop('csv_content', None)
|
||||
# Log the result
|
||||
import sys
|
||||
try:
|
||||
with open('/app/upload_success.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"UPLOAD COMPLETED at {datetime.now()}\n")
|
||||
f.write(f"File type: {file_type}\n")
|
||||
f.write(f"Total rows processed: {len(orders_list)}\n")
|
||||
f.write(f"Successfully inserted: {inserted_count}\n")
|
||||
f.write(f"Errors: {error_count}\n")
|
||||
if errors:
|
||||
f.write(f"First 10 errors:\n")
|
||||
for err in errors[:10]:
|
||||
f.write(f" - {err}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.stderr.write(f"DEBUG: Upload complete - inserted {inserted_count}, errors {error_count}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Clear session data and remove temp file
|
||||
import os
|
||||
temp_file_path = f'/tmp/upload_{upload_id}.{"json" if file_type == "excel" else "csv"}'
|
||||
try:
|
||||
if os.path.exists(temp_file_path):
|
||||
os.unlink(temp_file_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
session.pop('upload_id', None)
|
||||
session.pop('csv_filename', None)
|
||||
session.pop('file_type', None)
|
||||
|
||||
# Show results
|
||||
if error_count > 0:
|
||||
@@ -1912,6 +2193,24 @@ def upload_data():
|
||||
flash(f'Successfully uploaded {inserted_count} orders for labels', 'success')
|
||||
|
||||
except Exception as e:
|
||||
import sys
|
||||
import traceback
|
||||
error_trace = traceback.format_exc()
|
||||
|
||||
# Log the error
|
||||
try:
|
||||
with open('/app/upload_error.log', 'a') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"\n{'='*80}\n")
|
||||
f.write(f"SAVE ERROR at {datetime.now()}\n")
|
||||
f.write(f"Error: {str(e)}\n")
|
||||
f.write(f"Traceback:\n{error_trace}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.stderr.write(f"ERROR in save: {error_trace}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
flash(f'Error processing data: {str(e)}', 'error')
|
||||
|
||||
return redirect(url_for('main.upload_data'))
|
||||
@@ -3092,15 +3391,46 @@ def get_unprinted_orders():
|
||||
# return jsonify({'error': 'Access denied. Required roles: superadmin, warehouse_manager, etichete'}), 403
|
||||
|
||||
try:
|
||||
print("DEBUG: Calling get_unprinted_orders_data()")
|
||||
import sys
|
||||
sys.stderr.write("DEBUG: Calling get_unprinted_orders_data()\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
data = get_unprinted_orders_data()
|
||||
print(f"DEBUG: Retrieved {len(data)} orders")
|
||||
|
||||
sys.stderr.write(f"DEBUG: Retrieved {len(data)} orders\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Write to file
|
||||
try:
|
||||
with open('/app/unprinted_debug.log', 'w') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"DEBUG at {datetime.now()}\n")
|
||||
f.write(f"Retrieved {len(data)} orders\n")
|
||||
if data:
|
||||
f.write(f"First order: {data[0]}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
return jsonify(data)
|
||||
|
||||
except Exception as e:
|
||||
print(f"DEBUG: Error in get_unprinted_orders: {e}")
|
||||
import sys
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
error_trace = traceback.format_exc()
|
||||
|
||||
sys.stderr.write(f"DEBUG: Error in get_unprinted_orders: {e}\n{error_trace}\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
# Write to file
|
||||
try:
|
||||
with open('/app/unprinted_debug.log', 'w') as f:
|
||||
from datetime import datetime
|
||||
f.write(f"ERROR at {datetime.now()}\n")
|
||||
f.write(f"Error: {e}\n")
|
||||
f.write(f"Traceback:\n{error_trace}\n")
|
||||
except:
|
||||
pass
|
||||
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@bp.route('/generate_labels_pdf/<int:order_id>', methods=['POST'])
|
||||
|
||||
Reference in New Issue
Block a user