Initial commit: add compliance_checks table, per-check metadata on assets, and compliance audit trail

This commit is contained in:
2026-04-24 07:14:27 +03:00
commit e63b486ec2
58 changed files with 6468 additions and 0 deletions

5
app/services/__init__.py Normal file
View File

@@ -0,0 +1,5 @@
from app.services.csv_service import parse_users_csv
from app.services.ldap_service import LDAPService
from app.services.pdf_service import generate_paperwork_pdf
__all__ = ['parse_users_csv', 'LDAPService', 'generate_paperwork_pdf']

View File

@@ -0,0 +1,60 @@
import csv
import io
# Maps our field names to possible CSV column header aliases (case-insensitive)
FIELD_ALIASES = {
'windows_id': ['windows_id', 'employeeid', 'employee_id', 'wid', 'id', 'user_id', 'samaccountname'],
'first_name': ['first_name', 'firstname', 'givenname', 'given_name', 'prenom'],
'last_name': ['last_name', 'lastname', 'surname', 'sn', 'family_name', 'nom'],
'email': ['email', 'mail', 'email_address', 'emailaddress'],
'department': ['department', 'dept', 'division'],
'job_title': ['job_title', 'title', 'position', 'jobtitle', 'job_position'],
'phone': ['phone', 'telephone', 'mobile', 'phonenumber', 'telephonenumber'],
'location': ['location', 'office', 'site', 'physicaldeliveryofficename'],
}
def parse_users_csv(file_stream):
"""
Parse a CSV file stream and return (users_list, errors_list).
Accepts BOM-prefixed UTF-8 or plain UTF-8. Column headers are
matched case-insensitively against FIELD_ALIASES.
"""
try:
content = file_stream.read().decode('utf-8-sig')
except (UnicodeDecodeError, AttributeError):
return [], ['Could not decode file. Please use UTF-8 encoding.']
reader = csv.DictReader(io.StringIO(content))
if not reader.fieldnames:
return [], ['CSV file is empty or has no header row.']
# Build a lookup: normalised header -> our field name
norm_headers = {h.lower().strip().replace(' ', '_'): h for h in reader.fieldnames}
col_map = {} # our_field -> actual_csv_header
for field, aliases in FIELD_ALIASES.items():
for alias in aliases:
if alias in norm_headers:
col_map[field] = norm_headers[alias]
break
errors = []
users = []
for row_num, row in enumerate(reader, start=2):
user = {}
for field in FIELD_ALIASES:
csv_col = col_map.get(field)
user[field] = (row.get(csv_col, '') or '').strip() if csv_col else ''
wid = user.get('windows_id', '').strip()
if not wid:
errors.append(f'Row {row_num}: missing windows_id — skipped.')
continue
users.append(user)
return users, errors

View File

@@ -0,0 +1,164 @@
"""
Dell asset lookup service.
Two modes (chosen automatically):
1. **No credentials** Returns a partial pre-fill (brand=Dell, OS default, service_tag).
Model and warranty must be filled in manually; a link to Dell's support page is provided.
Dell's public website is protected by Akamai and cannot be scraped reliably.
2. **TechDirect API** Full data including model, warranty dates, serial number.
Register free at https://tdm.dell.com → API Services → Create an API key pair.
Set DELL_CLIENT_ID and DELL_CLIENT_SECRET in your .env file.
"""
import logging
from datetime import datetime
import requests
from flask import current_app
log = logging.getLogger(__name__)
_TOKEN_URL = "https://apigtwb2c.us.dell.com/auth/oauth/v2/token"
_ASSET_URL = "https://apigtwb2c.us.dell.com/PROD/sbil/eapi/v5/asset-entitlements"
_SUPPORT_PAGE = "https://www.dell.com/support/home/en-us/product-support/servicetag/{tag}/overview"
_TYPE_MAP = [
(["latitude", "inspiron", "xps", "vostro", "precision 5", "precision 7"], "Laptop"),
(["optiplex", "precision tower", "precision 3", "precision 9",
"optiplex micro", "optiplex small"], "Desktop"),
(["poweredge", "server"], "Server"),
(["wyse", "thin client"], "Other"),
(["monitor", "display", "screen", "s24", "s27", "p24", "u27"], "Monitor"),
]
def _detect_type(description: str) -> str:
desc = description.lower()
for keywords, asset_type in _TYPE_MAP:
if any(kw in desc for kw in keywords):
return asset_type
return "Laptop" # sensible default for Dell business hardware
# ------------------------------------------------------------------
# Partial pre-fill (no credentials)
# ------------------------------------------------------------------
def _partial_prefill(tag: str) -> dict:
"""
Return a minimal pre-fill dict using only what we know without querying Dell.
Includes a link to Dell's support page so the user can look up the rest.
"""
return {
"service_tag": tag,
"serial_number": "",
"brand": "Dell",
"model": "",
"asset_type": "Laptop",
"operating_system": "Windows 11 Pro",
"warranty_expiry": "",
"purchase_date": "",
"source": "partial",
"support_url": _SUPPORT_PAGE.format(tag=tag),
}
# ------------------------------------------------------------------
# Official TechDirect API (requires credentials)
# ------------------------------------------------------------------
def _get_token(client_id: str, client_secret: str) -> str:
resp = requests.post(
_TOKEN_URL,
data={
"grant_type": "client_credentials",
"client_id": client_id,
"client_secret": client_secret,
},
timeout=10,
)
resp.raise_for_status()
return resp.json()["access_token"]
def _lookup_api(tag: str, client_id: str, client_secret: str) -> dict:
try:
token = _get_token(client_id, client_secret)
except Exception as exc:
raise RuntimeError(f"Failed to obtain Dell API token: {exc}") from exc
try:
resp = requests.get(
_ASSET_URL,
params={"servicetags": tag},
headers={"Authorization": f"Bearer {token}"},
timeout=15,
)
resp.raise_for_status()
data = resp.json()
except Exception as exc:
raise RuntimeError(f"Dell API request failed: {exc}") from exc
if not data:
raise RuntimeError(f"No data returned for service tag '{tag}'.")
item = data[0] if isinstance(data, list) else data
system_desc = item.get("productLineDescription") or item.get("systemDescription") or ""
model = system_desc.strip()
serial_number = (item.get("serviceTag") or tag).upper()
warranty_expiry = None
for ent in (item.get("entitlements") or []):
end_str = ent.get("endDate") or ""
if end_str:
try:
dt = datetime.fromisoformat(end_str[:10])
if warranty_expiry is None or dt > warranty_expiry:
warranty_expiry = dt
except ValueError:
pass
ship_date = item.get("shipDate") or ""
purchase_date = None
if ship_date:
try:
purchase_date = datetime.fromisoformat(ship_date[:10])
except ValueError:
pass
return {
"service_tag": tag,
"serial_number": serial_number,
"brand": "Dell",
"model": model,
"asset_type": _detect_type(f"{system_desc} {item.get('productFamily', '')}"),
"operating_system": "Windows 11 Pro",
"warranty_expiry": warranty_expiry.strftime("%Y-%m-%d") if warranty_expiry else "",
"purchase_date": purchase_date.strftime("%Y-%m-%d") if purchase_date else "",
"source": "techdirect_api",
"support_url": _SUPPORT_PAGE.format(tag=tag),
}
# ------------------------------------------------------------------
# Public entry point
# ------------------------------------------------------------------
def lookup_service_tag(service_tag: str) -> dict:
"""
Look up a Dell service tag.
Uses TechDirect API when DELL_CLIENT_ID + DELL_CLIENT_SECRET are configured;
otherwise returns a partial pre-fill with a link to Dell's support page.
"""
tag = service_tag.strip().upper()
client_id = current_app.config.get("DELL_CLIENT_ID", "")
client_secret = current_app.config.get("DELL_CLIENT_SECRET", "")
if client_id and client_secret:
log.debug("Dell lookup via TechDirect API for %s", tag)
return _lookup_api(tag, client_id, client_secret)
log.debug("Dell lookup returning partial pre-fill for %s (no API credentials)", tag)
return _partial_prefill(tag)

View File

@@ -0,0 +1,84 @@
from ldap3 import Server, Connection, ALL, SUBTREE
from flask import current_app
class LDAPService:
"""Wraps ldap3 to sync users from Active Directory."""
ATTRIBUTES = [
'employeeID', 'sAMAccountName', 'givenName', 'sn',
'mail', 'department', 'title', 'telephoneNumber',
'distinguishedName', 'physicalDeliveryOfficeName',
'userAccountControl',
]
def _connect(self):
cfg = current_app.config
server = Server(
cfg['LDAP_SERVER'],
port=cfg['LDAP_PORT'],
use_ssl=cfg['LDAP_USE_SSL'],
get_info=ALL,
)
conn = Connection(
server,
user=cfg['LDAP_BIND_USER'],
password=cfg['LDAP_BIND_PASSWORD'],
auto_bind=True,
)
return conn
def sync_users(self):
"""
Query AD and return a list of dicts ready to be upserted into the
User model. Raises an exception if the connection fails.
"""
cfg = current_app.config
conn = self._connect()
conn.search(
search_base=cfg['LDAP_BASE_DN'],
search_filter=cfg['LDAP_USER_SEARCH_FILTER'],
search_scope=SUBTREE,
attributes=self.ATTRIBUTES,
)
wid_attr = cfg['LDAP_WINDOWS_ID_ATTR']
users = []
for entry in conn.entries:
# Resolve windows_id from the configured attribute, fall back to sAMAccountName
raw_wid = str(getattr(entry, wid_attr, '') or '')
if not raw_wid:
raw_wid = str(entry.sAMAccountName or '')
if not raw_wid:
continue # skip entries with no identifier
# userAccountControl bit 2 = disabled account
uac = 0
try:
uac = int(str(entry.userAccountControl or 0))
except (ValueError, TypeError):
pass
is_active = not bool(uac & 2)
users.append({
'windows_id': raw_wid.strip(),
'first_name': str(entry.givenName or '').strip(),
'last_name': str(entry.sn or '').strip(),
'email': str(entry.mail or '').strip(),
'department': str(entry.department or '').strip(),
'job_title': str(entry.title or '').strip(),
'phone': str(entry.telephoneNumber or '').strip(),
'location': str(entry.physicalDeliveryOfficeName or '').strip(),
'ldap_dn': str(entry.distinguishedName or '').strip(),
'is_active': is_active,
})
conn.unbind()
return users
def test_connection(self):
"""Returns True if a bind succeeds, raises on failure."""
conn = self._connect()
conn.unbind()
return True

238
app/services/pdf_service.py Normal file
View File

@@ -0,0 +1,238 @@
import os
import json
from datetime import datetime
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.units import cm
from reportlab.lib import colors
from reportlab.lib.enums import TA_CENTER, TA_LEFT, TA_RIGHT
from reportlab.platypus import (
SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, HRFlowable
)
def _styles():
base = getSampleStyleSheet()
custom = {
'title': ParagraphStyle(
'DocTitle', parent=base['Title'],
fontSize=16, spaceAfter=6, alignment=TA_CENTER, textColor=colors.HexColor('#1a3a5c'),
),
'subtitle': ParagraphStyle(
'Subtitle', parent=base['Normal'],
fontSize=10, spaceAfter=4, alignment=TA_CENTER, textColor=colors.HexColor('#555555'),
),
'section': ParagraphStyle(
'Section', parent=base['Heading2'],
fontSize=11, spaceBefore=12, spaceAfter=4,
textColor=colors.HexColor('#1a3a5c'), borderPad=2,
),
'normal': base['Normal'],
'small': ParagraphStyle(
'Small', parent=base['Normal'], fontSize=8, textColor=colors.grey,
),
'footer': ParagraphStyle(
'Footer', parent=base['Normal'],
fontSize=8, alignment=TA_CENTER, textColor=colors.grey,
),
'signature_label': ParagraphStyle(
'SigLabel', parent=base['Normal'], fontSize=9, alignment=TA_CENTER,
),
'right': ParagraphStyle(
'Right', parent=base['Normal'], alignment=TA_RIGHT,
),
}
return custom
def _header_table(company_name, company_address, doc_type_label, doc_id, created_at, styles):
left_data = [
[Paragraph(f'<b>{company_name}</b>', styles['normal'])],
[Paragraph(company_address or '', styles['small'])],
]
right_data = [
[Paragraph(f'<b>{doc_type_label}</b>', styles['right'])],
[Paragraph(f'Doc #: {doc_id}', styles['right'])],
[Paragraph(f'Date: {created_at.strftime("%d/%m/%Y") if created_at else ""}', styles['right'])],
]
table = Table(
[[Table(left_data, colWidths=[9 * cm]), Table(right_data, colWidths=[8 * cm])]],
colWidths=[9 * cm, 8 * cm],
)
table.setStyle(TableStyle([('VALIGN', (0, 0), (-1, -1), 'TOP')]))
return table
def _field_table(rows, styles):
"""rows: list of (label, value) tuples."""
data = [[Paragraph(f'<b>{label}</b>', styles['normal']), Paragraph(str(value or ''), styles['normal'])]
for label, value in rows]
t = Table(data, colWidths=[5 * cm, 12 * cm])
t.setStyle(TableStyle([
('BACKGROUND', (0, 0), (0, -1), colors.HexColor('#eaf0f8')),
('GRID', (0, 0), (-1, -1), 0.4, colors.HexColor('#cccccc')),
('VALIGN', (0, 0), (-1, -1), 'TOP'),
('TOPPADDING', (0, 0), (-1, -1), 4),
('BOTTOMPADDING', (0, 0), (-1, -1), 4),
('LEFTPADDING', (0, 0), (-1, -1), 6),
]))
return t
def _signature_block(styles):
data = [
[Paragraph('Issued by (IT Dept.)', styles['signature_label']),
Paragraph('Received by (Employee)', styles['signature_label'])],
[Spacer(1, 1.5 * cm), Spacer(1, 1.5 * cm)],
[HRFlowable(width='95%'), HRFlowable(width='95%')],
[Paragraph('Name / Signature / Date', styles['signature_label']),
Paragraph('Name / Signature / Date', styles['signature_label'])],
]
t = Table(data, colWidths=[8.5 * cm, 8.5 * cm])
t.setStyle(TableStyle([
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
('ALIGN', (0, 0), (-1, -1), 'CENTER'),
]))
return t
def generate_paperwork_pdf(doc, app):
"""
Generate a PDF for a Paperwork document and save it to PDF_FOLDER.
Returns the filename (not the full path).
"""
pdf_dir = os.path.join(app.root_path, '..', app.config['PDF_FOLDER'])
os.makedirs(pdf_dir, exist_ok=True)
filename = f'doc_{doc.id}_{datetime.utcnow().strftime("%Y%m%d_%H%M%S")}.pdf'
filepath = os.path.join(pdf_dir, filename)
page_doc = SimpleDocTemplate(
filepath, pagesize=A4,
rightMargin=2 * cm, leftMargin=2 * cm,
topMargin=2 * cm, bottomMargin=2.5 * cm,
title=doc.title,
)
styles = _styles()
company_name = app.config.get('COMPANY_NAME', '')
company_address = app.config.get('COMPANY_ADDRESS', '')
user = doc.user
asset = doc.asset
assignment = doc.assignment
# Load extra template fields
extra_fields = {}
if doc.template_data:
try:
raw = json.loads(doc.template_data)
extra_fields = {k.replace('td_', '').replace('_', ' ').title(): v
for k, v in raw.items()}
except (json.JSONDecodeError, TypeError):
pass
story = []
# ── Header ──────────────────────────────────────────────────────────────
story.append(_header_table(company_name, company_address,
doc.doc_type_label, doc.id, doc.created_at, styles))
story.append(Spacer(1, 0.3 * cm))
story.append(HRFlowable(width='100%', thickness=1.5, color=colors.HexColor('#1a3a5c')))
story.append(Spacer(1, 0.4 * cm))
# ── Title ────────────────────────────────────────────────────────────────
story.append(Paragraph(doc.title, styles['title']))
story.append(Spacer(1, 0.5 * cm))
# ── User section ─────────────────────────────────────────────────────────
story.append(Paragraph('Employee Information', styles['section']))
user_rows = [
('Windows ID', user.windows_id),
('Full Name', user.display_name),
('Email', user.display_email),
('Department', user.department or ''),
('Job Title', user.job_title or ''),
('Location', user.location or ''),
]
story.append(_field_table(user_rows, styles))
story.append(Spacer(1, 0.4 * cm))
# ── Asset section ─────────────────────────────────────────────────────────
if asset:
story.append(Paragraph('Asset Information', styles['section']))
asset_rows = [
('Asset Type', asset.asset_type),
('Brand / Model', f'{asset.brand or ""} {asset.model or ""}'.strip() or ''),
('Serial Number', asset.serial_number),
('Service Tag', asset.service_tag or ''),
('Asset Tag', asset.asset_tag or ''),
('Operating System', asset.operating_system or ''),
]
if asset.ram_gb:
asset_rows.append(('RAM', f'{asset.ram_gb} GB'))
if asset.storage_gb:
asset_rows.append(('Storage', f'{asset.storage_gb} GB'))
story.append(_field_table(asset_rows, styles))
story.append(Spacer(1, 0.4 * cm))
# ── Assignment section ────────────────────────────────────────────────────
if assignment:
story.append(Paragraph('Assignment Details', styles['section']))
assign_rows = [
('Assigned Date', str(assignment.assigned_date) if assignment.assigned_date else ''),
('Returned Date', str(assignment.returned_date) if assignment.returned_date else 'Currently assigned'),
]
story.append(_field_table(assign_rows, styles))
story.append(Spacer(1, 0.4 * cm))
# ── Extra / custom fields ─────────────────────────────────────────────────
if extra_fields:
story.append(Paragraph('Additional Information', styles['section']))
story.append(_field_table(list(extra_fields.items()), styles))
story.append(Spacer(1, 0.4 * cm))
# ── Notes ─────────────────────────────────────────────────────────────────
if doc.notes:
story.append(Paragraph('Notes', styles['section']))
story.append(Paragraph(doc.notes, styles['normal']))
story.append(Spacer(1, 0.4 * cm))
# Type-specific clauses
if doc.document_type == 'assignment':
story.append(Paragraph('Terms & Conditions', styles['section']))
clause = (
'By signing below the employee acknowledges receipt of the above equipment in good '
'working condition and agrees to: (1) use it solely for company business, '
'(2) report any damage or loss immediately to the IT department, and '
'(3) return it upon request or at the end of employment.'
)
story.append(Paragraph(clause, styles['normal']))
story.append(Spacer(1, 0.4 * cm))
if doc.document_type == 'return':
story.append(Paragraph('Return Confirmation', styles['section']))
clause = (
'By signing below both parties confirm that the above equipment has been returned '
'to the IT department and has been inspected for completeness and condition.'
)
story.append(Paragraph(clause, styles['normal']))
story.append(Spacer(1, 0.4 * cm))
# ── Signatures ────────────────────────────────────────────────────────────
story.append(Paragraph('Signatures', styles['section']))
story.append(Spacer(1, 0.3 * cm))
story.append(_signature_block(styles))
story.append(Spacer(1, 0.5 * cm))
# ── Footer ────────────────────────────────────────────────────────────────
story.append(HRFlowable(width='100%', thickness=0.5, color=colors.grey))
story.append(Spacer(1, 0.2 * cm))
story.append(Paragraph(
f'Generated by IT Asset Management System · {datetime.utcnow().strftime("%d/%m/%Y %H:%M")} UTC',
styles['footer'],
))
page_doc.build(story)
return filename

View File

@@ -0,0 +1,221 @@
"""
Template service: fill Word (.docx) templates and generate output files.
Variables available in templates (use {{ variable_name }} syntax):
User:
{{ user_name }} Full name (or [MASKED] after PII erasure)
{{ user_email }}
{{ user_phone }}
{{ user_department }}
{{ user_job_title }}
{{ user_location }}
{{ user_windows_id }} Always present — survives masking
{{ user_employee_id }} Same as windows_id (alias)
Asset:
{{ asset_serial }} Serial number
{{ asset_service_tag }}
{{ asset_tag }} Internal asset tag
{{ asset_brand }}
{{ asset_model }}
{{ asset_type }} e.g. Laptop / Desktop
{{ asset_os }}
{{ asset_warranty_expiry }}
{{ asset_location }}
Assignment:
{{ assignment_date }}
{{ assignment_id }}
{{ return_date }}
Document / company:
{{ document_date }} Date of generation (dd/mm/yyyy)
{{ document_number }} Paperwork record ID
{{ company_name }}
{{ company_address }}
{{ admin_name }} Logged-in admin who generated the doc
PII masking:
When User.mask() is called, all Paperwork records that were generated
from a template have their merge_vars updated (PII keys replaced with
[MASKED]) and the .docx/.pdf files are regenerated.
"""
import json
import os
import re
from datetime import datetime
from docxtpl import DocxTemplate
from flask import current_app
# PII variable keys — these are blanked out on user mask
PII_VARS = {'user_name', 'user_email', 'user_phone'}
# Variables that survive masking (non-PII)
SAFE_VARS = {
'user_department', 'user_job_title', 'user_location',
'user_windows_id', 'user_employee_id',
}
def build_context(user, asset=None, assignment=None, paperwork=None, app=None):
"""
Build the Jinja2 context dict from ORM objects.
Used both at generation time and when re-rendering after masking.
"""
if app is None:
app = current_app._get_current_object()
ctx = {
# User
'user_name': user.display_name,
'user_email': user.display_email,
'user_phone': user.display_phone,
'user_department': user.department or '',
'user_job_title': user.job_title or '',
'user_location': user.location or '',
'user_windows_id': user.windows_id or '',
'user_employee_id': user.windows_id or '',
# Asset
'asset_serial': '',
'asset_service_tag': '',
'asset_tag': '',
'asset_brand': '',
'asset_model': '',
'asset_type': '',
'asset_os': '',
'asset_warranty_expiry': '',
'asset_location': '',
# Assignment
'assignment_date': '',
'assignment_id': '',
'return_date': '',
# Document/company
'document_date': datetime.utcnow().strftime('%d/%m/%Y'),
'document_number': str(paperwork.id) if paperwork else '',
'company_name': app.config.get('COMPANY_NAME', ''),
'company_address': app.config.get('COMPANY_ADDRESS', ''),
'admin_name': '',
}
if asset:
ctx.update({
'asset_serial': asset.serial_number or '',
'asset_service_tag': asset.service_tag or '',
'asset_tag': asset.asset_tag or '',
'asset_brand': asset.brand or '',
'asset_model': asset.model or '',
'asset_type': asset.asset_type or '',
'asset_os': asset.operating_system or '',
'asset_warranty_expiry': (asset.warranty_expiry.strftime('%d/%m/%Y')
if asset.warranty_expiry else ''),
'asset_location': asset.location or '',
})
if assignment:
ctx['assignment_date'] = (assignment.assigned_date.strftime('%d/%m/%Y')
if assignment.assigned_date else '')
ctx['assignment_id'] = str(assignment.id)
ctx['return_date'] = (assignment.returned_date.strftime('%d/%m/%Y')
if assignment.returned_date else '')
return ctx
def _docx_path(app, filename):
folder = os.path.join(app.root_path, '..', app.config.get('DOCX_FOLDER', 'docx_output'))
os.makedirs(folder, exist_ok=True)
return os.path.join(folder, filename)
def _template_path(app, filename):
folder = os.path.join(app.root_path, '..', app.config.get('TEMPLATE_FOLDER', 'doc_templates'))
return os.path.join(folder, filename)
def extract_variables(template_path):
"""
Parse a .docx file and return all unique Jinja2 variable names found
in the document text ({{ var_name }} syntax).
"""
try:
tpl = DocxTemplate(template_path)
return sorted(tpl.get_undeclared_template_variables())
except Exception:
# Fallback: open as zip and scan XML for {{ ... }}
import zipfile
vars_found = set()
try:
with zipfile.ZipFile(template_path, 'r') as z:
for name in z.namelist():
if name.endswith('.xml'):
content = z.read(name).decode('utf-8', errors='ignore')
vars_found.update(re.findall(r'\{\{\s*(\w+)\s*\}\}', content))
except Exception:
pass
return sorted(vars_found)
def render_template_to_docx(template_filepath, context, output_filename):
"""
Fill a .docx template with context values and save to DOCX_FOLDER.
Returns the saved filename.
"""
app = current_app._get_current_object()
tpl = DocxTemplate(template_filepath)
tpl.render(context)
out_path = _docx_path(app, output_filename)
tpl.save(out_path)
return output_filename
def regenerate_for_paperwork(paperwork, app=None):
"""
Re-render the .docx for an existing Paperwork record using its stored
merge_vars. Called after PII masking to overwrite files with sanitised data.
If the record was generated from a template, regenerates .docx.
Also regenerates the PDF via pdf_service if a PDF exists.
Returns (docx_filename, pdf_filename) — either may be None.
"""
from app.services.pdf_service import generate_paperwork_pdf
if app is None:
app = current_app._get_current_object()
docx_out = None
pdf_out = None
if paperwork.template_id and paperwork.template:
tpl_file = _template_path(app, paperwork.template.filename)
if os.path.exists(tpl_file):
ctx = paperwork.get_merge_vars()
out_name = paperwork.docx_filename or f'doc_{paperwork.id}.docx'
try:
render_template_to_docx(tpl_file, ctx, out_name)
docx_out = out_name
except Exception as exc:
app.logger.error('Template re-render failed for paperwork %s: %s', paperwork.id, exc)
# Always regenerate the PDF (uses pdf_service, reads from Paperwork + User model)
if paperwork.pdf_filename:
try:
pdf_out = generate_paperwork_pdf(paperwork, app)
except Exception as exc:
app.logger.error('PDF re-render failed for paperwork %s: %s', paperwork.id, exc)
return docx_out, pdf_out
def mask_variables(merge_vars: dict) -> dict:
"""Return a copy of merge_vars with PII values replaced by [MASKED]."""
masked = dict(merge_vars)
for key in PII_VARS:
if key in masked:
masked[key] = '[MASKED]'
return masked