Initial commit: add compliance_checks table, per-check metadata on assets, and compliance audit trail
This commit is contained in:
60
app/services/csv_service.py
Normal file
60
app/services/csv_service.py
Normal file
@@ -0,0 +1,60 @@
|
||||
import csv
|
||||
import io
|
||||
|
||||
|
||||
# Maps our field names to possible CSV column header aliases (case-insensitive)
|
||||
FIELD_ALIASES = {
|
||||
'windows_id': ['windows_id', 'employeeid', 'employee_id', 'wid', 'id', 'user_id', 'samaccountname'],
|
||||
'first_name': ['first_name', 'firstname', 'givenname', 'given_name', 'prenom'],
|
||||
'last_name': ['last_name', 'lastname', 'surname', 'sn', 'family_name', 'nom'],
|
||||
'email': ['email', 'mail', 'email_address', 'emailaddress'],
|
||||
'department': ['department', 'dept', 'division'],
|
||||
'job_title': ['job_title', 'title', 'position', 'jobtitle', 'job_position'],
|
||||
'phone': ['phone', 'telephone', 'mobile', 'phonenumber', 'telephonenumber'],
|
||||
'location': ['location', 'office', 'site', 'physicaldeliveryofficename'],
|
||||
}
|
||||
|
||||
|
||||
def parse_users_csv(file_stream):
|
||||
"""
|
||||
Parse a CSV file stream and return (users_list, errors_list).
|
||||
|
||||
Accepts BOM-prefixed UTF-8 or plain UTF-8. Column headers are
|
||||
matched case-insensitively against FIELD_ALIASES.
|
||||
"""
|
||||
try:
|
||||
content = file_stream.read().decode('utf-8-sig')
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
return [], ['Could not decode file. Please use UTF-8 encoding.']
|
||||
|
||||
reader = csv.DictReader(io.StringIO(content))
|
||||
|
||||
if not reader.fieldnames:
|
||||
return [], ['CSV file is empty or has no header row.']
|
||||
|
||||
# Build a lookup: normalised header -> our field name
|
||||
norm_headers = {h.lower().strip().replace(' ', '_'): h for h in reader.fieldnames}
|
||||
col_map = {} # our_field -> actual_csv_header
|
||||
for field, aliases in FIELD_ALIASES.items():
|
||||
for alias in aliases:
|
||||
if alias in norm_headers:
|
||||
col_map[field] = norm_headers[alias]
|
||||
break
|
||||
|
||||
errors = []
|
||||
users = []
|
||||
|
||||
for row_num, row in enumerate(reader, start=2):
|
||||
user = {}
|
||||
for field in FIELD_ALIASES:
|
||||
csv_col = col_map.get(field)
|
||||
user[field] = (row.get(csv_col, '') or '').strip() if csv_col else ''
|
||||
|
||||
wid = user.get('windows_id', '').strip()
|
||||
if not wid:
|
||||
errors.append(f'Row {row_num}: missing windows_id — skipped.')
|
||||
continue
|
||||
|
||||
users.append(user)
|
||||
|
||||
return users, errors
|
||||
Reference in New Issue
Block a user