import: support legacy wide-format SETUP.csv (Appl_Title, L_Head1..10, Default_Printer)\n- Add wide-format detection and dedicated import path mapping headers to SystemSetup keys\n- Accept SETUP.csv as valid in validation when wide-format headers are present\n- Batch import path handles wide-format with audit entries\n\nThis resolves 0 mapped / 12 unmapped issue for legacy SETUP.csv
This commit is contained in:
@@ -919,6 +919,70 @@ def _validate_required_headers(file_type: str, mapped_headers: Dict[str, str]) -
|
||||
}
|
||||
|
||||
|
||||
def _is_setup_wide_format(headers: List[str]) -> bool:
|
||||
"""Detect legacy wide-format SETUP.csv with column headers like L_Head1..L_Head10, Appl_Title, Default_Printer."""
|
||||
if not headers:
|
||||
return False
|
||||
known = {"Appl_Title", "Default_Printer"}
|
||||
known.update({f"L_Head{i}" for i in range(1, 11)})
|
||||
return any(h in known for h in headers)
|
||||
|
||||
|
||||
# Mapping from legacy wide-format SETUP.csv headers to canonical SystemSetup.setting_key values
|
||||
SETUP_WIDE_HEADER_TO_KEY: Dict[str, str] = {
|
||||
"Appl_Title": "appl_title",
|
||||
"Default_Printer": "default_printer",
|
||||
**{f"L_Head{i}": f"l_head{i}" for i in range(1, 11)},
|
||||
}
|
||||
|
||||
|
||||
def _import_setup_wide(rows: List[Dict[str, str]], db: Session, replace_existing: bool) -> Tuple[int, List[Dict[str, Any]]]:
|
||||
"""Import legacy wide-format SETUP.csv as key/value pairs into SystemSetup.
|
||||
|
||||
Each header maps to a setting key; values are taken from the row(s). If multiple rows exist,
|
||||
subsequent non-empty values will overwrite earlier ones for the same key.
|
||||
Returns (imported_count, errors).
|
||||
"""
|
||||
if replace_existing:
|
||||
try:
|
||||
db.query(SystemSetup).delete()
|
||||
db.commit()
|
||||
except Exception:
|
||||
db.rollback()
|
||||
# Proceed with upserts without clearing if deletion fails
|
||||
pass
|
||||
|
||||
imported_count = 0
|
||||
errors: List[Dict[str, Any]] = []
|
||||
for row_index, row in enumerate(rows, start=2):
|
||||
if not isinstance(row, dict):
|
||||
continue
|
||||
for header, key in SETUP_WIDE_HEADER_TO_KEY.items():
|
||||
try:
|
||||
if header not in row:
|
||||
continue
|
||||
value = row.get(header)
|
||||
if value in (None, ""):
|
||||
continue
|
||||
existing = db.query(SystemSetup).filter(SystemSetup.setting_key == key).first()
|
||||
if existing:
|
||||
existing.setting_value = str(value)
|
||||
else:
|
||||
db.add(SystemSetup(setting_key=key, setting_value=str(value), description=f"Imported from SETUP.{header}"))
|
||||
imported_count += 1
|
||||
if imported_count % 100 == 0:
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
errors.append({"row": row_index, "field": header, "error": str(e)})
|
||||
try:
|
||||
db.commit()
|
||||
except Exception as e:
|
||||
db.rollback()
|
||||
errors.append({"row": None, "field": "commit", "error": str(e)})
|
||||
return imported_count, errors
|
||||
|
||||
|
||||
def _get_required_fields(model_class) -> List[str]:
|
||||
"""Infer required (non-nullable) fields for a model to avoid DB errors.
|
||||
|
||||
@@ -1335,6 +1399,25 @@ async def import_csv_data(
|
||||
# Keep error minimal for client; internal logging can capture 'e'
|
||||
raise HTTPException(status_code=400, detail=f"Could not parse CSV file. The file appears to have serious formatting issues. Error: {str(e)}")
|
||||
|
||||
# Special handling: legacy wide-format SETUP.csv (Appl_Title, L_Head1..10, Default_Printer)
|
||||
if file_type == "SETUP.csv" and _is_setup_wide_format(headers):
|
||||
imported_count, errors = _import_setup_wide(rows_data, db, replace_existing)
|
||||
result = {
|
||||
"file_type": file_type,
|
||||
"imported_count": imported_count,
|
||||
"errors": errors[:10],
|
||||
"total_errors": len(errors),
|
||||
"auto_mapping": {
|
||||
"mapped_headers": {},
|
||||
"unmapped_headers": headers,
|
||||
"wide_format": True,
|
||||
"flexible_saved_rows": 0,
|
||||
},
|
||||
}
|
||||
if errors:
|
||||
result["warning"] = f"Import completed with {len(errors)} errors"
|
||||
return result
|
||||
|
||||
imported_count = 0
|
||||
created_count = 0
|
||||
updated_count = 0
|
||||
@@ -1911,9 +1994,11 @@ async def batch_validate_csv_files(
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
# Consider valid if we can map at least one column; for SETUP.csv also accept recognized wide-format headers
|
||||
wide_ok = (file_type == "SETUP.csv" and _is_setup_wide_format(csv_headers))
|
||||
validation_results.append({
|
||||
"file_type": file_type,
|
||||
"valid": (len(mapped_headers) > 0 and len(errors) == 0 and header_validation.get("ok", True)),
|
||||
"valid": ((len(mapped_headers) > 0 or wide_ok) and len(errors) == 0 and header_validation.get("ok", True)),
|
||||
"headers": {
|
||||
"found": csv_headers,
|
||||
"mapped": mapped_headers,
|
||||
@@ -1925,6 +2010,7 @@ async def batch_validate_csv_files(
|
||||
"total_errors": len(errors),
|
||||
"auto_mapping": {
|
||||
"suggestions": mapping_info["suggestions"],
|
||||
"wide_format": wide_ok,
|
||||
},
|
||||
})
|
||||
|
||||
@@ -2195,6 +2281,49 @@ async def batch_import_csv_files(
|
||||
mapped_headers = mapping_info["mapped_headers"]
|
||||
unmapped_headers = mapping_info["unmapped_headers"]
|
||||
header_validation = _validate_required_headers(file_type, mapped_headers)
|
||||
|
||||
# Special handling: legacy wide-format SETUP.csv (Appl_Title, L_Head1..10, Default_Printer)
|
||||
if file_type == "SETUP.csv" and _is_setup_wide_format(csv_headers):
|
||||
imported_count, errors = _import_setup_wide(rows_list, db, replace_existing)
|
||||
total_imported += imported_count
|
||||
total_errors += len(errors)
|
||||
results.append({
|
||||
"file_type": file_type,
|
||||
"status": "success" if not errors else "completed_with_errors",
|
||||
"imported_count": imported_count,
|
||||
"errors": len(errors),
|
||||
"message": f"Imported {imported_count} settings" + (f" with {len(errors)} errors" if errors else ""),
|
||||
"auto_mapping": {
|
||||
"mapped_headers": {},
|
||||
"unmapped_headers": csv_headers,
|
||||
"wide_format": True,
|
||||
"flexible_saved_rows": 0,
|
||||
},
|
||||
})
|
||||
try:
|
||||
db.add(ImportAuditFile(
|
||||
audit_id=audit_row.id,
|
||||
file_type=file_type,
|
||||
status="success" if not errors else "completed_with_errors",
|
||||
imported_count=imported_count,
|
||||
errors=len(errors),
|
||||
message=f"Imported {imported_count} settings" + (f" with {len(errors)} errors" if errors else ""),
|
||||
details={
|
||||
"mapped_headers": [],
|
||||
"unmapped_count": len(csv_headers),
|
||||
"flexible_saved_rows": 0,
|
||||
"wide_format": True,
|
||||
"header_validation": header_validation,
|
||||
}
|
||||
))
|
||||
db.commit()
|
||||
try:
|
||||
await _broadcast_import_progress(db, audit_row.id)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
db.rollback()
|
||||
continue
|
||||
|
||||
imported_count = 0
|
||||
errors = []
|
||||
|
||||
Reference in New Issue
Block a user