feat: Rebuild complete CSV import system for legacy data migration
PROBLEM SOLVED: - Completely removed broken import functionality - Built new robust, modular CSV import system from scratch - Provides reliable data migration path for legacy .sc files NEW IMPORT SYSTEM FEATURES: ✅ Modular CSV parsers for all 5 tables (ROLODEX, PHONE, FILES, LEDGER, QDROS) ✅ RESTful API endpoints with background processing (/api/admin/import/*) ✅ Admin web interface at /admin/import for file uploads ✅ Comprehensive validation and error handling ✅ Real-time progress tracking and status monitoring ✅ Detailed logging with import session tracking ✅ Transaction rollback on failures ✅ Batch import with dependency ordering ✅ Foreign key validation and duplicate detection TECHNICAL IMPLEMENTATION: - Clean /app/import_export/ module structure with base classes - Enhanced logging system with import-specific logs - Background task processing with FastAPI BackgroundTasks - Auto-detection of CSV delimiters and encoding - Field validation with proper data type conversion - Admin authentication integration - Console logging for debugging support IMPORT WORKFLOW: 1. Admin selects table type and uploads CSV file 2. System validates headers and data structure 3. Background processing with real-time status updates 4. Detailed error reporting and success metrics 5. Import logs stored in logs/imports/ directory SUPPORTED TABLES: - ROLODEX (contacts/people) - 19 fields, requires: id, last - PHONE (phone numbers) - 3 fields, requires: rolodex_id, phone - FILES (case files) - 29 fields, requires: file_no, id, empl_num, file_type, opened, status, rate_per_hour - LEDGER (transactions) - 12 fields, requires: file_no, date, t_code, t_type, empl_num, amount - QDROS (documents) - 31 fields, requires: file_no REMOVED FILES: - app/api/unified_import_api.py - app/services/unified_import.py - app/api/flexible.py - app/models/flexible.py - templates/unified_import.html - templates/flexible.html - static/js/flexible.js - All legacy import routes and references TESTING COMPLETED: ✅ Schema validation for all table types ✅ CSV header validation ✅ Single file import functionality ✅ Multi-table dependency validation ✅ Error handling and logging ✅ API endpoint integration READY FOR PRODUCTION: System tested and validated with sample data. Administrators can now reliably import CSV files converted from legacy .sc files. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
93
app/import_export/rolodex_importer.py
Normal file
93
app/import_export/rolodex_importer.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""
|
||||
ROLODEX CSV Importer
|
||||
"""
|
||||
from typing import Dict, List, Any
|
||||
from datetime import date
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .base import BaseCSVImporter, ImportValidationError
|
||||
from app.models.rolodex import Rolodex
|
||||
|
||||
|
||||
class RolodexCSVImporter(BaseCSVImporter):
|
||||
"""CSV importer for ROLODEX table"""
|
||||
|
||||
@property
|
||||
def table_name(self) -> str:
|
||||
return "rolodex"
|
||||
|
||||
@property
|
||||
def required_fields(self) -> List[str]:
|
||||
return ["id", "last"] # Only ID and last name are required
|
||||
|
||||
@property
|
||||
def field_mapping(self) -> Dict[str, str]:
|
||||
"""Map CSV headers to database field names"""
|
||||
return {
|
||||
"id": "id",
|
||||
"last": "last",
|
||||
"first": "first",
|
||||
"middle": "middle",
|
||||
"prefix": "prefix",
|
||||
"suffix": "suffix",
|
||||
"title": "title",
|
||||
"group": "group",
|
||||
"a1": "a1",
|
||||
"a2": "a2",
|
||||
"a3": "a3",
|
||||
"city": "city",
|
||||
"abrev": "abrev",
|
||||
"zip": "zip",
|
||||
"email": "email",
|
||||
"dob": "dob",
|
||||
"ss_number": "ss_number",
|
||||
"legal_status": "legal_status",
|
||||
"memo": "memo"
|
||||
}
|
||||
|
||||
def create_model_instance(self, row_data: Dict[str, Any]) -> Rolodex:
|
||||
"""Create a Rolodex instance from processed row data"""
|
||||
|
||||
# Validate required fields
|
||||
if not row_data.get("id"):
|
||||
raise ImportValidationError("ID is required")
|
||||
if not row_data.get("last"):
|
||||
raise ImportValidationError("Last name is required")
|
||||
|
||||
# Check for duplicate ID
|
||||
existing = self.db_session.query(Rolodex).filter_by(id=row_data["id"]).first()
|
||||
if existing:
|
||||
raise ImportValidationError(f"Rolodex ID '{row_data['id']}' already exists")
|
||||
|
||||
# Parse date of birth
|
||||
dob = None
|
||||
if row_data.get("dob"):
|
||||
try:
|
||||
dob = self.parse_date(row_data["dob"])
|
||||
except ValueError as e:
|
||||
raise ImportValidationError(f"Invalid date of birth: {e}")
|
||||
|
||||
# Create instance with field length validation
|
||||
rolodex = Rolodex(
|
||||
id=self.normalize_string(row_data["id"], 80),
|
||||
last=self.normalize_string(row_data["last"], 80),
|
||||
first=self.normalize_string(row_data.get("first", ""), 45),
|
||||
middle=self.normalize_string(row_data.get("middle", ""), 45),
|
||||
prefix=self.normalize_string(row_data.get("prefix", ""), 45),
|
||||
suffix=self.normalize_string(row_data.get("suffix", ""), 45),
|
||||
title=self.normalize_string(row_data.get("title", ""), 45),
|
||||
group=self.normalize_string(row_data.get("group", ""), 45),
|
||||
a1=self.normalize_string(row_data.get("a1", ""), 45),
|
||||
a2=self.normalize_string(row_data.get("a2", ""), 45),
|
||||
a3=self.normalize_string(row_data.get("a3", ""), 45),
|
||||
city=self.normalize_string(row_data.get("city", ""), 80),
|
||||
abrev=self.normalize_string(row_data.get("abrev", ""), 45),
|
||||
zip=self.normalize_string(row_data.get("zip", ""), 45),
|
||||
email=self.normalize_string(row_data.get("email", ""), 100),
|
||||
dob=dob,
|
||||
ss_number=self.normalize_string(row_data.get("ss_number", ""), 20),
|
||||
legal_status=self.normalize_string(row_data.get("legal_status", ""), 45),
|
||||
memo=row_data.get("memo", "") # Text field, no length limit
|
||||
)
|
||||
|
||||
return rolodex
|
||||
Reference in New Issue
Block a user