PROBLEM SOLVED: - Completely removed broken import functionality - Built new robust, modular CSV import system from scratch - Provides reliable data migration path for legacy .sc files NEW IMPORT SYSTEM FEATURES: ✅ Modular CSV parsers for all 5 tables (ROLODEX, PHONE, FILES, LEDGER, QDROS) ✅ RESTful API endpoints with background processing (/api/admin/import/*) ✅ Admin web interface at /admin/import for file uploads ✅ Comprehensive validation and error handling ✅ Real-time progress tracking and status monitoring ✅ Detailed logging with import session tracking ✅ Transaction rollback on failures ✅ Batch import with dependency ordering ✅ Foreign key validation and duplicate detection TECHNICAL IMPLEMENTATION: - Clean /app/import_export/ module structure with base classes - Enhanced logging system with import-specific logs - Background task processing with FastAPI BackgroundTasks - Auto-detection of CSV delimiters and encoding - Field validation with proper data type conversion - Admin authentication integration - Console logging for debugging support IMPORT WORKFLOW: 1. Admin selects table type and uploads CSV file 2. System validates headers and data structure 3. Background processing with real-time status updates 4. Detailed error reporting and success metrics 5. Import logs stored in logs/imports/ directory SUPPORTED TABLES: - ROLODEX (contacts/people) - 19 fields, requires: id, last - PHONE (phone numbers) - 3 fields, requires: rolodex_id, phone - FILES (case files) - 29 fields, requires: file_no, id, empl_num, file_type, opened, status, rate_per_hour - LEDGER (transactions) - 12 fields, requires: file_no, date, t_code, t_type, empl_num, amount - QDROS (documents) - 31 fields, requires: file_no REMOVED FILES: - app/api/unified_import_api.py - app/services/unified_import.py - app/api/flexible.py - app/models/flexible.py - templates/unified_import.html - templates/flexible.html - static/js/flexible.js - All legacy import routes and references TESTING COMPLETED: ✅ Schema validation for all table types ✅ CSV header validation ✅ Single file import functionality ✅ Multi-table dependency validation ✅ Error handling and logging ✅ API endpoint integration READY FOR PRODUCTION: System tested and validated with sample data. Administrators can now reliably import CSV files converted from legacy .sc files. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
113 lines
3.9 KiB
Python
113 lines
3.9 KiB
Python
"""
|
|
LEDGER CSV Importer
|
|
"""
|
|
from typing import Dict, List, Any
|
|
from datetime import date
|
|
from sqlalchemy.orm import Session
|
|
|
|
from .base import BaseCSVImporter, ImportValidationError
|
|
from app.models.ledger import Ledger
|
|
from app.models.files import File
|
|
|
|
|
|
class LedgerCSVImporter(BaseCSVImporter):
|
|
"""CSV importer for LEDGER table"""
|
|
|
|
@property
|
|
def table_name(self) -> str:
|
|
return "ledger"
|
|
|
|
@property
|
|
def required_fields(self) -> List[str]:
|
|
return ["file_no", "date", "t_code", "t_type", "empl_num", "amount"]
|
|
|
|
@property
|
|
def field_mapping(self) -> Dict[str, str]:
|
|
"""Map CSV headers to database field names"""
|
|
return {
|
|
"file_no": "file_no",
|
|
"item_no": "item_no",
|
|
"date": "date",
|
|
"t_code": "t_code",
|
|
"t_type": "t_type",
|
|
"t_type_l": "t_type_l",
|
|
"empl_num": "empl_num",
|
|
"quantity": "quantity",
|
|
"rate": "rate",
|
|
"amount": "amount",
|
|
"billed": "billed",
|
|
"note": "note"
|
|
}
|
|
|
|
def create_model_instance(self, row_data: Dict[str, Any]) -> Ledger:
|
|
"""Create a Ledger instance from processed row data"""
|
|
|
|
# Validate required fields
|
|
required_checks = [
|
|
("file_no", "File number"),
|
|
("date", "Date"),
|
|
("t_code", "Transaction code"),
|
|
("t_type", "Transaction type"),
|
|
("empl_num", "Employee number"),
|
|
("amount", "Amount")
|
|
]
|
|
|
|
for field, display_name in required_checks:
|
|
if not row_data.get(field):
|
|
raise ImportValidationError(f"{display_name} is required")
|
|
|
|
# Validate foreign key exists (file number)
|
|
file_exists = self.db_session.query(File).filter_by(file_no=row_data["file_no"]).first()
|
|
if not file_exists:
|
|
raise ImportValidationError(f"File number '{row_data['file_no']}' does not exist")
|
|
|
|
# Parse date
|
|
try:
|
|
transaction_date = self.parse_date(row_data["date"])
|
|
except ValueError as e:
|
|
raise ImportValidationError(f"Invalid date: {e}")
|
|
|
|
# Parse numeric fields
|
|
try:
|
|
item_no = 1 # Default
|
|
if row_data.get("item_no"):
|
|
item_no = self.parse_int(row_data["item_no"])
|
|
if item_no < 1:
|
|
raise ImportValidationError("Item number must be positive")
|
|
except ValueError as e:
|
|
raise ImportValidationError(f"Invalid item number: {e}")
|
|
|
|
try:
|
|
quantity = self.parse_float(row_data.get("quantity", "0"))
|
|
rate = self.parse_float(row_data.get("rate", "0"))
|
|
amount = self.parse_float(row_data["amount"])
|
|
except ValueError as e:
|
|
raise ImportValidationError(f"Invalid numeric value: {e}")
|
|
|
|
# Validate transaction code and type
|
|
t_code = self.normalize_string(row_data["t_code"], 10)
|
|
t_type = self.normalize_string(row_data["t_type"], 1)
|
|
t_type_l = self.normalize_string(row_data.get("t_type_l", ""), 1)
|
|
|
|
# Validate billed field (Y/N)
|
|
billed = row_data.get("billed", "N").strip().upper()
|
|
if billed not in ["Y", "N", ""]:
|
|
billed = "N" # Default to N if invalid
|
|
|
|
# Create instance
|
|
ledger = Ledger(
|
|
file_no=self.normalize_string(row_data["file_no"], 45),
|
|
item_no=item_no,
|
|
date=transaction_date,
|
|
t_code=t_code,
|
|
t_type=t_type,
|
|
t_type_l=t_type_l,
|
|
empl_num=self.normalize_string(row_data["empl_num"], 10),
|
|
quantity=quantity,
|
|
rate=rate,
|
|
amount=amount,
|
|
billed=billed,
|
|
note=row_data.get("note", "") # Text field
|
|
)
|
|
|
|
return ledger |