Implement comprehensive CSV import system for legacy database migration

- Added 5 new legacy models to app/models.py (FileType, FileNots, RolexV, FVarLkup, RVarLkup)
- Created app/import_legacy.py with import functions for all legacy tables:
  * Reference tables: TRNSTYPE, TRNSLKUP, FOOTERS, FILESTAT, EMPLOYEE, GRUPLKUP, FILETYPE, FVARLKUP, RVARLKUP
  * Core tables: ROLODEX, PHONE, ROLEX_V, FILES, FILES_R, FILES_V, FILENOTS, LEDGER, DEPOSITS, PAYMENTS
  * Specialized: PLANINFO, QDROS, PENSIONS and all pension-related tables
- Created app/sync_legacy_to_modern.py with sync functions to populate modern models from legacy data
- Updated admin routes in app/main.py:
  * Extended process_csv_import to support all new import types
  * Added /admin/sync endpoint for syncing legacy to modern models
  * Updated get_import_type_from_filename to recognize all CSV file patterns
- Enhanced app/templates/admin.html with:
  * Import Order Guide showing recommended import sequence
  * Sync to Modern Models section with confirmation dialog
  * Sync results display with detailed per-table statistics
  * Updated supported file formats list
- All import functions use batch processing (500 rows), proper error handling, and structured logging
- Sync functions maintain foreign key integrity and skip orphaned records with warnings
This commit is contained in:
HotSwapp
2025-10-08 09:41:38 -05:00
parent 2efbf14940
commit 4030dbd88e
6 changed files with 2545 additions and 38 deletions

1615
app/import_legacy.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -49,6 +49,8 @@ from .schemas import (
FilesListResponse, FilesListResponse,
LedgerListResponse, LedgerListResponse,
) )
from . import import_legacy
from . import sync_legacy_to_modern
# Load environment variables # Load environment variables
load_dotenv() load_dotenv()
@@ -237,45 +239,77 @@ app.mount("/static", StaticFiles(directory="static"), name="static")
def get_import_type_from_filename(filename: str) -> str: def get_import_type_from_filename(filename: str) -> str:
""" """
Determine import type based on filename pattern. Determine import type based on filename pattern for legacy CSV files.
Args: Args:
filename: Name of the uploaded CSV file filename: Name of the uploaded CSV file
Returns: Returns:
Import type string (client, phone, case, transaction, document, payment) Import type string matching the import function keys
""" """
filename_upper = filename.upper() filename_upper = filename.upper()
# Strip extension and normalize # Strip extension and normalize
base = filename_upper.rsplit('.', 1)[0] base = filename_upper.rsplit('.', 1)[0]
# Support files saved with explicit type prefixes (e.g., CLIENT_<uuid>.csv) # Reference tables
if base.startswith('CLIENT_'): if 'TRNSTYPE' in base:
return 'client' return 'trnstype'
if base.startswith('PHONE_'): if 'TRNSLKUP' in base:
return 'trnslkup'
if 'FOOTER' in base:
return 'footers'
if 'FILESTAT' in base:
return 'filestat'
if 'EMPLOYEE' in base:
return 'employee'
if 'GRUPLKUP' in base or 'GROUPLKUP' in base:
return 'gruplkup'
if 'FILETYPE' in base:
return 'filetype'
if 'FVARLKUP' in base:
return 'fvarlkup'
if 'RVARLKUP' in base:
return 'rvarlkup'
# Core data tables
if 'ROLEX_V' in base or 'ROLEXV' in base:
return 'rolex_v'
if 'ROLODEX' in base or 'ROLEX' in base:
return 'rolodex'
if 'FILES_R' in base or 'FILESR' in base:
return 'files_r'
if 'FILES_V' in base or 'FILESV' in base:
return 'files_v'
if 'FILENOTS' in base or 'FILE_NOTS' in base:
return 'filenots'
if 'FILES' in base or 'FILE' in base:
return 'files'
if 'PHONE' in base:
return 'phone' return 'phone'
if base.startswith('CASE_'): if 'LEDGER' in base:
return 'case' return 'ledger'
if base.startswith('TRANSACTION_'): if 'DEPOSITS' in base or 'DEPOSIT' in base:
return 'transaction' return 'deposits'
if base.startswith('DOCUMENT_'): if 'PAYMENTS' in base or 'PAYMENT' in base:
return 'document' return 'payments'
if base.startswith('PAYMENT_'):
return 'payment' # Specialized tables
if 'PLANINFO' in base or 'PLAN_INFO' in base:
# Legacy/real file name patterns return 'planinfo'
if base.startswith('ROLODEX') or base.startswith('ROLEX') or 'ROLODEX' in base or 'ROLEX' in base: if 'QDROS' in base or 'QDRO' in base:
return 'client' return 'qdros'
if base.startswith('PHONE') or 'PHONE' in base: if 'MARRIAGE' in base:
return 'phone' return 'pension_marriage'
if base.startswith('FILES') or base.startswith('FILE') or 'FILES' in base: if 'DEATH' in base:
return 'case' return 'pension_death'
if base.startswith('LEDGER') or 'LEDGER' in base or base.startswith('TRNSACTN') or 'TRNSACTN' in base: if 'SCHEDULE' in base:
return 'transaction' return 'pension_schedule'
if base.startswith('QDROS') or base.startswith('QDRO') or 'QDRO' in base: if 'SEPARATE' in base:
return 'document' return 'pension_separate'
if base.startswith('PAYMENTS') or base.startswith('DEPOSITS') or 'PAYMENT' in base or 'DEPOSIT' in base: if 'RESULTS' in base:
return 'payment' return 'pension_results'
if 'PENSIONS' in base or 'PENSION' in base:
return 'pensions'
raise ValueError(f"Unknown file type for filename: {filename}") raise ValueError(f"Unknown file type for filename: {filename}")
@@ -874,23 +908,49 @@ def import_payments_data(db: Session, file_path: str) -> Dict[str, Any]:
def process_csv_import(db: Session, import_type: str, file_path: str) -> Dict[str, Any]: def process_csv_import(db: Session, import_type: str, file_path: str) -> Dict[str, Any]:
""" """
Process CSV import based on type. Process CSV import based on type using legacy import functions.
Args: Args:
db: Database session db: Database session
import_type: Type of import (client, phone, case, transaction, document, payment) import_type: Type of import
file_path: Path to CSV file file_path: Path to CSV file
Returns: Returns:
Dict with import results Dict with import results
""" """
import_functions = { import_functions = {
'client': import_rolodex_data, # Reference tables (import first)
'phone': import_phone_data, 'trnstype': import_legacy.import_trnstype,
'case': import_files_data, 'trnslkup': import_legacy.import_trnslkup,
'transaction': import_ledger_data, 'footers': import_legacy.import_footers,
'document': import_qdros_data, 'filestat': import_legacy.import_filestat,
'payment': import_payments_data 'employee': import_legacy.import_employee,
'gruplkup': import_legacy.import_gruplkup,
'filetype': import_legacy.import_filetype,
'fvarlkup': import_legacy.import_fvarlkup,
'rvarlkup': import_legacy.import_rvarlkup,
# Core data tables
'rolodex': import_legacy.import_rolodex,
'phone': import_legacy.import_phone,
'rolex_v': import_legacy.import_rolex_v,
'files': import_legacy.import_files,
'files_r': import_legacy.import_files_r,
'files_v': import_legacy.import_files_v,
'filenots': import_legacy.import_filenots,
'ledger': import_legacy.import_ledger,
'deposits': import_legacy.import_deposits,
'payments': import_legacy.import_payments,
# Specialized tables
'planinfo': import_legacy.import_planinfo,
'qdros': import_legacy.import_qdros,
'pensions': import_legacy.import_pensions,
'pension_marriage': import_legacy.import_pension_marriage,
'pension_death': import_legacy.import_pension_death,
'pension_schedule': import_legacy.import_pension_schedule,
'pension_separate': import_legacy.import_pension_separate,
'pension_results': import_legacy.import_pension_results,
} }
import_func = import_functions.get(import_type) import_func = import_functions.get(import_type)
@@ -1566,7 +1626,17 @@ async def admin_import_data(
return RedirectResponse(url="/login", status_code=302) return RedirectResponse(url="/login", status_code=302)
# Validate data type # Validate data type
valid_types = ['client', 'phone', 'case', 'transaction', 'document', 'payment'] valid_types = [
# Reference tables
'trnstype', 'trnslkup', 'footers', 'filestat', 'employee',
'gruplkup', 'filetype', 'fvarlkup', 'rvarlkup',
# Core data tables
'rolodex', 'phone', 'rolex_v', 'files', 'files_r', 'files_v',
'filenots', 'ledger', 'deposits', 'payments',
# Specialized tables
'planinfo', 'qdros', 'pensions', 'pension_marriage',
'pension_death', 'pension_schedule', 'pension_separate', 'pension_results'
]
if data_type not in valid_types: if data_type not in valid_types:
return templates.TemplateResponse("admin.html", { return templates.TemplateResponse("admin.html", {
"request": request, "request": request,
@@ -1670,6 +1740,69 @@ async def admin_import_data(
}) })
@app.post("/admin/sync")
async def admin_sync_data(
request: Request,
db: Session = Depends(get_db)
):
"""
Sync legacy database models to modern application models.
This route triggers the sync process to populate the simplified
modern models (Client, Phone, Case, Transaction, Payment, Document)
from the comprehensive legacy models.
"""
# Check authentication
user = get_current_user_from_session(request.session)
if not user:
return RedirectResponse(url="/login", status_code=302)
# Get form data for confirmation
form = await request.form()
clear_existing = form.get("clear_existing") == "true"
try:
logger.info(
"admin_sync_starting",
clear_existing=clear_existing,
username=user.username
)
# Run all sync functions
results = sync_legacy_to_modern.sync_all(db, clear_existing=clear_existing)
# Calculate totals
total_synced = sum(r['success'] for r in results.values() if r)
total_skipped = sum(r['skipped'] for r in results.values() if r)
total_errors = sum(len(r['errors']) for r in results.values() if r)
logger.info(
"admin_sync_complete",
total_synced=total_synced,
total_skipped=total_skipped,
total_errors=total_errors,
username=user.username
)
return templates.TemplateResponse("admin.html", {
"request": request,
"user": user,
"sync_results": results,
"total_synced": total_synced,
"total_skipped": total_skipped,
"total_sync_errors": total_errors,
"show_sync_results": True
})
except Exception as e:
logger.error("admin_sync_failed", error=str(e), username=user.username)
return templates.TemplateResponse("admin.html", {
"request": request,
"user": user,
"error": f"Sync failed: {str(e)}"
})
@app.get("/admin") @app.get("/admin")
async def admin_panel(request: Request, db: Session = Depends(get_db)): async def admin_panel(request: Request, db: Session = Depends(get_db)):
""" """

View File

@@ -683,3 +683,68 @@ class PensionSeparate(Base):
__table_args__ = ( __table_args__ = (
ForeignKeyConstraint(["file_no", "version"], ["pensions.file_no", "pensions.version"], ondelete="CASCADE"), ForeignKeyConstraint(["file_no", "version"], ["pensions.file_no", "pensions.version"], ondelete="CASCADE"),
) )
class FileType(Base):
"""FILETYPE reference table for file/case types."""
__tablename__ = "filetype"
file_type = Column(String, primary_key=True)
def __repr__(self):
return f"<FileType(file_type='{self.file_type}')>"
class FileNots(Base):
"""FILENOTS table for file memos/notes."""
__tablename__ = "filenots"
file_no = Column(String, ForeignKey("files.file_no", ondelete="CASCADE"), primary_key=True)
memo_date = Column(Date, primary_key=True)
memo_note = Column(Text)
__table_args__ = (
Index("ix_filenots_file_no", "file_no"),
)
def __repr__(self):
return f"<FileNots(file_no='{self.file_no}', date='{self.memo_date}')>"
class RolexV(Base):
"""ROLEX_V variables per rolodex entry."""
__tablename__ = "rolex_v"
id = Column(String, ForeignKey("rolodex.id", ondelete="CASCADE"), primary_key=True)
identifier = Column(String, primary_key=True)
response = Column(Text)
__table_args__ = (
Index("ix_rolex_v_id", "id"),
)
def __repr__(self):
return f"<RolexV(id='{self.id}', identifier='{self.identifier}')>"
class FVarLkup(Base):
"""FVARLKUP file variable lookup table."""
__tablename__ = "fvarlkup"
identifier = Column(String, primary_key=True)
query = Column(Text)
response = Column(Text)
def __repr__(self):
return f"<FVarLkup(identifier='{self.identifier}')>"
class RVarLkup(Base):
"""RVARLKUP rolodex variable lookup table."""
__tablename__ = "rvarlkup"
identifier = Column(String, primary_key=True)
query = Column(Text)
def __repr__(self):
return f"<RVarLkup(identifier='{self.identifier}')>"

View File

@@ -0,0 +1,528 @@
"""
Sync functions to populate modern models from legacy database tables.
This module provides functions to migrate data from the comprehensive legacy
schema to the simplified modern application models.
"""
from typing import Dict, Any
from sqlalchemy.orm import Session
from sqlalchemy.exc import IntegrityError
import structlog
from .models import (
# Legacy models
Rolodex, LegacyPhone, LegacyFile, Ledger, LegacyPayment, Qdros,
# Modern models
Client, Phone, Case, Transaction, Payment, Document
)
logger = structlog.get_logger(__name__)
BATCH_SIZE = 500
def sync_clients(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync Rolodex → Client.
Maps legacy rolodex entries to modern simplified client records.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing modern client data
if clear_existing:
logger.info("sync_clients_clearing_existing")
db.query(Client).delete()
db.commit()
# Query all rolodex entries
rolodex_entries = db.query(Rolodex).all()
logger.info("sync_clients_processing", count=len(rolodex_entries))
batch = []
for rolex in rolodex_entries:
try:
# Build complete address from A1, A2, A3
address_parts = [
rolex.a1 or '',
rolex.a2 or '',
rolex.a3 or ''
]
address = ', '.join(filter(None, address_parts))
# Create modern client record
client = Client(
rolodex_id=rolex.id,
last_name=rolex.last,
first_name=rolex.first,
middle_initial=rolex.middle,
company=rolex.title, # Using title as company name
address=address if address else None,
city=rolex.city,
state=rolex.abrev,
zip_code=rolex.zip
)
batch.append(client)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"Rolodex ID {rolex.id}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_clients_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_clients_failed", error=str(e))
return result
def sync_phones(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync LegacyPhone → Phone.
Links phone numbers to modern client records via rolodex_id.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing phone data
if clear_existing:
logger.info("sync_phones_clearing_existing")
db.query(Phone).delete()
db.commit()
# Build lookup map: rolodex_id → client.id
clients = db.query(Client).all()
rolodex_to_client = {c.rolodex_id: c.id for c in clients}
logger.info("sync_phones_client_map", client_count=len(rolodex_to_client))
# Query all legacy phones
legacy_phones = db.query(LegacyPhone).all()
logger.info("sync_phones_processing", count=len(legacy_phones))
batch = []
for lphone in legacy_phones:
try:
# Find corresponding modern client
client_id = rolodex_to_client.get(lphone.id)
if not client_id:
result['errors'].append(f"No client found for rolodex ID: {lphone.id}")
result['skipped'] += 1
continue
# Create modern phone record
phone = Phone(
client_id=client_id,
phone_type=lphone.location if lphone.location else 'unknown',
phone_number=lphone.phone,
extension=None
)
batch.append(phone)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"Phone {lphone.id}/{lphone.phone}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_phones_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_phones_failed", error=str(e))
return result
def sync_cases(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync LegacyFile → Case.
Converts legacy file cabinet entries to modern case records.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing case data
if clear_existing:
logger.info("sync_cases_clearing_existing")
db.query(Case).delete()
db.commit()
# Build lookup map: rolodex_id → client.id
clients = db.query(Client).all()
rolodex_to_client = {c.rolodex_id: c.id for c in clients}
logger.info("sync_cases_client_map", client_count=len(rolodex_to_client))
# Query all legacy files
legacy_files = db.query(LegacyFile).all()
logger.info("sync_cases_processing", count=len(legacy_files))
batch = []
for lfile in legacy_files:
try:
# Find corresponding modern client
client_id = rolodex_to_client.get(lfile.id)
if not client_id:
result['errors'].append(f"No client found for rolodex ID: {lfile.id} (file {lfile.file_no})")
result['skipped'] += 1
continue
# Map legacy status to modern status
status = 'active'
if lfile.closed:
status = 'closed'
elif lfile.status and 'inactive' in lfile.status.lower():
status = 'inactive'
# Create modern case record
case = Case(
file_no=lfile.file_no,
client_id=client_id,
status=status,
case_type=lfile.file_type,
description=lfile.regarding,
open_date=lfile.opened,
close_date=lfile.closed
)
batch.append(case)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"File {lfile.file_no}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_cases_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_cases_failed", error=str(e))
return result
def sync_transactions(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync Ledger → Transaction.
Converts legacy ledger entries to modern transaction records.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing transaction data
if clear_existing:
logger.info("sync_transactions_clearing_existing")
db.query(Transaction).delete()
db.commit()
# Build lookup map: file_no → case.id
cases = db.query(Case).all()
file_no_to_case = {c.file_no: c.id for c in cases}
logger.info("sync_transactions_case_map", case_count=len(file_no_to_case))
# Query all ledger entries
ledger_entries = db.query(Ledger).all()
logger.info("sync_transactions_processing", count=len(ledger_entries))
batch = []
for ledger in ledger_entries:
try:
# Find corresponding modern case
case_id = file_no_to_case.get(ledger.file_no)
if not case_id:
result['errors'].append(f"No case found for file: {ledger.file_no}")
result['skipped'] += 1
continue
# Create modern transaction record with all ledger fields
transaction = Transaction(
case_id=case_id,
transaction_date=ledger.date,
transaction_type=ledger.t_type,
amount=float(ledger.amount) if ledger.amount else None,
description=ledger.note,
reference=str(ledger.item_no) if ledger.item_no else None,
# Ledger-specific fields
item_no=ledger.item_no,
employee_number=ledger.empl_num,
t_code=ledger.t_code,
t_type_l=ledger.t_type_l,
quantity=float(ledger.quantity) if ledger.quantity else None,
rate=float(ledger.rate) if ledger.rate else None,
billed=ledger.billed
)
batch.append(transaction)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"Ledger {ledger.file_no}/{ledger.item_no}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_transactions_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_transactions_failed", error=str(e))
return result
def sync_payments(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync LegacyPayment → Payment.
Converts legacy payment entries to modern payment records.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing payment data
if clear_existing:
logger.info("sync_payments_clearing_existing")
db.query(Payment).delete()
db.commit()
# Build lookup map: file_no → case.id
cases = db.query(Case).all()
file_no_to_case = {c.file_no: c.id for c in cases}
logger.info("sync_payments_case_map", case_count=len(file_no_to_case))
# Query all legacy payments
legacy_payments = db.query(LegacyPayment).all()
logger.info("sync_payments_processing", count=len(legacy_payments))
batch = []
for lpay in legacy_payments:
try:
# Find corresponding modern case
if not lpay.file_no:
result['skipped'] += 1
continue
case_id = file_no_to_case.get(lpay.file_no)
if not case_id:
result['errors'].append(f"No case found for file: {lpay.file_no}")
result['skipped'] += 1
continue
# Create modern payment record
payment = Payment(
case_id=case_id,
payment_date=lpay.deposit_date,
payment_type='deposit', # Legacy doesn't distinguish
amount=float(lpay.amount) if lpay.amount else None,
description=lpay.note if lpay.note else lpay.regarding,
check_number=None # Not in legacy PAYMENTS table
)
batch.append(payment)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"Payment {lpay.id}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_payments_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_payments_failed", error=str(e))
return result
def sync_documents(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Sync Qdros → Document.
Converts QDRO entries to modern document records.
"""
result = {'success': 0, 'errors': [], 'skipped': 0}
try:
# Optionally clear existing document data
if clear_existing:
logger.info("sync_documents_clearing_existing")
db.query(Document).delete()
db.commit()
# Build lookup map: file_no → case.id
cases = db.query(Case).all()
file_no_to_case = {c.file_no: c.id for c in cases}
logger.info("sync_documents_case_map", case_count=len(file_no_to_case))
# Query all QDRO entries
qdros = db.query(Qdros).all()
logger.info("sync_documents_processing", count=len(qdros))
batch = []
for qdro in qdros:
try:
# Find corresponding modern case
case_id = file_no_to_case.get(qdro.file_no)
if not case_id:
result['errors'].append(f"No case found for file: {qdro.file_no}")
result['skipped'] += 1
continue
# Build description from QDRO fields
desc_parts = []
if qdro.case_type:
desc_parts.append(f"Type: {qdro.case_type}")
if qdro.case_number:
desc_parts.append(f"Case#: {qdro.case_number}")
if qdro.plan_id:
desc_parts.append(f"Plan: {qdro.plan_id}")
description = '; '.join(desc_parts) if desc_parts else None
# Create modern document record
document = Document(
case_id=case_id,
document_type='QDRO',
file_name=qdro.form_name,
file_path=None, # Legacy doesn't have file paths
description=description,
uploaded_date=qdro.draft_out if qdro.draft_out else qdro.judgment_date
)
batch.append(document)
if len(batch) >= BATCH_SIZE:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
batch = []
except Exception as e:
result['errors'].append(f"QDRO {qdro.file_no}/{qdro.version}: {str(e)}")
result['skipped'] += 1
# Save remaining batch
if batch:
db.bulk_save_objects(batch)
db.commit()
result['success'] += len(batch)
logger.info("sync_documents_complete", **result)
except Exception as e:
db.rollback()
result['errors'].append(f"Fatal error: {str(e)}")
logger.error("sync_documents_failed", error=str(e))
return result
def sync_all(db: Session, clear_existing: bool = False) -> Dict[str, Any]:
"""
Run all sync functions in proper order.
Order matters due to foreign key dependencies:
1. Clients (no dependencies)
2. Phones (depends on Clients)
3. Cases (depends on Clients)
4. Transactions (depends on Cases)
5. Payments (depends on Cases)
6. Documents (depends on Cases)
"""
results = {
'clients': None,
'phones': None,
'cases': None,
'transactions': None,
'payments': None,
'documents': None
}
logger.info("sync_all_starting", clear_existing=clear_existing)
try:
results['clients'] = sync_clients(db, clear_existing)
logger.info("sync_all_clients_done", success=results['clients']['success'])
results['phones'] = sync_phones(db, clear_existing)
logger.info("sync_all_phones_done", success=results['phones']['success'])
results['cases'] = sync_cases(db, clear_existing)
logger.info("sync_all_cases_done", success=results['cases']['success'])
results['transactions'] = sync_transactions(db, clear_existing)
logger.info("sync_all_transactions_done", success=results['transactions']['success'])
results['payments'] = sync_payments(db, clear_existing)
logger.info("sync_all_payments_done", success=results['payments']['success'])
results['documents'] = sync_documents(db, clear_existing)
logger.info("sync_all_documents_done", success=results['documents']['success'])
logger.info("sync_all_complete")
except Exception as e:
logger.error("sync_all_failed", error=str(e))
raise
return results

View File

@@ -49,7 +49,8 @@
</label> </label>
<input type="file" class="form-control" id="files" name="files" multiple accept=".csv"> <input type="file" class="form-control" id="files" name="files" multiple accept=".csv">
<div class="form-text"> <div class="form-text">
Supported formats: ROLODEX*.csv, PHONE*.csv, FILES*.csv, LEDGER*.csv, QDROS*.csv, PAYMENTS*.csv <strong>Supported formats:</strong> ROLODEX, PHONE, FILES, LEDGER, PAYMENTS, DEPOSITS, QDROS, PENSIONS, PLANINFO,
TRNSTYPE, TRNSLKUP, FOOTERS, FILESTAT, EMPLOYEE, GRUPLKUP, FILETYPE, and all related tables (*.csv)
</div> </div>
</div> </div>
<button type="submit" class="btn btn-primary"> <button type="submit" class="btn btn-primary">
@@ -136,6 +137,156 @@
</div> </div>
{% endif %} {% endif %}
<!-- Import Order Guide -->
<div class="card mb-4">
<div class="card-header bg-info text-white">
<h5 class="mb-0">
<i class="bi bi-list-ol me-2"></i>Import Order Guide
</h5>
</div>
<div class="card-body">
<p class="mb-3">For best results, import tables in this recommended order:</p>
<div class="row">
<div class="col-md-6">
<h6 class="text-primary"><i class="bi bi-1-circle me-2"></i>Reference Tables (Import First)</h6>
<ul class="list-unstyled ms-3">
<li><i class="bi bi-arrow-right me-2"></i>TRNSTYPE</li>
<li><i class="bi bi-arrow-right me-2"></i>TRNSLKUP</li>
<li><i class="bi bi-arrow-right me-2"></i>FOOTERS</li>
<li><i class="bi bi-arrow-right me-2"></i>FILESTAT</li>
<li><i class="bi bi-arrow-right me-2"></i>EMPLOYEE</li>
<li><i class="bi bi-arrow-right me-2"></i>GRUPLKUP</li>
<li><i class="bi bi-arrow-right me-2"></i>FILETYPE</li>
<li><i class="bi bi-arrow-right me-2"></i>FVARLKUP, RVARLKUP</li>
</ul>
</div>
<div class="col-md-6">
<h6 class="text-success"><i class="bi bi-2-circle me-2"></i>Core Data Tables</h6>
<ul class="list-unstyled ms-3">
<li><i class="bi bi-arrow-right me-2"></i>ROLODEX</li>
<li><i class="bi bi-arrow-right me-2"></i>PHONE, ROLEX_V</li>
<li><i class="bi bi-arrow-right me-2"></i>FILES (+ FILES_R, FILES_V, FILENOTS)</li>
<li><i class="bi bi-arrow-right me-2"></i>LEDGER</li>
<li><i class="bi bi-arrow-right me-2"></i>DEPOSITS, PAYMENTS</li>
<li><i class="bi bi-arrow-right me-2"></i>PLANINFO</li>
<li><i class="bi bi-arrow-right me-2"></i>QDROS, PENSIONS (+ related tables)</li>
</ul>
</div>
</div>
<div class="alert alert-warning mt-3 mb-0">
<i class="bi bi-exclamation-triangle me-2"></i>
<strong>Important:</strong> Reference tables must be imported before core data to avoid foreign key errors.
</div>
</div>
</div>
<!-- Sync to Modern Models -->
<div class="card mb-4">
<div class="card-header bg-success text-white">
<h5 class="mb-0">
<i class="bi bi-arrow-repeat me-2"></i>Sync to Modern Models
</h5>
</div>
<div class="card-body">
<p>After importing legacy CSV data, sync it to the simplified modern application models (Client, Phone, Case, Transaction, Payment, Document).</p>
<form action="/admin/sync" method="post" id="syncForm">
<div class="mb-3">
<div class="form-check">
<input class="form-check-input" type="checkbox" id="clearExisting" name="clear_existing" value="true">
<label class="form-check-label" for="clearExisting">
<strong>Clear existing modern data before sync</strong>
<br>
<small class="text-muted">Warning: This will delete all current Client, Phone, Case, Transaction, Payment, and Document records!</small>
</label>
</div>
</div>
<button type="button" class="btn btn-success" onclick="confirmSync()">
<i class="bi bi-arrow-repeat me-2"></i>Start Sync Process
</button>
</form>
</div>
</div>
<!-- Sync Results -->
{% if show_sync_results and sync_results %}
<div class="card mb-4">
<div class="card-header bg-success text-white">
<h5 class="mb-0">
<i class="bi bi-check-circle me-2"></i>Sync Results
</h5>
</div>
<div class="card-body">
<div class="row mb-3">
<div class="col-md-3">
<div class="card bg-light">
<div class="card-body text-center">
<h3 class="mb-0 text-success">{{ total_synced or 0 }}</h3>
<small class="text-muted">Records Synced</small>
</div>
</div>
</div>
<div class="col-md-3">
<div class="card bg-light">
<div class="card-body text-center">
<h3 class="mb-0 text-warning">{{ total_skipped or 0 }}</h3>
<small class="text-muted">Records Skipped</small>
</div>
</div>
</div>
<div class="col-md-3">
<div class="card bg-light">
<div class="card-body text-center">
<h3 class="mb-0 text-danger">{{ total_sync_errors or 0 }}</h3>
<small class="text-muted">Errors</small>
</div>
</div>
</div>
</div>
<h6 class="mb-3">Detailed Results by Table:</h6>
<div class="table-responsive">
<table class="table table-sm table-bordered">
<thead>
<tr>
<th>Modern Table</th>
<th>Synced</th>
<th>Skipped</th>
<th>Errors</th>
</tr>
</thead>
<tbody>
{% for table_name, result in sync_results.items() %}
<tr>
<td><strong>{{ table_name.title() }}</strong></td>
<td class="text-success">{{ result.success }}</td>
<td class="text-warning">{{ result.skipped }}</td>
<td class="text-danger">{{ result.errors|length }}</td>
</tr>
{% if result.errors %}
<tr>
<td colspan="4">
<details>
<summary class="text-danger">View Errors ({{ result.errors|length }})</summary>
<ul class="mt-2 mb-0">
{% for error in result.errors[:10] %}
<li><small>{{ error }}</small></li>
{% endfor %}
{% if result.errors|length > 10 %}
<li><small><em>... and {{ result.errors|length - 10 }} more errors</em></small></li>
{% endif %}
</ul>
</details>
</td>
</tr>
{% endif %}
{% endfor %}
</tbody>
</table>
</div>
</div>
</div>
{% endif %}
<!-- Import Section --> <!-- Import Section -->
<div class="card mb-4"> <div class="card mb-4">
<div class="card-header bg-warning"> <div class="card-header bg-warning">
@@ -425,5 +576,20 @@ document.addEventListener('DOMContentLoaded', function() {
} }
}); });
}); });
// Sync confirmation function
function confirmSync() {
const clearCheckbox = document.getElementById('clearExisting');
const clearExisting = clearCheckbox.checked;
let message = "Are you sure you want to sync legacy data to modern models?";
if (clearExisting) {
message += "\n\n⚠ WARNING: This will DELETE all existing Client, Phone, Case, Transaction, Payment, and Document records before syncing!";
}
if (confirm(message)) {
document.getElementById('syncForm').submit();
}
}
</script> </script>
{% endblock %} {% endblock %}

BIN
delphi.db

Binary file not shown.