coming together
This commit is contained in:
@@ -7,9 +7,10 @@ from .rolodex import Rolodex, Phone
|
||||
from .files import File
|
||||
from .ledger import Ledger
|
||||
from .qdro import QDRO
|
||||
from .audit import AuditLog, LoginAttempt
|
||||
from .audit import AuditLog, LoginAttempt, ImportAudit, ImportAuditFile
|
||||
from .auth import RefreshToken
|
||||
from .additional import Deposit, Payment, FileNote, FormVariable, ReportVariable, Document
|
||||
from .flexible import FlexibleImport
|
||||
from .support import SupportTicket, TicketResponse, TicketStatus, TicketPriority, TicketCategory
|
||||
from .pensions import (
|
||||
Pension, PensionSchedule, MarriageHistory, DeathBenefit,
|
||||
@@ -23,8 +24,8 @@ from .lookups import (
|
||||
|
||||
__all__ = [
|
||||
"BaseModel", "User", "Rolodex", "Phone", "File", "Ledger", "QDRO",
|
||||
"AuditLog", "LoginAttempt", "RefreshToken",
|
||||
"Deposit", "Payment", "FileNote", "FormVariable", "ReportVariable", "Document",
|
||||
"AuditLog", "LoginAttempt", "ImportAudit", "ImportAuditFile", "RefreshToken",
|
||||
"Deposit", "Payment", "FileNote", "FormVariable", "ReportVariable", "Document", "FlexibleImport",
|
||||
"SupportTicket", "TicketResponse", "TicketStatus", "TicketPriority", "TicketCategory",
|
||||
"Pension", "PensionSchedule", "MarriageHistory", "DeathBenefit",
|
||||
"SeparationAgreement", "LifeTable", "NumberTable",
|
||||
|
||||
@@ -46,4 +46,57 @@ class LoginAttempt(BaseModel):
|
||||
failure_reason = Column(String(200), nullable=True) # Reason for failure
|
||||
|
||||
def __repr__(self):
|
||||
return f"<LoginAttempt(username='{self.username}', success={bool(self.success)}, timestamp='{self.timestamp}')>"
|
||||
return f"<LoginAttempt(username='{self.username}', success={bool(self.success)}, timestamp='{self.timestamp}')>"
|
||||
|
||||
|
||||
class ImportAudit(BaseModel):
|
||||
"""
|
||||
Records each batch CSV upload run with metrics and outcome.
|
||||
"""
|
||||
__tablename__ = "import_audit"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True, index=True)
|
||||
started_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
|
||||
finished_at = Column(DateTime, nullable=True, index=True)
|
||||
status = Column(String(30), nullable=False, default="running", index=True) # running|success|completed_with_errors|failed
|
||||
|
||||
total_files = Column(Integer, nullable=False, default=0)
|
||||
successful_files = Column(Integer, nullable=False, default=0)
|
||||
failed_files = Column(Integer, nullable=False, default=0)
|
||||
total_imported = Column(Integer, nullable=False, default=0)
|
||||
total_errors = Column(Integer, nullable=False, default=0)
|
||||
|
||||
initiated_by_user_id = Column(Integer, ForeignKey("users.id"), nullable=True)
|
||||
initiated_by_username = Column(String(100), nullable=True)
|
||||
|
||||
message = Column(String(255), nullable=True)
|
||||
details = Column(JSON, nullable=True) # optional, compact summary payload
|
||||
|
||||
user = relationship("User")
|
||||
files = relationship("ImportAuditFile", back_populates="audit", cascade="all, delete-orphan")
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
f"<ImportAudit(id={self.id}, status='{self.status}', files={self.successful_files}/{self.total_files}, "
|
||||
f"imported={self.total_imported}, errors={self.total_errors})>"
|
||||
)
|
||||
|
||||
|
||||
class ImportAuditFile(BaseModel):
|
||||
"""Per-file result for a given batch import run."""
|
||||
__tablename__ = "import_audit_files"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True, index=True)
|
||||
audit_id = Column(Integer, ForeignKey("import_audit.id", ondelete="CASCADE"), nullable=False, index=True)
|
||||
file_type = Column(String(64), nullable=False, index=True)
|
||||
status = Column(String(30), nullable=False, index=True)
|
||||
imported_count = Column(Integer, nullable=False, default=0)
|
||||
errors = Column(Integer, nullable=False, default=0)
|
||||
message = Column(String(255), nullable=True)
|
||||
details = Column(JSON, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.utcnow, nullable=False, index=True)
|
||||
|
||||
audit = relationship("ImportAudit", back_populates="files")
|
||||
|
||||
def __repr__(self):
|
||||
return f"<ImportAuditFile(audit_id={self.audit_id}, file='{self.file_type}', status='{self.status}', imported={self.imported_count}, errors={self.errors})>"
|
||||
37
app/models/flexible.py
Normal file
37
app/models/flexible.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Flexible storage for unmapped CSV columns during import
|
||||
"""
|
||||
from sqlalchemy import Column, Integer, String
|
||||
from sqlalchemy.types import JSON
|
||||
|
||||
from app.models.base import BaseModel
|
||||
|
||||
|
||||
class FlexibleImport(BaseModel):
|
||||
"""Stores per-row extra/unmapped data for any import, without persisting mapping patterns."""
|
||||
|
||||
__tablename__ = "flexible_imports"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
|
||||
# The CSV filename used by the importer (e.g., "FILES.csv" or arbitrary names in flexible mode)
|
||||
file_type = Column(String(120), nullable=False, index=True)
|
||||
|
||||
# The SQLAlchemy model table this extra data is associated with (if any)
|
||||
target_table = Column(String(120), nullable=True, index=True)
|
||||
|
||||
# Optional link to the primary record created in the target table
|
||||
primary_key_field = Column(String(120), nullable=True)
|
||||
primary_key_value = Column(String(255), nullable=True, index=True)
|
||||
|
||||
# Extra unmapped columns from the CSV row
|
||||
extra_data = Column(JSON, nullable=False)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover - repr utility
|
||||
return (
|
||||
f"<FlexibleImport(id={self.id}, file_type='{self.file_type}', "
|
||||
f"target_table='{self.target_table}', pk_field='{self.primary_key_field}', "
|
||||
f"pk_value='{self.primary_key_value}')>"
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user