Improve Rolodex imports, display, and add repair script
This commit is contained in:
130
tests/test_import_data.py
Normal file
130
tests/test_import_data.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Tests for Rolodex and Files CSV import helpers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from app.main import import_files_data, import_rolodex_data
|
||||
from app.models import Base, Case, Client
|
||||
|
||||
|
||||
def _make_engine(db_path: Path):
|
||||
engine = create_engine(f"sqlite:///{db_path}")
|
||||
Base.metadata.create_all(engine)
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def session(tmp_path):
|
||||
engine = _make_engine(tmp_path / "import-tests.db")
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
db = SessionLocal()
|
||||
try:
|
||||
yield db
|
||||
finally:
|
||||
db.close()
|
||||
engine.dispose()
|
||||
|
||||
|
||||
def _write_csv(path: Path, content: str) -> Path:
|
||||
path.write_text(content, encoding="utf-8")
|
||||
return path
|
||||
|
||||
|
||||
def test_import_rolodex_captures_email_and_memo(session, tmp_path):
|
||||
csv_path = _write_csv(
|
||||
tmp_path / "ROLODEX.csv",
|
||||
"Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
|
||||
"1001,,Ada,,Lovelace,,Countess,123 Main,,,London,,UK,12345,ada@example.com,12/10/1815,XXX-XX-1111,Active,VIP,Top client notes\n"
|
||||
"1002,,Alan,,Turing,,,43 Park,,Bletchley,,UK,67890,,06/23/1912,XXX-XX-2222,Active,VIP, \n",
|
||||
)
|
||||
|
||||
result = import_rolodex_data(session, str(csv_path))
|
||||
|
||||
assert result["success"] == 2
|
||||
assert result["email_imported"] == 1
|
||||
assert result["email_missing"] == 1
|
||||
assert result["memo_imported"] == 1
|
||||
assert result["memo_missing"] == 1
|
||||
assert not result["errors"]
|
||||
|
||||
clients = {c.rolodex_id: c for c in session.query(Client).all()}
|
||||
assert clients["1001"].email == "ada@example.com"
|
||||
assert clients["1001"].memo == "Top client notes"
|
||||
assert clients["1002"].email is None
|
||||
assert clients["1002"].memo is None
|
||||
|
||||
|
||||
def test_import_rolodex_handles_duplicates(session, tmp_path):
|
||||
csv_path = _write_csv(
|
||||
tmp_path / "ROLODEX.csv",
|
||||
"Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
|
||||
"3001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Notes\n"
|
||||
"3001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Duplicate\n",
|
||||
)
|
||||
|
||||
result = import_rolodex_data(session, str(csv_path))
|
||||
|
||||
assert result["success"] == 1
|
||||
assert result["skipped_duplicates"] == 1
|
||||
assert len(result["errors"]) == 0
|
||||
|
||||
|
||||
def test_import_files_links_clients_and_detects_missing(session, tmp_path):
|
||||
_write_csv(
|
||||
tmp_path / "ROLODEX.csv",
|
||||
"Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
|
||||
"2001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Notes\n",
|
||||
)
|
||||
import_rolodex_data(session, str(tmp_path / "ROLODEX.csv"))
|
||||
|
||||
files_path = _write_csv(
|
||||
tmp_path / "FILES.csv",
|
||||
"File_No,Id,File_Type,Regarding,Opened,Closed,Empl_Num,Rate_Per_Hour,Status,Footer_Code,Opposing,Hours,Hours_P,Trust_Bal,Trust_Bal_P,Hourly_Fees,Hourly_Fees_P,Flat_Fees,Flat_Fees_P,Disbursements,Disbursements_P,Credit_Bal,Credit_Bal_P,Total_Charges,Total_Charges_P,Amount_Owing,Amount_Owing_P,Transferable,Memo\n"
|
||||
"F-001,2001,Divorce,Important matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Initial legacy memo\n"
|
||||
"F-002,9999,Divorce,Missing client,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Should be skipped\n",
|
||||
)
|
||||
|
||||
result = import_files_data(session, str(files_path))
|
||||
|
||||
assert result["success"] == 1
|
||||
assert result["client_linked"] == 1
|
||||
assert result["client_missing"] == 1
|
||||
assert result["errors"], "Expected missing client to produce an error"
|
||||
|
||||
case = session.query(Case).filter(Case.file_no == "F-001").one()
|
||||
assert case.client is not None
|
||||
assert case.client.rolodex_id == "2001"
|
||||
|
||||
missing_case = session.query(Case).filter(Case.file_no == "F-002").first()
|
||||
assert missing_case is None
|
||||
|
||||
|
||||
def test_import_files_respects_duplicates(session, tmp_path):
|
||||
_write_csv(
|
||||
tmp_path / "ROLODEX.csv",
|
||||
"Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
|
||||
"4001,,Marie,,Curie,,,,,Paris,,France,75000,marie@example.com,11/07/1867,XXX-XX-4444,Active,VIP,Notes\n",
|
||||
)
|
||||
import_rolodex_data(session, str(tmp_path / "ROLODEX.csv"))
|
||||
|
||||
files_csv = _write_csv(
|
||||
tmp_path / "FILES.csv",
|
||||
"File_No,Id,File_Type,Regarding,Opened,Closed,Empl_Num,Rate_Per_Hour,Status,Footer_Code,Opposing,Hours,Hours_P,Trust_Bal,Trust_Bal_P,Hourly_Fees,Hourly_Fees_P,Flat_Fees,Flat_Fees_P,Disbursements,Disbursements_P,Credit_Bal,Credit_Bal_P,Total_Charges,Total_Charges_P,Amount_Owing,Amount_Owing_P,Transferable,Memo\n"
|
||||
"F-100,4001,Divorce,Legacy matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,First import\n"
|
||||
"F-100,4001,Divorce,Legacy matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Duplicate row\n",
|
||||
)
|
||||
|
||||
first_result = import_files_data(session, str(files_csv))
|
||||
assert first_result["success"] == 1
|
||||
assert first_result["skipped_duplicates"] == 0
|
||||
|
||||
second_result = import_files_data(session, str(files_csv))
|
||||
assert second_result["success"] == 0
|
||||
assert second_result["skipped_duplicates"] == 1
|
||||
assert not session.query(Case).filter(Case.file_no == "F-100").all()[1:]
|
||||
|
||||
Reference in New Issue
Block a user