From 84c3dac83a41fdcbbabe8ff92109385cfc286794 Mon Sep 17 00:00:00 2001
From: HotSwapp <47397945+HotSwapp@users.noreply.github.com>
Date: Mon, 13 Oct 2025 15:00:13 -0500
Subject: [PATCH] Improve Rolodex imports, display, and add repair script
---
app/main.py | 103 +++++++++++++++++++++++--
app/templates/rolodex_view.html | 25 ++++--
delphi.db | Bin 37187584 -> 37187584 bytes
scripts/fix_case_links.py | 94 +++++++++++++++++++++++
static/js/custom.js | 10 ++-
tests/test_import_data.py | 130 ++++++++++++++++++++++++++++++++
6 files changed, 344 insertions(+), 18 deletions(-)
create mode 100644 scripts/fix_case_links.py
create mode 100644 tests/test_import_data.py
diff --git a/app/main.py b/app/main.py
index b0c44b3..7794179 100644
--- a/app/main.py
+++ b/app/main.py
@@ -29,7 +29,7 @@ import structlog
from structlog import contextvars as structlog_contextvars
from .database import create_tables, get_db, get_database_url
-from .models import User, Case, Client, Phone, Transaction, Document, Payment, ImportLog, Qdros
+from .models import User, Case, Client, Phone, Transaction, Document, Payment, ImportLog, Qdros, LegacyFile
from .auth import authenticate_user, get_current_user_from_session
from .reporting import (
build_phone_book_pdf,
@@ -578,7 +578,13 @@ def import_rolodex_data(db: Session, file_path: str) -> Dict[str, Any]:
result = {
'success': 0,
'errors': [],
- 'total_rows': 0
+ 'total_rows': 0,
+ 'memo_imported': 0,
+ 'memo_missing': 0,
+ 'email_imported': 0,
+ 'email_missing': 0,
+ 'skipped_duplicates': 0,
+ 'encoding_used': None,
}
expected_fields = {
@@ -606,6 +612,7 @@ def import_rolodex_data(db: Session, file_path: str) -> Dict[str, Any]:
try:
f, used_encoding = open_text_with_fallbacks(file_path)
+ result['encoding_used'] = used_encoding
with f as file:
reader = csv.DictReader(file)
@@ -630,7 +637,13 @@ def import_rolodex_data(db: Session, file_path: str) -> Dict[str, Any]:
# Check for existing client
existing = db.query(Client).filter(Client.rolodex_id == rolodex_id).first()
if existing:
- result['errors'].append(f"Row {row_num}: Client with ID '{rolodex_id}' already exists")
+ result['skipped_duplicates'] += 1
+ logger.warning(
+ "rolodex_import_duplicate",
+ row=row_num,
+ rolodex_id=rolodex_id,
+ file=file_path,
+ )
continue
# Parse DOB (YYYY-MM-DD or MM/DD/YY variants)
@@ -645,6 +658,21 @@ def import_rolodex_data(db: Session, file_path: str) -> Dict[str, Any]:
except ValueError:
continue
+ email_val = row.get('Email', '').strip() or None
+ memo_val = row.get('Memo', '')
+ memo_clean = memo_val.strip() if memo_val is not None else ''
+ memo_val_clean = memo_clean or None
+
+ if email_val:
+ result['email_imported'] += 1
+ else:
+ result['email_missing'] += 1
+
+ if memo_val_clean:
+ result['memo_imported'] += 1
+ else:
+ result['memo_missing'] += 1
+
client = Client(
rolodex_id=rolodex_id,
prefix=row.get('Prefix', '').strip() or None,
@@ -659,20 +687,41 @@ def import_rolodex_data(db: Session, file_path: str) -> Dict[str, Any]:
state=(row.get('Abrev', '').strip() or row.get('St', '').strip() or None),
zip_code=row.get('Zip', '').strip() or None,
group=row.get('Group', '').strip() or None,
- email=row.get('Email', '').strip() or None,
+ email=email_val,
dob=dob_val,
ssn=row.get('SS#', '').strip() or None,
legal_status=row.get('Legal_Status', '').strip() or None,
- memo=row.get('Memo', '').strip() or None,
+ memo=memo_val_clean,
)
db.add(client)
result['success'] += 1
+ logger.info(
+ "rolodex_import_row",
+ row=row_num,
+ rolodex_id=rolodex_id,
+ email_present=bool(email_val),
+ memo_present=bool(memo_val_clean),
+ )
+
except Exception as e:
result['errors'].append(f"Row {row_num}: {str(e)}")
db.commit()
+ logger.info(
+ "rolodex_import_complete",
+ file=file_path,
+ encoding=used_encoding,
+ total_rows=result['total_rows'],
+ success=result['success'],
+ memo_imported=result['memo_imported'],
+ memo_missing=result['memo_missing'],
+ email_imported=result['email_imported'],
+ email_missing=result['email_missing'],
+ skipped_duplicates=result['skipped_duplicates'],
+ errors=len(result['errors']),
+ )
except Exception as e:
logger.error("rolodex_import_failed", file=file_path, error=str(e))
@@ -757,7 +806,11 @@ def import_files_data(db: Session, file_path: str) -> Dict[str, Any]:
result = {
'success': 0,
'errors': [],
- 'total_rows': 0
+ 'total_rows': 0,
+ 'client_linked': 0,
+ 'client_missing': 0,
+ 'encoding_used': None,
+ 'skipped_duplicates': 0,
}
expected_fields = {
@@ -795,6 +848,7 @@ def import_files_data(db: Session, file_path: str) -> Dict[str, Any]:
f = None
try:
f, used_encoding = open_text_with_fallbacks(file_path)
+ result['encoding_used'] = used_encoding
reader = csv.DictReader(f)
headers = reader.fieldnames or []
@@ -816,7 +870,13 @@ def import_files_data(db: Session, file_path: str) -> Dict[str, Any]:
# Check for existing case
existing = db.query(Case).filter(Case.file_no == file_no).first()
if existing:
- result['errors'].append(f"Row {row_num}: Case with file number '{file_no}' already exists")
+ result['skipped_duplicates'] += 1
+ logger.warning(
+ "files_import_duplicate",
+ row=row_num,
+ file_no=file_no,
+ file=file_path,
+ )
continue
# Find client by ID
@@ -825,8 +885,16 @@ def import_files_data(db: Session, file_path: str) -> Dict[str, Any]:
if client_id:
client = db.query(Client).filter(Client.rolodex_id == client_id).first()
if not client:
+ result['client_missing'] += 1
+ logger.warning(
+ "files_import_missing_client",
+ row=row_num,
+ file_no=file_no,
+ legacy_client_id=client_id,
+ )
result['errors'].append(f"Row {row_num}: Client with ID '{client_id}' not found")
continue
+ result['client_linked'] += 1
case = Case(
file_no=file_no,
@@ -835,16 +903,35 @@ def import_files_data(db: Session, file_path: str) -> Dict[str, Any]:
case_type=row.get('File_Type', '').strip() or None,
description=row.get('Regarding', '').strip() or None,
open_date=parse_date(row.get('Opened', '')),
- close_date=parse_date(row.get('Closed', ''))
+ close_date=parse_date(row.get('Closed', '')),
)
db.add(case)
result['success'] += 1
+ logger.info(
+ "files_import_row",
+ row=row_num,
+ file_no=file_no,
+ client_id=client.id if client else None,
+ status=case.status,
+ )
+
except Exception as e:
result['errors'].append(f"Row {row_num}: {str(e)}")
db.commit()
+ logger.info(
+ "files_import_complete",
+ file=file_path,
+ encoding=used_encoding,
+ total_rows=result['total_rows'],
+ success=result['success'],
+ client_linked=result['client_linked'],
+ client_missing=result['client_missing'],
+ skipped_duplicates=result['skipped_duplicates'],
+ errors=len(result['errors']),
+ )
except Exception as e:
result['errors'].append(f"Import failed: {str(e)}")
diff --git a/app/templates/rolodex_view.html b/app/templates/rolodex_view.html
index 9dac4fa..2fb2d27 100644
--- a/app/templates/rolodex_view.html
+++ b/app/templates/rolodex_view.html
@@ -59,7 +59,13 @@
Email
-
{{ client.email or '' }}
+
Memo / Notes
-
{{ client.memo or '' }}
+
+ {% if client.memo %}
+ {{ client.memo }}
+ {% else %}
+ No notes available
+ {% endif %}
+
@@ -171,8 +183,9 @@
- {% if client.cases and client.cases|length > 0 %}
- {% for c in client.cases %}
+ {% set sorted_cases = client.cases | sort(attribute='open_date', reverse=True) %}
+ {% if sorted_cases and sorted_cases|length > 0 %}
+ {% for c in sorted_cases %}
{% if status_filter == 'all' or (status_filter == 'open' and (c.status != 'closed')) or (status_filter == 'closed' and c.status == 'closed') %}
| {{ c.file_no }} |
@@ -184,7 +197,7 @@
Open
{% endif %}
- {{ c.open_date.strftime('%Y-%m-%d') if c.open_date else '' }} |
+ {{ c.open_date.strftime('%Y-%m-%d') if c.open_date else '—' }} |
@@ -197,7 +210,7 @@
{% endif %}
{% endfor %}
{% else %}
- | No related cases. |
+ | No related cases linked. |
{% endif %}
|
diff --git a/delphi.db b/delphi.db
index 33698af89762bb62eb2a0f8397e2c257f7bba419..347ce1cf9489b5aa3cf617d1d7bd11dca1451721 100644
GIT binary patch
delta 2885
zcmZY9d3?`x9KiAYW;SLUb2G=xIfiXEl1g)wQ0}9snYJ-D*S1i~lrsulqSH~1BtmkO
z?u!nTqmW#=%h5%p*Q-Bx^yvF|yq=HG=%A}d
z=In%KfprrGtQ`=TpWcx3WrI2fl@5wp7?mE>G4g!m;)t;kN#QHPdxn<@o>6jTi7sK6
zLKlbj37L~tlu|Q%V(XBwN-;5ksl8SoiO3F&A2BK`J7-L6@3ii1JEV1QkenEw5Sy2i
znVFlFJ}e_I>&ix|8~KIV{~IpIDagG#mXzpR+@Mjz3rEEL_u{O)jGWvnr;<}s8O%%>
zJ}|ajTF>^}JAc^KO>dH$H7FxDJ-;BMpfLZ+&cBZ_Cw6?GYVn-R`0)5u%TESYpASA(
za!UHJ00mPq3Zdc@N?}xjoC&8=6hV;`MWv|>MN?TSN9CykRisK(nW|7#ilJ&$ooY}`
ziltf0xPK|QG#^`<`5mu{y!s2}yGJLxV;
zrva2f1LuCdRq)qe|
zy-l0x9eS7Eqxb0p`j9@NkLeTols=;^w3R-mFX&79inh^q+Cg8_H?)&>(Qev9-_m!q
zm-f+q`koHZ4|I?Y(P275N9jj8M#t#{oupHAn$FNq^fR5Mb9A0A&@c2W{YJmjA9Rr}
z(Vz4e{Y{sr^X<qzGEpYUWSJsUWtu!J(`AM{B9F>UnI*I3F?n2`kSFCSnIlij
zT$v}&$g}dCJTEWEi!xsp$U<2pFUiZYSeD3ASthT@a(Pu&$ZN7vR>|wKTGq%~c|+F8
zo3dUu$VS;DZ^_%TS>BO%{K9CRPBl%c9kx%6_*&rI>_BaS4?$DIp~#TuMoVL`syDmNF78Wu=^y
zmkLr*DoJIjB2^_us!4UJAvGmdYDt{bmO4^b>PdZRAo0>r8cBjQmL}3v5+zBJrJ1Bi
Osx%MY%tI|qZu(qiDJ#C(dLTz&aUfeDW=>4U=ws16qvNA)MP)>mi`W}6DzG>rA$(7GYFK>ef#6NHrUX*P
zPpOlBy>CdRP?izP7hVks<>+?l*ZPIDUU@TkCwkbWo*@*ZOcY9C6i%5bf+8u3vXJ|;
zQVeCI>=a8mC@1Bj+?0p%Qa;L01t^XRQXwi#MW`qhqv8}#C8#8oqS91`%2GKhPYG0k
zDpDn?Oo>#5lBgPp?{b$Wxk
z)0@+xEue+8h(4z;
zXfb8b5?V^jXgRH*mGmX8qSds9*3wt>HLaudw1GC#CfZC}Xe(`_?eq=ppq;dfcGI`C
zhrXll=?D6e_R>DuPY38A9iqeZ6a7p_=qMed<8*?4p_6oqPSY9sm42h&=?^+f=jc3L
zpo?^gF4GmdO4sN*-JqNFC*7jI=x@4BcjzwNqxdHZ8Yf|5x>B}~F4vqVUw
zL`fEjmaGyZ*(AHfN)E{>xg@vbk-U;m@=F1UlY&x63QG|wD#fI@#7hY&DW#;el##Mh
zPRdJyRFH~NNh(XCRFNd9D%GUA)R3C;h}4otK*O0v8xousq8BCpD8(nY#T
zH+fy&knZxP^pO9qN-t+`$=lLf-jNjPBdO9?-j#mRUk1oPc~1sOnhcir;KgES3yeB1>hNESD9sQofW`vRc;2TKP)8mUXgTHpoWVB%5W6Y?W=YUA~bW
zvQu`+ZuwUB$anI+{2)KdUfC!6<$xTNLvmPtlAq;>9F=2oTu#U@a#BvoX*nam%5Ue--Qb{UHqEwM2sVdc^dT=J+Uoh9d=RIKO
diff --git a/scripts/fix_case_links.py b/scripts/fix_case_links.py
new file mode 100644
index 0000000..4e25a62
--- /dev/null
+++ b/scripts/fix_case_links.py
@@ -0,0 +1,94 @@
+"""Utility script to repair orphaned Case records.
+
+This script attempts to link Case records to Client entries based on legacy
+identifiers. It should be executed inside the running Docker container.
+
+Usage:
+ docker compose exec delphi-db python scripts/fix_case_links.py
+
+Safety:
+ - Prints a summary before making changes.
+ - Requires confirmation unless run with --yes.
+"""
+
+from __future__ import annotations
+
+import argparse
+from collections import defaultdict
+
+from sqlalchemy.orm import joinedload
+
+from app.database import SessionLocal
+from app.models import Case, Client
+
+
+def build_client_maps(session) -> tuple[dict[str, int], dict[str, list[int]]]:
+ rolodex_to_client: dict[str, int] = {}
+ file_to_client: dict[str, list[int]] = defaultdict(list)
+
+ for client in session.query(Client).options(joinedload(Client.cases)).all():
+ if client.rolodex_id:
+ rolodex_to_client[client.rolodex_id] = client.id
+ for case in client.cases:
+ file_to_client[case.file_no].append(client.id)
+
+ return rolodex_to_client, file_to_client
+
+
+def find_orphaned_cases(session):
+ return session.query(Case).filter(Case.client_id == None).all() # noqa: E711
+
+
+def main(confirm: bool) -> int:
+ session = SessionLocal()
+ try:
+ rolodex_map, file_map = build_client_maps(session)
+ orphans = find_orphaned_cases(session)
+
+ if not orphans:
+ print("No orphaned cases found. 🎉")
+ return 0
+
+ assignments: list[tuple[Case, int]] = []
+ for case in orphans:
+ candidate_client_id = None
+ if case.file_no in file_map and file_map[case.file_no]:
+ candidate_client_id = file_map[case.file_no][0]
+ elif case.file_no in rolodex_map:
+ candidate_client_id = rolodex_map[case.file_no]
+
+ if candidate_client_id:
+ assignments.append((case, candidate_client_id))
+
+ if not assignments:
+ print("No matching clients found for orphaned cases. Nothing to do.")
+ return 1
+
+ print("Orphaned cases detected:")
+ for case, client_id in assignments:
+ print(f" Case {case.file_no} (id={case.id}) → Client id {client_id}")
+
+ if not confirm:
+ response = input("Apply these fixes? [y/N]: ").strip().lower()
+ if response not in {"y", "yes"}:
+ print("Aborting with no changes.")
+ return 1
+
+ updated = 0
+ for case, client_id in assignments:
+ case.client_id = client_id
+ updated += 1
+
+ session.commit()
+ print(f"Updated {updated} cases with matching clients.")
+ return 0
+ finally:
+ session.close()
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(description="Repair orphaned Case links.")
+ parser.add_argument("--yes", action="store_true", help="Apply changes without confirmation")
+ args = parser.parse_args()
+ raise SystemExit(main(confirm=args.yes))
+
diff --git a/static/js/custom.js b/static/js/custom.js
index 117dfcc..d0b32bc 100644
--- a/static/js/custom.js
+++ b/static/js/custom.js
@@ -2,10 +2,12 @@
document.addEventListener('DOMContentLoaded', function() {
// Initialize tooltips if any
- var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'));
- var tooltipList = tooltipTriggerList.map(function (tooltipTriggerEl) {
- return new bootstrap.Tooltip(tooltipTriggerEl);
- });
+ if (window.bootstrap && bootstrap.Tooltip) {
+ var tooltipTriggerList = [].slice.call(document.querySelectorAll('[data-bs-toggle="tooltip"]'));
+ tooltipTriggerList.forEach(function (tooltipTriggerEl) {
+ new bootstrap.Tooltip(tooltipTriggerEl);
+ });
+ }
// Auto-hide alerts after 5 seconds
var alerts = document.querySelectorAll('.alert:not(.alert-permanent)');
diff --git a/tests/test_import_data.py b/tests/test_import_data.py
new file mode 100644
index 0000000..0cb519d
--- /dev/null
+++ b/tests/test_import_data.py
@@ -0,0 +1,130 @@
+"""Tests for Rolodex and Files CSV import helpers."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+from sqlalchemy import create_engine
+from sqlalchemy.orm import sessionmaker
+
+from app.main import import_files_data, import_rolodex_data
+from app.models import Base, Case, Client
+
+
+def _make_engine(db_path: Path):
+ engine = create_engine(f"sqlite:///{db_path}")
+ Base.metadata.create_all(engine)
+ return engine
+
+
+@pytest.fixture()
+def session(tmp_path):
+ engine = _make_engine(tmp_path / "import-tests.db")
+ SessionLocal = sessionmaker(bind=engine)
+ db = SessionLocal()
+ try:
+ yield db
+ finally:
+ db.close()
+ engine.dispose()
+
+
+def _write_csv(path: Path, content: str) -> Path:
+ path.write_text(content, encoding="utf-8")
+ return path
+
+
+def test_import_rolodex_captures_email_and_memo(session, tmp_path):
+ csv_path = _write_csv(
+ tmp_path / "ROLODEX.csv",
+ "Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
+ "1001,,Ada,,Lovelace,,Countess,123 Main,,,London,,UK,12345,ada@example.com,12/10/1815,XXX-XX-1111,Active,VIP,Top client notes\n"
+ "1002,,Alan,,Turing,,,43 Park,,Bletchley,,UK,67890,,06/23/1912,XXX-XX-2222,Active,VIP, \n",
+ )
+
+ result = import_rolodex_data(session, str(csv_path))
+
+ assert result["success"] == 2
+ assert result["email_imported"] == 1
+ assert result["email_missing"] == 1
+ assert result["memo_imported"] == 1
+ assert result["memo_missing"] == 1
+ assert not result["errors"]
+
+ clients = {c.rolodex_id: c for c in session.query(Client).all()}
+ assert clients["1001"].email == "ada@example.com"
+ assert clients["1001"].memo == "Top client notes"
+ assert clients["1002"].email is None
+ assert clients["1002"].memo is None
+
+
+def test_import_rolodex_handles_duplicates(session, tmp_path):
+ csv_path = _write_csv(
+ tmp_path / "ROLODEX.csv",
+ "Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
+ "3001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Notes\n"
+ "3001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Duplicate\n",
+ )
+
+ result = import_rolodex_data(session, str(csv_path))
+
+ assert result["success"] == 1
+ assert result["skipped_duplicates"] == 1
+ assert len(result["errors"]) == 0
+
+
+def test_import_files_links_clients_and_detects_missing(session, tmp_path):
+ _write_csv(
+ tmp_path / "ROLODEX.csv",
+ "Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
+ "2001,,Grace,,Hopper,,,,,Arlington,VA,Virginia,22202,grace@example.com,12/09/1906,XXX-XX-3333,Active,VIP,Notes\n",
+ )
+ import_rolodex_data(session, str(tmp_path / "ROLODEX.csv"))
+
+ files_path = _write_csv(
+ tmp_path / "FILES.csv",
+ "File_No,Id,File_Type,Regarding,Opened,Closed,Empl_Num,Rate_Per_Hour,Status,Footer_Code,Opposing,Hours,Hours_P,Trust_Bal,Trust_Bal_P,Hourly_Fees,Hourly_Fees_P,Flat_Fees,Flat_Fees_P,Disbursements,Disbursements_P,Credit_Bal,Credit_Bal_P,Total_Charges,Total_Charges_P,Amount_Owing,Amount_Owing_P,Transferable,Memo\n"
+ "F-001,2001,Divorce,Important matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Initial legacy memo\n"
+ "F-002,9999,Divorce,Missing client,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Should be skipped\n",
+ )
+
+ result = import_files_data(session, str(files_path))
+
+ assert result["success"] == 1
+ assert result["client_linked"] == 1
+ assert result["client_missing"] == 1
+ assert result["errors"], "Expected missing client to produce an error"
+
+ case = session.query(Case).filter(Case.file_no == "F-001").one()
+ assert case.client is not None
+ assert case.client.rolodex_id == "2001"
+
+ missing_case = session.query(Case).filter(Case.file_no == "F-002").first()
+ assert missing_case is None
+
+
+def test_import_files_respects_duplicates(session, tmp_path):
+ _write_csv(
+ tmp_path / "ROLODEX.csv",
+ "Id,Prefix,First,Middle,Last,Suffix,Title,A1,A2,A3,City,Abrev,St,Zip,Email,DOB,SS#,Legal_Status,Group,Memo\n"
+ "4001,,Marie,,Curie,,,,,Paris,,France,75000,marie@example.com,11/07/1867,XXX-XX-4444,Active,VIP,Notes\n",
+ )
+ import_rolodex_data(session, str(tmp_path / "ROLODEX.csv"))
+
+ files_csv = _write_csv(
+ tmp_path / "FILES.csv",
+ "File_No,Id,File_Type,Regarding,Opened,Closed,Empl_Num,Rate_Per_Hour,Status,Footer_Code,Opposing,Hours,Hours_P,Trust_Bal,Trust_Bal_P,Hourly_Fees,Hourly_Fees_P,Flat_Fees,Flat_Fees_P,Disbursements,Disbursements_P,Credit_Bal,Credit_Bal_P,Total_Charges,Total_Charges_P,Amount_Owing,Amount_Owing_P,Transferable,Memo\n"
+ "F-100,4001,Divorce,Legacy matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,First import\n"
+ "F-100,4001,Divorce,Legacy matter,01/01/2020,,E1,175,Open,F1,,,0,0,0,0,0,0,0,0,0,0,0,0,0,0,No,Duplicate row\n",
+ )
+
+ first_result = import_files_data(session, str(files_csv))
+ assert first_result["success"] == 1
+ assert first_result["skipped_duplicates"] == 0
+
+ second_result = import_files_data(session, str(files_csv))
+ assert second_result["success"] == 0
+ assert second_result["skipped_duplicates"] == 1
+ assert not session.query(Case).filter(Case.file_no == "F-100").all()[1:]
+